From 95096dc1eb214fcccdf2d379fee91ab6613c9df2 Mon Sep 17 00:00:00 2001 From: Ben Franksen Date: Thu, 9 Jan 2025 20:29:54 +0100 Subject: [PATCH] screened 2025-01-09 19:29:54+00:00 --- .github/workflows/build-and-test.yml | 38 +- CHANGELOG.md | 18 - README.md | 2 +- Setup.hs | 2 +- darcs.cabal | 48 +- harness/Darcs/Test/Patch.hs | 16 +- harness/Darcs/Test/Patch/Arbitrary/Generic.hs | 13 +- .../Arbitrary/{RepoPatch.hs => Mergeable.hs} | 27 +- harness/Darcs/Test/Patch/Arbitrary/Named.hs | 69 +- .../Test/Patch/Arbitrary/PrimFileUUID.hs | 4 +- harness/Darcs/Test/Patch/Arbitrary/PrimV1.hs | 17 +- .../Darcs/Test/Patch/Arbitrary/RepoPatchV1.hs | 6 +- .../Darcs/Test/Patch/Arbitrary/RepoPatchV2.hs | 5 +- .../Darcs/Test/Patch/Arbitrary/RepoPatchV3.hs | 6 +- harness/Darcs/Test/Patch/Examples/Set2.hs | 513 +++ .../Test/Patch/Examples/Set2Unwitnessed.hs | 503 --- harness/Darcs/Test/Patch/Examples/Unwind.hs | 20 +- harness/Darcs/Test/Patch/Properties.hs | 231 +- .../Darcs/Test/Patch/Properties/Generic.hs | 18 +- .../Patch/Properties/GenericUnwitnessed.hs | 90 - .../Properties/{RepoPatch.hs => Mergeable.hs} | 95 +- .../Test/Patch/Properties/RepoPatchV3.hs | 4 +- harness/Darcs/Test/Patch/RepoModel.hs | 26 +- .../Test/Patch/Types/MergeableSequence.hs | 61 +- harness/Darcs/Test/Patch/Unwind.hs | 21 +- harness/Darcs/Test/Patch/WSub.hs | 139 - harness/Darcs/Test/Patch/WithState.hs | 28 +- harness/Darcs/Test/Shell.hs | 18 +- .../Test/UI/Commands/Test/IndexedApply.hs | 5 +- harness/test.hs | 26 +- release/distributed-context | 2 +- release/distributed-version | 2 +- release/release.sh | 2 +- shelly/ChangeLog.md | 41 - shelly/LICENSE | 30 - shelly/README.md | 187 -- shelly/Setup.hs | 2 - shelly/shelly.cabal | 198 -- shelly/src/Shelly.hs | 1473 --------- shelly/src/Shelly/Base.hs | 331 -- shelly/src/Shelly/Find.hs | 79 - shelly/src/Shelly/Lifted.hs | 584 ---- shelly/src/Shelly/Pipe.hs | 643 ---- shelly/src/Shelly/Unix.hs | 12 - shelly/test/data/nonascii.txt | 1 - shelly/test/data/symlinked_dir/hoge_file | 0 shelly/test/data/zshrc | 2795 ----------------- shelly/test/examples/color.hs | 12 - shelly/test/examples/drain.hs | 19 - shelly/test/examples/drain.sh | 4 - shelly/test/examples/printer.sh | 6 - shelly/test/examples/run-handles.hs | 8 - shelly/test/examples/test.sh | 2 - shelly/test/src/CopySpec.hs | 85 - shelly/test/src/EnvSpec.hs | 36 - shelly/test/src/FailureSpec.hs | 29 - shelly/test/src/FindSpec.hs | 115 - shelly/test/src/Help.hs | 24 - shelly/test/src/LiftedSpec.hs | 23 - shelly/test/src/LogWithSpec.hs | 19 - shelly/test/src/MoveSpec.hs | 76 - shelly/test/src/ReadFileSpec.hs | 23 - shelly/test/src/RmSpec.hs | 82 - shelly/test/src/RunSpec.hs | 58 - shelly/test/src/SshSpec.hs | 39 - shelly/test/src/TestInit.hs | 10 - shelly/test/src/TestMain.hs | 32 - shelly/test/src/WhichSpec.hs | 17 - shelly/test/src/WriteSpec.hs | 41 - shelly/test/src/sleep.hs | 9 - shelly/test/testall | 57 - src/Darcs/Patch.hs | 2 +- src/Darcs/Patch/Annotate.hs | 2 +- src/Darcs/Patch/Apply.hs | 4 +- src/Darcs/Patch/ApplyMonad.hs | 8 +- src/Darcs/Patch/Conflict.hs | 6 +- src/Darcs/Patch/Effect.hs | 1 - src/Darcs/Patch/Format.hs | 2 +- src/Darcs/Patch/FromPrim.hs | 2 +- src/Darcs/Patch/Ident.hs | 2 +- src/Darcs/Patch/Info.hs | 74 +- src/Darcs/Patch/Match.hs | 147 +- src/Darcs/Patch/Named.hs | 183 +- src/Darcs/Patch/Object.hs | 4 +- src/Darcs/Patch/PatchInfoAnd.hs | 2 +- src/Darcs/Patch/Permutations.hs | 2 +- src/Darcs/Patch/Prim.hs | 1 - src/Darcs/Patch/Prim/FileUUID/Apply.hs | 2 +- src/Darcs/Patch/Prim/FileUUID/Commute.hs | 2 +- src/Darcs/Patch/Prim/FileUUID/Details.hs | 2 +- src/Darcs/Patch/Prim/FileUUID/ObjectMap.hs | 4 +- src/Darcs/Patch/Prim/FileUUID/Read.hs | 2 +- src/Darcs/Patch/Prim/FileUUID/Show.hs | 2 +- src/Darcs/Patch/Prim/V1/Apply.hs | 2 +- src/Darcs/Patch/Prim/V1/Coalesce.hs | 2 +- src/Darcs/Patch/Prim/V1/Commute.hs | 236 +- src/Darcs/Patch/Prim/V1/Details.hs | 2 +- src/Darcs/Patch/Prim/V1/Mangle.hs | 2 +- src/Darcs/Patch/Prim/V1/Read.hs | 2 +- src/Darcs/Patch/Prim/V1/Show.hs | 2 +- src/Darcs/Patch/RepoPatch.hs | 4 +- src/Darcs/Patch/Set.hs | 36 +- src/Darcs/Patch/Split.hs | 2 +- src/Darcs/Patch/Summary.hs | 83 +- src/Darcs/Patch/V1/Apply.hs | 2 +- src/Darcs/Patch/V1/Commute.hs | 7 +- src/Darcs/Patch/V1/Read.hs | 2 +- src/Darcs/Patch/V1/Show.hs | 2 +- src/Darcs/Patch/V1/Viewing.hs | 2 +- src/Darcs/Patch/V2/Non.hs | 3 +- src/Darcs/Patch/V2/RepoPatch.hs | 9 +- src/Darcs/Patch/V3.hs | 2 +- src/Darcs/Patch/V3/Resolution.hs | 46 +- src/Darcs/Patch/Viewing.hs | 2 +- src/Darcs/Prelude.hs | 12 +- src/Darcs/Repository.hs | 2 + src/Darcs/Repository/ApplyPatches.hs | 2 +- src/Darcs/Repository/Clone.hs | 15 +- src/Darcs/Repository/Hashed.hs | 1 - src/Darcs/Repository/InternalTypes.hs | 2 +- src/Darcs/Repository/Inventory.hs | 4 + src/Darcs/Repository/Match.hs | 34 +- src/Darcs/Repository/PatchIndex.hs | 11 +- src/Darcs/Repository/Paths.hs | 14 +- src/Darcs/Repository/Prefs.hs | 14 +- src/Darcs/Repository/Traverse.hs | 99 +- src/Darcs/UI/ApplyPatches.hs | 2 +- src/Darcs/UI/Commands.hs | 28 +- src/Darcs/UI/Commands/Amend.hs | 11 +- src/Darcs/UI/Commands/Annotate.hs | 4 +- src/Darcs/UI/Commands/Clone.hs | 19 +- src/Darcs/UI/Commands/Convert/Darcs2.hs | 10 +- src/Darcs/UI/Commands/Convert/Import.hs | 3 +- src/Darcs/UI/Commands/Diff.hs | 4 +- src/Darcs/UI/Commands/Help.hs | 3 +- src/Darcs/UI/Commands/Init.hs | 3 +- src/Darcs/UI/Commands/Log.hs | 54 +- src/Darcs/UI/Commands/Optimize.hs | 4 +- src/Darcs/UI/Commands/Rebase.hs | 4 +- src/Darcs/UI/Commands/Record.hs | 4 +- src/Darcs/UI/Commands/Send.hs | 10 +- src/Darcs/UI/Commands/ShowAuthors.hs | 3 + src/Darcs/UI/Commands/ShowRepo.hs | 206 +- src/Darcs/UI/Commands/ShowTags.hs | 27 +- src/Darcs/UI/Commands/Test/Impl.hs | 12 +- src/Darcs/UI/Commands/Unrecord.hs | 4 +- src/Darcs/UI/Commands/Util.hs | 30 +- src/Darcs/UI/External.hs | 8 +- src/Darcs/UI/Options/All.hs | 22 + src/Darcs/UI/Options/Flags.hs | 2 + src/Darcs/UI/PatchHeader.hs | 19 +- src/Darcs/UI/Prompt.hs | 32 +- src/Darcs/UI/SelectChanges.hs | 12 +- src/Darcs/Util/Lock.hs | 8 - src/Darcs/Util/Path.hs | 64 +- src/Darcs/Util/Printer.hs | 52 +- src/Darcs/Util/Ssh.hs | 13 +- src/Darcs/Util/Tree/Hashed.hs | 13 +- src/Darcs/Util/Tree/Monad.hs | 11 +- tests/clone.sh | 2 +- tests/conflict-fight.sh | 3 + ...issue1241-rollback-with-file-beyond-tag.sh | 18 + ...ling-issue1702-optimize-relink-vs-cache.sh | 2 +- tests/failing-issue2729-index-corner-case.sh | 14 + tests/failing-issue2729-index-corner-case2.sh | 18 + tests/issue1210-no-global-cache-in-sources.sh | 4 +- ...e2136-log_created_as_for_multiple_files.sh | 4 +- tests/issue2333.sh | 2 - tests/issue2378-moving-directory-to-file.sh | 11 +- tests/issue2380-rename-to-deleted-file.sh | 5 +- ...issue2382-mv-dir-to-file-confuses-darcs.sh | 2 +- .../issue2556-apply-fails-for-large-bundle.sh | 12 + ...ssue2727-resolutions-order-independent1.sh | 400 +++ ...sue2727-resolutions-order-independent10.sh | 538 ++++ ...sue2727-resolutions-order-independent11.sh | 516 +++ ...ssue2727-resolutions-order-independent2.sh | 315 ++ ...ssue2727-resolutions-order-independent3.sh | 393 +++ ...ssue2727-resolutions-order-independent4.sh | 438 +++ ...ssue2727-resolutions-order-independent5.sh | 852 +++++ ...ssue2727-resolutions-order-independent6.sh | 236 ++ ...ssue2727-resolutions-order-independent7.sh | 349 ++ ...ssue2727-resolutions-order-independent8.sh | 555 ++++ ...ssue2727-resolutions-order-independent9.sh | 393 +++ tests/issue494-pending-sort.sh | 7 +- tests/latin9-input.sh | 14 +- tests/legacy-inverted.sh | 4 +- tests/match.sh | 2 +- tests/mutex-option-precedence.sh | 2 +- ...ng-issue2462-remote-darcs-transfer-mode.sh | 11 + tests/printer.sh | 17 +- tests/pull.sh | 2 +- tests/rename_shouldnt_affect_prefixes.sh | 2 +- ...ms.sh => repair-missing-pristine-files.sh} | 6 +- tests/resolve-conflicts-explicitly.sh | 10 +- 194 files changed, 7061 insertions(+), 9192 deletions(-) rename harness/Darcs/Test/Patch/Arbitrary/{RepoPatch.hs => Mergeable.hs} (83%) create mode 100644 harness/Darcs/Test/Patch/Examples/Set2.hs delete mode 100644 harness/Darcs/Test/Patch/Examples/Set2Unwitnessed.hs delete mode 100644 harness/Darcs/Test/Patch/Properties/GenericUnwitnessed.hs rename harness/Darcs/Test/Patch/Properties/{RepoPatch.hs => Mergeable.hs} (71%) delete mode 100644 harness/Darcs/Test/Patch/WSub.hs delete mode 100644 shelly/ChangeLog.md delete mode 100644 shelly/LICENSE delete mode 100644 shelly/README.md delete mode 100644 shelly/Setup.hs delete mode 100644 shelly/shelly.cabal delete mode 100644 shelly/src/Shelly.hs delete mode 100644 shelly/src/Shelly/Base.hs delete mode 100644 shelly/src/Shelly/Find.hs delete mode 100644 shelly/src/Shelly/Lifted.hs delete mode 100644 shelly/src/Shelly/Pipe.hs delete mode 100644 shelly/src/Shelly/Unix.hs delete mode 100644 shelly/test/data/nonascii.txt delete mode 100644 shelly/test/data/symlinked_dir/hoge_file delete mode 100644 shelly/test/data/zshrc delete mode 100644 shelly/test/examples/color.hs delete mode 100644 shelly/test/examples/drain.hs delete mode 100755 shelly/test/examples/drain.sh delete mode 100755 shelly/test/examples/printer.sh delete mode 100644 shelly/test/examples/run-handles.hs delete mode 100755 shelly/test/examples/test.sh delete mode 100644 shelly/test/src/CopySpec.hs delete mode 100644 shelly/test/src/EnvSpec.hs delete mode 100644 shelly/test/src/FailureSpec.hs delete mode 100644 shelly/test/src/FindSpec.hs delete mode 100644 shelly/test/src/Help.hs delete mode 100644 shelly/test/src/LiftedSpec.hs delete mode 100644 shelly/test/src/LogWithSpec.hs delete mode 100644 shelly/test/src/MoveSpec.hs delete mode 100644 shelly/test/src/ReadFileSpec.hs delete mode 100644 shelly/test/src/RmSpec.hs delete mode 100644 shelly/test/src/RunSpec.hs delete mode 100644 shelly/test/src/SshSpec.hs delete mode 100644 shelly/test/src/TestInit.hs delete mode 100644 shelly/test/src/TestMain.hs delete mode 100644 shelly/test/src/WhichSpec.hs delete mode 100644 shelly/test/src/WriteSpec.hs delete mode 100644 shelly/test/src/sleep.hs delete mode 100755 shelly/test/testall create mode 100755 tests/failing-issue1241-rollback-with-file-beyond-tag.sh create mode 100644 tests/failing-issue2729-index-corner-case.sh create mode 100644 tests/failing-issue2729-index-corner-case2.sh mode change 100644 => 100755 tests/issue2378-moving-directory-to-file.sh create mode 100755 tests/issue2556-apply-fails-for-large-bundle.sh create mode 100755 tests/issue2727-resolutions-order-independent1.sh create mode 100755 tests/issue2727-resolutions-order-independent10.sh create mode 100755 tests/issue2727-resolutions-order-independent11.sh create mode 100755 tests/issue2727-resolutions-order-independent2.sh create mode 100755 tests/issue2727-resolutions-order-independent3.sh create mode 100755 tests/issue2727-resolutions-order-independent4.sh create mode 100755 tests/issue2727-resolutions-order-independent5.sh create mode 100755 tests/issue2727-resolutions-order-independent6.sh create mode 100755 tests/issue2727-resolutions-order-independent7.sh create mode 100755 tests/issue2727-resolutions-order-independent8.sh create mode 100755 tests/issue2727-resolutions-order-independent9.sh create mode 100755 tests/network/failing-issue2462-remote-darcs-transfer-mode.sh rename tests/{failing-pristine-problems.sh => repair-missing-pristine-files.sh} (88%) diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml index 63ce34a4..101eeb84 100644 --- a/.github/workflows/build-and-test.yml +++ b/.github/workflows/build-and-test.yml @@ -4,34 +4,40 @@ on: push jobs: build-with-cabal: - name: ${{ matrix.os }} / ghc-${{ matrix.ghc }} + name: ${{ matrix.os }} / cabal-${{ matrix.cabal }} / ghc-${{ matrix.ghc }} runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: os: # - ubuntu-20.04 - - ubuntu-22.04 -# - macos-11 +# - ubuntu-22.04 + - ubuntu-24.04 # - macos-12 - macos-13 # - windows-2019 - windows-2022 ghc: - 8.4.4 - - 8.6.5 - - 8.8.2 - - 8.10.7 - - 9.0.2 - - 9.2.8 - - 9.4.8 - - 9.6.4 - - 9.8.2 +# - 8.6.5 +# - 8.8.2 +# - 8.10.7 +# - 9.0.2 +# - 9.2.8 +# - 9.4.8 +# - 9.6.6 +# - 9.8.4 + - 9.10.1 cabal: - - '3.10' +# - '3.10' + - '3.12' + exclude: + # building text-2.1.2 fails with obscure assembler error + - os: windows-2022 + ghc: 8.10.7 env: builddocs: ${{ matrix.ghc == '9.4.5' }} - runtests: ${{ matrix.ghc == '8.4.4' || matrix.ghc == '9.8.2' }} + runtests: ${{ matrix.ghc == '8.4.4' || matrix.ghc == '9.10.1' }} warnaserror: ${{ matrix.ghc == '9.8.2'}} tests-to-run: # e.g. -t=EXAMPLE testcmd: cabal run -- darcs-test -j=6 -f=123 -i=yn -c=yn --hide @@ -70,8 +76,8 @@ jobs: uses: actions/cache/restore@v4 with: path: ${{ steps.setup-haskell-cabal.outputs.cabal-store }} - key: cabal-store-${{ matrix.os }}-ghc-${{ matrix.ghc }}-${{ hashFiles('cabal.project.freeze') }} - restore-keys: cabal-store-${{ matrix.os }}-ghc-${{ matrix.ghc }}- + key: cabal-store-${{ matrix.os }}-cabal-${{ matrix.cabal }}-ghc-${{ matrix.ghc }}-${{ hashFiles('cabal.project.freeze') }} + restore-keys: cabal-store-${{ matrix.os }}-cabal-${{ matrix.cabal }}-ghc-${{ matrix.ghc }}- - name: Build dependencies (with docs) if: env.builddocs == 'true' @@ -86,7 +92,7 @@ jobs: uses: actions/cache/save@v4 with: path: ${{ steps.setup-haskell-cabal.outputs.cabal-store }} - key: cabal-store-${{ matrix.os }}-ghc-${{ matrix.ghc }}-${{ hashFiles('cabal.project.freeze') }} + key: cabal-store-${{ matrix.os }}-cabal-${{ matrix.cabal }}-ghc-${{ matrix.ghc }}-${{ hashFiles('cabal.project.freeze') }} - name: Build run: cabal build diff --git a/CHANGELOG.md b/CHANGELOG.md index 491708cb..a1296fd8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,21 +1,3 @@ -Darcs 2.18.5, 9 Jan 2025 - -This release is done solely to keep up with (breaking) changes in some of -our dependencies. A plain `cabal update && cabal install darcs` should now -again succeed. - - * restrict upper limit on attoparsec from <0.15 to <14.4 for ghc versions <8.10 - * exclude system-fileio-0.3.16.5 which fails to build on Windows - * raise upper bound on tls, data-default-class -> data-default - -Darcs 2.18.4, 26 Oct 2024 - - * darcs can now be built with stack against stackage lts-22.34, which is - the version that the debian haskell team currently targets - - * resolve issue2725: the permissions of _darcs/patch_index are now - generically set to those of its parent directory - Darcs 2.18.3, 26 May 2024 * relax upper bounds for some dependencies diff --git a/README.md b/README.md index 61bf6ff6..839f3ff2 100644 --- a/README.md +++ b/README.md @@ -14,7 +14,7 @@ cabal update && cabal install darcs ``` with a recent cabal (version 3.2 or later is recommended). Any version of -ghc from 8.2 up to 9.8 should work. +ghc from 8.2 up to 9.10 should work. From inside a clone or a source dist, use diff --git a/Setup.hs b/Setup.hs index 2b1a4e63..1cef865b 100644 --- a/Setup.hs +++ b/Setup.hs @@ -8,7 +8,7 @@ import Distribution.PackageDescription ( PackageDescription ) import Distribution.Package ( packageVersion ) import Distribution.Version( Version ) import Distribution.Simple.LocalBuildInfo - ( LocalBuildInfo(..), absoluteInstallDirs ) + ( LocalBuildInfo(..), absoluteInstallDirs, buildDir ) import Distribution.Simple.InstallDirs (mandir, CopyDest (NoCopyDest)) import Distribution.Simple.Setup (buildVerbosity, copyDest, copyVerbosity, fromFlag, diff --git a/darcs.cabal b/darcs.cabal index 4b2bfcdf..539b2647 100644 --- a/darcs.cabal +++ b/darcs.cabal @@ -1,6 +1,6 @@ Cabal-Version: 2.4 Name: darcs -version: 2.18.5 +version: 2.19.1 License: GPL-2.0-or-later License-file: COPYING Author: David Roundy , @@ -61,9 +61,6 @@ extra-source-files: release/distributed-version release/distributed-context - -- bundled shelly (the bare minimum required) - shelly/LICENSE - -- testsuite tests/data/*.tgz tests/data/README @@ -123,8 +120,8 @@ flag warn-as-error -- ---------------------------------------------------------------------- custom-setup - setup-depends: base >= 4.10 && < 4.20, - Cabal >= 2.4 && < 3.11, + setup-depends: base >= 4.10 && < 4.21, + Cabal >= 2.4 && < 3.13, process >= 1.2.3.0 && < 1.7, filepath >= 1.4.1 && < 1.5.0.0, directory >= 1.2.7 && < 1.4 @@ -416,7 +413,7 @@ Library build-depends: unix >= 2.7.1.0 && < 2.9, directory >= 1.2.7 && < 1.4 - build-depends: base >= 4.10 && < 4.20, + build-depends: base >= 4.10 && < 4.21, safe >= 0.3.20 && < 0.4, stm >= 2.1 && < 2.6, binary >= 0.5 && < 0.11, @@ -448,7 +445,7 @@ Library async >= 2.0.2 && < 2.3, constraints >= 0.11 && < 0.15, unix-compat >= 0.6 && < 0.8, - bytestring >= 0.10.6 && < 0.13, + bytestring >= 0.11.3.0 && < 0.13, old-time >= 1.1.0.3 && < 1.2, time >= 1.9 && < 1.15, text >= 1.2.1.3 && < 2.2, @@ -458,6 +455,7 @@ Library hashable >= 1.2.3.3 && < 1.5, mmap >= 0.5.9 && < 0.6, zlib >= 0.6.1.2 && < 0.8, + xml >= 1.3.14 && < 1.4, network-uri >= 2.6 && < 2.8, network >= 2.6 && < 3.3, conduit >= 1.3.0 && < 1.4, @@ -483,12 +481,12 @@ Library ghc-options: -Wall -funbox-strict-fields -fwarn-tabs + -- It's a deliberate choice to use NoMonoLocalBinds and tolerate this warning long-term, + -- as otherwise we'd need 10s of extra type signatures in our code. + -- See https://mail.haskell.org/pipermail/glasgow-haskell-users/2010-November/019464.html if impl(ghc >= 9.4.1) ghc-options: -Wno-gadt-mono-local-binds - if impl(ghc >= 9.0.1) - ghc-options: -Wno-star-is-type - -- The terminfo package cannot be built on Windows. if flag(terminfo) && !os(windows) build-depends: terminfo >= 0.4.0.2 && < 0.5 @@ -573,7 +571,7 @@ test-suite darcs-test type: exitcode-stdio-1.0 default-language: Haskell2010 main-is: test.hs - hs-source-dirs: harness, shelly/src + hs-source-dirs: harness if os(windows) cpp-options: -DWIN32 @@ -603,17 +601,7 @@ test-suite darcs-test test-framework-leancheck >= 0.0.1 && < 0.1, vector, zip-archive, - -- additional dependencies needed by the shelly modules - async, - exceptions, - monad-control >= 0.3.2 && < 1.1, - process, - system-filepath >= 0.4.7 && < 0.5, - -- exclude 0.3.16.5 which fails to build on Windows - system-fileio < 0.3.16.5 || > 0.3.16.5 && < 0.4, - time, - transformers-base, - unix-compat, + shelly, -- the tests shell out to a built darcs binary, so we depend on it to make -- sure that it's built. It's not actually required for build, just at runtime, @@ -627,17 +615,15 @@ test-suite darcs-test Darcs.Test.Patch.Check Darcs.Test.Patch.Depends Darcs.Test.Patch.Examples.Set1 - Darcs.Test.Patch.Examples.Set2Unwitnessed + Darcs.Test.Patch.Examples.Set2 Darcs.Test.Patch.Examples.Unwind - Darcs.Test.Patch.WSub Darcs.Test.Patch.Info Darcs.Test.Patch.Properties Darcs.Test.Patch.Properties.V1Set1 Darcs.Test.Patch.Properties.V1Set2 Darcs.Test.Patch.Properties.Generic - Darcs.Test.Patch.Properties.GenericUnwitnessed Darcs.Test.Patch.Properties.Check - Darcs.Test.Patch.Properties.RepoPatch + Darcs.Test.Patch.Properties.Mergeable Darcs.Test.Patch.Properties.RepoPatchV3 Darcs.Test.Patch.Arbitrary.Generic Darcs.Test.Patch.Arbitrary.Named @@ -645,7 +631,7 @@ test-suite darcs-test Darcs.Test.Patch.Arbitrary.PatchTree Darcs.Test.Patch.Arbitrary.PrimFileUUID Darcs.Test.Patch.Arbitrary.PrimV1 - Darcs.Test.Patch.Arbitrary.RepoPatch + Darcs.Test.Patch.Arbitrary.Mergeable Darcs.Test.Patch.Arbitrary.RepoPatchV1 Darcs.Test.Patch.Arbitrary.RepoPatchV2 Darcs.Test.Patch.Arbitrary.RepoPatchV3 @@ -681,9 +667,6 @@ test-suite darcs-test Darcs.Test.UI.Commands.Test.Simple Darcs.Test.Util.TestResult Darcs.Test.Util.QuickCheck - Shelly - Shelly.Base - Shelly.Find if flag(warn-as-error) ghc-options: -Werror @@ -693,9 +676,6 @@ test-suite darcs-test if impl(ghc >= 9.4.1) ghc-options: -Wno-gadt-mono-local-binds - if impl(ghc >= 9.0.1) - ghc-options: -Wno-star-is-type - if flag(threaded) ghc-options: -threaded diff --git a/harness/Darcs/Test/Patch.hs b/harness/Darcs/Test/Patch.hs index 528db259..666bf38c 100644 --- a/harness/Darcs/Test/Patch.hs +++ b/harness/Darcs/Test/Patch.hs @@ -30,11 +30,12 @@ import Darcs.Patch.V1 ( RepoPatchV1 ) import Darcs.Patch.V2.RepoPatch ( RepoPatchV2 ) import Darcs.Patch.V3 ( RepoPatchV3 ) import Darcs.Patch.Commute ( Commute(..) ) +import qualified Darcs.Patch.RepoPatch as RP import Darcs.Test.Patch.Arbitrary.Generic import Darcs.Test.Patch.Arbitrary.Named () import Darcs.Test.Patch.Arbitrary.PrimFileUUID() -import Darcs.Test.Patch.Arbitrary.RepoPatch +import Darcs.Test.Patch.Arbitrary.Mergeable import Darcs.Test.Patch.Arbitrary.RepoPatchV1 () import Darcs.Test.Patch.Arbitrary.RepoPatchV2 () import Darcs.Test.Patch.Arbitrary.RepoPatchV3 () @@ -58,10 +59,12 @@ type Prim2 = V2.Prim -- tests (either QuickCheck or Unit) that should be run on any type of patch general_patchTests :: forall p - . ( ArbitraryRepoPatch p, CheckedMerge p + . ( ArbitraryMergeable p, CheckedMerge p + , RP.RepoPatch p , PrimBased p, Commute (OnlyPrim p), ArbitraryPrim (OnlyPrim p) - , ShrinkModel (PrimOf p) - , Show1 (ModelOf (PrimOf p)), Show2 p + , ShrinkModel (ModelOf p) (PrimOf p) + , Show1 (ModelOf p), Show2 p + , RepoApply (PrimOf p) ) => [Test] general_patchTests = @@ -76,6 +79,7 @@ testSuite = , repoPatchV1Tests , repoPatchV2Tests , repoPatchV3Tests + , namedPatchV3Tests , Darcs.Test.Patch.Depends.testSuite , Darcs.Test.Patch.Info.testSuite , Darcs.Test.Patch.Selection.testSuite @@ -109,3 +113,7 @@ testSuite = qc_V3 (undefined :: FileUUID.Prim wX wY) ++ general_patchTests @(RepoPatchV3 FileUUID.Prim) ] + namedPatchV3Tests = testGroup "Named RepoPatchV3" + [ testGroup "using V2.Prim wrapper for Prim.V1" $ + qc_Named_V3 (undefined :: Prim2 wX wY) + ] diff --git a/harness/Darcs/Test/Patch/Arbitrary/Generic.hs b/harness/Darcs/Test/Patch/Arbitrary/Generic.hs index 397eefd7..31a29588 100644 --- a/harness/Darcs/Test/Patch/Arbitrary/Generic.hs +++ b/harness/Darcs/Test/Patch/Arbitrary/Generic.hs @@ -1,7 +1,6 @@ {-# LANGUAGE UndecidableInstances #-} module Darcs.Test.Patch.Arbitrary.Generic ( ArbitraryPrim(..) - , ShrinkPrim , TestablePrim , PrimBased(..) , NullPatch(..) @@ -28,7 +27,7 @@ import Darcs.Patch.Witnesses.Ordered import Darcs.Patch.Apply ( Apply, ApplyState ) import Darcs.Patch.Effect ( Effect(..) ) import Darcs.Patch.Format ( PatchListFormat ) -import Darcs.Patch.Merge ( Merge(..) ) +import Darcs.Patch.Merge ( CleanMerge, Merge(..) ) import Darcs.Patch.Invert ( Invert(..) ) import Darcs.Patch.Commute ( Commute(..) ) import Darcs.Patch.FromPrim ( PrimOf ) @@ -119,13 +118,8 @@ class ( ArbitraryState prim default usesV1Model :: ModelOf prim ~ V1Model => Maybe (Dict (ModelOf prim ~ V1Model)) usesV1Model = Just Dict -type ShrinkPrim prim = - ( ShrinkModel prim - , PropagateShrink prim prim - ) - type TestablePrim prim = - ( Apply prim, Commute prim, Invert prim, Eq2 prim + ( Apply prim, CleanMerge prim, Commute prim, Invert prim, Eq2 prim, Show2 prim , PatchListFormat prim, ShowPatchBasic prim, ReadPatch prim , RepoModel (ModelOf prim), ApplyState prim ~ RepoState (ModelOf prim) , ArbitraryPrim prim @@ -139,7 +133,7 @@ class ( Effect p, Show2 (OnlyPrim p), ArbitraryState (OnlyPrim p) , ModelOf p ~ ModelOf (OnlyPrim p) ) => PrimBased p where - type OnlyPrim p :: * -> * -> * + type OnlyPrim p :: Type -> Type -> Type primEffect :: OnlyPrim p wX wY -> FL (PrimOf p) wX wY liftFromPrim :: OnlyPrim p wX wY -> p wX wY @@ -147,4 +141,3 @@ instance (Commute (OnlyPrim p), PrimBased p) => PrimBased (FL p) where type OnlyPrim (FL p) = FL (OnlyPrim p) primEffect = concatFL . mapFL_FL (primEffect @p) liftFromPrim = mapFL_FL liftFromPrim - diff --git a/harness/Darcs/Test/Patch/Arbitrary/RepoPatch.hs b/harness/Darcs/Test/Patch/Arbitrary/Mergeable.hs similarity index 83% rename from harness/Darcs/Test/Patch/Arbitrary/RepoPatch.hs rename to harness/Darcs/Test/Patch/Arbitrary/Mergeable.hs index 78e79cc0..74590f0e 100644 --- a/harness/Darcs/Test/Patch/Arbitrary/RepoPatch.hs +++ b/harness/Darcs/Test/Patch/Arbitrary/Mergeable.hs @@ -1,14 +1,15 @@ {-# LANGUAGE UndecidableInstances, ViewPatterns #-} -- | Test case generator for patch with a Merge instance -module Darcs.Test.Patch.Arbitrary.RepoPatch +module Darcs.Test.Patch.Arbitrary.Mergeable ( withSingle , withPair , withTriple , withFork , withSequence + , withSequencePair , withAllSequenceItems , NotRepoPatchV1(..) - , ArbitraryRepoPatch(..) + , ArbitraryMergeable(..) ) where import Darcs.Prelude @@ -17,13 +18,16 @@ import Darcs.Test.Patch.WithState import Darcs.Test.Patch.RepoModel import Darcs.Test.Patch.Arbitrary.Generic ( ArbitraryPrim(..), PrimBased ) import Darcs.Test.Patch.Merge.Checked ( CheckedMerge ) -import Darcs.Test.Patch.Types.MergeableSequence ( mergeableSequenceToRL, MergeableSequence(..) ) +import Darcs.Test.Patch.Types.MergeableSequence + ( MergeableSequence(..) + , WithSplit(..) + , mergeableSequenceToRL + ) import Darcs.Test.Patch.Types.Pair ( Pair(..) ) import Darcs.Patch.Witnesses.Sealed import Darcs.Patch.Witnesses.Ordered hiding ( Fork ) import Darcs.Patch.Apply ( Apply(..) ) import Darcs.Patch.FromPrim ( PrimOf ) -import Darcs.Patch.RepoPatch ( RepoPatch ) import Darcs.Patch.V1 ( RepoPatchV1 ) import Data.Constraint @@ -33,11 +37,10 @@ data NotRepoPatchV1 p = NotRepoPatchV1 (forall prim . Dict (p ~ RepoPatchV1 prim -- | Class to simplify type signatures and superclass constraints. class - ( RepoPatch p - , ArbitraryPrim (PrimOf p) - , ModelOf p ~ ModelOf (PrimOf p) + ( ArbitraryPrim (PrimOf p) + , RepoModel (ModelOf p) , ApplyState p ~ RepoState (ModelOf p) - ) => ArbitraryRepoPatch p where + ) => ArbitraryMergeable p where notRepoPatchV1 :: Maybe (NotRepoPatchV1 p) @@ -87,10 +90,16 @@ withSequence withSequence prop (Sealed2 (WithStartState2 _ ms)) = prop (mergeableSequenceToRL ms) +withSequencePair + :: (CheckedMerge p, PrimBased p) + => (forall wX wY. (RL p :> RL p) wX wY -> r) + -> Sealed2 (WithStartState2 (WithSplit (MergeableSequence p))) -> r +withSequencePair prop (Sealed2 (WithStartState2 _ (WithSplit n ms))) + = prop (splitAtRL n (mergeableSequenceToRL ms)) + withAllSequenceItems :: (CheckedMerge p, PrimBased p, Monoid r) => (forall wX wY. p wX wY -> r) -> Sealed2 (WithStartState2 (MergeableSequence p)) -> r withAllSequenceItems prop (Sealed2 (WithStartState2 _ ms)) = mconcat . mapRL prop . mergeableSequenceToRL $ ms - diff --git a/harness/Darcs/Test/Patch/Arbitrary/Named.hs b/harness/Darcs/Test/Patch/Arbitrary/Named.hs index afc61e25..cc83bc9f 100644 --- a/harness/Darcs/Test/Patch/Arbitrary/Named.hs +++ b/harness/Darcs/Test/Patch/Arbitrary/Named.hs @@ -1,52 +1,95 @@ {-# OPTIONS_GHC -Wno-orphans #-} {-# LANGUAGE UndecidableInstances #-} module Darcs.Test.Patch.Arbitrary.Named - ( + ( WithNames(..) ) where import Darcs.Prelude -import Darcs.Test.Patch.Info () import Darcs.Test.Patch.Arbitrary.Generic import Darcs.Test.Patch.Arbitrary.Shrink import Darcs.Test.Patch.RepoModel import Darcs.Test.Patch.WithState +import Darcs.Test.TestOnly.Instance () +import Darcs.Patch.Apply import Darcs.Patch.Commute +import Darcs.Patch.Info ( PatchInfo, rawPatchInfo ) import Darcs.Patch.Named import Darcs.Patch.Witnesses.Maybe import Darcs.Patch.Witnesses.Ordered import Darcs.Patch.Witnesses.Sealed +import Darcs.Patch.Witnesses.Show -import Control.Applicative ( (<|>) ) +import Data.List ( sort ) import Test.QuickCheck -type instance ModelOf (Named prim) = ModelOf prim +data WithNames m wX = WithNames + { primModel :: m wX + , appliedPatchNames :: [PatchInfo] + } -instance ArbitraryState prim => ArbitraryState (Named prim) where +instance Show1 m => Show1 (WithNames m) + +instance Show1 m => Show (WithNames m wX) where + showsPrec d (WithNames m ns) = + showParen (d > appPrec) + $ showString "WithNames " + . showsPrec1 (appPrec + 1) m + . showString " " + . showsPrec (appPrec + 1) ns + +instance RepoModel m => RepoModel (WithNames m) where + type RepoState (WithNames m) = RepoState m + aSmallRepo = WithNames <$> aSmallRepo <*> pure [] + repoApply m p = + WithNames <$> repoApply (primModel m) p <*> pure (appliedPatchNames m ++ patchNames p) + eqModel m1 m2 = + eqModel (primModel m1) (primModel m2) + && sort (appliedPatchNames m1) == sort (appliedPatchNames m2) + showModel = show + +type instance ModelOf (Named p) = WithNames (ModelOf p) + +instance (ArbitraryState p, RepoModel (ModelOf p)) => ArbitraryState (Named p) where arbitraryState rm = do - info <- arbitrary - Sealed (WithEndState prims rm') <- arbitraryState rm - return $ Sealed $ WithEndState (NamedP info [] prims) rm' + info <- arbitrarySimplePatchInfo + deps <- sublistOf (appliedPatchNames rm) + Sealed (WithEndState prims rm') <- arbitraryState (primModel rm) + return $ Sealed $ + WithEndState (NamedP info deps prims) (WithNames rm' (info : appliedPatchNames rm)) + where + -- generate only minimal, human readable (and always valid) 'PatchInfo's + -- with no need for shrinking + arbitrarySimplePatchInfo = do + -- this is hopefully random enough to avoid collisions in practice + name <- vectorOf 20 $ elements ['a'..'z'] + let date = "20240606010532" + author = "tester" + log = [] + inverted = False + return $ rawPatchInfo date name author log inverted instance (Commute p, Shrinkable p) => Shrinkable (Named p) where shrinkInternally (NamedP pi deps ps) = - -- TODO this isn't quite right because other patches might - -- explicitly depend on this one - (\pi' -> NamedP pi' deps ps) <$> shrink pi - <|> NamedP pi deps <$> shrinkInternally ps shrinkAtStart (NamedP pi deps ps) = mapFlipped (NamedP pi deps) <$> shrinkAtStart ps shrinkAtEnd (NamedP pi deps ps) = mapSeal (NamedP pi deps) <$> shrinkAtEnd ps +instance ShrinkModel model p => ShrinkModel (WithNames model) p where + shrinkModelPatch (WithNames m _) = shrinkModelPatch m + instance PropagateShrink prim p => PropagateShrink prim (Named p) where propagateShrink (prim :> NamedP pi deps ps) = do mps' :> mprim' <- propagateShrink (prim :> ps) return (mapMB_MB (NamedP pi deps) mps' :> mprim') -instance (Commute (OnlyPrim p), PrimBased p) => PrimBased (Named p) where +instance (Commute (OnlyPrim p), PrimBased p, RepoModel (ModelOf (OnlyPrim p))) => PrimBased (Named p) where type OnlyPrim (Named p) = Named (OnlyPrim p) primEffect (NamedP _ _ ps) = primEffect @(FL p) ps liftFromPrim (NamedP pi deps ps) = NamedP pi deps (liftFromPrim ps) + +instance Apply p => RepoApply (Named p) where + patchNames p = [patch2patchinfo p] diff --git a/harness/Darcs/Test/Patch/Arbitrary/PrimFileUUID.hs b/harness/Darcs/Test/Patch/Arbitrary/PrimFileUUID.hs index 0c6a3b08..48a7ec99 100644 --- a/harness/Darcs/Test/Patch/Arbitrary/PrimFileUUID.hs +++ b/harness/Darcs/Test/Patch/Arbitrary/PrimFileUUID.hs @@ -51,10 +51,12 @@ instance NullPatch Prim where instance PropagateShrink Prim Prim where propagateShrink = propagatePrim -instance ShrinkModel Prim where +instance ShrinkModel FileUUIDModel Prim where -- no shrinking for now shrinkModelPatch _ = [] +instance RepoApply Prim + ---------------------------------------------------------------------- -- * QuickCheck generators diff --git a/harness/Darcs/Test/Patch/Arbitrary/PrimV1.hs b/harness/Darcs/Test/Patch/Arbitrary/PrimV1.hs index 8c68de0f..adb539ab 100644 --- a/harness/Darcs/Test/Patch/Arbitrary/PrimV1.hs +++ b/harness/Darcs/Test/Patch/Arbitrary/PrimV1.hs @@ -82,6 +82,9 @@ instance Shrinkable Prim.Prim where deriving instance Shrinkable V1.Prim deriving instance Shrinkable V2.Prim +instance RepoApply Prim1 +instance RepoApply Prim2 + ---------------------------------------------------------------------- -- * QuickCheck generators @@ -236,8 +239,11 @@ aModelShrinkFileContent repo = do -- | Generates any type of 'prim' patch, except binary and setpref patches. -aPrim :: forall prim wX . (PrimPatch prim, ApplyState prim ~ RepoState V1Model) - => V1Model wX -> Gen (Sealed (WithEndState V1Model (prim wX))) +aPrim + :: forall prim wX + . (PrimPatch prim, ApplyState prim ~ RepoState V1Model, RepoApply prim) + => V1Model wX + -> Gen (Sealed (WithEndState V1Model (prim wX))) aPrim repo = do mbFile <- maybeOf repoFiles mbEmptyFile <- maybeOf $ filter (isEmpty . snd) repoFiles @@ -306,6 +312,7 @@ aPrimPair :: ( PrimPatch prim , ArbitraryState prim , ApplyState prim ~ RepoState V1Model , ModelOf prim ~ V1Model + , RepoApply prim ) => V1Model wX -> Gen (Sealed (WithEndState V1Model (Pair prim wX))) @@ -336,7 +343,7 @@ aPrimPair repo type instance ModelOf Prim.Prim = V1Model -instance ShrinkModel Prim.Prim where +instance ShrinkModel V1Model Prim.Prim where shrinkModelPatch s = aModelShrink s -- Prim1 @@ -345,7 +352,7 @@ instance ArbitraryState Prim1 where arbitraryState = aPrim arbitraryStatePair = aPrimPair -instance ShrinkModel Prim1 where +instance ShrinkModel V1Model Prim1 where shrinkModelPatch s = map (mapSeal V1.Prim) $ shrinkModelPatch s instance PropagateShrink Prim1 Prim1 where @@ -360,7 +367,7 @@ instance ArbitraryState Prim2 where arbitraryState = aPrim arbitraryStatePair = aPrimPair -instance ShrinkModel Prim2 where +instance ShrinkModel V1Model Prim2 where shrinkModelPatch s = map (mapSeal V2.Prim) $ shrinkModelPatch s instance PropagateShrink Prim2 Prim2 where diff --git a/harness/Darcs/Test/Patch/Arbitrary/RepoPatchV1.hs b/harness/Darcs/Test/Patch/Arbitrary/RepoPatchV1.hs index ffa18d8c..70a36aec 100644 --- a/harness/Darcs/Test/Patch/Arbitrary/RepoPatchV1.hs +++ b/harness/Darcs/Test/Patch/Arbitrary/RepoPatchV1.hs @@ -34,7 +34,7 @@ import Darcs.Patch.Witnesses.Sealed ( Sealed(Sealed) ) import Darcs.Test.Patch.Arbitrary.Generic ( MightHaveDuplicate, ArbitraryPrim, PrimBased(..) ) -import Darcs.Test.Patch.Arbitrary.RepoPatch +import Darcs.Test.Patch.Arbitrary.Mergeable import Darcs.Test.Patch.Merge.Checked ( CheckedMerge(..) ) import Darcs.Test.Patch.RepoModel ( RepoState, ModelOf ) import Darcs.Test.Patch.Types.Pair ( Pair(..) ) @@ -46,8 +46,8 @@ import Darcs.Test.Patch.WithState type Patch = RepoPatchV1 V1.Prim instance - (ArbitraryPrim prim, PrimPatch prim, ApplyState prim ~ RepoState (ModelOf prim)) - => ArbitraryRepoPatch (RepoPatchV1 prim) + (ArbitraryPrim prim, ApplyState prim ~ RepoState (ModelOf prim)) + => ArbitraryMergeable (RepoPatchV1 prim) where notRepoPatchV1 = Nothing diff --git a/harness/Darcs/Test/Patch/Arbitrary/RepoPatchV2.hs b/harness/Darcs/Test/Patch/Arbitrary/RepoPatchV2.hs index 3b227607..5895156e 100644 --- a/harness/Darcs/Test/Patch/Arbitrary/RepoPatchV2.hs +++ b/harness/Darcs/Test/Patch/Arbitrary/RepoPatchV2.hs @@ -12,7 +12,7 @@ import Darcs.Test.Patch.Arbitrary.Generic , MightHaveDuplicate(..) , PrimBased(..) ) -import Darcs.Test.Patch.Arbitrary.RepoPatch +import Darcs.Test.Patch.Arbitrary.Mergeable import Darcs.Test.Patch.Merge.Checked ( CheckedMerge(..) ) import Darcs.Test.Patch.RepoModel ( RepoState, ModelOf ) import Darcs.Test.Patch.WithState ( PropagateShrink ) @@ -27,10 +27,9 @@ instance MightHaveDuplicate (RepoPatchV2 prim) where type instance ModelOf (RepoPatchV2 prim) = ModelOf prim instance ( ArbitraryPrim prim - , PrimPatch prim , ApplyState prim ~ RepoState (ModelOf prim) ) => - ArbitraryRepoPatch (RepoPatchV2 prim) where + ArbitraryMergeable (RepoPatchV2 prim) where notRepoPatchV1 = Just (NotRepoPatchV1 (\case {})) instance PrimPatch prim => CheckedMerge (RepoPatchV2 prim) where diff --git a/harness/Darcs/Test/Patch/Arbitrary/RepoPatchV3.hs b/harness/Darcs/Test/Patch/Arbitrary/RepoPatchV3.hs index 275a6e75..b41e2288 100644 --- a/harness/Darcs/Test/Patch/Arbitrary/RepoPatchV3.hs +++ b/harness/Darcs/Test/Patch/Arbitrary/RepoPatchV3.hs @@ -6,7 +6,7 @@ import Darcs.Prelude import Darcs.Test.Patch.Arbitrary.Generic ( MightHaveDuplicate(..), PrimBased(..), ArbitraryPrim ) import Darcs.Test.Patch.Arbitrary.NamedPrim () -import Darcs.Test.Patch.Arbitrary.RepoPatch +import Darcs.Test.Patch.Arbitrary.Mergeable import Darcs.Test.Patch.Merge.Checked ( CheckedMerge ) import Darcs.Test.Patch.RepoModel ( RepoState, ModelOf ) import Darcs.Test.Patch.WithState ( PropagateShrink ) @@ -25,8 +25,8 @@ instance MightHaveDuplicate (RepoPatchV3 prim) where type instance ModelOf (RepoPatchV3 prim) = ModelOf prim instance - (ArbitraryPrim prim, PrimPatch prim, ApplyState prim ~ RepoState (ModelOf prim)) - => ArbitraryRepoPatch (RepoPatchV3 prim) + (ArbitraryPrim prim, ApplyState prim ~ RepoState (ModelOf prim)) + => ArbitraryMergeable (RepoPatchV3 prim) where notRepoPatchV1 = Just (NotRepoPatchV1 (\case {})) diff --git a/harness/Darcs/Test/Patch/Examples/Set2.hs b/harness/Darcs/Test/Patch/Examples/Set2.hs new file mode 100644 index 00000000..b0480365 --- /dev/null +++ b/harness/Darcs/Test/Patch/Examples/Set2.hs @@ -0,0 +1,513 @@ +-- Copyright (C) 2007 David Roundy +-- +-- This program is free software; you can redistribute it and/or modify +-- it under the terms of the GNU General Public License as published by +-- the Free Software Foundation; either version 2, or (at your option) +-- any later version. +-- +-- This program is distributed in the hope that it will be useful, +-- but WITHOUT ANY WARRANTY; without even the implied warranty of +-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +-- GNU General Public License for more details. +-- +-- You should have received a copy of the GNU General Public License +-- along with this program; see the file COPYING. If not, write to +-- the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, +-- Boston, MA 02110-1301, USA. + +{-# LANGUAGE OverloadedStrings #-} +{-# OPTIONS_GHC -Wno-orphans #-} +module Darcs.Test.Patch.Examples.Set2 + ( primPermutables + , primPatches + , commutables + , commutablesFL + , repov2Commutables + , repov2Mergeables + , repov2Triples + , repov2NonduplicateTriples + , repov2Patches + , repov2PatchLoopExamples + ) where + +import Darcs.Prelude + +import qualified Data.ByteString.Char8 as BC ( pack ) +import Data.Maybe ( catMaybes ) +import Data.String ( IsString(..) ) +import qualified Data.ByteString as B ( ByteString ) + +import Darcs.Patch ( hunk, invert ) +import Darcs.Patch.Commute ( Commute(..) ) +import Darcs.Patch.FromPrim ( fromAnonymousPrim ) +import Darcs.Patch.Invert ( Invert ) +import Darcs.Patch.Merge ( Merge, merge, mergeFL ) +import Darcs.Patch.Prim ( PrimPatch ) +import Darcs.Patch.V2 ( RepoPatchV2 ) +import qualified Darcs.Patch.V2.Prim as V2 +import Darcs.Patch.Witnesses.Ordered +import Darcs.Patch.Witnesses.Sealed +import Darcs.Patch.Witnesses.Unsafe ( unsafeCoercePEnd, unsafeCoercePStart ) +import Darcs.Util.Path ( AnchoredPath, makeName, unsafeFloatPath ) + +import Darcs.Test.Patch.Arbitrary.Generic ( notDuplicatestriple ) +import Darcs.Test.Patch.Arbitrary.PatchTree + ( Tree(..) + , TreeWithFlattenPos(..) + , canonizeTree + , commutePairFromTWFP + , commutePairFromTree + , commuteTripleFromTree + , getPairs + , getTriples + , mergePairFromCommutePair + ) +import Darcs.Test.Patch.Arbitrary.PrimV1 () +import Darcs.Test.Patch.Arbitrary.RepoPatchV2 () +import Darcs.Test.Patch.Types.Merged ( Merged ) +import Darcs.Test.Patch.V1Model ( Content, V1Model, makeFile, makeRepo ) +import Darcs.Test.Patch.WithState ( WithStartState(..) ) + +instance IsString AnchoredPath where + fromString = unsafeFloatPath + +type Prim2 = V2.Prim + +type Patch = RepoPatchV2 Prim2 + +makeSimpleRepo :: String -> Content -> V1Model wX +makeSimpleRepo filename content = + makeRepo [(either error id $ makeName filename, makeFile content)] + +withStartState :: s wX -> p wX -> Sealed (WithStartState s p) +withStartState s p = seal (WithStartState s p) + +tripleExamples :: [Sealed2 (Patch :> Patch :> Patch)] +tripleExamples = + catMaybes + [ commuteTripleFromTree seal2 $ + withStartState + (makeSimpleRepo "file" []) + (ParTree + (SeqTree + (hunk "file" 1 [] ["g"]) + (SeqTree + (hunk "file" 2 [] ["j"]) + (SeqTree (hunk "file" 1 [] ["s"]) NilTree))) + (SeqTree (hunk "file" 1 [] ["e"]) NilTree)) + , commuteTripleFromTree seal2 $ + withStartState + (makeSimpleRepo "file" ["j"]) + (ParTree + (SeqTree + (hunk "file" 1 [] ["s"]) + (ParTree + (SeqTree (hunk "file" 2 ["j"] []) NilTree) + (SeqTree (hunk "file" 2 ["j"] []) NilTree))) + (SeqTree (hunk "file" 1 ["j"] []) NilTree)) + ] + +mergeExamples :: [Sealed2 (Patch :\/: Patch)] +mergeExamples = map (unseal2 (mergePairFromCommutePair seal2)) commuteExamples + +commuteExamples :: [Sealed2 (Patch :> Patch)] +commuteExamples = + catMaybes + [ commutePairFromTWFP seal2 $ + withStartState (makeSimpleRepo "file" []) + (TWFP 3 + (ParTree + (SeqTree (hunk "file" 1 [] ["h"]) NilTree) + (SeqTree (hunk "file" 1 [] ["b"]) + (SeqTree (hunk "file" 1 [] ["f"]) + (SeqTree (hunk "file" 1 [] ["v"]) + (SeqTree (hunk "file" 2 ["f"] []) NilTree)))))) + , commutePairFromTWFP seal2 $ + withStartState + (makeSimpleRepo "file" ["f","s","d"]) + (TWFP 3 + (ParTree + (SeqTree (hunk "file" 3 ["d"] []) NilTree) + (ParTree + (SeqTree (hunk "file" 1 ["f"] []) NilTree) + (SeqTree (hunk "file" 1 ["f"] []) + (SeqTree (hunk "file" 1 ["s","d"] []) + (SeqTree (hunk "file" 1 [] ["v"]) NilTree)))))) +{- , commutePairFromTWFP seal2 $ + withStartState + (makeSimpleRepo "file" ["f","u", + "s","d"]) + (TWFP 5 + (ParTree + (SeqTree (hunk "file" 5 [] ["x"]) + (SeqTree (hunk "file" 4 ["d"] []) NilTree)) + (ParTree + (SeqTree (hunk "file" 1 ["f","u"] []) NilTree) + (SeqTree (hunk "file" 1 ["f"] []) + (SeqTree (hunk "file" 1 ["u","s","d"] []) + (SeqTree (hunk "file" 1 [] ["a"]) + (SeqTree (hunk "file" 1 ["a"] []) NilTree))))))) +-} + , commutePairFromTree seal2 $ + withStartState (makeSimpleRepo "file" ["n","t","h"]) + (ParTree + (SeqTree (hunk "file" 1 ["n","t","h"] []) + NilTree) + (SeqTree (hunk "file" 3 ["h"] []) + (SeqTree (hunk "file" 1 ["n"] []) + (SeqTree (hunk "file" 1 ["t"] []) NilTree)))) + , commutePairFromTree seal2 $ + withStartState (makeSimpleRepo "file" []) + (ParTree + (SeqTree (hunk "file" 1 [] ["n"]) NilTree) + (SeqTree (hunk "file" 1 [] ["i"]) + (SeqTree (hunk "file" 1 [] ["i"]) NilTree))) + , commutePairFromTree seal2 $ + withStartState (makeSimpleRepo "file" []) + (ParTree + (SeqTree (hunk "file" 1 [] ["c"]) + (ParTree + (SeqTree (hunk "file" 1 ["c"] ["r"]) NilTree) + (SeqTree (hunk "file" 1 [] ["h"]) + (SeqTree (hunk "file" 1 [] ["d"]) NilTree)))) + (SeqTree (hunk "file" 1 [] ["f"]) NilTree)) + , commutePairFromTWFP seal2 $ + withStartState (makeSimpleRepo "file" []) + (TWFP 1 + (ParTree + (ParTree + (SeqTree (hunk "file" 1 [] ["t"]) NilTree) + (SeqTree (hunk "file" 1 [] ["t"]) NilTree)) + (SeqTree (hunk "file" 1 [] ["f"]) NilTree))) + , commutePairFromTWFP seal2 $ + withStartState + (makeSimpleRepo "file" ["f", " r", "c", "v"]) + (TWFP 4 + (ParTree + (SeqTree (hunk "file" 3 ["c","v"] []) + (ParTree + (SeqTree (hunk "file" 2 ["r"] []) + (SeqTree (hunk "fi le" 1 ["f"] []) NilTree)) + (SeqTree (hunk "file" 1 ["f","r"] []) + (SeqTree (hunk "file" 1 [] ["y"]) NilTree)))) + (SeqTree (hunk "file" 4 ["v"] []) NilTree))) + , commutePairFromTree seal2 $ + withStartState (makeSimpleRepo "file" []) + (ParTree + (SeqTree (hunk "file" 1 [] ["z"]) NilTree) + (ParTree + (SeqTree (hunk "file" 1 [] ["f"]) NilTree) + (ParTree + (SeqTree (hunk "file" 1 [] ["r"]) NilTree) + (SeqTree (hunk "file" 1 [] ["d"]) NilTree)))) + , commutePairFromTree seal2 $ + withStartState (makeSimpleRepo "file" ["t","r","h"]) + (ParTree + (ParTree + (SeqTree (hunk "file" 1 ["t","r","h"] []) + NilTree) + (SeqTree (hunk "file" 1 [] ["o"]) NilTree)) + (SeqTree (hunk "file" 1 ["t"] []) + (SeqTree (hunk "file" 2 ["h"] []) NilTree))) + , commutePairFromTWFP seal2 $ + withStartState (makeSimpleRepo "file" []) $ + TWFP 2 + (ParTree + (SeqTree (hunk "file" 1 [] ["h"]) NilTree) + (SeqTree (hunk "file" 1 [] ["y"]) + (SeqTree (hunk "file" 2 [] ["m"]) + (SeqTree (hunk "file" 1 [] ["v"]) NilTree)))) + , commutePairFromTree seal2 $ + withStartState (makeSimpleRepo "file" []) + (ParTree + (SeqTree (hunk "file" 1 [] ["p"]) + (SeqTree (hunk "file" 1 ["p"] []) + (SeqTree (hunk "file" 1 [] ["c"]) NilTree))) + (SeqTree (hunk "file" 1 [] ["z"]) NilTree)) + , commutePairFromTree seal2 $ + withStartState (makeSimpleRepo "file" []) + (ParTree + (SeqTree (hunk "file" 1 [] ["j" ]) + (SeqTree (hunk "file" 1 ["j"] []) NilTree)) + (SeqTree (hunk "file" 1 [] ["v"]) NilTree)) + , commutePairFromTree seal2 $ + withStartState (makeSimpleRepo "file" []) + (ParTree + (SeqTree (hunk "file" 1 [] ["v"]) NilTree) + (SeqTree (hunk "file" 1 [] ["j" ]) + (SeqTree (hunk "file" 1 ["j"] []) NilTree))) + , commutePairFromTree seal2 $ + withStartState (makeSimpleRepo "file" ["x","c"]) + (ParTree + (SeqTree (hunk "file" 1 [] ["h"]) + (ParTree + (SeqTree (hunk "file" 3 ["c"] []) NilTree) + (SeqTree (hunk "file" 2 ["x"] []) + (SeqTree (hunk "file" 1 [] ["j"]) NilTree)))) + (SeqTree (hunk "file" 1 [] ["l"]) NilTree)) + , commutePairFromTree seal2 $ + withStartState (makeSimpleRepo "file" []) + (ParTree + (SeqTree (hunk "file" 1 [] (packStringLetters "s")) NilTree) + (SeqTree (hunk "file" 1 [] (packStringLetters "k")) + (SeqTree (hunk "file" 1 (packStringLetters "k") []) + (SeqTree (hunk "file" 1 [] (packStringLetters "m")) + (SeqTree (hunk "file" 1 (packStringLetters "m") []) NilTree))))) + ] + +packStringLetters :: String -> [B.ByteString] +packStringLetters s = [ BC.pack [c] | c <- s ] + +repov2PatchLoopExamples :: [Sealed (WithStartState V1Model (Tree Prim2))] +repov2PatchLoopExamples = + [Sealed (WithStartState (makeSimpleRepo fx []) + $ canonizeTree + (ParTree + (SeqTree (hunk fx 1 [] (packStringLetters "pkotufogbvdabnmbzajvolwviqebieonxvcvuvigkfgybmqhzuaaurjspd")) + (ParTree + (SeqTree (hunk fx 47 (packStringLetters "qhzu") (packStringLetters "zafybdcokyjskcgnvhkbzpysaafnjjhcstgrczplxsfwagmh")) + (ParTree + (ParTree + NilTree + (ParTree + (ParTree + (ParTree + (SeqTree (hunk fx 15 (packStringLetters "mbzajvolwviqebieonxvcvuvigkfgyb") (packStringLetters "vujnxnhvybvpouyciaabszfmgssezlwwjgnethvrpnfrkubphzvdgymjjoacppqps")) + (ParTree + NilTree + (ParTree + (SeqTree (hunk fx 40 (packStringLetters "ssezlwwjgnethvrpnfrkubphzvdgymjjoacppqpsmzafybdcokyjskcgnvhkbz") (packStringLetters "wnesidpccwoiqiichxaaejdsyrhrusqljlcoro")) + (ParTree + (ParTree + (SeqTree (hunk fx 12 (packStringLetters "abnvujnxnhvybvpouyciaabszfmgwnesidpccwoiqii") (packStringLetters "czfdhqkipdstfjycqaxwnbxrihrufdeyneqiiiafwzlmg")) NilTree) + NilTree) + NilTree)) + (SeqTree (hunk fx 25 [] (packStringLetters "dihgmsotezucqdgxczvcivijootyvhlwymbiueufnvpwpeukmskqllalfe")) NilTree)))) + (SeqTree (hunk fx 56 (packStringLetters "yjskcgnvhkbzpysaafnjjhcstgrczplxsfwagmhaaurjsp") (packStringLetters "xldhrutyhcyaqeezwujiguawfyawjjqlirxshjddvq")) NilTree)) + (SeqTree (hunk fx 20 [] (packStringLetters "ooygwiyogqrqnytixqtmvdxx")) + (SeqTree (hunk fx 26 (packStringLetters "yogqrqnytixqtmvdxxvolwviqebieonxvcvuvigkfgybmzafybdcokyjskcgnvhkbz") (packStringLetters "akhsmlbkdxnvfoikmiatfbpzdrsyykkpoxvvddeaspzxe")) + (SeqTree (hunk fx 39 [] (packStringLetters "ji")) + (ParTree + NilTree + (ParTree + NilTree + (ParTree + (ParTree + NilTree + (SeqTree (hunk fx 26 (packStringLetters "akhsmlbkdxnvfjioikmiatfbpzdrsyykkpoxvvddeaspzxepysaafnjjhcstgrczplxs") (packStringLetters "onjbhddskcj")) + (SeqTree (hunk fx 39 [] (packStringLetters "fyscunxxxjjtyqpfxeznhtwvlphmp")) NilTree))) + (ParTree + NilTree + (SeqTree (hunk fx 44 [] (packStringLetters "xcchzwmzoezxkmkhcmesplnjpqriypshgiqklgdnbmmkldnydiy")) + (ParTree + NilTree + (SeqTree (hunk fx 64 (packStringLetters "plnjpqriypshgiqklgdnbmmkldnydiymiatfbpzdrsyykkpoxvvddeaspzxepysaafn") (packStringLetters "anjlzfdqbjqbcplvqvkhwjtkigp")) NilTree))))))))))) + (ParTree + NilTree + NilTree))) + NilTree)) + NilTree)) + (ParTree + NilTree + (SeqTree (hunk fx 1 [] (packStringLetters "ti")) + (SeqTree (hunk fx 1 (packStringLetters "t") (packStringLetters "ybcop")) + (SeqTree (hunk fx 2 [] (packStringLetters "dvlhgwqlpaeweerqrhnjtfolczbqbzoccnvdsyqiefqitrqneralf")) + (SeqTree (hunk fx 15 [] (packStringLetters "yairbjphwtnaerccdlfewujvjvmjakbc")) + (SeqTree (hunk fx 51 [] (packStringLetters "xayvfuwaiiogginufnhsrmktpmlbvxiakjwllddkiyofyfw")) + (ParTree + NilTree + NilTree)))))))))] + where + fx :: IsString a => a + fx = "F" + +quickhunk :: PrimPatch prim => Int -> String -> String -> prim wX wY +quickhunk l o n = + hunk "test" l (map (\c -> BC.pack [c]) o) (map (\c -> BC.pack [c]) n) + +primPermutables :: [(Prim2 :> Prim2 :> Prim2) wX wY] +primPermutables = + [quickhunk 0 "e" "bo" :> quickhunk 3 "" "x" :> quickhunk 2 "f" "qljo"] + +mergeables :: [(Prim2 :\/: Prim2) wX wY] +mergeables = + [ quickhunk 1 "a" "b" :\/: quickhunk 1 "a" "c" + , quickhunk 1 "a" "b" :\/: quickhunk 3 "z" "c" + , quickhunk 0 "" "a" :\/: quickhunk 1 "" "b" + , quickhunk 0 "a" "" :\/: quickhunk 1 "" "b" + , quickhunk 0 "a" "" :\/: quickhunk 1 "b" "" + , quickhunk 0 "" "a" :\/: quickhunk 1 "b" "" + ] + +mergeablesFL :: [(FL Prim2 :\/: FL Prim2) wX wY] +mergeablesFL = map (\(x :\/: y) -> (x :>: NilFL) :\/: (y :>: NilFL)) mergeables + -- ++ [(quickhunk 1 "a" "b" :>: quickhunk 3 "z" "c" :>: NilFL) + -- :\/: (quickhunk 1 "a" "z" :>: NilFL), + -- (quickhunk 1 "a" "b" :>: quickhunk 1 "b" "c" :>: NilFL) + -- :\/: (quickhunk 1 "a" "z" :>: NilFL)] + +mergeable2commutable :: Invert p => (p :\/: p) wX wY -> (p :> p) wX wY +mergeable2commutable (x :\/: y) = (invert x) :> y + +commutablesFL :: [(FL Prim2 :> FL Prim2) wX wY] +commutablesFL = map mergeable2commutable mergeablesFL + +commutables :: [(Prim2 :> Prim2) wX wY] +commutables = map mergeable2commutable mergeables + +primPatches :: [Sealed2 Prim2] +primPatches = concatMap mergeable2patches mergeables + where + mergeable2patches (x :\/: y) = [Sealed2 x, Sealed2 y] + +repov2Patches :: [Sealed2 Patch] +repov2Patches = concatMap commutable2patches repov2Commutables + where + commutable2patches (Sealed2 (x :> y)) = [Sealed2 x, Sealed2 y] + +typedMerge + :: Merge p => (p :\/: p) wA wB -> (p wA (Merged wA wB), p wB (Merged wA wB)) +typedMerge (p :\/: q) = + case merge (p :\/: q) of + (q' :/\: p') -> (unsafeCoercePEnd q', unsafeCoercePEnd p') + +repov2Triples :: [Sealed2 (Patch :> Patch :> Patch)] +repov2Triples + | oa <- fromAnonymousPrim $ quickhunk 1 "o" "aa" + , oa2 <- fromAnonymousPrim $ quickhunk 1 "o" "aa" + , a2 <- fromAnonymousPrim $ quickhunk 2 "a34" "2xx" + , ob <- fromAnonymousPrim $ quickhunk 1 "o" "bb" + , (ob', oa') <- typedMerge (oa :\/: ob) + , (a2', _) <- typedMerge (ob' :\/: a2) + , (a2'', _) <- typedMerge (oa2 :\/: a2') = + [Sealed2 (ob' :> oa2 :> a2''), Sealed2 (oa' :> oa2 :> a2'')] ++ + tripleExamples ++ getTriples repov2FL + +repov2NonduplicateTriples :: [Sealed2 (Patch :> Patch :> Patch)] +repov2NonduplicateTriples = filter (unseal2 notDuplicatestriple) repov2Triples + +repov2FL :: FL Patch wX wX +repov2FL + | oa <- fromAnonymousPrim $ quickhunk 1 "o" "a" + , ps :/\: _ <- + merge (oa :>: invert oa :>: nilFL :\/: oa :>: invert oa :>: nilFL) = + oa :>: invert oa :>: oa :>: invert oa :>: + unsafeCoercePEnd ps +>+ oa :>: invert oa :>: nilFL + +repov2Commutables :: [Sealed2 (Patch :> Patch)] +repov2Commutables + | oa <- fromAnonymousPrim $ quickhunk 1 "o" "a" + , ob <- fromAnonymousPrim $ quickhunk 1 "o" "b" + , _ :/\: ob' <- mergeFL (ob :\/: oa :>: invert oa :>: nilFL) = + commuteExamples ++ + map (mapSeal2 mergeable2commutable) repov2Mergeables ++ + [Sealed2 (invert oa :> ob')] ++ + getPairs repov2FL + +repov2Mergeables :: [Sealed2 (Patch :\/: Patch)] +repov2Mergeables + | oa <- fromAnonymousPrim $ quickhunk 1 "o" "aa" + , a2 <- fromAnonymousPrim $ quickhunk 2 "a34" "2xx" + , og <- fromAnonymousPrim $ quickhunk 3 "4" "g" + , ob <- fromAnonymousPrim $ quickhunk 1 "o" "bb" + , b2 <- fromAnonymousPrim $ quickhunk 2 "b" "2" + , oc <- fromAnonymousPrim $ quickhunk 1 "o" "cc" + , od <- fromAnonymousPrim $ quickhunk 7 "x" "d" + , oe <- fromAnonymousPrim $ quickhunk 7 "x" "e" + , pf <- fromAnonymousPrim $ quickhunk 7 "x" "f" + , od'' <- fromAnonymousPrim $ quickhunk 8 "x" "d" + , ob' :>: b2' :>: _ :/\: _ <- mergeFL (oa :\/: ob :>: b2 :>: nilFL) + , a2' :/\: _ <- merge (ob' :\/: a2) + , ob'' :/\: _ <- merge (a2 :\/: ob') + , og' :/\: _ <- merge (oa :\/: og) + , og'' :/\: _ <- merge (a2 :\/: og') + , og''' :/\: _ <- merge (ob' :\/: og') + , oc' :/\: _ <- merge (oa :\/: oc) + , oc'' :/\: _ <- merge (a2 :\/: oc) + , oc''' :/\: _ <- merge (ob' :\/: oc') + , oe' :/\: _ <- merge (od :\/: oe) + , of' :/\: _ <- merge (od :\/: pf) = + map + (\(x :\/: y) -> Sealed2 (fromAnonymousPrim x :\/: fromAnonymousPrim y)) + mergeables ++ + repov2IglooMergeables ++ + repov2QuickcheckMergeables ++ + mergeExamples ++ + catMaybes (map pair2m (getPairs repov2FL)) ++ + [ Sealed2 (oa :\/: od) + , Sealed2 (oa :\/: unsafeCoercePStart a2') + , Sealed2 (ob' :\/: od'') + , Sealed2 (oe :\/: od) + , Sealed2 (of' :\/: oe') + , Sealed2 (ob' :\/: oe') + , Sealed2 (oa :\/: oe') + , Sealed2 (ob' :\/: oc') + , Sealed2 (b2' :\/: oc''') + , Sealed2 (ob' :\/: a2) + , Sealed2 (b2' :\/: og''') + , Sealed2 (oc''' :\/: og''') + , Sealed2 (oc'' :\/: og'') + , Sealed2 (ob'' :\/: og'') + , Sealed2 (ob'' :\/: oc'') + , Sealed2 (oc' :\/: od'') + ] + | otherwise = error "impossible" + +repov2IglooMergeables :: [Sealed2 (Patch :\/: Patch)] +repov2IglooMergeables + | a <- fromAnonymousPrim $ quickhunk 1 "1" "A" + , b <- fromAnonymousPrim $ quickhunk 2 "2" "B" + , c <- fromAnonymousPrim $ quickhunk 3 "3" "C" + , x <- fromAnonymousPrim $ quickhunk 1 "1BC" "xbc" + , y <- fromAnonymousPrim $ quickhunk 1 "A2C" "ayc" + , z <- fromAnonymousPrim $ quickhunk 1 "AB3" "abz" + , x' :/\: _ <- merge (a :\/: x) + , y' :/\: _ <- merge (b :\/: y) + , z' :/\: _ <- merge (c :\/: z) = + [ Sealed2 (a :\/: b) + , Sealed2 (b :\/: c) + , Sealed2 (a :\/: c) + , Sealed2 (x :\/: a) + , Sealed2 (y :\/: b) + , Sealed2 (z :\/: c) + , Sealed2 (x' :\/: y') + , Sealed2 (z' :\/: y') + , Sealed2 (x' :\/: z') + , Sealed2 (a :\/: a) + ] + +repov2QuickcheckMergeables :: [Sealed2 (Patch :\/: Patch)] +repov2QuickcheckMergeables + | hb <- fromAnonymousPrim $ quickhunk 0 "" "hb" + , k <- fromAnonymousPrim $ quickhunk 0 "" "k" + , n <- fromAnonymousPrim $ quickhunk 0 "" "n" + , b <- fromAnonymousPrim $ quickhunk 1 "b" "" + , d <- fromAnonymousPrim $ quickhunk 2 "" "d" + , d' :/\: _ <- merge (b :\/: d) + -- , k1 :>: n1 :>: _ :/\: _ <- mergeFL (hb :\/: k :>: n :>: nilFL) + -- , k2 :>: n2 :>: _ :/\: _ <- merge (hb :>: b :>: nilFL :\/: k :>: n :>: nilFL) + , k' :>: n' :>: _ :/\: _ :>: b' :>: _ <- + merge (hb :>: b :>: d' :>: nilFL :\/: k :>: n :>: nilFL) + , i <- fromAnonymousPrim $ quickhunk 0 "" "i" + , x <- fromAnonymousPrim $ quickhunk 0 "" "x" + , xi <- fromAnonymousPrim $ quickhunk 0 "xi" "" + , d3 :/\: _ <- merge (xi :\/: d) + , _ :/\: k3 <- mergeFL (k :\/: i :>: x :>: xi :>: d3 :>: nilFL) = + -- Merging inverted RepoPatchV2 is no longer supported: + -- [Sealed2 (invert k1 :\/: n1), Sealed2 (invert k2 :\/: n2)] ++ + [ Sealed2 (hb :\/: k) + , Sealed2 (b' :\/: b') + , Sealed2 (n' :\/: n') + , Sealed2 (b :\/: d) + , Sealed2 (k' :\/: k') + , Sealed2 (k3 :\/: k3) + ] ++ catMaybes (map pair2m (getPairs (hb :>: b :>: d' :>: k' :>: n' :>: nilFL))) + | otherwise = error "impossible" + +pair2m :: Sealed2 (Patch :> Patch) -> Maybe (Sealed2 (Patch :\/: Patch)) +pair2m (Sealed2 (xx :> y)) = do + y' :> _ <- commute (xx :> y) + return $ Sealed2 (xx :\/: y') + +nilFL :: FL Patch wX wX +nilFL = NilFL diff --git a/harness/Darcs/Test/Patch/Examples/Set2Unwitnessed.hs b/harness/Darcs/Test/Patch/Examples/Set2Unwitnessed.hs deleted file mode 100644 index 01cffe60..00000000 --- a/harness/Darcs/Test/Patch/Examples/Set2Unwitnessed.hs +++ /dev/null @@ -1,503 +0,0 @@ --- Copyright (C) 2007 David Roundy --- --- This program is free software; you can redistribute it and/or modify --- it under the terms of the GNU General Public License as published by --- the Free Software Foundation; either version 2, or (at your option) --- any later version. --- --- This program is distributed in the hope that it will be useful, --- but WITHOUT ANY WARRANTY; without even the implied warranty of --- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the --- GNU General Public License for more details. --- --- You should have received a copy of the GNU General Public License --- along with this program; see the file COPYING. If not, write to --- the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, --- Boston, MA 02110-1301, USA. - -{-# LANGUAGE OverloadedStrings #-} -{-# OPTIONS_GHC -Wno-orphans #-} -{-# OPTIONS_GHC -Wno-incomplete-uni-patterns #-} - -module Darcs.Test.Patch.Examples.Set2Unwitnessed - ( primPermutables, primPatches - , commutables, commutablesFL - , repov2Commutables , repov2Mergeables, repov2Triples - , repov2NonduplicateTriples, repov2Patches, repov2PatchLoopExamples - ) where - -import Darcs.Prelude - -import Data.Maybe ( catMaybes ) -import qualified Data.ByteString.Char8 as BC ( pack ) -import Data.String ( IsString(..) ) - -import Darcs.Patch.Witnesses.Sealed -import Darcs.Patch ( invert, hunk ) -import Darcs.Patch.Commute ( Commute ) -import Darcs.Patch.Invert ( Invert ) -import Darcs.Patch.FromPrim ( fromAnonymousPrim ) -import Darcs.Patch.Prim ( PrimPatch ) -import Darcs.Patch.V2 ( RepoPatchV2 ) --- import Darcs.Test.Patch.Test () -- for instance Eq Patch --- import Darcs.Test.Patch.Examples.Set2Unwitnessed -import Darcs.Patch.Witnesses.Unsafe ( unsafeCoerceP ) -import qualified Darcs.Test.Patch.Arbitrary.Generic as W ( notDuplicatestriple ) -import Darcs.Test.Patch.Arbitrary.RepoPatchV2 () -import Darcs.Test.Patch.Arbitrary.PrimV1 () ---import Darcs.Util.Printer ( greenText ) ---import Darcs.Util.Printer.Color ( traceDoc ) ---import Darcs.Util.Printer.Color ( errorDoc ) -import Darcs.Util.Printer.Color () -- for instance Show Doc -import Darcs.Test.Patch.WSub - - -import qualified Darcs.Patch.Witnesses.Ordered as W ( (:>), (:\/:) ) -import qualified Data.ByteString as B ( ByteString ) -import Darcs.Test.Patch.V1Model ( V1Model, Content - , makeRepo, makeFile) -import Darcs.Test.Patch.WithState ( WithStartState(..) ) -import Darcs.Util.Path ( AnchoredPath, unsafeFloatPath, makeName ) -import Darcs.Patch.FromPrim ( PrimPatchBase(..), FromPrim ) -import Darcs.Patch.Merge ( Merge ) -import Darcs.Test.Patch.Arbitrary.PatchTree - ( Tree(..) - , TreeWithFlattenPos(..) - , commutePairFromTree, commuteTripleFromTree - , mergePairFromCommutePair, commutePairFromTWFP - , canonizeTree - ) - -instance IsString AnchoredPath where - fromString = unsafeFloatPath - - --- import Debug.Trace - -type Patch = RepoPatchV2 Prim2 - -makeSimpleRepo :: String -> Content -> V1Model wX -makeSimpleRepo filename content = - makeRepo [(either error id $ makeName filename, makeFile content)] - -withStartState :: s wX -> p wX -> Sealed (WithStartState s p) -withStartState s p = seal (WithStartState s p) - -w_tripleExamples :: (FromPrim p, Merge p, PrimPatchBase p) - => [Sealed2 (p W.:> p W.:> p)] -w_tripleExamples = catMaybes [commuteTripleFromTree seal2 $ - withStartState (makeSimpleRepo "file" []) - (ParTree - (SeqTree (hunk "file" 1 [] [BC.pack "g"]) - (SeqTree (hunk "file" 2 [] [BC.pack "j"]) - (SeqTree (hunk "file" 1 [] [BC.pack "s"]) NilTree))) - (SeqTree (hunk "file" 1 [] [BC.pack "e"]) NilTree)) - ,commuteTripleFromTree seal2 $ - withStartState (makeSimpleRepo "file" [BC.pack "j"]) - (ParTree - (SeqTree (hunk "file" 1 [] [BC.pack "s"]) - (ParTree - (SeqTree (hunk "file" 2 [BC.pack "j"] []) NilTree) - (SeqTree (hunk "file" 2 [BC.pack "j"] []) NilTree))) - (SeqTree (hunk "file" 1 [BC.pack "j"] []) NilTree)) - ] - - -w_mergeExamples :: (FromPrim p, Commute p, Merge p, PrimPatchBase p) - => [Sealed2 (p W.:\/: p)] -w_mergeExamples = map (unseal2 (mergePairFromCommutePair seal2)) w_commuteExamples - -w_commuteExamples :: (FromPrim p, Merge p, PrimPatchBase p) => [Sealed2 (p W.:> p)] -w_commuteExamples = catMaybes [ - commutePairFromTWFP seal2 $ - withStartState (makeSimpleRepo "file" []) - (TWFP 3 - (ParTree - (SeqTree (hunk "file" 1 [] [BC.pack "h"]) NilTree) - (SeqTree (hunk "file" 1 [] [BC.pack "b"]) - (SeqTree (hunk "file" 1 [] [BC.pack "f"]) - (SeqTree (hunk "file" 1 [] [BC.pack "v"]) - (SeqTree (hunk "file" 2 [BC.pack "f"] []) NilTree)))))), - commutePairFromTWFP seal2 $ - withStartState - (makeSimpleRepo "file" [BC.pack "f",BC.pack "s",BC.pack "d"]) - (TWFP 3 - (ParTree - (SeqTree (hunk "file" 3 [BC.pack "d"] []) NilTree) - (ParTree - (SeqTree (hunk "file" 1 [BC.pack "f"] []) NilTree) - (SeqTree (hunk "file" 1 [BC.pack "f"] []) - (SeqTree (hunk "file" 1 [BC.pack "s",BC.pack "d"] []) - (SeqTree (hunk "file" 1 [] [BC.pack "v"]) NilTree)))))), -{- commutePairFromTWFP seal2 $ - withStartState - (makeSimpleRepo "file" [BC.pack "f",BC.pack "u", - BC.pack "s",BC.pack "d"]) - (TWFP 5 - (ParTree - (SeqTree (hunk "file" 5 [] [BC.pack "x"]) - (SeqTree (hunk "file" 4 [BC.pack "d"] []) NilTree)) - (ParTree - (SeqTree (hunk "file" 1 [BC.pack "f",BC.pack "u"] []) NilTree) - (SeqTree (hunk "file" 1 [BC.pack "f"] []) - (SeqTree (hunk "file" 1 [BC.pack "u",BC.pack "s",BC.pack "d"] []) - (SeqTree (hunk "file" 1 [] [BC.pack "a"]) - (SeqTree (hunk "file" 1 [BC.pack "a"] []) NilTree))))))),-} - commutePairFromTree seal2 $ - withStartState (makeSimpleRepo "file" [BC.pack "n",BC.pack "t",BC.pack "h"]) - (ParTree - (SeqTree (hunk "file" 1 [BC.pack "n",BC.pack "t",BC.pack "h"] []) - NilTree) - (SeqTree (hunk "file" 3 [BC.pack "h"] []) - (SeqTree (hunk "file" 1 [BC.pack "n"] []) - (SeqTree (hunk "file" 1 [BC.pack "t"] []) NilTree)))), - commutePairFromTree seal2 $ - withStartState (makeSimpleRepo "file" []) - (ParTree - (SeqTree (hunk "file" 1 [] [BC.pack "n"]) NilTree) - (SeqTree (hunk "file" 1 [] [BC.pack "i"]) - (SeqTree (hunk "file" 1 [] [BC.pack "i"]) NilTree))), - commutePairFromTree seal2 $ - withStartState (makeSimpleRepo "file" []) - (ParTree - (SeqTree (hunk "file" 1 [] [BC.pack "c"]) - (ParTree - (SeqTree (hunk "file" 1 [BC.pack "c"] [BC.pack "r"]) NilTree) - (SeqTree (hunk "file" 1 [] [BC.pack "h"]) - (SeqTree (hunk "file" 1 [] [BC.pack "d"]) NilTree)))) - (SeqTree (hunk "file" 1 [] [BC.pack "f"]) NilTree)), - commutePairFromTWFP seal2 $ - withStartState (makeSimpleRepo "file" []) - (TWFP 1 - (ParTree - (ParTree - (SeqTree (hunk "file" 1 [] [BC.pack "t"]) NilTree) - (SeqTree (hunk "file" 1 [] [BC.pack "t"]) NilTree)) - (SeqTree (hunk "file" 1 [] [BC.pack "f"]) NilTree))), - commutePairFromTWFP seal2 $ - withStartState (makeSimpleRepo "file" [BC.pack "f",BC.pack " r", - BC.pack "c",BC.pack "v"]) - (TWFP 4 - (ParTree - (SeqTree (hunk "file" 3 [BC.pack "c",BC.pack "v"] []) - (ParTree - (SeqTree (hunk "file" 2 [BC.pack "r"] []) - (SeqTree (hunk "fi le" 1 [BC.pack "f"] []) NilTree)) - (SeqTree (hunk "file" 1 [BC.pack "f",BC.pack "r"] []) - (SeqTree (hunk "file" 1 [] [BC.pack "y"]) NilTree)))) - (SeqTree (hunk "file" 4 [BC.pack "v"] []) NilTree))), - commutePairFromTree seal2 $ - withStartState (makeSimpleRepo "file" []) - (ParTree - (SeqTree (hunk "file" 1 [] [BC.pack "z"]) NilTree) - (ParTree - (SeqTree (hunk "file" 1 [] [BC.pack "f"]) NilTree) - (ParTree - (SeqTree (hunk "file" 1 [] [BC.pack "r"]) NilTree) - (SeqTree (hunk "file" 1 [] [BC.pack "d"]) NilTree)))) - , commutePairFromTree seal2 $ - withStartState (makeSimpleRepo "file" [BC.pack "t",BC.pack "r",BC.pack "h"]) - (ParTree - (ParTree - (SeqTree (hunk "file" 1 [BC.pack "t",BC.pack "r",BC.pack "h"] []) - NilTree) - (SeqTree (hunk "file" 1 [] [BC.pack "o"]) NilTree)) - (SeqTree (hunk "file" 1 [BC.pack "t"] []) - (SeqTree (hunk "file" 2 [BC.pack "h"] []) NilTree))) - , commutePairFromTWFP seal2 $ - withStartState (makeSimpleRepo "file" []) $ - TWFP 2 - (ParTree - (SeqTree (hunk "file" 1 [] [BC.pack "h"]) NilTree) - (SeqTree (hunk "file" 1 [] [BC.pack "y"]) - (SeqTree (hunk "file" 2 [] [BC.pack "m"]) - (SeqTree (hunk "file" 1 [] [BC.pack "v"]) NilTree)))) - , commutePairFromTree seal2 $ - withStartState (makeSimpleRepo "file" []) - (ParTree - (SeqTree (hunk "file" 1 [] [BC.pack "p"]) - (SeqTree (hunk "file" 1 [BC.pack "p"] []) - (SeqTree (hunk "file" 1 [] [BC.pack "c"]) NilTree))) - (SeqTree (hunk "file" 1 [] [BC.pack "z"]) NilTree)) - , commutePairFromTree seal2 $ - withStartState (makeSimpleRepo "file" []) - (ParTree - (SeqTree (hunk "file" 1 [] [BC.pack "j" ]) - (SeqTree (hunk "file" 1 [BC.pack "j"] []) NilTree)) - (SeqTree (hunk "file" 1 [] [BC.pack "v"]) NilTree)) - , commutePairFromTree seal2 $ - withStartState (makeSimpleRepo "file" []) - (ParTree - (SeqTree (hunk "file" 1 [] [BC.pack "v"]) NilTree) - (SeqTree (hunk "file" 1 [] [BC.pack "j" ]) - (SeqTree (hunk "file" 1 [BC.pack "j"] []) NilTree))) - , commutePairFromTree seal2 $ - withStartState (makeSimpleRepo "file" [BC.pack "x",BC.pack "c"]) - (ParTree - (SeqTree (hunk "file" 1 [] [BC.pack "h"]) - (ParTree - (SeqTree (hunk "file" 3 [BC.pack "c"] []) NilTree) - (SeqTree (hunk "file" 2 [BC.pack "x"] []) - (SeqTree (hunk "file" 1 [] [BC.pack "j"]) NilTree)))) - (SeqTree (hunk "file" 1 [] [BC.pack "l"]) NilTree)) - , commutePairFromTree seal2 $ - withStartState (makeSimpleRepo "file" []) - (ParTree - (SeqTree (hunk "file" 1 [] (packStringLetters "s")) NilTree) - (SeqTree (hunk "file" 1 [] (packStringLetters "k")) - (SeqTree (hunk "file" 1 (packStringLetters "k") []) - (SeqTree (hunk "file" 1 [] (packStringLetters "m")) - (SeqTree (hunk "file" 1 (packStringLetters "m") []) NilTree))))) - ] - -packStringLetters :: String -> [B.ByteString] -packStringLetters = map (BC.pack . (:[])) - -w_repov2PatchLoopExamples :: [Sealed (WithStartState V1Model (Tree Prim2))] -w_repov2PatchLoopExamples = - [Sealed (WithStartState (makeSimpleRepo fx []) - $ canonizeTree - (ParTree - (SeqTree (hunk fx 1 [] (packStringLetters "pkotufogbvdabnmbzajvolwviqebieonxvcvuvigkfgybmqhzuaaurjspd")) - (ParTree - (SeqTree (hunk fx 47 (packStringLetters "qhzu") (packStringLetters "zafybdcokyjskcgnvhkbzpysaafnjjhcstgrczplxsfwagmh")) - (ParTree - (ParTree - NilTree - (ParTree - (ParTree - (ParTree - (SeqTree (hunk fx 15 (packStringLetters "mbzajvolwviqebieonxvcvuvigkfgyb") (packStringLetters "vujnxnhvybvpouyciaabszfmgssezlwwjgnethvrpnfrkubphzvdgymjjoacppqps")) - (ParTree - NilTree - (ParTree - (SeqTree (hunk fx 40 (packStringLetters "ssezlwwjgnethvrpnfrkubphzvdgymjjoacppqpsmzafybdcokyjskcgnvhkbz") (packStringLetters "wnesidpccwoiqiichxaaejdsyrhrusqljlcoro")) - (ParTree - (ParTree - (SeqTree (hunk fx 12 (packStringLetters "abnvujnxnhvybvpouyciaabszfmgwnesidpccwoiqii") (packStringLetters "czfdhqkipdstfjycqaxwnbxrihrufdeyneqiiiafwzlmg")) NilTree) - NilTree) - NilTree)) - (SeqTree (hunk fx 25 [] (packStringLetters "dihgmsotezucqdgxczvcivijootyvhlwymbiueufnvpwpeukmskqllalfe")) NilTree)))) - (SeqTree (hunk fx 56 (packStringLetters "yjskcgnvhkbzpysaafnjjhcstgrczplxsfwagmhaaurjsp") (packStringLetters "xldhrutyhcyaqeezwujiguawfyawjjqlirxshjddvq")) NilTree)) - (SeqTree (hunk fx 20 [] (packStringLetters "ooygwiyogqrqnytixqtmvdxx")) - (SeqTree (hunk fx 26 (packStringLetters "yogqrqnytixqtmvdxxvolwviqebieonxvcvuvigkfgybmzafybdcokyjskcgnvhkbz") (packStringLetters "akhsmlbkdxnvfoikmiatfbpzdrsyykkpoxvvddeaspzxe")) - (SeqTree (hunk fx 39 [] (packStringLetters "ji")) - (ParTree - NilTree - (ParTree - NilTree - (ParTree - (ParTree - NilTree - (SeqTree (hunk fx 26 (packStringLetters "akhsmlbkdxnvfjioikmiatfbpzdrsyykkpoxvvddeaspzxepysaafnjjhcstgrczplxs") (packStringLetters "onjbhddskcj")) - (SeqTree (hunk fx 39 [] (packStringLetters "fyscunxxxjjtyqpfxeznhtwvlphmp")) NilTree))) - (ParTree - NilTree - (SeqTree (hunk fx 44 [] (packStringLetters "xcchzwmzoezxkmkhcmesplnjpqriypshgiqklgdnbmmkldnydiy")) - (ParTree - NilTree - (SeqTree (hunk fx 64 (packStringLetters "plnjpqriypshgiqklgdnbmmkldnydiymiatfbpzdrsyykkpoxvvddeaspzxepysaafn") (packStringLetters "anjlzfdqbjqbcplvqvkhwjtkigp")) NilTree))))))))))) - (ParTree - NilTree - NilTree))) - NilTree)) - NilTree)) - (ParTree - NilTree - (SeqTree (hunk fx 1 [] (packStringLetters "ti")) - (SeqTree (hunk fx 1 (packStringLetters "t") (packStringLetters "ybcop")) - (SeqTree (hunk fx 2 [] (packStringLetters "dvlhgwqlpaeweerqrhnjtfolczbqbzoccnvdsyqiefqitrqneralf")) - (SeqTree (hunk fx 15 [] (packStringLetters "yairbjphwtnaerccdlfewujvjvmjakbc")) - (SeqTree (hunk fx 51 [] (packStringLetters "xayvfuwaiiogginufnhsrmktpmlbvxiakjwllddkiyofyfw")) - (ParTree - NilTree - NilTree)))))))))] - where - fx :: IsString a => a - fx = "F" - -mergeExamples :: [Sealed2 (Patch :\/: Patch)] -mergeExamples = map (mapSeal2 fromW) w_mergeExamples - -repov2PatchLoopExamples :: [Sealed (WithStartState V1Model (Tree Prim2))] -repov2PatchLoopExamples = w_repov2PatchLoopExamples - -commuteExamples :: [Sealed2 (Patch :> Patch)] -commuteExamples = map (mapSeal2 fromW) w_commuteExamples - -tripleExamples :: [Sealed2 (Patch :> Patch :> Patch)] -tripleExamples = map (mapSeal2 fromW) w_tripleExamples - -notDuplicatestriple :: (Patch :> Patch :> Patch) wX wY -> Bool -notDuplicatestriple = W.notDuplicatestriple . toW - -quickhunk :: PrimPatch prim => Int -> String -> String -> prim wX wY -quickhunk l o n = hunk "test" l (map (\c -> BC.pack [c]) o) - (map (\c -> BC.pack [c]) n) - -primPermutables :: [(Prim2 :> Prim2 :> Prim2) wX wY] -primPermutables = - [quickhunk 0 "e" "bo" :> quickhunk 3 "" "x" :> quickhunk 2 "f" "qljo"] - -mergeables :: [(Prim2 :\/: Prim2) wX wY] -mergeables = [quickhunk 1 "a" "b" :\/: quickhunk 1 "a" "c", - quickhunk 1 "a" "b" :\/: quickhunk 3 "z" "c", - quickhunk 0 "" "a" :\/: quickhunk 1 "" "b", - quickhunk 0 "a" "" :\/: quickhunk 1 "" "b", - quickhunk 0 "a" "" :\/: quickhunk 1 "b" "", - quickhunk 0 "" "a" :\/: quickhunk 1 "b" "" - ] - -mergeablesFL :: [(FL Prim2 :\/: FL Prim2) wX wY] -mergeablesFL = map (\ (x:\/:y) -> (x :>: NilFL) :\/: (y :>: NilFL)) mergeables ++ - [] -- [(quickhunk 1 "a" "b" :>: quickhunk 3 "z" "c" :>: NilFL) - -- :\/: (quickhunk 1 "a" "z" :>: NilFL), - -- (quickhunk 1 "a" "b" :>: quickhunk 1 "b" "c" :>: NilFL) - -- :\/: (quickhunk 1 "a" "z" :>: NilFL)] - -mergeable2commutable :: Invert p => (p :\/: p) wX wY -> (p :> p) wX wY -mergeable2commutable (x :\/: y) = unsafeCoerceP (invert x) :> y - -commutablesFL :: [(FL Prim2 :> FL Prim2) wX wY] -commutablesFL = map mergeable2commutable mergeablesFL -commutables :: [(Prim2 :> Prim2) wX wY] -commutables = map mergeable2commutable mergeables - -primPatches :: [Prim2 wX wY] -primPatches = concatMap mergeable2patches mergeables - where mergeable2patches (x:\/:y) = [x,y] - -repov2Patches :: [Patch wX wY] -repov2Patches = concatMap commutable2patches repov2Commutables - where commutable2patches (x:>y) = [x,y] - -repov2Triples :: [(Patch :> Patch :> Patch) wX wY] -repov2Triples = [ob' :> oa2 :> a2'', - oa' :> oa2 :> a2''] - ++ map (unseal2 unsafeCoerceP) tripleExamples - ++ map (unseal2 unsafeCoerceP) (concatMap getTriples repov2FLs) - where oa = fromAnonymousPrim $ quickhunk 1 "o" "aa" - oa2 = oa - a2 = fromAnonymousPrim $ quickhunk 2 "a34" "2xx" - ob = fromAnonymousPrim $ quickhunk 1 "o" "bb" - ob' :/\: oa' = merge (oa :\/: ob) - a2' :/\: _ = merge (ob' :\/: a2) - a2'' :/\: _ = merge (oa2 :\/: a2') - -repov2NonduplicateTriples :: [(Patch :> Patch :> Patch) wX wY] -repov2NonduplicateTriples = filter (notDuplicatestriple) repov2Triples - -repov2FLs :: [FL (Patch) wX wY] -repov2FLs = [oa :>: invert oa :>: oa :>: invert oa :>: ps +>+ oa :>: invert oa :>: NilFL] - where oa = fromAnonymousPrim $ quickhunk 1 "o" "a" - ps :/\: _ = merge (oa :>: invert oa :>: NilFL :\/: oa :>: invert oa :>: NilFL) - -repov2Commutables :: [(Patch :> Patch) wX wY] -repov2Commutables = map (unseal2 unsafeCoerceP) commuteExamples++ - map mergeable2commutable repov2Mergeables++ - [invert oa :> ob'] ++ map (unseal2 unsafeCoerceP) (concatMap getPairs repov2FLs) - where oa = fromAnonymousPrim $ quickhunk 1 "o" "a" - ob = fromAnonymousPrim $ quickhunk 1 "o" "b" - _ :/\: ob' = mergeFL (ob :\/: oa :>: invert oa :>: NilFL) - -repov2Mergeables :: [(Patch :\/: Patch) wX wY] -repov2Mergeables = map (\ (x :\/: y) -> fromAnonymousPrim x :\/: fromAnonymousPrim y) mergeables - ++ repov2IglooMergeables - ++ repov2QuickcheckMergeables - ++ map (unseal2 unsafeCoerceP) mergeExamples - ++ catMaybes (map pair2m (concatMap getPairs repov2FLs)) - ++ [(oa :\/: od), - (oa :\/: a2'), - (ob' :\/: od''), - (oe :\/: od), - (of' :\/: oe'), - (ob' :\/: oe'), - (oa :\/: oe'), - (ob' :\/: oc'), - (b2' :\/: oc'''), - (ob' :\/: a2), - (b2' :\/: og'''), - (oc''' :\/: og'''), - (oc'' :\/: og''), - (ob'' :\/: og''), - (ob'' :\/: oc''), - (oc' :\/: od'')] - where oa = fromAnonymousPrim $ quickhunk 1 "o" "aa" - a2 = fromAnonymousPrim $ quickhunk 2 "a34" "2xx" - og = fromAnonymousPrim $ quickhunk 3 "4" "g" - ob = fromAnonymousPrim $ quickhunk 1 "o" "bb" - b2 = fromAnonymousPrim $ quickhunk 2 "b" "2" - oc = fromAnonymousPrim $ quickhunk 1 "o" "cc" - od = fromAnonymousPrim $ quickhunk 7 "x" "d" - oe = fromAnonymousPrim $ quickhunk 7 "x" "e" - pf = fromAnonymousPrim $ quickhunk 7 "x" "f" - od'' = fromAnonymousPrim $ quickhunk 8 "x" "d" - ob' :>: b2' :>: NilFL :/\: _ = mergeFL (oa :\/: ob :>: b2 :>: NilFL) - a2' :/\: _ = merge (ob' :\/: a2) - ob'' :/\: _ = merge (a2 :\/: ob') - og' :/\: _ = merge (oa :\/: og) - og'' :/\: _ = merge (a2 :\/: og') - og''' :/\: _ = merge (ob' :\/: og') - oc' :/\: _ = merge (oa :\/: oc) - oc'' :/\: _ = merge (a2 :\/: oc) - oc''' :/\: _ = merge (ob' :\/: oc') - oe' :/\: _ = merge (od :\/: oe) - of' :/\: _ = merge (od :\/: pf) - pair2m :: Sealed2 (Patch :> Patch) - -> Maybe ((Patch :\/: Patch) wX wY) - pair2m (Sealed2 (xx :> y)) = do y' :> _ <- commute (xx :> y) - return $ unsafeCoerceP (xx :\/: y') - -repov2IglooMergeables :: [(Patch :\/: Patch) wX wY] -repov2IglooMergeables = [(a :\/: b), - (b :\/: c), - (a :\/: c), - (x :\/: a), - (y :\/: b), - (z :\/: c), - (x' :\/: y'), - (z' :\/: y'), - (x' :\/: z'), - (a :\/: a)] - where a = fromAnonymousPrim $ quickhunk 1 "1" "A" - b = fromAnonymousPrim $ quickhunk 2 "2" "B" - c = fromAnonymousPrim $ quickhunk 3 "3" "C" - x = fromAnonymousPrim $ quickhunk 1 "1BC" "xbc" - y = fromAnonymousPrim $ quickhunk 1 "A2C" "ayc" - z = fromAnonymousPrim $ quickhunk 1 "AB3" "abz" - x' :/\: _ = merge (a :\/: x) - y' :/\: _ = merge (b :\/: y) - z' :/\: _ = merge (c :\/: z) - -repov2QuickcheckMergeables :: [(Patch :\/: Patch) wX wY] -repov2QuickcheckMergeables = [-- invert k1 :\/: n1 - --, invert k2 :\/: n2 - hb :\/: k - , b' :\/: b' - , n' :\/: n' - , b :\/: d - , k' :\/: k' - , k3 :\/: k3 - ] ++ catMaybes (map pair2m pairs) - where hb = fromAnonymousPrim $ quickhunk 0 "" "hb" - k = fromAnonymousPrim $ quickhunk 0 "" "k" - n = fromAnonymousPrim $ quickhunk 0 "" "n" - b = fromAnonymousPrim $ quickhunk 1 "b" "" - d = fromAnonymousPrim $ quickhunk 2 "" "d" - d':/\:_ = merge (b :\/: d) - --k1 :>: n1 :>: NilFL :/\: _ = mergeFL (hb :\/: k :>: n :>: NilFL) - --k2 :>: n2 :>: NilFL :/\: _ = - -- merge (hb :>: b :>: NilFL :\/: k :>: n :>: NilFL) - k' :>: n' :>: NilFL :/\: _ :>: b' :>: _ = merge (hb :>: b :>: d' :>: NilFL :\/: k :>: n :>: NilFL) - pairs = getPairs (hb :>: b :>: d' :>: k' :>: n' :>: NilFL) - pair2m :: Sealed2 (Patch :> Patch) - -> Maybe ((Patch :\/: Patch) wX wY) - pair2m (Sealed2 (xx :> y)) = do y' :> _ <- commute (xx :> y) - return $ unsafeCoerceP (xx :\/: y') - - i = fromAnonymousPrim $ quickhunk 0 "" "i" - x = fromAnonymousPrim $ quickhunk 0 "" "x" - xi = fromAnonymousPrim $ quickhunk 0 "xi" "" - d3 :/\: _ = merge (xi :\/: d) - _ :/\: k3 = mergeFL (k :\/: i :>: x :>: xi :>: d3 :>: NilFL) - diff --git a/harness/Darcs/Test/Patch/Examples/Unwind.hs b/harness/Darcs/Test/Patch/Examples/Unwind.hs index 30db88de..7ee2cab7 100644 --- a/harness/Darcs/Test/Patch/Examples/Unwind.hs +++ b/harness/Darcs/Test/Patch/Examples/Unwind.hs @@ -13,7 +13,6 @@ module Darcs.Test.Patch.Examples.Unwind where import Darcs.Prelude -import Darcs.Patch.FromPrim import Darcs.Patch.Info import Darcs.Patch.Merge import Darcs.Patch.Named @@ -28,9 +27,9 @@ import Darcs.Util.Tree import Darcs.Test.HashedStorage ( unsafeMakeName ) import Darcs.Test.Patch.Arbitrary.Generic -import Darcs.Test.Patch.Arbitrary.Named () +import Darcs.Test.Patch.Arbitrary.Named ( WithNames(..) ) import Darcs.Test.Patch.Arbitrary.PrimV1 () -import Darcs.Test.Patch.Arbitrary.RepoPatch +import Darcs.Test.Patch.Arbitrary.Mergeable import Darcs.Test.Patch.RepoModel import Darcs.Test.Patch.Types.MergeableSequence ( MergeableSequence(..) ) @@ -48,10 +47,10 @@ import Data.String examples :: forall p - . (ArbitraryRepoPatch p, ArbitraryPrim (OnlyPrim p)) + . (ArbitraryMergeable p, ArbitraryPrim (OnlyPrim p), ModelOf p ~ ModelOf (OnlyPrim p)) => [Sealed2 (WithStartState2 (MergeableSequence (Named p)))] examples = - case (hasPrimConstruct @(OnlyPrim p), usesV1Model @(PrimOf p), notRepoPatchV1 @p) of + case (hasPrimConstruct @(OnlyPrim p), usesV1Model @(OnlyPrim p), notRepoPatchV1 @p) of (Just Dict, Just Dict, Just _) -> [example1, example2, example3, example4] (Just Dict, Just Dict, Nothing) -> [example1, example2, example3] _ -> [] @@ -62,8 +61,11 @@ mkNamed hash = NamedP (rawPatchInfo "" "" "" ["Ignore-this: "++hash] False) [] path :: String -> AnchoredPath path s = AnchoredPath [unsafeMakeName s] -repo :: [(String, [BLC.ByteString])] -> V1Model wX -repo entries = +repo :: [(String, [BLC.ByteString])] -> WithNames V1Model wX +repo entries = WithNames (primRepo entries) [] + +primRepo :: [(String, [BLC.ByteString])] -> V1Model wX +primRepo entries = makeRepo [ (unsafeMakeName s, RepoItem (File (makeBlob (BLC.unlines thelines)))) | (s, thelines) <- entries ] @@ -178,7 +180,7 @@ example4guts = in Sealed2 (WithStartState2 - (repo [("a", s3++s5++s7)]) + (primRepo [("a", s3++s5++s7)]) ( (NilMS `SeqMS` hunk (path "a") (off [s3,s5]) [] (map pack s10) @@ -206,7 +208,7 @@ example4 = case example4guts @p of Sealed2 (WithStartState2 model (((NilMS `SeqMS` a1 `SeqMS` a2) `ParMS` (NilMS `SeqMS` a3 `SeqMS` a4)) `ParMS` (NilMS `SeqMS` a5 `SeqMS` a6))) -> Sealed2 - (WithStartState2 model + (WithStartState2 (WithNames model []) ((NilMS `SeqMS` mkNamed "91b78b39ea6649b6e43ea74a57070480c87d7053" diff --git a/harness/Darcs/Test/Patch/Properties.hs b/harness/Darcs/Test/Patch/Properties.hs index 6892f265..23c022dd 100644 --- a/harness/Darcs/Test/Patch/Properties.hs +++ b/harness/Darcs/Test/Patch/Properties.hs @@ -18,12 +18,14 @@ -- UndecidableInstances was added because GHC 8.6 needed it -- even though GHC 8.2 didn't {-# LANGUAGE UndecidableInstances #-} +{-# OPTIONS_GHC -Wno-orphans #-} module Darcs.Test.Patch.Properties ( unit_V1P1 , unit_V2P1 , qc_V1P1 , qc_V2 , qc_V3 + , qc_Named_V3 , qc_prim , qc_named_prim ) where @@ -63,14 +65,16 @@ import Darcs.Patch.V3 ( RepoPatchV3 ) import Darcs.Patch.Commute ( Commute(..) ) import Darcs.Patch.Invert ( Invert ) import Darcs.Patch.Show ( ShowPatchBasic ) -import Darcs.Patch.Apply( Apply, ApplyState ) +import Darcs.Patch.Apply( ApplyState ) import Darcs.Patch.Merge ( Merge ) +import Darcs.Patch.Named ( Named ) +import qualified Darcs.Patch.RepoPatch as RP import Darcs.Test.Patch.Arbitrary.Generic import Darcs.Test.Patch.Arbitrary.Named () import Darcs.Test.Patch.Arbitrary.PatchTree import Darcs.Test.Patch.Arbitrary.PrimFileUUID() -import Darcs.Test.Patch.Arbitrary.RepoPatch +import Darcs.Test.Patch.Arbitrary.Mergeable import Darcs.Test.Patch.Arbitrary.RepoPatchV1 () import Darcs.Test.Patch.Arbitrary.RepoPatchV2 () import Darcs.Test.Patch.Arbitrary.RepoPatchV3 () @@ -85,69 +89,77 @@ import Darcs.Test.Patch.WithState , ArbitraryWS(..) ) -import qualified Darcs.Test.Patch.Examples.Set1 as Ex -import qualified Darcs.Test.Patch.Examples.Set2Unwitnessed as ExU +import qualified Darcs.Test.Patch.Examples.Set1 as Ex1 +import qualified Darcs.Test.Patch.Examples.Set2 as Ex2 import Darcs.Test.Patch.Properties.Check( Check(..) ) -import Darcs.Test.Patch.Properties.Generic ( PatchProperty, MergeProperty, SequenceProperty ) +import Darcs.Test.Patch.Properties.Generic + ( MergeProperty + , PatchProperty + , SequenceProperty + , SequencePairProperty + ) import qualified Darcs.Test.Patch.Properties.Generic as PropG -import qualified Darcs.Test.Patch.Properties.RepoPatch as PropR +import qualified Darcs.Test.Patch.Properties.Mergeable as PropM import qualified Darcs.Test.Patch.Properties.RepoPatchV3 as PropR3 -import qualified Darcs.Test.Patch.Properties.GenericUnwitnessed as PropU import qualified Darcs.Test.Patch.Properties.V1Set1 as Prop1 import qualified Darcs.Test.Patch.Properties.V1Set2 as Prop2 import Darcs.Test.Patch.Types.Triple (Triple(..)) -import qualified Darcs.Test.Patch.WSub as WSub type Prim2 = V2.Prim +instance PrimPatch prim => RepoApply (NamedPrim prim) +instance PrimPatch prim => RepoApply (RepoPatchV1 prim) +instance PrimPatch prim => RepoApply (RepoPatchV2 prim) +instance PrimPatch prim => RepoApply (RepoPatchV3 prim) + unit_V1P1:: [Test] unit_V1P1 = - [ testCases "known commutes" Prop1.checkCommute Ex.knownCommutes - , testCases "known non-commutes" Prop1.checkCantCommute Ex.knownCantCommutes - , testCases "known merges" Prop1.checkMerge Ex.knownMerges - , testCases "known merges (equiv)" Prop1.checkMergeEquiv Ex.knownMergeEquivs - , testCases "known canons" Prop1.checkCanon Ex.knownCanons - , testCases "merge swaps" Prop1.checkMergeSwap Ex.mergePairs2 - , testCases "the patch validation works" Prop1.tTestCheck Ex.validPatches - , testCases "commute/recommute" (PropG.recommute commute) Ex.commutePairs - , testCases "merge properties: merge either way valid" PropG.mergeEitherWayValid Ex.mergePairs - , testCases "merge properties: merge swap" PropG.mergeEitherWay Ex.mergePairs - , testCases "primitive patch IO functions" (Prop1.tShowRead unsafeCompare) Ex.primitiveTestPatches - , testCases "IO functions (test patches)" (Prop1.tShowRead unsafeCompare) Ex.testPatches - , testCases "IO functions (named test patches)" (Prop1.tShowRead unsafeCompare) Ex.testPatchesNamed - , testCases "primitive commute/recommute" (PropG.recommute commute) Ex.primitiveCommutePairs + [ testCases "known commutes" Prop1.checkCommute Ex1.knownCommutes + , testCases "known non-commutes" Prop1.checkCantCommute Ex1.knownCantCommutes + , testCases "known merges" Prop1.checkMerge Ex1.knownMerges + , testCases "known merges (equiv)" Prop1.checkMergeEquiv Ex1.knownMergeEquivs + , testCases "known canons" Prop1.checkCanon Ex1.knownCanons + , testCases "merge swaps" Prop1.checkMergeSwap Ex1.mergePairs2 + , testCases "the patch validation works" Prop1.tTestCheck Ex1.validPatches + , testCases "commute/recommute" (PropG.recommute commute) Ex1.commutePairs + , testCases "merge properties: merge either way valid" PropG.mergeEitherWayValid Ex1.mergePairs + , testCases "merge properties: merge swap" PropG.mergeEitherWay Ex1.mergePairs + , testCases "primitive patch IO functions" (Prop1.tShowRead unsafeCompare) Ex1.primitiveTestPatches + , testCases "IO functions (test patches)" (Prop1.tShowRead unsafeCompare) Ex1.testPatches + , testCases "IO functions (named test patches)" (Prop1.tShowRead unsafeCompare) Ex1.testPatchesNamed + , testCases "primitive commute/recommute" (PropG.recommute commute) Ex1.primitiveCommutePairs ] unit_V2P1 :: [Test] unit_V2P1 = - [ testCases "coalesce commute" (PropU.coalesceCommute WSub.coalesce) ExU.primPermutables - , testCases "prim recommute" (PropU.recommute WSub.commute) ExU.commutables - , testCases "square commute law" (PropU.squareCommuteLaw WSub.commute) ExU.commutables - , testCases "prim inverses commute" (PropU.commuteInverses WSub.commute) ExU.commutables - , testCases "FL prim recommute" (PropU.recommute WSub.commute) ExU.commutablesFL - , testCases "FL square commute law" (PropU.squareCommuteLaw WSub.commute) ExU.commutablesFL - , testCases "FL prim inverses commute" (PropU.commuteInverses WSub.commute) $ ExU.commutablesFL - , testCases "fails" (PropU.commuteFails WSub.commute) ([] :: [(Prim2 WSub.:> Prim2) wX wY]) - , testCases "read and show work on Prim" PropU.showRead ExU.primPatches - , testCases "read and show work on RepoPatchV2" PropU.showRead ExU.repov2Patches - , testCases "example flattenings work" (PropR.propConsistentTreeFlattenings fromPrim2) ExU.repov2PatchLoopExamples - , testCases "V2 merge input consistent" (PropU.mergeArgumentsConsistent isConsistent) ExU.repov2Mergeables - , testCases "V2 merge input is forward" (PropU.mergeArgumentsConsistent isForward) ExU.repov2Mergeables - , testCases "V2 merge output is forward" (PropU.mergeConsistent isForward) ExU.repov2Mergeables - , testCases "V2 merge output consistent" (PropU.mergeConsistent isConsistent) ExU.repov2Mergeables - , testCases "V2 merge either way" PropU.mergeEitherWay ExU.repov2Mergeables - , testCases "V2 merge and commute" PropU.mergeCommute ExU.repov2Mergeables - - , testCases "V2 recommute" (PropU.recommute WSub.commute) ExU.repov2Commutables - , testCases "V2 inverses commute" (PropU.commuteInverses WSub.commute) ExU.repov2Commutables - , testCases "V2 permutivity" (PropU.permutivity WSub.commute) ExU.repov2NonduplicateTriples + [ testCases "coalesce commute" (PropG.coalesceCommute (fmap maybeToFL . coalesce)) Ex2.primPermutables + , testCases "prim recommute" (PropG.recommute commute . Pair) Ex2.commutables + , testCases "square commute law" (PropG.squareCommuteLaw commute . Pair) Ex2.commutables + , testCases "prim inverses commute" (PropG.commuteInverses commute . Pair) Ex2.commutables + , testCases "FL prim recommute" (PropG.recommute commute . Pair) Ex2.commutablesFL + , testCases "FL square commute law" (PropG.squareCommuteLaw commute . Pair) Ex2.commutablesFL + , testCases "FL prim inverses commute" (PropG.commuteInverses commute . Pair) $ Ex2.commutablesFL + , sealedCases "read and show work on Prim" PropG.showRead Ex2.primPatches + , sealedCases "read and show work on RepoPatchV2" PropG.showRead Ex2.repov2Patches + , testCases "example flattenings work" (PropM.propConsistentTreeFlattenings fromPrim2) Ex2.repov2PatchLoopExamples + , sealedCases "V2 merge input consistent" (PropG.mergeArgumentsConsistent isConsistent) Ex2.repov2Mergeables + , sealedCases "V2 merge input is forward" (PropG.mergeArgumentsConsistent isForward) Ex2.repov2Mergeables + , sealedCases "V2 merge output is forward" (PropG.mergeConsistent isForward) Ex2.repov2Mergeables + , sealedCases "V2 merge output consistent" (PropG.mergeConsistent isConsistent) Ex2.repov2Mergeables + , sealedCases "V2 merge either way" PropG.mergeEitherWay Ex2.repov2Mergeables + , sealedCases "V2 merge and commute" PropG.mergeCommute Ex2.repov2Mergeables + , sealedCases "V2 recommute" (PropG.recommute commute . Pair) Ex2.repov2Commutables + , sealedCases "V2 inverses commute" (PropG.commuteInverses commute . Pair) Ex2.repov2Commutables + , sealedCases "V2 permutivity" (PropG.permutivity commute) Ex2.repov2NonduplicateTriples ] where - fromPrim2 :: PropR.FromPrimT RepoPatchV2 Prim2 + fromPrim2 :: PropM.FromPrimT RepoPatchV2 Prim2 fromPrim2 = fromAnonymousPrim + sealedCases :: String -> (forall wX wY. p wX wY -> TestResult) -> [Sealed2 p] -> Test + sealedCases name prop = testCases name (unseal2 prop) arbitraryThing :: TestGenerator thing (Sealed2 thing) arbitraryThing = TestGenerator (\f p -> Just (unseal2 f p)) @@ -157,11 +169,11 @@ arbitraryWSThing = TestGenerator (\f wsp -> Just (unseal2 (f . wsPatch) wsp)) qc_prim :: forall prim. ( TestablePrim prim - , Show2 prim , Show1 (ModelOf prim) , MightBeEmptyHunk prim , MightHaveDuplicate prim , ArbitraryWS prim + , RepoApply prim ) => [Test] qc_prim = [testProperty "prim pair coverage" (unseal2 (PropG.propPrimPairCoverage @prim . wsPatch))] ++ @@ -195,7 +207,7 @@ qc_prim = qc_named_prim :: forall prim. ( TestablePrim prim - , Show2 prim + , PrimPatch prim , Show1 (ModelOf (NamedPrim prim)) , MightBeEmptyHunk prim ) => [Test] @@ -209,7 +221,7 @@ qc_named_prim = qc_V1P1 :: [Test] qc_V1P1 = - repoPatchProperties @(RepoPatchV1 V1.Prim) ++ + mergeablePatchProperties @(RepoPatchV1 V1.Prim) ++ [ testProperty "commuting by patch and its inverse is ok" (Prop2.propCommuteInverse . mapSeal2 (getPair . wsPatch)) , testProperty "a patch followed by its inverse is identity" (Prop2.propPatchAndInverseIsIdentity . mapSeal2 (getPair . wsPatch)) , testProperty "'simple smart merge'" Prop2.propSimpleSmartMergeGoodEnough @@ -232,17 +244,18 @@ qc_V1P1 = qc_V2 :: forall prim wXx wYy. ( PrimPatch prim , Show1 (ModelOf prim) - , ShrinkModel prim + , ShrinkModel (ModelOf prim) prim , PropagateShrink prim prim , ArbitraryPrim prim , RepoState (ModelOf prim) ~ ApplyState prim + , RepoApply prim ) => prim wXx wYy -> [Test] qc_V2 _ = [ testProperty "with quickcheck that patches are consistent" (withSingle consistent) ] - ++ repoPatchProperties @(RepoPatchV2 prim) + ++ mergeablePatchProperties @(RepoPatchV2 prim) ++ concat [ merge_properties @(RepoPatchV2 prim) "tree" (TestGenerator mergePairFromTree) , merge_properties @(RepoPatchV2 prim) "twfp" (TestGenerator mergePairFromTWFP) @@ -258,10 +271,11 @@ qc_V2 _ = qc_V3 :: forall prim wXx wYy. ( PrimPatch prim , Show1 (ModelOf prim) - , ShrinkModel prim + , ShrinkModel (ModelOf prim) prim , PropagateShrink prim prim , ArbitraryPrim prim , RepoState (ModelOf prim) ~ ApplyState prim + , RepoApply prim ) => prim wXx wYy -> [Test] @@ -269,55 +283,82 @@ qc_V3 _ = [ testProperty "repo invariants" (withSequence (PropR3.prop_repoInvariants :: SequenceProperty (RepoPatchV3 prim))) ] - ++ repoPatchProperties @(RepoPatchV3 prim) - ++ difficultRepoPatchProperties @(RepoPatchV3 prim) - -repoPatchProperties :: forall p. - ( ArbitraryRepoPatch p - , Show2 p - , Show1 (ModelOf p) - , CheckedMerge p - , ShrinkModel (PrimOf p) - , PrimBased p - ) - => [Test] -repoPatchProperties = + ++ mergeablePatchProperties @(RepoPatchV3 prim) + ++ difficultPatchProperties @(RepoPatchV3 prim) + +instance (ArbitraryPrim prim, ApplyState prim ~ RepoState (ModelOf prim)) => + ArbitraryMergeable (Named (RepoPatchV3 prim)) where + notRepoPatchV1 = Just (NotRepoPatchV1 (\case {})) + +instance MightHaveDuplicate p => MightHaveDuplicate (Named p) + +qc_Named_V3 + :: forall prim wX wY + . ( PrimPatch prim + , Show1 (ModelOf prim) + , ShrinkModel (ModelOf prim) prim + , PropagateShrink prim prim + , ArbitraryPrim prim + , RepoState (ModelOf prim) ~ ApplyState prim + , RepoApply prim + ) + => prim wX wY + -> [Test] +qc_Named_V3 _ = + mergeablePatchProperties @(Named (RepoPatchV3 prim)) ++ + difficultPatchProperties @(Named (RepoPatchV3 prim)) + +-- | Similar to 'RepoPatch' but with constraints reduced to what is needed for +-- generation and property testing of mergeable patches, so that we have +-- instances for @'Named' p@ for all 'RepoPatch' types @p@. +type MergeablePatch p = + ( ApplyState (PrimOf p) ~ ApplyState p + , CheckedMerge p + , PrimPatch (PrimOf p) + , RP.Conflict p + , RP.PatchListFormat p + , RP.ReadPatch p + , Show2 p + , ShowPatchBasic p + ) + +mergeablePatchProperties + :: forall p + . ( ArbitraryMergeable p + , MergeablePatch p + , Show1 (ModelOf p) + , ShrinkModel (ModelOf p) (PrimOf p) + , PrimBased p + , RepoApply p + , RepoApply (PrimOf p) + ) + => [Test] +mergeablePatchProperties = [ testProperty "readPatch/showPatch" (withSingle (PropG.showRead :: PatchProperty p)) , testProperty "readPatch/showPatch (RL)" (withSequence (PropG.showRead :: SequenceProperty p)) -{- we no longer support inversion for RepoPatches - , testProperty "invert involution" - (withSingle (PropG.invertInvolution :: PatchProperty p)) - , testProperty "inverse composition" - (withPair (PropG.inverseComposition :: PairProperty p)) --} , testProperty "resolutions don't conflict" - (withSequence (PropR.propResolutionsDontConflict :: SequenceProperty p)) + (withSequence (PropM.propResolutionsDontConflict :: SequenceProperty p)) ] -- | These properties regularly fail for RepoPatchV2 with the standard test -- case generator when we crank up the number of tests (to e.g. 10000). -difficultRepoPatchProperties :: forall p. - ( ArbitraryRepoPatch p - , ShrinkModel (PrimOf p) - , Show2 p - , CheckedMerge p - , MightHaveDuplicate p - , Show1 (ModelOf p) - , PrimBased p - ) - => [Test] -difficultRepoPatchProperties = +difficultPatchProperties + :: forall p + . ( ArbitraryMergeable p + , MergeablePatch p + , ShrinkModel (ModelOf p) (PrimOf p) + , MightHaveDuplicate p + , Show1 (ModelOf p) + , PrimBased p + , RepoApply p + , RepoApply (PrimOf p) + ) + => [Test] +difficultPatchProperties = [ testProperty "reorderings are consistent" - (PropR.propConsistentReorderings @p) -{- we no longer support inversion for RepoPatches - , testProperty "inverses commute" - (withPair (PropG.commuteInverses com)) - , testConditional "nontrivial inverses commute" - (withPair nontrivialCommute) - (withPair (PropG.commuteInverses com)) --} + (PropM.propConsistentReorderings @p) , testProperty "recommute" (withPair (PropG.recommute com)) , testConditional "nontrivial recommute" @@ -331,17 +372,13 @@ difficultRepoPatchProperties = (withTriple (PropG.permutivity com)) , testProperty "merge either way" (withFork (PropG.mergeEitherWay :: MergeProperty p)) -{- this test relies on inversion and is thereore only valid for prims - , testProperty "merge either way valid" - (withFork (PropG.mergeEitherWayValid :: MergeProperty p)) --} , testConditional "nontrivial merge either way" (fromMaybe False . withFork nontrivialMerge) (withFork (PropG.mergeEitherWay :: MergeProperty p)) , testProperty "merge commute" (withFork (PropG.mergeCommute :: MergeProperty p)) , testProperty "resolutions are invariant under reorderings" - (withSequence (PropR.propResolutionsOrderIndependent :: SequenceProperty p)) + (withSequencePair (PropM.propResolutionsOrderIndependent :: SequencePairProperty p)) ] where com :: (p :> p) wA wB -> Maybe ((p :> p) wA wB) @@ -403,9 +440,10 @@ patch_properties genname gen = patch_repo_properties :: forall p gen . ( Show gen, Arbitrary gen - , Invert p, Apply p, ShowPatchBasic p + , Invert p, ShowPatchBasic p , RepoModel (ModelOf p) , RepoState (ModelOf p) ~ ApplyState p + , RepoApply p ) => PropList (WithState p) gen patch_repo_properties genname gen = @@ -448,11 +486,11 @@ pair_repo_properties ( Show gen , Arbitrary gen , Commute p - , Apply p , ShowPatchBasic p , MightBeEmptyHunk p , RepoModel (ModelOf p) , RepoState (ModelOf p) ~ ApplyState p + , RepoApply p ) => PropList (WithState (Pair p)) gen pair_repo_properties genname gen = @@ -461,4 +499,3 @@ pair_repo_properties genname gen = , TestCondition (const True) , TestCheck (PropG.effectPreserving commute)) ] - diff --git a/harness/Darcs/Test/Patch/Properties/Generic.hs b/harness/Darcs/Test/Patch/Properties/Generic.hs index 8834c4cb..3d7b9081 100644 --- a/harness/Darcs/Test/Patch/Properties/Generic.hs +++ b/harness/Darcs/Test/Patch/Properties/Generic.hs @@ -36,6 +36,7 @@ module Darcs.Test.Patch.Properties.Generic , PatchProperty , MergeProperty , SequenceProperty + , SequencePairProperty , propPrimPairCoverage ) where @@ -73,11 +74,12 @@ import Darcs.Patch.Read ( ReadPatch ) import Darcs.Patch.Show ( ShowPatchBasic, displayPatch, showPatch, ShowPatchFor(ForStorage) ) import Darcs.Patch () -import Darcs.Patch.Apply ( Apply, ApplyState ) +import Darcs.Patch.Apply ( ApplyState ) import Darcs.Patch.Commute ( Commute, commute, commuteFL ) import Darcs.Patch.CommuteFn ( CommuteFn ) import Darcs.Patch.Merge ( Merge(merge) ) import Darcs.Patch.Read ( readPatch ) +import Darcs.Test.Patch.RepoModel ( RepoApply ) import Darcs.Patch.Invert ( Invert(..) ) import Darcs.Patch.Witnesses.Eq ( Eq2(..), EqCheck(..) ) import Darcs.Patch.Witnesses.Ordered @@ -98,6 +100,7 @@ type PatchProperty p = forall wA wB. p wA wB -> TestResult -- type PairProperty p = forall wA wB. (p :> p) wA wB -> TestResult type MergeProperty p = forall wA wB. (FL p :\/: FL p) wA wB -> TestResult type SequenceProperty p = forall wA wB. RL p wA wB -> TestResult +type SequencePairProperty p = forall wA wB. (RL p :> RL p) wA wB -> TestResult -- | @A^^=A@ invertInvolution :: (Invert p, Eq2 p, ShowPatchBasic p) => p wA wB -> TestResult @@ -132,10 +135,10 @@ inverseComposition (Pair (a :> b)) = invertRollback :: ( ApplyState p ~ RepoState model , Invert p - , Apply p , ShowPatchBasic p , RepoModel model , model ~ ModelOf p + , RepoApply p ) => WithState p wA wB -> TestResult @@ -230,12 +233,12 @@ commuteInverses c (Pair (x :> y)) = -- | effect preserving AB <--> B'A' then effect(AB) = effect(B'A') effectPreserving - :: ( Apply p - , MightBeEmptyHunk p + :: ( MightBeEmptyHunk p , RepoModel model , model ~ ModelOf p , ApplyState p ~ RepoState model , ShowPatchBasic p + , RepoApply p ) => CommuteFn p p -> WithState (Pair p) wA wB @@ -508,9 +511,10 @@ mergeCommute (x :\/: y) = -- | coalesce effect preserving coalesceEffectPreserving - :: TestablePrim prim - => (forall wX wY . (prim :> prim) wX wY -> Maybe (FL prim wX wY)) - -> WithState (Pair prim) wA wB -> TestResult + :: (TestablePrim prim, RepoApply prim) + => (forall wX wY . (prim :> prim) wX wY -> Maybe (FL prim wX wY)) + -> WithState (Pair prim) wA wB + -> TestResult coalesceEffectPreserving j (WithState r (Pair (a :> b)) r') = case j (a :> b) of Nothing -> rejected diff --git a/harness/Darcs/Test/Patch/Properties/GenericUnwitnessed.hs b/harness/Darcs/Test/Patch/Properties/GenericUnwitnessed.hs deleted file mode 100644 index ed277712..00000000 --- a/harness/Darcs/Test/Patch/Properties/GenericUnwitnessed.hs +++ /dev/null @@ -1,90 +0,0 @@ -module Darcs.Test.Patch.Properties.GenericUnwitnessed where - -import Darcs.Prelude - -import qualified Darcs.Test.Patch.Properties.Generic as W - ( permutivity - , mergeConsistent, mergeArgumentsConsistent, mergeEitherWay - , mergeCommute, squareCommuteLaw, coalesceCommute, commuteInverses - , recommute - , showRead ) -import Darcs.Test.Patch.Arbitrary.Generic ( MightHaveDuplicate ) -import Darcs.Test.Patch.Arbitrary.PrimV1 () - -import Darcs.Test.Patch.Types.Pair ( Pair(..) ) -import Darcs.Test.Patch.WSub -import Darcs.Test.Util.TestResult - -import Darcs.Patch.Commute ( Commute ) -import Darcs.Patch.Invert ( Invert ) -import Darcs.Patch.Read ( ReadPatch ) -import Darcs.Patch.Show ( ShowPatchBasic, displayPatch ) -import Darcs.Patch.Witnesses.Show -import Darcs.Patch.Witnesses.Eq -import Darcs.Patch.Merge ( Merge ) -import Darcs.Util.Printer ( Doc, redText, ($$) ) - - -permutivity :: (ShowPatchBasic wp, Eq2 wp, WSub wp p) - => (forall wX wY . (p :> p) wX wY -> Maybe ((p :> p) wX wY)) - -> (p :> p :> p) wA wB -> TestResult -permutivity f = W.permutivity (fmap toW . f . fromW) . toW - -mergeEitherWay :: (ShowPatchBasic wp, Eq2 wp, Merge wp, WSub wp p) => (p :\/: p) wX wY -> TestResult -mergeEitherWay = W.mergeEitherWay . toW - -commuteInverses :: (Invert wp, ShowPatchBasic wp, Eq2 wp, WSub wp p) - => (forall wX wY . (p :> p) wX wY -> Maybe ((p :> p) wX wY)) - -> (p :> p) wA wB -> TestResult -commuteInverses f = W.commuteInverses (fmap toW . f . fromW) . Pair . toW - -recommute :: (ShowPatchBasic wp, MightHaveDuplicate wp, Eq2 wp, WSub wp p) - => (forall wX wY . ((p :> p) wX wY -> Maybe ((p :> p) wX wY))) - -> (p :> p) wA wB -> TestResult -recommute f = W.recommute (fmap toW . f . fromW) . Pair . toW - -mergeCommute :: ( MightHaveDuplicate wp - , ShowPatchBasic wp - , Eq2 wp - , Commute wp - , Merge wp - , WSub wp p - ) - => (p :\/: p) wX wY - -> TestResult -mergeCommute = W.mergeCommute . toW - -mergeConsistent :: (Merge wp, ShowPatchBasic wp, WSub wp p) => - (forall wX wY . p wX wY -> Maybe Doc) - -> (p :\/: p) wA wB -> TestResult -mergeConsistent f = W.mergeConsistent (f . fromW) . toW - -mergeArgumentsConsistent :: (ShowPatchBasic wp, WSub wp p) => - (forall wX wY . p wX wY -> Maybe Doc) - -> (p :\/: p) wA wB -> TestResult -mergeArgumentsConsistent f = W.mergeArgumentsConsistent (f . fromW) . toW - -showRead :: (ShowPatchBasic p, ReadPatch p, Eq2 p, Show2 p) => p wX wY -> TestResult -showRead = W.showRead - -squareCommuteLaw :: (Invert wp, ShowPatchBasic wp, Eq2 wp, WSub wp p) => - (forall wX wY . (p :> p) wX wY -> Maybe ((p :> p) wX wY)) - -> (p :> p) wA wB -> TestResult -squareCommuteLaw f = W.squareCommuteLaw (fmap toW . f . fromW) . Pair . toW - - -coalesceCommute :: (forall wX wY . (Prim2 :> Prim2) wX wY -> Maybe (FL Prim2 wX wY)) - -> (Prim2 :> Prim2 :> Prim2) wA wB -> TestResult -coalesceCommute f = W.coalesceCommute (fmap toW . f . fromW) . toW - -commuteFails :: ShowPatchBasic p - => ((p :> p) wX wY -> Maybe ((p :> p) wX wY)) - -> (p :> p) wX wY - -> TestResult -commuteFails c (x :> y) = case c (x :> y) of - Nothing -> succeeded - Just (y' :> x') -> - failed $ redText "x" $$ displayPatch x $$ - redText ":> y" $$ displayPatch y $$ - redText "y'" $$ displayPatch y' $$ - redText ":> x'" $$ displayPatch x' diff --git a/harness/Darcs/Test/Patch/Properties/RepoPatch.hs b/harness/Darcs/Test/Patch/Properties/Mergeable.hs similarity index 71% rename from harness/Darcs/Test/Patch/Properties/RepoPatch.hs rename to harness/Darcs/Test/Patch/Properties/Mergeable.hs index d0e1cccc..7bad8d7a 100644 --- a/harness/Darcs/Test/Patch/Properties/RepoPatch.hs +++ b/harness/Darcs/Test/Patch/Properties/Mergeable.hs @@ -1,4 +1,4 @@ -module Darcs.Test.Patch.Properties.RepoPatch +module Darcs.Test.Patch.Properties.Mergeable ( propConsistentTreeFlattenings , propConsistentReorderings , propResolutionsDontConflict @@ -18,21 +18,22 @@ import Darcs.Test.Patch.Arbitrary.PatchTree import Darcs.Test.Patch.Merge.Checked ( CheckedMerge ) import Darcs.Test.Patch.Types.MergeableSequence ( MergeableSequence, mergeableSequenceToRL ) import Darcs.Test.Patch.WithState -import Darcs.Test.Patch.RepoModel ( RepoModel, repoApply, showModel, eqModel, RepoState - , Fail, maybeFail, ModelOf ) +import Darcs.Test.Patch.RepoModel + ( Fail, ModelOf, RepoApply, RepoModel, RepoState + , eqModel, maybeFail, repoApply, showModel + ) import Darcs.Test.Util.TestResult ( TestResult, failed, rejected, succeeded ) import Darcs.Util.Printer ( text, redText, ($$), vsep ) -import Darcs.Patch.Conflict ( Conflict(..), ConflictDetails(..), Unravelled ) -import Darcs.Patch.Apply ( Apply(..) ) -import Darcs.Patch.Merge ( Merge, mergeList ) +import Darcs.Patch.Conflict ( ConflictDetails(..), Unravelled ) +import Darcs.Patch.Merge ( CleanMerge, mergeList ) import Darcs.Patch.Permutations ( permutationsRL, (=\~/=) ) -import Darcs.Patch.RepoPatch ( Commute, RepoPatch ) +import Darcs.Patch.RepoPatch import Darcs.Patch.Show ( displayPatch ) -import Darcs.Patch.Witnesses.Eq ( Eq2, isIsEq ) -import Darcs.Patch.Witnesses.Ordered ( RL(..) ) +import Darcs.Patch.Witnesses.Eq ( isIsEq ) +import Darcs.Patch.Witnesses.Ordered ( RL(..), (:>)(..) ) import Darcs.Patch.Witnesses.Sealed ( Sealed(..), unseal, Sealed2(..) ) import Darcs.Patch.Witnesses.Show ( Show2 ) @@ -56,13 +57,14 @@ propConsistentTreeFlattenings :: forall rp prim model. , RepoState model ~ ApplyState prim , ApplyState (rp prim) ~ ApplyState prim , Merge (rp prim) - , Apply (rp prim) - , Show2 (rp prim) ) + , Show2 (rp prim) + , RepoApply (rp prim) + ) => FromPrimT rp prim -> Sealed (WithStartState model (Tree prim)) -> TestResult -propConsistentTreeFlattenings fromPrim (Sealed (WithStartState start t)) = - case flattenTree (mapTree fromPrim t) of +propConsistentTreeFlattenings fromPrim' (Sealed (WithStartState start t)) = + case flattenTree (mapTree fromPrim' t) of Sealed (G2 flat') -> -- Limit the number of tree flattenings to something sane, as -- the length of the original list can grow exponentially. @@ -75,14 +77,15 @@ propConsistentTreeFlattenings fromPrim (Sealed (WithStartState start t)) = -- | This property states that all reorderings of a sequence of patches, -- when applied to the same state, give the same result state. -propConsistentReorderings :: ( RepoPatch p - , RepoModel (ModelOf p) - , RepoState (ModelOf p) ~ ApplyState p - , CheckedMerge p - , PrimBased p - ) - => Sealed2 (WithStartState2 (MergeableSequence p)) - -> TestResult +propConsistentReorderings + :: ( RepoModel (ModelOf p) + , RepoState (ModelOf p) ~ ApplyState p + , CheckedMerge p + , PrimBased p + , RepoApply p + ) + => Sealed2 (WithStartState2 (MergeableSequence p)) + -> TestResult propConsistentReorderings (Sealed2 (WithStartState2 start ms)) = case mapM (repoApply start) $ permutationsRL ps of Left e -> failed $ redText "could not apply all reorderings:" $$ text (show e) @@ -104,31 +107,50 @@ propConsistentReorderings (Sealed2 (WithStartState2 start ms)) = -- | This property states that the standard conflict resolutions for a -- sequence of patches are independent of any reordering of the sequence. -propResolutionsOrderIndependent :: RepoPatch p => RL p wX wY -> TestResult -propResolutionsOrderIndependent ps = - check $ map withConflictParts pss +propResolutionsOrderIndependent + :: ( Commute p + , Conflict p + , ShowPatchBasic p + , PatchListFormat p + , PatchListFormat (PrimOf p) + , ShowPatchBasic (PrimOf p) + , Eq2 (PrimOf p) + , Show2 (PrimOf p) + , Commute (PrimOf p) + ) + => (RL p :> RL p) wX wY + -> TestResult +propResolutionsOrderIndependent (ctx :> ps) = + check [withConflictParts cs qs | cs <- css, qs <- pss] where - withConflictParts qs = - (Sealed qs, map conflictParts $ resolveConflicts NilRL qs) + withConflictParts cs qs = + (Sealed (cs :> qs), map conflictParts $ resolveConflicts cs qs) pss = permutationsRL ps + css = permutationsRL ctx check [] = error "we should have at least one permutation!" check [_] = rejected check xs = eql xs eql [] = error "impossible" eql [_] = succeeded - eql ((ps1,r1):(ps2,r2):rs) - | listEqBy eqUnravelled r1 r2 = eql ((ps2,r2):rs) + eql ((cps1,r1):(cps2,r2):rs) + | listEqBy eqUnravelled r1 r2 = eql ((cps2,r2):rs) | otherwise = failed $ vsep [ redText "resolutions differ: r1=" , text (show r1) , redText "r2=" , text (show r2) - , text "for patches" - , unseal displayPatch ps1 + , unseal displayPair cps1 , text "versus" - , unseal displayPatch ps2 + , unseal displayPair cps2 ] + displayPair (as :> bs) = + vsep + [ text "for context" + , displayPatch as + , text "and patches" + , displayPatch bs + ] -- | Equality for 'Unravelled' is modulo order of patches. eqUnravelled :: (Commute p, Eq2 p) => Unravelled p wX -> Unravelled p wX -> Bool @@ -143,7 +165,16 @@ listEqBy _ _ _ = False -- | This property states that the standard conflict resolutions for a -- sequence of patches do not themselves conflict with each other. -propResolutionsDontConflict :: RepoPatch p => RL p wX wY -> TestResult +propResolutionsDontConflict + :: ( Conflict p + , PatchListFormat p + , ShowPatchBasic p + , CleanMerge (PrimOf p) + , PatchListFormat (PrimOf p) + , ShowPatchBasic (PrimOf p) + ) + => RL p wX wY + -> TestResult propResolutionsDontConflict patches = case mergeList $ catMaybes $ map conflictMangled $ resolveConflicts NilRL patches of Right _ -> succeeded diff --git a/harness/Darcs/Test/Patch/Properties/RepoPatchV3.hs b/harness/Darcs/Test/Patch/Properties/RepoPatchV3.hs index f8a391e6..c6db597c 100644 --- a/harness/Darcs/Test/Patch/Properties/RepoPatchV3.hs +++ b/harness/Darcs/Test/Patch/Properties/RepoPatchV3.hs @@ -95,9 +95,9 @@ prop_conflictsCommutePastConflictor ps p = failed $ text "conflicting patches not found in repo:" $$ vcat (mapRL displayPatch (ps :<: p)) - | not (revertedIds p `S.isSubsetOf` rids) + | not (revertedIds p `S.isSubsetOf` xids) = failed - $ text "undone patches not found in repo:" + $ text "undone patches not a subset of conflicting patches:" $$ vcat (mapRL displayPatch (ps :<: p)) | otherwise = case partitionRL' ((`S.member` xids) . ident) ps of diff --git a/harness/Darcs/Test/Patch/RepoModel.hs b/harness/Darcs/Test/Patch/RepoModel.hs index e2607a16..cabbaf89 100644 --- a/harness/Darcs/Test/Patch/RepoModel.hs +++ b/harness/Darcs/Test/Patch/RepoModel.hs @@ -5,7 +5,9 @@ import Darcs.Prelude import Control.Exception ( SomeException ) import Darcs.Patch.Apply ( Apply, ApplyState ) -import Darcs.Patch.Witnesses.Ordered ( FL, RL ) +import Darcs.Patch.Info ( PatchInfo ) +import Darcs.Patch.Witnesses.Ordered ( FL, RL, mapFL, mapRL ) +import Darcs.Patch.Witnesses.Show ( Show1 ) import Test.QuickCheck ( Gen ) @@ -17,14 +19,28 @@ unFail = either (error.show) id maybeFail :: Fail a -> Maybe a maybeFail = either (const Nothing) Just -class RepoModel model where - type RepoState model :: (* -> *) -> * +-- | Class of patch types that can be applied to a model +class Apply p => RepoApply p where + -- | This method exists so that we can keep track of the names of patches + -- that have been applied to the model. This allows us to generate 'Named' + -- patches with meaningful explicit dependencies. + patchNames :: p wX wY -> [PatchInfo] + patchNames _ = [] + +instance RepoApply p => RepoApply (FL p) where + patchNames = concat . mapFL patchNames + +instance RepoApply p => RepoApply (RL p) where + patchNames = concat . mapRL patchNames + +class Show1 model => RepoModel model where + type RepoState model :: (Type -> Type) -> Type showModel :: model x -> String eqModel :: model x -> model x -> Bool aSmallRepo :: Gen (model x) - repoApply :: (Apply p, ApplyState p ~ RepoState model) => model x -> p x y -> Fail (model y) + repoApply :: (RepoApply p, ApplyState p ~ RepoState model) => model x -> p x y -> Fail (model y) -type family ModelOf (p :: * -> * -> *) :: * -> * +type family ModelOf (p :: Type -> Type -> Type) :: Type -> Type type instance ModelOf (FL p) = ModelOf p type instance ModelOf (RL p) = ModelOf p diff --git a/harness/Darcs/Test/Patch/Types/MergeableSequence.hs b/harness/Darcs/Test/Patch/Types/MergeableSequence.hs index d3f55fc7..ff720bfa 100644 --- a/harness/Darcs/Test/Patch/Types/MergeableSequence.hs +++ b/harness/Darcs/Test/Patch/Types/MergeableSequence.hs @@ -3,6 +3,7 @@ module Darcs.Test.Patch.Types.MergeableSequence ( MergeableSequence(..) , arbitraryMergeableSequence , mergeableSequenceToRL + , WithSplit(..) ) where import Darcs.Prelude @@ -21,7 +22,7 @@ import Darcs.Patch.Witnesses.Maybe import Darcs.Patch.Witnesses.Sealed import Darcs.Patch.Witnesses.Unsafe import Darcs.Patch.Witnesses.Ordered -import Darcs.Patch.Apply ( Apply, ApplyState ) +import Darcs.Patch.Apply ( ApplyState ) import Darcs.Patch.Effect ( Effect(..) ) import Darcs.Patch.Invert ( Invert(..) ) import Darcs.Patch.FromPrim ( PrimPatchBase, PrimOf ) @@ -205,7 +206,7 @@ arbitraryMergeableSequence . ( RepoModel model , CheckedMerge p , PrimBased p - , Apply p, ApplyState p ~ RepoState model + , RepoApply p, ApplyState p ~ RepoState model ) => (forall wA . model wA -> Gen (Sealed (WithEndState model (OnlyPrim p wA)))) -> model wX @@ -237,7 +238,7 @@ arbitraryMergeableSequence arbitrarySingle = go instance ( RepoModel model - , Apply p, ApplyState p ~ RepoState model + , RepoApply p, ApplyState p ~ RepoState model , model ~ ModelOf (OnlyPrim p) , model ~ ModelOf p , CheckedMerge p @@ -245,3 +246,57 @@ instance ) => ArbitraryState (MergeableSequence p) where arbitraryState rm = bSized 3 0.035 9 $ arbitraryMergeableSequence arbitraryState rm + +data WithSplit p wX wY = WithSplit Int (p wX wY) + +type instance ModelOf (WithSplit p) = ModelOf p + +instance PrimPatchBase p => PrimPatchBase (WithSplit p) where + type PrimOf (WithSplit p) = PrimOf p + +instance Effect p => Effect (WithSplit p) where + effect (WithSplit _ p) = effect p + +instance Shrinkable p => Shrinkable (WithSplit p) where + shrinkInternally (WithSplit n ms) = map (WithSplit n) (shrinkInternally ms) + shrinkAtStart (WithSplit n ms) = map (mapFlipped (WithSplit n)) (shrinkAtStart ms) + shrinkAtEnd (WithSplit n ms) = map (mapSeal (WithSplit n)) (shrinkAtEnd ms) + +instance + ( PropagateShrink prim (OnlyPrim p) + , CheckedMerge p, Effect p, PrimOf p ~ prim + , Invert prim, PrimCoalesce prim + , PrimBased p + ) + => PropagateShrink prim (WithSplit (MergeableSequence p)) where + propagateShrink (x :> WithSplit n p) = + case propagateShrink (x :> p) of + Nothing -> Nothing + Just (Just2 p' :> x') -> Just (Just2 (WithSplit n p') :> x') + Just (Nothing2 :> x') -> Just (Nothing2 :> x') + +instance Show2 p => Show2 (WithSplit p) + +instance Show2 p => Show (WithSplit p wX wY) where + showsPrec d (WithSplit n p) = + showParen (d > appPrec) $ + showString "WithSplit " . shows n . showString " " . showsPrec2 (appPrec + 1) p + +instance + ( RepoModel model + , RepoApply p, ApplyState p ~ RepoState model + , model ~ ModelOf (OnlyPrim p) + , model ~ ModelOf p + , CheckedMerge p + , PrimBased p + ) + => ArbitraryState (WithSplit (MergeableSequence p)) where + arbitraryState s = do + Sealed (WithEndState ms s') <- arbitraryState s + n <- chooseInt (0, lengthMS ms) + return $ seal $ WithEndState (WithSplit n ms) s' + +lengthMS :: MergeableSequence p wX wY -> Int +lengthMS NilMS = 0 +lengthMS (ParMS a b) = lengthMS a + lengthMS b +lengthMS (SeqMS a _) = lengthMS a + 1 diff --git a/harness/Darcs/Test/Patch/Unwind.hs b/harness/Darcs/Test/Patch/Unwind.hs index 53cc824a..679d61d4 100644 --- a/harness/Darcs/Test/Patch/Unwind.hs +++ b/harness/Darcs/Test/Patch/Unwind.hs @@ -5,13 +5,13 @@ module Darcs.Test.Patch.Unwind import Darcs.Prelude import Darcs.Patch -import Darcs.Patch.Commute +import Darcs.Patch.RepoPatch import Darcs.Patch.Unwind import Darcs.Patch.Witnesses.Ordered import Darcs.Patch.Witnesses.Show import Darcs.Test.Patch.Arbitrary.Generic -import Darcs.Test.Patch.Arbitrary.RepoPatch +import Darcs.Test.Patch.Arbitrary.Mergeable import Darcs.Test.Patch.Examples.Unwind import Darcs.Test.Patch.Merge.Checked import Darcs.Test.Patch.Properties.Generic @@ -43,10 +43,19 @@ numberedTestCases text runTest = zipWith numbered [1..] testSuite :: forall p - . ( ArbitraryRepoPatch p, PrimBased p, ArbitraryPrim (OnlyPrim p) - , ShrinkModel (PrimOf p) - , Show1 (ModelOf (PrimOf p)), Show2 p - , CheckedMerge p, Commute (OnlyPrim p) + . ( ArbitraryMergeable p + , Apply p + , ApplyState (PrimOf p) ~ RepoState (ModelOf p) + , Unwind p + , PrimPatchBase p + , PrimBased p + , ArbitraryPrim (OnlyPrim p) + , ShrinkModel (ModelOf p) (PrimOf p) + , Show1 (ModelOf p) + , Show2 p + , CheckedMerge p + , Commute (OnlyPrim p) + , RepoApply (PrimOf p) ) => [Test] testSuite = diff --git a/harness/Darcs/Test/Patch/WSub.hs b/harness/Darcs/Test/Patch/WSub.hs deleted file mode 100644 index fe54757e..00000000 --- a/harness/Darcs/Test/Patch/WSub.hs +++ /dev/null @@ -1,139 +0,0 @@ -{-# LANGUAGE FunctionalDependencies, UndecidableInstances #-} -module Darcs.Test.Patch.WSub where - -{- -The Examples.Set2Unwitnessed module builds a lot of test cases by pattern matching -on the results of merge/commute in where clauses. This would -be very painful to switch to using witnesses properly, because -we'd have to make them use case in series. - -So instead we give up on witnesses for this module, but instead -of preprocessor hacks which make incompatible code with the rest -of darcs, we build a fresh set of witnesses constructors (FL etc) -which aren't actually GADTs or existentials. So the pattern matching -works as before, but we need to translate back and forth a lot. - -We call the normal darcs constructors the 'W' variants. --} - -import Darcs.Prelude - -import qualified Darcs.Test.Patch.Arbitrary.PatchTree as W - ( getPairs, getTriples ) - -import qualified Darcs.Patch as W ( commute ) -import qualified Darcs.Patch.Merge as W ( merge, mergeFL ) -import qualified Darcs.Patch.Prim as W ( coalesce ) - -import qualified Darcs.Patch.Witnesses.Ordered as W -import Darcs.Patch.Witnesses.Sealed -import Darcs.Patch.Witnesses.Eq -import Darcs.Patch.Witnesses.Maybe -import Darcs.Patch.Witnesses.Show -import Darcs.Patch.Witnesses.Unsafe ( unsafeCoerceP, unsafeCoercePStart, unsafeCoercePEnd ) - -import Darcs.Patch.Merge ( Merge ) -import Darcs.Patch.V2 ( RepoPatchV2 ) -import qualified Darcs.Patch.V2.Prim as V2 -import Darcs.Patch.Commute ( Commute ) -import Darcs.Patch.Invert ( Invert(..) ) - -type Prim2 = V2.Prim - - -infixr 5 :>: -infixr 5 +>+ -infixr 1 :> -infix 1 :/\: -infix 1 :\/: - -data FL p wX wY where - NilFL :: FL p wX wY - (:>:) :: p wX wY -> FL p wX wY -> FL p wX wY - -(+>+) :: FL p wX wY -> FL p wX wY -> FL p wX wY -NilFL +>+ ps = ps -(p :>: ps) +>+ qs = p :>: (ps +>+ qs) - -data (p :> q) wX wY where - (:>) :: p wX wY -> q wX wY -> (p :> q) wX wY - -data (p :\/: q) wX wY where - (:\/:) :: p wX wY -> q wX wY -> (p :\/: q) wX wY - -data (p :/\: q) wX wY where - (:/\:) :: p wX wY -> q wX wY -> (p :/\: q) wX wY - -class WSub wp p | p -> wp, wp -> p where - fromW :: wp wX wY -> p wX wY - toW :: p wX wY -> wp wX wY - -instance (WSub wp1 p1, WSub wp2 p2) => WSub (wp1 W.:\/: wp2) (p1 :\/: p2) where - fromW (x W.:\/: y) = unsafeCoerceP (fromW x) :\/: unsafeCoerceP (fromW y) - toW (x :\/: y) = unsafeCoerceP (toW x) W.:\/: unsafeCoerceP (toW y) - -instance (WSub wp1 p1, WSub wp2 p2) => WSub (wp1 W.:/\: wp2) (p1 :/\: p2) where - fromW (x W.:/\: y) = unsafeCoerceP (fromW x) :/\: unsafeCoerceP (fromW y) - toW (x :/\: y) = unsafeCoerceP (toW x) W.:/\: unsafeCoerceP (toW y) - -instance (WSub wp1 p1, WSub wp2 p2) => WSub (wp1 W.:> wp2) (p1 :> p2) where - fromW (x W.:> y) = unsafeCoercePEnd (fromW x) :> unsafeCoercePStart (fromW y) - toW (x :> y) = unsafeCoercePEnd (toW x) W.:> unsafeCoercePStart (toW y) - -instance WSub wp p => WSub (W.FL wp) (FL p) where - fromW W.NilFL = unsafeCoerceP NilFL - fromW (x W.:>: xs) = unsafeCoercePEnd (fromW x) :>: unsafeCoercePStart (fromW xs) - - toW NilFL = unsafeCoerceP W.NilFL - toW (x :>: xs) = unsafeCoercePEnd (toW x) W.:>: unsafeCoercePStart (toW xs) - -instance WSub (RepoPatchV2 prim) (RepoPatchV2 prim) where - fromW = id - toW = id - -instance WSub Prim2 Prim2 where - fromW = id - toW = id - -instance (WSub wp p, WSub wq q, Show2 wp, Show2 wq) => Show ((p :> q) wX wY) where - show = show . toW - -instance (WSub wp p, WSub wq q, Show2 wp, Show2 wq) => Show2 (p :> q) - -instance (WSub wp p, WSub wq q, Show2 wp, Show2 wq) => Show ((p :\/: q) wX wY) where - show = show . toW - -instance (WSub wp p, WSub wq q, Show2 wp, Show2 wq) => Show2 (p :\/: q) - -instance (WSub wp p, Show2 wp) => Show (FL p wX wY) where - show = show . toW - -instance (WSub wp p, Show2 wp) => Show2 (FL p) - -instance (WSub wp p, Commute wp, Eq2 wp) => Eq2 (FL p) where - unsafeCompare x y = unsafeCompare (toW x) (toW y) - -instance (WSub wp p, Invert wp) => Invert (FL p) where - invert = fromW . invert . toW - -instance (WSub wp p, Commute wp) => Commute (FL p) where - commute (xs W.:> ys) = do ys' W.:> xs' <- W.commute (toW xs W.:> toW ys) - return (fromW ys' W.:> fromW xs') - -mergeFL :: (WSub wp p, Merge wp) => (p :\/: FL p) wX wY -> (FL p :/\: p) wX wY -mergeFL = fromW . W.mergeFL . toW - -merge :: (WSub wp p, Merge wp) => (p :\/: p) wX wY -> (p :/\: p) wX wY -merge = fromW . W.merge . toW - -commute :: (WSub wp p, Commute wp) => (p :> p) wX wY -> Maybe ((p :> p) wX wY) -commute = fmap fromW . W.commute . toW - -getPairs :: FL (RepoPatchV2 Prim2) wX wY -> [Sealed2 (RepoPatchV2 Prim2 :> RepoPatchV2 Prim2)] -getPairs = map (mapSeal2 fromW) . W.getPairs . toW - -getTriples :: FL (RepoPatchV2 Prim2) wX wY -> [Sealed2 (RepoPatchV2 Prim2 :> RepoPatchV2 Prim2 :> RepoPatchV2 Prim2)] -getTriples = map (mapSeal2 fromW) . W.getTriples . toW - -coalesce :: (Prim2 :> Prim2) wX wY -> Maybe (FL Prim2 wX wY) -coalesce = fmap (fromW . maybeToFL) . W.coalesce . toW diff --git a/harness/Darcs/Test/Patch/WithState.hs b/harness/Darcs/Test/Patch/WithState.hs index ea814a1d..abe37527 100644 --- a/harness/Darcs/Test/Patch/WithState.hs +++ b/harness/Darcs/Test/Patch/WithState.hs @@ -189,7 +189,7 @@ makeWSGen stGen = do s <- stGen -- | A class to help with shrinking complex test cases by simplifying -- the starting state of the test case. See also 'PropagateShrink'. -class ShrinkModel prim where +class ShrinkModel s prim where -- |Given a repository state, produce a patch that simplifies the -- repository state. The inverse of the patch can be passed as the -- "shrinking fixup" to 'propagateShrink'. @@ -214,15 +214,20 @@ class ShrinkModel prim where -- V V -- s wX1 ----------------> s wY1 -- p1 wX1 wY1 - shrinkModelPatch :: ModelOf prim wX -> [Sealed (prim wX)] + shrinkModelPatch :: s wX -> [Sealed (prim wX)] checkOK :: Fail a -> [a] checkOK = maybe [] (\x -> [x]) . maybeFail shrinkModel :: forall s prim wX - . (Apply prim, ApplyState prim ~ RepoState s, ModelOf prim ~ s, RepoModel s, ShrinkModel prim) - => s wX -> [Sealed (WithEndState s (prim wX))] + . ( ApplyState prim ~ RepoState s + , RepoModel s + , ShrinkModel s prim + , RepoApply prim + ) + => s wX + -> [Sealed (WithEndState s (prim wX))] shrinkModel s = do Sealed prim <- shrinkModelPatch s endState <- checkOK $ repoApply s prim @@ -266,11 +271,11 @@ propagateShrinkMaybe (Just2 prim :> p) = propagateShrink (prim :> p) -- patch type of the test case. shrinkState :: forall s prim p - . ( Invert prim, Apply prim, RepoModel s - , ShrinkModel prim, PropagateShrink prim p + . ( Invert prim, RepoModel s + , ShrinkModel s prim, PropagateShrink prim p , ApplyState prim ~ RepoState s , ModelOf p ~ s - , ModelOf prim ~ s + , RepoApply prim ) => Sealed2 (WithStartState2 p) -> [Sealed2 (WithStartState2 p)] @@ -281,8 +286,9 @@ shrinkState (Sealed2 (WithStartState2 s p)) = do shrinkAtStartState :: ( Shrinkable p, RepoModel (ModelOf p), Effect p - , prim ~ PrimOf p, Invert prim, Apply prim + , prim ~ PrimOf p, Invert prim , ApplyState prim ~ RepoState (ModelOf p) + , RepoApply prim ) => WithStartState2 p wX wY -> [FlippedSeal (WithStartState2 p) wY] @@ -295,10 +301,10 @@ shrinkAtStartState (WithStartState2 s p) = do instance ( ArbitraryState p, Shrinkable p, RepoModel s , s ~ ModelOf p - , s ~ ModelOf prim , Effect p - , Apply prim, ApplyState prim ~ RepoState s - , prim ~ PrimOf p, Invert prim, ShrinkModel prim, PropagateShrink prim p + , ApplyState prim ~ RepoState s + , prim ~ PrimOf p, Invert prim, ShrinkModel s prim, PropagateShrink prim p + , RepoApply prim ) => ArbitraryS2 (WithStartState2 p) where arbitraryS2 = do diff --git a/harness/Darcs/Test/Shell.hs b/harness/Darcs/Test/Shell.hs index 71585d50..ef4f68ad 100644 --- a/harness/Darcs/Test/Shell.hs +++ b/harness/Darcs/Test/Shell.hs @@ -1,5 +1,5 @@ {-# LANGUAGE CPP, OverloadedStrings, ExtendedDefaultRules, RecordWildCards #-} -{-# OPTIONS_GHC -fno-warn-type-defaults #-} +{-# OPTIONS_GHC -Wno-type-defaults #-} module Darcs.Test.Shell ( Format(..) , DiffAlgorithm(..) @@ -29,7 +29,6 @@ import Shelly , mkdir_p , onCommandHandles , pwd - , setenv , shelly , silently , sub @@ -38,7 +37,7 @@ import Shelly , writefile , () ) -import qualified System.FilePath as Native ( searchPathSeparator, splitSearchPath ) +import qualified System.FilePath as Native ( splitSearchPath ) import System.FilePath ( makeRelative, takeBaseName, takeDirectory ) import qualified System.FilePath.Posix as Posix ( searchPathSeparator ) import System.IO ( hSetBinaryMode ) @@ -113,6 +112,7 @@ runtest' ShellTest{..} srcdir = , ("TESTDATA", EnvFilePath (srcdir "tests" "data")) , ("TESTBIN", EnvFilePath (srcdir "tests" "bin")) , ("DARCS_TESTING_PREFS_DIR" , EnvFilePath $ wd ".darcs") + , ("DARCS_CACHE_DIR" , EnvFilePath $ wd ".cache/darcs") , ("EMAIL" , EnvString "tester") , ("GIT_AUTHOR_NAME" , EnvString "tester") , ("GIT_AUTHOR_EMAIL" , EnvString "tester") @@ -129,6 +129,9 @@ runtest' ShellTest{..} srcdir = , ("GHC_VERSION", EnvString $ show (__GLASGOW_HASKELL__ :: Int)) -- https://www.joshkel.com/2018/01/18/symlinks-in-windows/ , ("MSYS" , EnvString "winsymlinks:nativestrict") +#ifdef WIN32 + , ("OS" , EnvString "windows") +#endif ] -- we write the variables to a shell script and source them from there in -- ./lib, so that it's easy to reproduce a test failure after running the @@ -136,8 +139,6 @@ runtest' ShellTest{..} srcdir = writefile "env" $ T.unlines $ map (\(k, v) -> T.concat ["export ", k, "=", envItemForScript v]) env - -- just in case the test script doesn't source ./lib: - mapM_ (\(k, v) -> setenv k (envItemForEnv v)) env mkdir ".darcs" writefile ".darcs/defaults" defaults @@ -174,13 +175,6 @@ runtest' ShellTest{..} srcdir = WithCache -> [] NoCache -> ["ALL no-cache"] - -- convert an 'EnvItem' to a string you can put in the environment directly - envItemForEnv :: EnvItem -> Text - envItemForEnv (EnvString v) = pack v - envItemForEnv (EnvFilePath v) = toTextIgnore v - envItemForEnv (EnvSearchPath vs) = - T.intercalate (T.singleton Native.searchPathSeparator) $ map toTextIgnore vs - -- convert an 'EnvItem' to a string that will evaluate to the right value -- when embedded in a bash script envItemForScript :: EnvItem -> Text diff --git a/harness/Darcs/Test/UI/Commands/Test/IndexedApply.hs b/harness/Darcs/Test/UI/Commands/Test/IndexedApply.hs index a9c964ad..e0e117f1 100644 --- a/harness/Darcs/Test/UI/Commands/Test/IndexedApply.hs +++ b/harness/Darcs/Test/UI/Commands/Test/IndexedApply.hs @@ -2,6 +2,9 @@ module Darcs.Test.UI.Commands.Test.IndexedApply ( IndexedApply(..) ) where + +import Darcs.Prelude hiding ( Monad(..) ) + import Darcs.Util.IndexedMonad import Darcs.Patch.Witnesses.Ordered ( FL(..), RL(..) ) @@ -10,7 +13,7 @@ import Darcs.UI.Commands.Test.Impl ( PatchSeq(..) ) -- our own indexed monad Apply class class IndexedApply p where - type ApplyState p :: * -> * -> * -> * + type ApplyState p :: Type -> Type -> Type -> Type apply :: Monad (ApplyState p) => p wX wY -> ApplyState p wX wY () unapply :: Monad (ApplyState p) => p wX wY -> ApplyState p wY wX () diff --git a/harness/test.hs b/harness/test.hs index 7a2cb5ee..bd7dce7b 100644 --- a/harness/test.hs +++ b/harness/test.hs @@ -1,4 +1,4 @@ -{-# OPTIONS_GHC -fno-warn-missing-fields #-} +{-# OPTIONS_GHC -Wno-missing-fields #-} module Main ( main, run, defaultConfig, Config(..) ) where import Darcs.Prelude @@ -12,6 +12,7 @@ import Darcs.Test.Shell import qualified Darcs.Test.UI import Darcs.Util.Exception ( die ) +import Control.Concurrent ( setNumCapabilities ) import Control.Monad ( filterM, unless, when ) import Data.List ( isPrefixOf, isSuffixOf, sort ) import GHC.IO.Encoding ( textEncodingName ) @@ -34,6 +35,7 @@ data Config = Config { suites :: String , diffalgs :: String , index :: String , cache :: String + , failing :: String , full :: Bool , darcs :: String , tests :: [String] @@ -51,11 +53,12 @@ data Config = Config { suites :: String defaultConfigAnn :: Annotate Ann defaultConfigAnn = record Config{} - [ suites := "snu" += help "Select which test suites to run: (s=shell, n=network, u=unit, f=failing, h=hashed) [snu]" += typ "SET" + [ suites := "snu" += help "Select which test suites to run: (s=shell, n=network, u=unit, h=hashed) [snu]" += typ "SET" , formats := "123" += help "Select which darcs formats to test: (1=darcs-1, 2=darcs-2, 3=darcs-3) [123]" += name "f" += typ "SET" , diffalgs := "p" += help "Select which diff alorithms to use (p=patience, m=myers) [p]" += name "a" += typ "SET" , index := "y" += help "Select whether to use the index (n=no, y=yes) [y]" += typ "SET" , cache := "y" += help "Select whether to use the cache (n=no, y=yes) [y]" += typ "SET" + , failing := "n" += help "Select whether to use failing tests (n=no, y=yes) [n]" += typ "SET" , full := False += help "Shortcut for -s=snu -f=123 -a=mp -c=yn -i=yn" , darcs := "" += help "Darcs binary path" += typ "PATH" , tests := [] += help "Pattern to limit the tests to run" += typ "PATTERN" += name "t" @@ -64,7 +67,7 @@ defaultConfigAnn , plain := False += help "Use plain-text output [no]" , hideSuccesses := False += help "Hide successes [no]" , threads := 1 += help "Number of threads [1]" += name "j" - , qcCount := 100 += help "Number of QuickCheck iterations per test [100]" += name "q" + , qcCount := 1000 += help "Number of QuickCheck iterations per test [1000]" += name "q" , replay := Nothing += help "Replay QC tests with given seed" += typ "SEED" ] += summary "Darcs test harness" @@ -120,7 +123,6 @@ run conf = do when e $ die ("Directory " ++ d ++ " already exists. Cowardly exiting") let hashed = 'h' `elem` suites conf - failing = 'f' `elem` suites conf shell = 's' `elem` suites conf network = 'n' `elem` suites conf unit = 'u' `elem` suites conf @@ -138,11 +140,14 @@ run conf = do nocache = 'n' `elem` cache conf withcache = 'y' `elem` cache conf + withFailing = 'y' `elem` failing conf + withSucceeding = 'n' `elem` failing conf + darcsBin <- case darcs conf of "" -> findDarcs v -> return v - when (shell || network || failing) $ do + when (shell || network) $ do unless (isAbsolute $ darcsBin) $ die ("Argument to --darcs should be an absolute path") unless (exeExtension `isSuffixOf` darcsBin) $ @@ -169,9 +174,13 @@ run conf = do let findTestFiles dir = select . map (dir ) <$> listDirectory dir where filter_failing = - if failing - then id - else filter $ not . ("failing-" `isPrefixOf`) . takeBaseName + case (withFailing, withSucceeding) of + (True,True) -> id -- "yn" + (False,True) -> -- "n" + filter $ not . ("failing-" `isPrefixOf`) . takeBaseName + (True,False) -> -- "y" + filter $ ("failing-" `isPrefixOf`) . takeBaseName + (False,False) -> const [] -- "" select = sort . filter_failing . filter (".sh" `isSuffixOf`) stests <- @@ -222,6 +231,7 @@ run conf = do main :: IO () main = do hSetBuffering stdout NoBuffering clp <- cmdArgs_ defaultConfigAnn + setNumCapabilities (threads clp) run $ if full clp then clp { formats = "123" diff --git a/release/distributed-context b/release/distributed-context index 6639e33f..4672618a 100644 --- a/release/distributed-context +++ b/release/distributed-context @@ -1 +1 @@ -Just "\nContext:\n\n\n[TAG 2.18.5\nBen Franksen **20250109094054\n Ignore-this: 87740960ae17ffb53882f6c942ff13164d84a18f70a5a70cf457c1595e2ae064927ad4def40a9673\n] \n" \ No newline at end of file +Just "\nContext:\n\n\n[refactor getOnePatchset -> matchOnePatchset\nBen Franksen **20210916064826\n Ignore-this: fdfd1c83a97bdf09bc6cd59bf9dad24bf918520d2192a1e977a6bcbefbde465e4fd8eb44e78577d7\n \n Darcs.Repository.Match.getOnePatchSet only needed the Repository argument to\n call readPatches. However, in all use cases we have already done that, so we\n can as well pass the PatchSet directly. The new function matchOnePatchSet\n was then moved to Darcs.Patch.Match as it no longer depends on the\n Repository layer.\n] \n\n[fix in Darcs.UI.Prompt: show default help only if we have a default\nBen Franksen **20230707091117\n Ignore-this: 53904b23064c6be7a2c99978ae36a96ab0d40cbbcbb33a5f1668f222f378f9acaf4912fc0d44379c\n] \n\n[allow user to cancel command when prompted about long comment\nBen Franksen **20230707084448\n Ignore-this: f79a18ff1ca1fd0a9a2310a811d9d9923c5604de82a129c56b43420bd14fce4197446f783ffc103e\n] \n\n[accept issue2462: --remote-darcs gets ignored for transfer-mode\nBen Franksen **20210627083540\n Ignore-this: ca0cce019f7d5fe53757eb0184e4ec878e39f63d5679afbda2df6f70f05b736f79b6d2f09893332b\n] \n\n[use colored output when piping it through a pager\nBen Franksen **20240714180050\n Ignore-this: 852bf6abf2c24076675d1a77b0893f01bf72f9e91e45686a31a7391433d3d3d37f316fd570c87611\n \n I previously had to set DARCS_ALWAYS_COLOR=1 to get colored output when a\n pager is invoked, which has become the rule rather than the exception. But\n that makes darcs output color codes even when the user redirects the output\n to a file, which is not something anybody wants to happen. The trick is to\n pass the printers the stdout /before/ we invoke the pager (and ignore the\n handle it gets passed later when stdout is the write end of the pipe).\n] \n\n[remove unused or trivial exports from Darcs.Util.Printer\nBen Franksen **20240629145415\n Ignore-this: 8159040c0a1302ea21bf0b032f847e411f565bf659f29716b14f715fccc8bbf074742f3096017f76\n] \n\n[ci: exclude ghc-8.10.7 on windows due to obscure assembler error\nBen Franksen **20241217154912\n Ignore-this: e95e32de639fb8815ac51fb6db098f08306394af15450707315932c27f3c3c99d124f6680fd42fa6\n] \n\n[harness: do not setenv, instead make sure all test scripts source ./lib\nBen Franksen **20241209183720\n Ignore-this: ae2c8e17e1508a8eceb058240509fb159268473425c2b2aabe608c44f4bdce8f850c803006cda47e\n] \n\n[tests/latin9-input.sh: simplify creation of interaction_script.txt\nBen Franksen **20241216180240\n Ignore-this: 22d472a25e0ffc5a3854c4adc550b244a5ebb8794d428c762e8934e612185b6e61395239eea83759\n] \n\n[tests/printer.sh: simplify unsetting of env vars\nBen Franksen **20241216175948\n Ignore-this: 26774f3c69da4034a5fc5070f665ceeca738b17468cb93c27bf36805c6c8a9b51cf2d05f3ed586f9\n] \n\n[unbundle the shelly dependency\nBen Franksen **20241202155019\n Ignore-this: b1ca0e8e58eeea0f958bd65d279571741e43cee17a6094eb6388b17fe92f6761b17d041e4872c8b9\n] \n\n[resolve issue2732: generate XML using the xml package\nBen Franksen **20241104182444\n Ignore-this: d7133c3369471936c930b9eb572d5ff2aa5498bc9975e243599756c8a3cd9551c347a4d1d150336f\n \n This patch is supposed to replace all the manually generated XML with uses\n of the xml package.\n] \n\n[fix: require bytestring >= 0.11.3.0\nBen Franksen **20241216192231\n Ignore-this: f63cc0a56768c2cc7bd23c81eb8f9a6e55ca2f3ccc62f8cf0cd086d50daa475a6860cbedda657b62\n \n Earlier versions don't export Data.ByteString.Short.map. This fixes building with\n ghc < 9.2, after being broken by\n \n patch 22b35c6b822281bea274838a96d5fdbc37c9d24e\n Author: Ben Franksen \n Date: Fri May 12 11:36:46 CEST 2023\n * use ShortByteString for Name (and thus AnchoredPath)\n] \n\n[optimize eqAnycase\nBen Franksen **20230513063608\n Ignore-this: 3c4495a2b69f482beb5c67d89a1649e524da7c9d09cc6ed6960127803647729966519579d9b7c4ba\n] \n\n[remove unused name2fp\nBen Franksen **20230513062054\n Ignore-this: 51ec70e89ea336f859e378a52f002e48ee845cbf1a1ad555b542b0447e213224c437ea21741df0e7\n] \n\n[use ShortByteString for Name (and thus AnchoredPath)\nBen Franksen **20230512093646\n Ignore-this: 27ce412afbfea9dc4e85adb442859c5bd9490404aeeb7fa215c64efafac8c03a22453969eb1b420b\n \n This is the minimum necessary change that keeps the external API as is.\n] \n\n[document Name and turn unName from record accessor to a separate function\nBen Franksen **20230423163723\n Ignore-this: 32a37ed0123f01b0c3ce5d1edb06cf9645b849d0058d86abe9f393459ced320791278a1e52e2f3af\n \n This makes the derived Show instance less verbose, which is helpful when\n debugging failing test cases.\n] \n\n[improve help text for --covering option\nBen Franksen **20241130131159\n Ignore-this: 3e66699252b53e87656255ce54747d27a771be440ed4cfa74d768c63c6c4c933c3bc66badac2de\n] \n\n[resolve issue2288: add --covering PATTERN option to show tags command\nBen Franksen **20240622073733\n Ignore-this: b95a7a678b4871fb6dbbaff90e19d781cca1beeae80d12cd8bcb49e0d94403567bcbfa714418f638\n] \n\n[ci: include cabal version in job name\nBen Franksen **20241130121253\n Ignore-this: b50e418bc05155070b0eb4efd8c36e95f4266a15237a978e20dd0d2128434c6213f0d01cd64d1cec\n] \n\n[patch index: missing fid is not necessarily a bug\nBen Franksen **20241128135439\n Ignore-this: 47e37be6f94623abd78a02fcc397d423918627a7d2a0bad707ce82acf9ffb331095eee33b7b17f78\n \n Such an error can happen if the repo contains broken patches, especially\n broken move patches with either missing source or target. So it makes more\n sense to ask the user to try repair.\n] \n\n[check/repair: avoid newlines in progress messages\nBen Franksen **20241128132655\n Ignore-this: 9518036436c726e6d948444ec7f363d4d1efb9e73db1c88f576628127950acd7427330c6dbaaf488\n \n It doesn't make sense to output the complete patch info in a progress\n message and our progress machinery can't properly handle that anyway.\n] \n\n[check/repair: display patch info of repaired patches\nBen Franksen **20241128132432\n Ignore-this: 63b1df0f1639942c81a294439898d999e775f105e75a439f1918be1ba0c71806d2f0b2d6ef914872\n] \n\n[restrict upper limit on attoparsec from <0.15 to <14.4 for ghc versions <8.10\nBen Franksen **20241127163543\n Ignore-this: 1b481a11e4f5a2b081651647dc27c15e715d86ea656c30aad893fd6d47379ae7841d1e81161eaa68\n \n Otherwise we get errors when building the attoparsec dependency.\n] \n\n[exclude system-fileio-0.3.16.5 which fails to build on Windows\nBen Franksen **20241130111802\n Ignore-this: 13aef8e0eb295e383c8020fac8ecec3cd1cd7915b78d68e6d5169736e3fd080eb1f76207768235d2\n] \n\n[ci: remove macos-11, add (and use) ubuntu-24.04\nBen Franksen **20241111201735\n Ignore-this: 5aa6c932e3a5e2bd07f85b66c2c587a3a44df7b4533ef86aef9aff1730c1cbb6e54d536b61214c42\n] \n\n[ci: use latest version of ghc-9.6\nBen Franksen **20241111201622\n Ignore-this: 5dfb1c6a5dac3d567afb907595942b955af3ec990ad1737976b2c4b578ba472836de394fc5942ad1\n] \n\n[raise upper bound on tls, data-default-class -> data-default\nBen Franksen **20241107080831\n Ignore-this: e1af6546320196398b9801d9b21983783bf8751cb8ff57085e6b5970580866165db46223f5c83291\n \n This allows building against current stackage nightly and restores building\n with ghc-9.8 which broke due to PVP violating updates of these packages.\n] \n\n[ci: run tests for ghc-9.10 instead of ghc-9.8\nBen Franksen **20241107094641\n Ignore-this: 185eb4aa9c52a10433851600af6bd656ed3e32e1cc3491423f973316d0cdc28921a31082678c638f\n] \n\n[support building with cabal-3.12\nBen Franksen **20241107091827\n Ignore-this: 8beb3dbdc41fcd665448c95e500e684650757454d2229bf46cea737e17902433b4ebc6c94ef8cf5e\n] \n\n[stack.yml: target lts-22.34, remove extra-deps\nBen Franksen **20241014073102\n Ignore-this: 797697e1275126dcda57053d2036fd00dda8235a90d7cbda0c6fbdc58ec8b62e2fda470d6ada1bdb\n \n This is the version (\"resolver\") that the debian haskell team currently uses\n to build haskell packages. To make darcs build against it (with no\n extra-deps), we also have to limit usage of tls>=2.0 to ghc>=9.8 (instead of\n ghc>=9.6) because lts-22.34 comes with ghc-9.6.6 but it still on tls-1.8.0.\n \n From now on, whenever we make changes to our cabal file, we should be\n careful to check that\n - stack build still works (i.e. finds a valid build plan) and\n - we adapt our stack.yml to whatever the debian haskell team targets\n These two steps should ensure continued availability of darcs in debian.\n] \n\n[exclude certain versions of directory dependency only on windows\nBen Franksen **20241012212249\n Ignore-this: 265d0bf2d75afc200fa1d8e1d6d74b7b4f80f0282c95f43d6207f8773c7eccdd8638ebe3f43a84ef\n \n The bug in the directory package that forces us to exclude these versions\n appears only on windows. This makes it possible to build darcs with stack\n against current stackage nightly, hopefully allowing darcs to get back into\n stackage and afterwards back into debian.\n] \n\n[remove dependency on strict-identity\nBen Franksen **20241012211305\n Ignore-this: 832020d9445251ff5374a524fe50af9dd3d42f06ebcfe4207cdd586a206e580a5869690c2bddc23b\n \n This is done by bundling (only) the single module as\n src/Darcs/Util/StrictIdentity.hs.\n] \n\n[accept issue2729: index crashes with zero size, zero timestamp file\nBen Franksen **20240809091510\n Ignore-this: 1846ec6368b2753ac38adf88beb94b3e58ef61666bbd856820de0a80d736b4a4e186d6059a5d6de0\n] \n\n[support GHC 9.10\nGanesh Sittampalam **20240720165737\n Ignore-this: cedeb71c11245d1d992f93c7e2c6d94830a206805c51b1597abd52dd9e13ddd8e3109fc1aa613bf9\n] \n\n[remove progress reporting from writeDarcsHashed\nBen Franksen **20240628063734\n Ignore-this: fdc80ae085f447abcefe800f10271fa15ce7a8f8dd1319d3ec2d8a4f6e48e2f700de7506d61e23d5\n \n This gets rid of useless \"Getting pristine ... done\" messages, since (apart\n from a few exceptions) this function is only used to write an empty tree to\n a newly created repo. Almost all non-trivial interaction with hashed trees\n happens via hashedTreeIO instead.\n] \n\n[remove unnecessary call to writePristine from clone command\nBen Franksen **20240628215424\n Ignore-this: 9cdb2723dd33a331fb44d74c5575295d20381565583f66b4e47cd3d326609c0a7f68e21bc31617c9\n] \n\n[remove unnecessary call to writeDarcsHashed from diff command\nBen Franksen **20240628064110\n Ignore-this: a0e15211f621d1995dcf9f77c8fc3024f9e29762922828883058e099fecb72054394ef57ed00f400\n] \n\n[accept issue1241: rollback with file beyond tag\nBen Franksen **20240623075709\n Ignore-this: 40b0f648b367614c96af3fc2a009781ba463c474e0f4e5543143cd0fb7e865289d06d13d0ca6dc21\n] \n\n[add (succeeding) regression test for issue2556\nBen Franksen **20240629105452\n Ignore-this: b7db22f54cf740e2f6063683ea075e8230c8c6ea0d160fbe71810dcd28c0a0a7a8d3f2dfa6f8630c\n] \n\n[(mostly) fix the detrimental side-effects of the fix for issue2727\nBen Franksen **20240626144339\n Ignore-this: 9fa4ed19d87819fda62fbf22169f30a630f3040e828b2210501d06c8df24f8295debbc617c5bd768\n \n This re-implements onlyRealConflicts by counting and comparing conflicts\n before and after commuting conflicting Named patches back to before the\n patch in question. We also handle the case where this commute fails (due to\n bugs in V1 or V2): instead of calling error i.e. crashing darcs, we prefer\n to create potentially inconsistent resolutions by assuming that the rest of\n the conflicting patches are real conflicts.\n \n Apart from the tests for issue2727 there remains just one failing test,\n where we run into the infamous \"Non-exhaustive patterns in Just a2'o\" bug in\n RepoPatchV2, namely tests/conflict-fight.sh. Skipping that one for darcs-2\n seems to be the only solution. Some of the tests for issue2727 that were\n previously skipped (for darcs-1 or darcs-2) now also pass for those formats.\n] \n\n[a number of new regression tests for issue2727\nBen Franksen **20240621183816\n Ignore-this: ffba8e536edf416f91f9ee66d5504e862a88b5354fd781e227eaed15889aecf4817388c1fc88e3c2\n \n These all came up during my attempts at fixing issue2727 and are based on\n failing QC test cases, which I manually reconstructed. Some of them (notably\n 4, 5, 8, and 11) pass even before applying the final fix for issue2727 (but\n after the fixes for RepoPatchV3); they represent bugs I introduced on my way\n toward the final solution. I decided to include them anyway, in case we ever\n have to touch that code again.\n] \n\n[rollback \"disable propResolutionsOrderIndependent for Named patches\"\nBen Franksen **20240621183624\n Ignore-this: 9a3e518a900492bf370fc2eec1ba01a50cc5496955e15c371f84ecb9afdedb6a962553d788fed223\n \n The property no longer fails for Named patches.\n] \n\n[a few local renamings in Darcs.Patch.Named for better clarity\nBen Franksen **20240621190231\n Ignore-this: aae320f1ebad3f29a15d352ac8683aa09a50824811be84fe57a4993e416b4e7993eddb55f78ce8eb\n] \n\n[resolve issue2727: conflict resolution should be invariant under reordering\nBen Franksen **20240621182317\n Ignore-this: dac56d011593c300cea87487623c700ed8b4157ac84f543c7aeb0db4fee47bf1317c0bf8f2f75142\n \n This is almost, though not quite, a re-write of 'resolveConflicts' for Named\n patches. There were many things wrong with the old version. For instance,\n reliably determining the patches that a Named patch conflicts with is more\n involved than just a call to findConflicting. Also, lessons learned from the\n latest bug fixes to resolveConflicts for RepoPatchV3 have been applied here\n as well. Particularly, the question of which conflicts have been resolved at\n the Named patch layer via explicit dependencies can and should be answered\n for direct conflicts between two patches. Transitive conflicts do /not/ play\n any role here, they are only important to finally generate the conflicting\n alternatives, and that is (as before) done at the RepoPatch level. For\n clarity, I decided to split the algorithm into two passes, one which gathers\n the necessary information (conflicts, dependencies), and another one to\n separate patch contents into resolved (by explicit dependencies) and\n unresolved (by same). This makes it more verbose overall, and perhaps\n slightly less efficient. But it should be clearer now what is happening and\n why. It is also much easier to debug in this form, since we can trace\n intermediate results to locate where things start to go wrong.\n] \n\n[minor changes to a regression test for issue2727\nBen Franksen **20240621181548\n Ignore-this: b354eb207bcf016f56be5310be8937ca6fd04452326fd009212022ba225d0c46ada05fd153223cf8\n] \n\n[make ./tests/resolve-conflicts-explicitly.sh slightly more robust\nBen Franksen **20240615151350\n Ignore-this: e1ed35ba7027bda587a6e4b195ff526da7ab503c43ba2d7aa9c520258a1fcb139c424c33f9921cb5\n \n It now doesn't get quite as easily confused when you add tracing to dump\n internal data for debugging.\n] \n\n[harness: allow parallel execution of unit tests, particularly QC tests\nBen Franksen **20240621082622\n Ignore-this: bad743e80fecef3783f5e24e6b6731e512e4efac04ada4c9c9875d224ca2f043b66dfc16407946\n] \n\n[regression test for issue2727, regarding RepoPatchV3 resolution\nBen Franksen **20240621061630\n Ignore-this: d5c86ac2a1f30c1e3aea7fd65bcde4a15ea271d19e26172977a2ce9441307d20f13c908ed8f33666\n \n It originated as a failing QC test for Named RepoPatchV3 with -q=100000. As\n with the previous regression tests for that issue, it turned out that\n explicit dependencies play no role at all here and the inputs to the\n instance for RepoPatchV3 were in fact identical up to commutation.\n] \n\n[fix another bug in RepoPatchV3 conflict resolution\nBen Franksen **20240619222433\n Ignore-this: b8d4fd671a2fecfa5e40ed6ba27b8b45034bafd6b135127fcf4ee62ce94d42db0727c8026e505fd7\n \n The error this patch fixes should have been obvious, given the motivating\n example scenarios discussed in the doc comment to 'findComponents'. Indeed,\n I concluded there that \"it would be wrong to first join conflict sets and\n then use those to join components\". (Note that this remark is about\n /resolved/ conflicts.) Yet, I failed to consistently follow that reasoning\n in the code, and at one point lumped together the patch and the ones it\n conflicts with into a single set. The correct view is that a single \"direct\n conflict\" is an edge in the conflict graph and the corresponding \"conflict\n set\" always contains exactly two patches.\n] \n\n[add --canonize/--no-canonize option to record and amend commands\nBen Franksen **20240612092154\n Ignore-this: ba8a22be137a4d453d4d33fa7b441516f020b0623e0e92fe8b771e083d77e8b7897f00d1ac6e2d07\n \n This is only so we can reconstruct failing QC test cases for named patches.\n] \n\n[Switch from * to Type\nGanesh Sittampalam **20240617000741\n Ignore-this: 1e88216d0279d4df38b38c951a77f2fd278faa418d1ee340fef851893aeaa1b16f3397c5535d5201\n] \n\n[two regression tests related to issue2727\nBen Franksen **20240617164207\n Ignore-this: 9ef7c33e7a89ffd64bfe2d88945d822a6aaff109ff35f584a50081ad68dd8357ef53e4368b1894ab\n \n These contain no --add-deps and thus guard against errors in the RepoPatch\n implementations of resolveConflicts. One of them fails for darcs-1 patches\n and is therefore skipped (I can't be bothered to even look at the\n RepoPatchV1 code anymore, much less make any changes to it).\n] \n\n[bugfix in Darcs.Patch.V3.Resolution.findComponents\nBen Franksen **20240617081546\n Ignore-this: 9b898d8be11e28b5b80d203370ffc8f0117a3ec2dc4d6ba9b7a4258af50e1980411a4f4bcd7cf55b\n \n The error causes propResolutionsOrderIndependent to fail and also generates\n more conflict markup than wanted, in in the case of a non-empty context. the\n point is that we should /not/ add conflicts contained in the patch under\n consideration to our todo set if we have exhausted the patches in the\n trailing \"interesting\" segment and the patch under consideration is in the\n \"context\". We regard those as uninteresting, either because they are not\n \"new\" conflicts e.g. when pulling, or because we consider conflicts\n contained in them as already resolved e.g. due to explicit dependencies.\n] \n\n[fix/generalize propResolutionsOrderIndependent\nBen Franksen **20240616163408\n Ignore-this: 5b828199754bf78bd282a7f940c2b4410184fd146c6e8745651e284f85c6e0cb8117e041ab3eaba8\n \n It now tests the property (as it should) on a pair of mergeable RLs, whereas\n before we fixed the context (the first sequence) to be empty.\n \n The generator for the pair requires a wrapper type so we can generate a\n split point for the sequence i.e. a random number between 0 and the length\n of the sequence. This wrapper then needs various instances which are mostly\n trivial but nevertheless tedious to implement.\n \n Note that the simpler method of directly generating a pair of\n MergeableSequences is /not/ general enough, and indeed does not catch the\n kind of errors we want to guard against. This is because it confines\n conflicts to each sequence, whereas we need them to spread over both sides\n of the split.\n] \n\n[more useful instance Arbitrary PatchInfo for Named patches\nBen Franksen **20240611112817\n Ignore-this: 33f18cbd9bb09ef0e5b0fc7a186a86e8ba7ff42fab501fb31bac19bc26aee1fb055f50d552c06907\n \n Shrinking patch names is problematic due to explicit dependencies and not\n needed if we generate only minimal, human readable (and always valid) names.\n] \n\n[add WithNames repo model wrapper for Named patches\nBen Franksen **20240604195912\n Ignore-this: 1b241be6e3636b11ce21608209697715d1cc1c3d4ab4eaa79c19201d94b0bf393917d637ab186a68\n \n This is much cleaner than hacking the applied patch names into the existing\n repo models. Also led to a clean instance ArbitraryState (Named p) (which\n was buggy before) and may in the future allow to shrink Named patches by\n removing some of their explicit dependencies.\n] \n\n[drop warning suppressions not needed for GHC 9.8\nGanesh Sittampalam **20240607092446\n Ignore-this: f1f76162956699a3e63cc4ca4bfb2e1a4cfef7a8344cf7d57a25f815ea0c1820ef653c837f4df4a9\n] \n\n[drop CPP in Darcs.Prelude\nGanesh Sittampalam **20240606224053\n Ignore-this: 9467b821ad524390fecfcd119217f3863d040d51d850d035e67543e22dc6fdda66fb907570a48df1\n \n instead we'll have warnings in older GHCs\n] \n\n[standardise on -Wno rather than -fno-warn\nGanesh Sittampalam **20240606223609\n Ignore-this: 1dfc85feba425df972c729585921eea1ddec85897e4bea98acafc9c8f01025f0c4321736c6b6e454\n] \n\n[only aim to be warnings clean on one version of GHC\nGanesh Sittampalam **20240606223521\n Ignore-this: baa0a229b2cf58978c3d04518ae0640aa270c191ab20a60650bbc69a8e3d4579afb96fa3b17a3b09\n \n For now this is GHC 9.8\n] \n\n[add a comment about why we have NoMonoLocalBinds etc\nGanesh Sittampalam **20240608165426\n Ignore-this: 83127c5035b3db42e89580809c8673b7b2c95e2a4e673efbeda4afe96b177fb2a7454cbd089517ef\n] \n\n[drop general ModelOf p ~ ModelOf (PrimOf p) constraint\nGanesh Sittampalam **20240606072935\n Ignore-this: e30a0cd87edc08f98077a9624cb4feda599a039e168115c65ef710ef14c3d14b9a54b9bf79bb235b\n] \n\n[make ShrinkModel into a MPTC\nGanesh Sittampalam **20240606004332\n Ignore-this: 8e1c06050946a0c68695aef78e7901fa4297271a22edbe20e7609a9b9322880a8fa6ca4cc5505fea\n] \n\n[remove unused alias\nGanesh Sittampalam **20240606003200\n Ignore-this: f1b086535d9383e6e4fcda9991e8344bf93bf98664d7890a463a5dc4a264db97124f05fa75368d9\n] \n\n[GHC 9.10 fix - need to import Prelude for ~\nGanesh Sittampalam **20240603135359\n Ignore-this: b3a0649bd00b0ebb1ed2a3e71d136972a3d1d83aef49a9e8538ab3330713d5dd6052c3e676f3e909\n] \n\n[manage migration of foldl' to the Prelude\nGanesh Sittampalam **20240602220515\n Ignore-this: af9d8a5d54929e11f8c37409e9b5e5c1491ef8a7d454afb78bc061ef5d3254ef82538fa09c6946fe\n] \n\n[suppress new warning about NE.unzip\nGanesh Sittampalam **20240602220502\n Ignore-this: 928b977611f86b70a2bf74aa00dda28d75d97884489e905f9e9120cd6091da1150aa66046568966d\n] \n\n[Fix incorrect imports of PrimOf(..)\nGanesh Sittampalam **20240602215744\n Ignore-this: eb7fd6dff96aabb00be2f37096ca3172c5fbd24a3df3a231f3d5fbc43024160aaeff4318392050b7\n] \n\n[drop checks for GHC>=7.6\nGanesh Sittampalam **20240603152317\n Ignore-this: acfccce3b9f74ae6a251a159eddbb9a115c4c83909ec7183138eea98cbc53420fa005ba7d490bb55\n] \n\n[resolve issue2725: permissions of _darcs/patch_index\nBen Franksen **20240601110447\n Ignore-this: 4c32dc9d6248db9b378547bcdaa05c6ae105a6487088694174503863d4c05576b7ebded1ff27ddbb\n \n The cheap solution here is to set the permissions of the newly created\n _darcs/patch_index/ to that of its parent directory _darcs/. Using _darcs/\n has the advantage that darcs now automatically fixes the permissions of\n existing patch_index directories.\n] \n\n[harness: disable propResolutionsOrderIndependent for Named patches\nBen Franksen **20240526061208\n Ignore-this: 2751ae5d06eda95ac4050acb34bbc82583473ec9be72323955078874502ec6f3a2cab770095f56fc\n \n This property occasionally fails with Named patches. Furthermore, we get\n exceptions when trying to shrink the test complaining about invalid patch\n names. This needs further investigation.\n] \n\n[D.T.Patch.Properties.RepoPatch -> D.T.Patch.Properties.Mergeable\nBen Franksen **20240528083133\n Ignore-this: dd696d6e978b6c0fdc39174eb29ca87503ca84c4c7cefbef3bfdfa7e6a51450d1ac7080b96e5033c\n \n These properties now work for any Mergeable patch type.\n] \n\n[harness: add proper QC tests for Named patches\nBen Franksen **20220615072000\n Ignore-this: b4a289444828db79b5628daa1e5d8d9f5540050a1eec6db99ccbcea48544b7a2d9b7553d3e5bceed\n \n This generalizes RepoPatch properties to any Mergeable patch type. Since we\n want to generate Named patches with meaningful explicit dependencies, we\n have to keep track of the names of patches that have been applied to a repo\n (model). This requires the introduction of yet another (somewhat ad-hoc)\n class RepoApply to add a method 'patchNames' to all testable patch types.\n The default implementation that returns [] is used for all patch types\n except for Named patches.\n] \n\n[harness: prepare generalization of RepoPatch to Mergeable\nBen Franksen **20220615080811\n Ignore-this: 6c073dcc7f970f71267697424a7927bc927eecd345415149b242c524bb3059f9697781539ee73874\n \n Recorded as a separate patch for easier review.\n] \n\n[harness: cleanup RepoPatchV2 examples\nBen Franksen **20230506154716\n Ignore-this: a9a21b4ddbb96d00a53adb513b459a43dc025881ed73df99d8c6da8b02d7a206d1b792a23f21f3c0\n \n This gets rid of the whole Unwitnessed stuff (Darcs.Test.Patch.WSub,\n Darcs.Test.Patch.Properties.GenericUnwitnessed), renames the module,\n and mostly returns list of Sealed2 to avoid coercions.\n] \n\n[rename CHANGELOG to CHANGELOG.md\nBen Franksen **20240526104913\n Ignore-this: 668375a211d970dfd5c8c00d9506300f9e81f9856f33e18a6d6fc1250d5c44450a00be60cc549d00\n] \n\n[add release notes for 2.18.3\nBen Franksen **20240526082420\n Ignore-this: 2ffddb319d036b32296b6591c3fecff16d9492092218038f38b996cd64b04da4d5a29395933a300f\n] \n\n[runghc -> cabal run in release/gen-version-info.hs, too\nBen Franksen **20240526083259\n Ignore-this: 9c91135b8fd2f0ffd2e92ee32f6252fde8ead972a9e72ce3f4188a99cbed0063da2926639a84e977\n] \n\n[make execution of release/gen-version-info.hs more robust\nBen Franksen **20240519114710\n Ignore-this: caa8562971e2e6306233650fe5742950df2f2db415dfc9ab34a84dbc9510f94f1b0bc9370553343c\n \n Using cabal run instead of runghc inside of Setup.hs ensures that we use the\n same ghc version that cabal uses, avoiding dependency errors.\n] \n\n[simplify configureHttpConnectionManager using tls >= 2.0.6\nBen Franksen **20240521074728\n Ignore-this: adc9870bceee32b93a7cd3a28527818e3f3a39a6c99856bc52ffdd4a8011288fc4b8c92da12d33ad\n \n They changed the default for TLS.CLientSupported to use\n TLS.ciphersuite_strong instead of the empty list.\n] \n\n[work around issue2720\nBen Franksen **20240522135059\n Ignore-this: fb31c68de86d2a133b3d5738eaa747db783b6da7c22f0469025c7ba64adba7e0ca7f7cac64031fb4\n \n This creates a symbolic link to the system's \"security\" command (if it\n exists) and adapts the reduced PATH so that it is found by darcs.\n] \n\n[clean up tests/issue1465_ortryrunning.sh\nBen Franksen **20240523053950\n Ignore-this: 936c57a8f3ac70d6d7b5c2afa6074f6d386d1060f71c244166de48fc96857e1ce0e00cbe423ef6bf\n \n - remove test repo before creating it\n - remove creation of unused fake 'vi' program (and simplify PATH)\n - add comments to the second (\"bad editor\") test\n] \n\n[downgrade stack.yaml to use lts-21.25\nBen Franksen **20240520090637\n Ignore-this: cc3e3f97a982ed9707769c2af2019cdc99cb844887c63d570cb812cc4af717f8005a22998feafe31\n \n Otherwise we would have to add quite a number of extra-deps, even with\n nightly-2024-05-20. This also changes the conditional for depending on\n crypton-connection-0.4.0 in the cabal file to ghc >= 9.6.\n] \n\n[resolve issue2721 by excluding certain versions of directory package\nBen Franksen **20240520062804\n Ignore-this: 67331fd6560d9665f1c7cdb58ffdd6de3e02b6e26e64779cdd127aa7aefbd1ce2b8d8cc9a559c093\n \n Versions 1.3.8 up to 1.3.8.4 of the directory package have a bug in their\n implementation of copyFile on Windows.\n] \n\n[update stack.yaml to use lts-22.22\nBen Franksen **20240519174557\n Ignore-this: 10ab7b145bf6b5f95f23bf8da21ef993e947c2f4a14ab608c9e0833ddfe1a69d9a2e22248393df8c\n] \n\n[avoid setting TLS.supportedCiphers in configureHttpConnectionManager\nBen Franksen **20240519115132\n Ignore-this: 8ca9b39858387544beadb30cf66af4a3729dff55574ba810fc5750be979cbeeec7d285681727517d\n \n The NC.settingClientSupported member of the default value for NC.TLSSettings\n already contains this setting, so we use that default and only change\n TLS.supportedExtendedMainSecret. This is semantically simpler and more\n robust in case TLSSettingsSimple acquires additional members. Unfortunately,\n doing nested record updates is a rather verbose affair in Haskell w/o\n lenses, so the code is somewhat less readable now.\n] \n\n[fix comment in darcs.cabal\nBen Franksen **20240519114637\n Ignore-this: 2699dd3037972d0c28a7b36363e64ec1d539c2da3a5e8ce033a850b7474872cf5399e135af329351\n] \n\n[add docs to configureHttpConnectionManager\nBen Franksen **20240519110146\n Ignore-this: ec69432373410c479f90ddda39b98a25a754db4d9e14055197cb47cfde85596faf2026e34f8fb11a\n] \n\n[add missing constraints data-default-class\nBen Franksen **20240519102829\n Ignore-this: 9d3f5afdeea6274f2532c2aa7fa407461bcddb3516a47a9fccb069500275e587d5cc252f41f6b96b\n] \n\n[Fix missing import in D.R.PatchIndex\nGanesh Sittampalam **20240513100533\n Ignore-this: bea59b7dfbd7e591a57f4bfcc7983003f79db4b4fe41f6072d904e9947a38d5936e75c9cdec0df86\n \n GHC 9.10 complains about it, not sure why earlier\n versions don't.\n] \n\n[ci: run tests with latest supported ghc version\nBen Franksen **20240517115615\n Ignore-this: 574d1033079aa6d6f5f42739ff36e353baf9b5503134f9b0cff839a02a64e101a778781f401661dc\n] \n\n[adapt supported ghc versions in README.md\nBen Franksen **20240517113218\n Ignore-this: c4e2e475e9c439bf0b240fd5dfffc6f57b43b94ecb57329e2686286b47f214a562150f0b336fce27\n] \n\n[ci: add ghc-9.8.2 to build matrix\nBen Franksen **20240517113141\n Ignore-this: a3103d91ec762a45b0e5b8a2a4f317d4358c58c0a94420f43784c623050af0381cdb9dc65a87bbc7\n] \n\n[resolve issue2715: hub.darcs.net does not support \"Extended Main Secret\"\nBen Franksen **20240515142402\n Ignore-this: 60c6b240cd1563fe7b3fc83845610e2c4d2ec00a6d5a4ada2e53e9fff4ff1022780a3dfa64d31a3e\n \n We use a new addition to crypton-connection in order to change the default\n setting back to old (tls < 2.0) default. For ghc < 8.8 crypton-connection\n is not available, so instead we constrain tls as before.\n] \n\n[support time 1.14\nGanesh Sittampalam **20240513000056\n Ignore-this: 929d96355713e762a5e77549597b6bd816992f93af1ef7125cdcf9caa396f6f5d82b2a8d71ff51e8\n \n This currently needs --allow-newer to use\n] \n\n[support QuickCheck 2.15\nGanesh Sittampalam **20240513000037\n Ignore-this: f078dc88530d2fdc51610fcadca12d0b355ae5c6594e76643f1592ccd329b5a8c817374f0522e0cc\n \n This currently needs --allow-newer to use\n] \n\n[support network 3.2\nGanesh Sittampalam **20240512234608\n Ignore-this: e50507030aecbb0cb90d369f331765cde093c746c257d6a301ea0d06ba91353b8d7aff9369e52bd3\n] \n\n[Resolve conflict in version bump from 2.18 branch\nGanesh Sittampalam **20240324221309\n Ignore-this: d035acfcb3339c22e37a8b4bc8f662258886e1f5ad786cd87d08edf9624cb3888606bf38d8177d26\n] \n\n[TAG 2.18.2\nGanesh Sittampalam **20240324205714\n Ignore-this: d03c480e509ce62eacaf0f1ad40f0cb7821163cac313d5ee4c977e7f77f9ec020b9e61214ddafe49\n] \n\n[bump version to 2.18.2\nGanesh Sittampalam **20240324205701\n Ignore-this: dd7bfdf697e6bc700288ee4a9406ed5d76e7ee049bc13a483c9ca3321903620c775e6fc133a12608\n] \n\n[CHANGELOG for 2.18.2\nGanesh Sittampalam **20240324205626\n Ignore-this: a7ea5d6b7ff3712201eb93651cdbf8653872da99839f527a6a6aa4a6f685b0707e867484ba805a57\n] \n\n[ci: upgrade actions/cache, fix cache/save warnings\nBen Franksen **20240510155801\n Ignore-this: 1b2f726a24693810ec0fe995af9a0be968ca86728cc26758ca9582516968397ca1d3b4be1c478a8b\n] \n\n[resolve issue2718: avoid using conflictedPaths to detect whether we have conflicts\nBen Franksen **20240510130208\n Ignore-this: 6aeca7588b2418053dd27039d0f2bcbecbee965e2e8a270f24f89b427fe787f9d811ef1eaf1a61bb\n \n This adds an explicit test named haveConflicts, which looks at the \"sum\" of\n the mangled and unmangled conflicts. This is cleaner because in principle\n (albeit not in practice) there could be conflicts that do not involve any\n file paths. Consequently, the warning message for conflicts is now separate\n from the listing of the affected paths.\n] \n\n[two optimizations in remove command\nBen Franksen **20240510071822\n Ignore-this: b4d3049af67f9824469de5692e6337f5dd0586e921b973632e74c9d180287f8d30460777119bc81f\n \n Frst, avoid duplicate call to readPristineAndPending by passing the pristine\n tree to makeRemovePatch. Also, use a path filter before expanding the tree,\n to avoid expansion of irrelevant parts of the tree.\n] \n\n[ci: remove ghc-8.2.2 from build matrix\nBen Franksen **20240509190406\n Ignore-this: f0a8e2f11701fc743212fda56d6106d5364cefead256b63d06d363e787b409a74318611cf1ce06da\n \n Building the dependencies now fails on macos-13 with ghc-8.2.2. We already\n excluded it for the other systems due to various build problems, so can as\n well remove it entirely.\n] \n\n[resolve issue2714: cannot remove recursively\nBen Franksen **20240506183723\n Ignore-this: fcab2e01d6fe9dc16facda2a569eeb4454c3c1208cdcd31bb9c630334290de1de6dba8d5611666d9\n] \n\n[put back the set-default hint\nGanesh Sittampalam **20240309154509\n Ignore-this: 7f52711f8776994dcb902a5d23741d6c6af6f3113c1ec30a2691348235d16cd81689a21eff8d334d\n \n This rolls back patch 8c1131290443248d423cabe0b5566f840e6cb3b3\n] \n\n[Add a --ghcflags/-g option to the test suite\nGanesh Sittampalam **20240309145422\n Ignore-this: aba41611871b0ee986beee2e1bd6fb9af6690bf7315737fdb1c166222685ee8be703003d492eb010\n \n This allows arbitrary GHC flags to be passed on to the ghc\n invocations used to build helper exes in various test scripts.\n \n For example this allows -dynamic to be passed on systems with\n shared libraries.\n \n Based on a patch by Vekhir \n] \n\n[Darcs.UI.External: fix warnings about head\nGanesh Sittampalam **20240225215257\n Ignore-this: 35ed1b4cd0f4c1af70133228547dac7fce5067a1482cbe6810c515f82389d70cb1f2bed0a7a32fc9\n] \n\n[Darcs.UI.Commands.ShowAuthors: fix warnings about head\nGanesh Sittampalam **20240225215214\n Ignore-this: ee64bb835284ac05403ea170915e8ed2191de27042e69edea61f5743bd752ea14e44082f54d90f9a\n] \n\n[use headErr/tailErr from safe for several head/tails\nGanesh Sittampalam **20240225215204\n Ignore-this: bb7a9c0ef437f841d13a7eb2a0a24abcf5ee4d26a2856feac2966a4f89a52a5b20560bea920fbc42\n \n They aren't trivial to fix in a concise way, and safe\n is generally a useful package to have around anyway.\n] \n\n[Darcs.Test.Patch.FileUUIDModel: fix warning about head/tail\nGanesh Sittampalam **20240225215135\n Ignore-this: 740956f48bd3127b4f77c0c7350b8d229e0fd7be3bda48932a61341169cc976d6e7ed3dedc7cad11\n] \n\n[Darcs.UI.Commands.Record: fix warning about head\nGanesh Sittampalam **20240225205552\n Ignore-this: e288796d05da69107f281ccd311e1980cfc7d71aaab72b3c2ab2619c0f1ceaff76a497ae37705689\n] \n\n[Darcs.UI.Commands.Convert.Import: fix warning about head\nGanesh Sittampalam **20240225205524\n Ignore-this: a8c51b94ff04c6bed7959f43c3d518d6171048755646fd1146a7996ac6d5c5013db900b51ff29504\n] \n\n[Darcs.UI.Commands.Convert.Export: fix warnings about tail\nGanesh Sittampalam **20240225204214\n Ignore-this: a49d956016082487081f71fcace360f65cb92ad17e10e195651821d6e009b70b9d01b49a2b0c8708\n] \n\n[add some tests for Darcs.UI.Commands.Convert.Export.patchAuthor\nGanesh Sittampalam **20240225204119\n Ignore-this: 4a3a731c86732c64a27b9e9719950e01a94ef320b37259113c693e75080283806a23835c423e40b9\n] \n\n[darcs.hs: fix warnings about head/tail\nGanesh Sittampalam **20240225201516\n Ignore-this: 8f6fec34b8e77474a3b1ee5e252fb08fe8439c57a4a0ad35df64c06fe15df18f83c82855d8435e15\n] \n\n[Darcs.Patch.Info: fix warning about head\nGanesh Sittampalam **20240225195147\n Ignore-this: f50bcf1a170c6c8941c688021e20d1eee24e59b2afe7a5f01488c9c4f2cb92d58aed586f146a0a98\n] \n\n[Darcs.Util.Diff.Patience: fix warning about head\nGanesh Sittampalam **20240225194009\n Ignore-this: ccfaccc22725d81839f5caf605742eb7f3ef916572fdd7a53bcc7f7fa837ef9d71729e7c71d4503\n] \n\n[Darcs.Patch.Annotate: fix warning about head\nGanesh Sittampalam **20240225182503\n Ignore-this: 4c48b8aadee71311e2a01f5b53a678e07f53339d98e843ebc01ebd381163fe0a9d7f3721c2c5dc58\n] \n\n[bump version to 2.19.1\nGanesh Sittampalam **20240225175930\n Ignore-this: b8e8df00de306f7de2363d1805c152be15cc5b5ab99a57be8f9db19bb8552d0d25a196eeb2dc20c4\n] \n\n[TAG 2.18.1\nGanesh Sittampalam **20240225173219\n Ignore-this: dc3a92eafcab9d4fa9f53b1811d4c99d330615a8c202bff1b77f44d551d21684eee81e96c69be62b\n] \n" \ No newline at end of file diff --git a/release/distributed-version b/release/distributed-version index 4e25a0bd..51d6b98c 100644 --- a/release/distributed-version +++ b/release/distributed-version @@ -1 +1 @@ -Just 0 \ No newline at end of file +Just 105 \ No newline at end of file diff --git a/release/release.sh b/release/release.sh index 1c38f9c0..e8c58cdd 100755 --- a/release/release.sh +++ b/release/release.sh @@ -23,7 +23,7 @@ rm -rf $packagename cabal unpack $tarballpath cd $packagename -cabal test --enable-tests --test-option="-j3" +cabal test --enable-tests --test-option="-j3 --hidesuccesses" cabal install --disable-optimisation --install-method=copy --installdir=./bin ./bin/darcs --version diff --git a/shelly/ChangeLog.md b/shelly/ChangeLog.md deleted file mode 100644 index 5af3f35e..00000000 --- a/shelly/ChangeLog.md +++ /dev/null @@ -1,41 +0,0 @@ -# 1.7.2 - -* Support exceptions-0.9 - -# 1.7.0.1 - -* Fix FindSpec.hs tests. Fixes [#150](https://github.com/yesodweb/Shelly.hs/issues/150), [#162](https://github.com/yesodweb/Shelly.hs/issues/162) - -# 1.6.8.7 - -* Relax unix-compat constraints - -# 1.6.8.6 - -* Fix Build issue [#156](https://github.com/yesodweb/Shelly.hs/issues/156) - -# 1.6.8.5 - -* Fix Windows build [#155](https://github.com/yesodweb/Shelly.hs/pull/155) - -# 1.6.8 - -* added sshPairsWithOptions function - -# 1.6.7 - -* flush stdout when using `echo`, not just `echo_n` -* fix should be able to silence stderr when using `runHandle` -* expose RunFailed - -# 1.6.6 - -* add prependToPath function - -# 1.6.5 - -* expose MonadShControl - -# 1.6.4.1 - -* add writeBinary function diff --git a/shelly/LICENSE b/shelly/LICENSE deleted file mode 100644 index fe9721fa..00000000 --- a/shelly/LICENSE +++ /dev/null @@ -1,30 +0,0 @@ -Copyright (c) 2017, Petr Rockai - -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - * Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials provided - with the distribution. - - * Neither the name of Petr Rockai nor the names of other - contributors may be used to endorse or promote products derived - from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/shelly/README.md b/shelly/README.md deleted file mode 100644 index cefb77c8..00000000 --- a/shelly/README.md +++ /dev/null @@ -1,187 +0,0 @@ -# Shelly - -[![Build Status](https://travis-ci.org/yesodweb/Shelly.hs.svg?branch=master)](https://travis-ci.org/yesodweb/Shelly.hs) -[![Hackage](https://img.shields.io/hackage/v/shelly.svg)](https://hackage.haskell.org/package/shelly) -[![Stackage Nightly](http://stackage.org/package/shelly/badge/nightly)](http://stackage.org/nightly/package/shelly) -[![Stackage LTS](http://stackage.org/package/shelly/badge/lts)](http://stackage.org/lts/package/shelly) - -Shelly provides a single module for convenient systems programming in Haskell. - -* is aimed at convenience and getting things done rather than being a demonstration of elegance. -* has detailed and useful error messages -* maintains its own environment, making it thread-safe -* is modern, using Text and system-filepath/system-fileio -* has low memory usage - * `run_` and other underscore variants that don't return stdout - * `runFoldLines` to run a fold operation over each line rather than loading all of stdout into memory - * `runHandle` and `runHandles` for complete control over handles - -Looking to put your Haskell learning to immediate practical use? You don't have to create artifical intelligence, try just automating some of your boring tasks. - -The focus of this library on convenience combined with good error messages should make shelly approachable for newer users of Haskell. -I have published [an introductory article to scripting with shelly, targeted towards those not familiar with Haskell](http://www.linux-magazin.de/Online-Artikel/Shell-scripting-with-type-safety-using-Haskell/). There is a paid version in German from Linux Magazin. -That article uses the version `shelly < 1.0` which uses lazy text. `shelly > 1.0` uses strict text. - - - -## More shelly packages - -The [shelly-extra](http://hackage.haskell.org/package/shelly-extra) package has some additional functionality that requires additional dependencies, currently including a convenient concurrency/futures implementation. If you are following along the above article you need to install it. - - -## Examples - -* [A small deployment script](http://www.alfredodinapoli.com/posts/2015-11-03-how-i-deploy-haskell-code.html) -* [Yesod development installer](https://github.com/yesodweb/scripts/blob/master/install.hs) -* [cabal-meta, a haskell install tool](https://github.com/yesodweb/cabal-meta/blob/master/main.hs) -* [antigen-hs, a zsh plugin manager](https://github.com/Tarrasch/antigen-hs) - - -### Blog Posts - -* [Shelly automation with Literate Haskell](http://www.scholarslab.org/dh-developer/shell-programming-in-haskell-converting-s5-slides-to-pdf/) - - -### Testimonials - -* [a beginning Haskeller does automation](http://www.reddit.com/r/haskell/comments/w86gu/my_current_job_task_is_boring_so_i_wrote_a_simple/) - -### Help - -* [google group for Haskell shell scripting](https://groups.google.com/forum/#!forum/haskell-shell-scripting) - -## Alternatives - -### Haskell shell scripting libraries - - -* [HSH](http://hackage.haskell.org/package/HSH) - A good alternative if you want to mixup usage of String and ByteString rather than just use Text. -* [HsShellScript](http://hackage.haskell.org/packages/archive/hsshellscript/3.1.0/doc/html/HsShellScript.html) - Has extensive low-level shell capabilities. -* [shell-conduit](http://hackage.haskell.org/package/shell-conduit) - efficient streaming via conduits. Makes some portability sacrifices by - * encouraging one to just use the shell instead of cross-platform Haskell code - * encouraging one to use a convenience function that searches the PATH at compile-time -* [shell-monad](http://hackage.haskell.org/package/shell-monad) - compile Haskell code down to shell script. This is a different approach from all the rest of the libraries. Writing your script is not as user-friendly as the other Haskell libraries, but it nicely solves the deployment issue. -* [turtle](http://hackage.haskell.org/package/turtle) - In some sense a [redesign of Shelly designed for beginner-friendliness](http://www.reddit.com/r/haskell/comments/2u6b8m/use_haskell_for_shell_scripting/co5ucq9) - -HSH and HsShellScript (unlike Shelly currently) implement very efficient mechanisms for piping/redirecting in the system. -turtle, like Shelly offers folding as a way to efficiently deal with a stream. - -None of the alternatives to Shelly offer command tracing. -For some this is an absolutely critical feature, particularly given that Haskell does not yet offer up stack traces. - - -### Haskell file-finding supplements - -* [find-conduit](http://hackage.haskell.org/package/find-conduit) - uses conduits, similar speed to GNU find -* [FileManip](hackage.haskell.org/package/FileManip) - uses Lazy IO - -Shelly's finders load all files into memory. This is simpler to use if you control the filesystem structure and know the system is bounded in size. However, if the filesystem structure is unbounded it consumes unbounded memory. - - -### Shell commands with richer input/output - -Shelly does not change the nature of shell scripting (text in, text out). -If you want something more revolutionary you might try these: - -* PowerShell is probably the best known. -* [Haskell project](https://github.com/pkamenarsky/ytools) using typed JSON -* [RecordStream](https://github.com/benbernard/RecordStream) untyped JSON] - - -## Usage - -Shelly's main goal is ease of use. -There should be a primitive for every shell operation you need so you can easily build abstractions, so there are many of the usual file and environment operations. - -There are 2 main entry points for running arbitrary commands: `run` and `cmd`. -They take a FilePath as their first argument. `run` takes a [Text] as its second argument. -`cmd` takes a variadic number of arguments, and they can be either Text or FilePath. - -Fun Example: shows an infectious script: it uploads itself to a server and runs itself over ssh. -Of course, the development machine may need to be exactly the same OS as the server. - -I recommend using the boilerplate at the top of this example in your projects. -This includes setting line buffering if you are dealing with text and not binary data. - -~~~~~ {.haskell} - {-# LANGUAGE OverloadedStrings #-} - {-# LANGUAGE ExtendedDefaultRules #-} - {-# OPTIONS_GHC -fno-warn-type-defaults #-} - import Shelly - import Data.Text as T - default (T.Text) - - main = do - hSetBuffering stdout LineBuffering - shelly $ verbosely $ do - host <- run "uname" ["-n"] - if T.stripEnd host === "local-machine" - then do d <- cmd "date" - c <- escaping False $ cmd "git" "log -1 | head -1 | awk '{print $2}'" - appendfile "log/deploy.log" $ T.intercalate " - " [T.stripEnd d, c] - uploads "my-server:/remote/path/" ["deploy"] - sshPairs_ "my-server" [("cd", ["/remote/path"]), ("./deploy", [])] - else do - cmd "./script/angel" - - -- same path on remote host - -- will create directories - uploads :: Text -> [Text] -> Sh () - uploads remote locals = rsync $ ["--relative"] ++ locals ++ [remote] - - rsync args = run_ "rsync" $ ["--delete", "-avz", "--no-g"] ++ args -~~~~~ - -### Variadic arguments to cmd - -Yes, as seen above you can write variadic functions in Haskell quite easily, you just can't compose them as easily. -I find `cmd` to be more convenient, but I often use `run` and `command` variants when I am building up abstractions. -Building up abstractions with cmd will require type signatures. - - -- easy signature, but only allows one argument - let cabal = cmd "cabal" :: Text -> Sh Text - - -- more complex signature that allows partial application of cmd - let cabal = cmd "cabal" :: Shelly.ShellCmd result => result - - - -### Escaping - -By default, all commands are shell escaped. -If you want the shell to interpret special characters such as `*`, just use `escaping False $ do ...` - -### Using Text and FilePath together - -Shelly's usage of system-filepath means you may need to convert between Text and FilePath sometimes. -This should be infrequent though because - -* `cmd` will convert FilePath to Text -* The `` and `<.>` combinators convert String/Text into a FilePath automatically - -Manual conversion is done through `toTextIgnore` or `toTextWarn`. - - -### Thread-safe working directory and relative paths - -`cd` does not change the process working directory (essentially a global variable), but instead changes the shelly state (which is thread safe). -All of the Shelly API takes this into account, internally shelly converts all paths to absolute paths. You can turn a relative path into an absolute with `absPath` or `canonic` or you can make a path relative to the Shelly working directory with `relPath`. - - -### Good error messages - -Haskell's #1 weakness for IO code is a lack of stack traces. -Shelly gives you something different: detailed logging. -In most cases this should be more useful than a stack trace. -Shelly keeps a log of API usage and saves it to a .shelly directory on failure. -If you use `shellyNoDir`, the log will instead be printed to stderr. -This is in addition to the `verbosely` settings that will print out commands and their output as the program is running. -Shelly's own error messages are detailed and in some cases it will catch Haskell exceptions and re-throw them with better messages. - -If you make your own primitive functions that don't use the existing Shelly API, you can create a wrapper in the Sh monad that use `trace` or `tag` to log what they are doing. -You can turn tracing off (not generally recommended) by setting `tracing False`. - - -## Future plans - -* Don't use the filepath library diff --git a/shelly/Setup.hs b/shelly/Setup.hs deleted file mode 100644 index 9a994af6..00000000 --- a/shelly/Setup.hs +++ /dev/null @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff --git a/shelly/shelly.cabal b/shelly/shelly.cabal deleted file mode 100644 index 55abe8b6..00000000 --- a/shelly/shelly.cabal +++ /dev/null @@ -1,198 +0,0 @@ -Name: shelly - -Version: 1.7.1.1 -Synopsis: shell-like (systems) programming in Haskell - -Description: Shelly provides convenient systems programming in Haskell, - similar in spirit to POSIX shells. Shelly: - . - * is aimed at convenience and getting things done rather than - being a demonstration of elegance. - . - * has detailed and useful error messages - . - * maintains its own environment, making it thread-safe. - . - * is modern, using Text and system-filepath/system-fileio - . - Shelly is originally forked from the Shellish package. - . - See the shelly-extra package for additional functionality. - . - An overview is available in the README: - - -Homepage: https://github.com/yesodweb/Shelly.hs -License: BSD3 -License-file: LICENSE -Author: Greg Weber, Petr Rockai -Maintainer: Greg Weber -Category: Development -Build-type: Simple -Cabal-version: >=1.8 - --- for the sdist of the test suite -extra-source-files: test/src/*.hs - test/examples/*.sh - test/examples/*.hs - test/data/zshrc - test/data/nonascii.txt - test/data/symlinked_dir/hoge_file - test/testall - README.md - ChangeLog.md - -Library - Exposed-modules: Shelly, Shelly.Lifted, Shelly.Pipe, Shelly.Unix - other-modules: Shelly.Base, Shelly.Find - hs-source-dirs: src - other-extensions: InstanceSigs - - Build-depends: - containers >= 0.4.2.0, - time >= 1.3 && < 2, - directory >= 1.1.0.0 && < 1.4.0.0, - mtl >= 2, - process >= 1.0, - unix-compat < 0.6, - system-filepath >= 0.4.7 && < 0.5, - system-fileio < 0.4, - monad-control >= 0.3.2 && < 1.1, - lifted-base, - lifted-async, - exceptions >= 0.6, - enclosed-exceptions, - text, bytestring, async, transformers, transformers-base - - if impl(ghc >= 7.6.1) - build-depends: - base >= 4.6 && < 5 - else - build-depends: - base >= 4 && < 5 - - ghc-options: -Wall - - if impl(ghc >= 7.6.1) - CPP-Options: -DNO_PRELUDE_CATCH - - extensions: - CPP - -source-repository head - type: git - location: https://github.com/yesodweb/Shelly.hs - -Flag lifted - Description: run the tests against Shelly.Lifted - Default: False - -Test-Suite shelly-testsuite - type: exitcode-stdio-1.0 - hs-source-dirs: src test/src - main-is: TestMain.hs - other-modules: - CopySpec - EnvSpec - FailureSpec - FindSpec - Help - LiftedSpec - MoveSpec - ReadFileSpec - RmSpec - RunSpec - SshSpec - Shelly - Shelly.Base - Shelly.Find - Shelly.Lifted - TestInit - WhichSpec - WriteSpec - - ghc-options: -O2 -Wall -fwarn-tabs -funbox-strict-fields -threaded - -fno-warn-unused-do-bind -fno-warn-type-defaults - - - extensions: OverloadedStrings, ExtendedDefaultRules - - if flag(lifted) - cpp-options: -DLIFTED - - build-depends: - base >= 4.6, - text >= 0.11, - async, - bytestring >= 0.10, - containers >= 0.5.0.0, - directory >= 1.1.0.0 && < 1.4.0.0, - process >= 1.1.0, - unix-compat < 0.6, - system-filepath >= 0.4.7 && < 0.5, - system-fileio < 0.4, - time >= 1.3 && < 2, - mtl >= 2, - HUnit >= 1.2, - hspec >= 1.5, - transformers, - transformers-base, - filepath, - monad-control, - lifted-base, - lifted-async, - enclosed-exceptions, - exceptions - - extensions: - CPP - -Flag build-examples - Description: build some example programs - Default: False - Manual: True - --- demonstarated that command output in Shellish was not shown until after the command finished --- not necessary anymore -Executable drain - hs-source-dirs: test/examples - main-is: drain.hs - if flag(build-examples) - buildable: True - - build-depends: base >= 4.6 - , shelly - , text - - extensions: - CPP - else - buildable: False - -Executable run-handles - hs-source-dirs: test/examples - main-is: run-handles.hs - if flag(build-examples) - buildable: True - - build-depends: base >= 4.6 - , shelly - , text - - extensions: - CPP - else - buildable: False - -Executable Color - hs-source-dirs: test/examples - main-is: color.hs - if flag(build-examples) - buildable: True - - build-depends: base >= 4.6 - , process - , shelly - , text - else - buildable: False diff --git a/shelly/src/Shelly.hs b/shelly/src/Shelly.hs deleted file mode 100644 index 87ae6c8b..00000000 --- a/shelly/src/Shelly.hs +++ /dev/null @@ -1,1473 +0,0 @@ -{-# LANGUAGE ScopedTypeVariables, DeriveDataTypeable, OverloadedStrings, - FlexibleInstances, IncoherentInstances, CPP, - TypeFamilies, ExistentialQuantification #-} - --- | A module for shell-like programming in Haskell. --- Shelly's focus is entirely on ease of use for those coming from shell scripting. --- However, it also tries to use modern libraries and techniques to keep things efficient. --- --- The functionality provided by --- this module is (unlike standard Haskell filesystem functionality) --- thread-safe: each Sh maintains its own environment and its own working --- directory. --- --- Recommended usage includes putting the following at the top of your program, --- otherwise you will likely need either type annotations or type conversions --- --- > {-# LANGUAGE OverloadedStrings #-} --- > {-# LANGUAGE ExtendedDefaultRules #-} --- > {-# OPTIONS_GHC -fno-warn-type-defaults #-} --- > import Shelly --- > import qualified Data.Text as T --- > default (T.Text) -module Shelly - ( - -- * Entering Sh. - Sh, ShIO, shelly, shellyNoDir, shellyFailDir, asyncSh, sub - , silently, verbosely, escaping, print_stdout, print_stderr, print_commands - , onCommandHandles - , tracing, errExit - , log_stdout_with, log_stderr_with - - -- * Running external commands. - , run, run_, runFoldLines, cmd, FoldCallback - , bash, bash_, bashPipeFail - , (-|-), lastStderr, setStdin, lastExitCode - , command, command_, command1, command1_ - , sshPairs,sshPairsPar, sshPairs_,sshPairsPar_, sshPairsWithOptions - , sshCommandText, SshMode(..) - , ShellCmd(..), CmdArg (..) - - -- * Running commands Using handles - , runHandle, runHandles, transferLinesAndCombine, transferFoldHandleLines - , StdHandle(..), StdStream(..) - - -- * Handle manipulation - , HandleInitializer, StdInit(..), initOutputHandles, initAllHandles - - -- * Modifying and querying environment. - , setenv, get_env, get_env_text, getenv, get_env_def, get_env_all, get_environment, appendToPath, prependToPath - - -- * Environment directory - , cd, chdir, chdir_p, pwd - - -- * Printing - , echo, echo_n, echo_err, echo_n_err, inspect, inspect_err - , tag, trace, show_command - - -- * Querying filesystem. - , ls, lsT, test_e, test_f, test_d, test_s, test_px, which - - -- * Filename helpers - , absPath, (), (<.>), canonic, canonicalize, relPath, relativeTo, path - , hasExt - - -- * Manipulating filesystem. - , mv, rm, rm_f, rm_rf, cp, cp_r, mkdir, mkdir_p, mkdirTree - - -- * reading/writing Files - , readfile, readBinary, writefile, writeBinary, appendfile, touchfile, withTmpDir - - -- * exiting the program - , exit, errorExit, quietExit, terror - - -- * Exceptions - , bracket_sh, catchany, catch_sh, handle_sh, handleany_sh, finally_sh, ShellyHandler(..), catches_sh, catchany_sh - , ReThrownException(..) - , RunFailed(..) - - -- * convert between Text and FilePath - , toTextIgnore, toTextWarn, FP.fromText - - -- * Utility Functions - , whenM, unlessM, time, sleep - - -- * Re-exported for your convenience - , liftIO, when, unless, FilePath, (<$>) - - -- * internal functions for writing extensions - , get, put - - -- * find functions - , find, findWhen, findFold, findDirFilter, findDirFilterWhen, findFoldDirFilter - , followSymlink - ) where - -import Shelly.Base -import Shelly.Find -import Control.Monad ( when, unless, void, forM, filterM, liftM2 ) -import Control.Monad.Trans ( MonadIO ) -import Control.Monad.Reader (ask) -#if defined(__GLASGOW_HASKELL__) && __GLASGOW_HASKELL__ < 706 -import Prelude hiding ( readFile, FilePath, catch) -#else -import Prelude hiding ( readFile, FilePath) -#endif -import Data.Char ( isAlphaNum, isSpace ) -import Data.Typeable -import Data.IORef -import Data.Sequence (Seq, (|>)) -import Data.Foldable (toList) -import Data.Maybe -import System.IO ( hClose, stderr, stdout, openTempFile) -import System.IO.Error (isPermissionError, catchIOError, isEOFError, isIllegalOperation) -import System.Exit -import System.Environment -import Control.Applicative -import Control.Exception -import Control.Concurrent -import Control.Concurrent.Async (async, wait, Async) -import Data.Time.Clock( getCurrentTime, diffUTCTime ) - -import qualified Data.Text.IO as TIO -import qualified Data.Text.Encoding as TE -import qualified Data.Text.Encoding.Error as TE -import System.Process( CmdSpec(..), StdStream(CreatePipe, UseHandle), CreateProcess(..), createProcess, waitForProcess, terminateProcess, ProcessHandle, StdStream(..) ) - -import qualified Data.Text as T -import qualified Data.ByteString as BS -import Data.ByteString (ByteString) - -#if !MIN_VERSION_base(4,13,0) -import Data.Monoid (mempty, mappend, (<>)) -#endif - -import Filesystem.Path.CurrentOS hiding (concat, fromText, (), (<.>)) -import Filesystem hiding (canonicalizePath) -import qualified Filesystem.Path.CurrentOS as FP - -import System.Directory ( setPermissions, getPermissions, Permissions(..), getTemporaryDirectory ) -import Data.Char (isDigit) - -import Data.Tree(Tree(..)) -import qualified Data.Set as S -import qualified Data.List as L - -searchPathSeparator :: Char -#if defined(mingw32_HOST_OS) -searchPathSeparator = ';' -#else -searchPathSeparator = ':' -#endif - -{- GHC won't default to Text with this, even with extensions! - - see: http://hackage.haskell.org/trac/ghc/ticket/6030 -class CmdArgs a where - toTextArgs :: a -> [Text] - -instance CmdArgs Text where toTextArgs t = [t] -instance CmdArgs FilePath where toTextArgs t = [toTextIgnore t] -instance CmdArgs [Text] where toTextArgs = id -instance CmdArgs [FilePath] where toTextArgs = map toTextIgnore - -instance CmdArgs (Text, Text) where - toTextArgs (t1,t2) = [t1, t2] -instance CmdArgs (FilePath, FilePath) where - toTextArgs (fp1,fp2) = [toTextIgnore fp1, toTextIgnore fp2] -instance CmdArgs (Text, FilePath) where - toTextArgs (t1, fp1) = [t1, toTextIgnore fp1] -instance CmdArgs (FilePath, Text) where - toTextArgs (fp1,t1) = [toTextIgnore fp1, t1] - -cmd :: (CmdArgs args) => FilePath -> args -> Sh Text -cmd fp args = run fp $ toTextArgs args --} - --- | Argument converter for the variadic argument version of 'run' called 'cmd'. --- Useful for a type signature of a function that uses 'cmd' -class CmdArg a where toTextArg :: a -> Text -instance CmdArg Text where toTextArg = id -instance CmdArg FilePath where toTextArg = toTextIgnore -instance CmdArg String where toTextArg = T.pack - --- | For the variadic function 'cmd' --- --- partially applied variadic functions require type signatures -class ShellCmd t where - cmdAll :: FilePath -> [Text] -> t - -instance ShellCmd (Sh Text) where - cmdAll = run - -instance (s ~ Text, Show s) => ShellCmd (Sh s) where - cmdAll = run - --- note that Sh () actually doesn't work for its case (_<- cmd) when there is no type signature -instance ShellCmd (Sh ()) where - cmdAll = run_ - -instance (CmdArg arg, ShellCmd result) => ShellCmd (arg -> result) where - cmdAll fp acc x = cmdAll fp (acc ++ [toTextArg x]) - -instance (CmdArg arg, ShellCmd result) => ShellCmd ([arg] -> result) where - cmdAll fp acc x = cmdAll fp (acc ++ map toTextArg x) - - - --- | variadic argument version of 'run'. --- Please see the documenation for 'run'. --- --- The syntax is more convenient, but more importantly it also allows the use of a FilePath as a command argument. --- So an argument can be a Text or a FilePath without manual conversions. --- a FilePath is automatically converted to Text with 'toTextIgnore'. --- --- Convenient usage of 'cmd' requires the following: --- --- > {-# LANGUAGE OverloadedStrings #-} --- > {-# LANGUAGE ExtendedDefaultRules #-} --- > {-# OPTIONS_GHC -fno-warn-type-defaults #-} --- > import Shelly --- > import qualified Data.Text as T --- > default (T.Text) --- -cmd :: (ShellCmd result) => FilePath -> result -cmd fp = cmdAll fp [] - --- | Helper to convert a Text to a FilePath. Used by '()' and '(<.>)' -class ToFilePath a where - toFilePath :: a -> FilePath - -instance ToFilePath FilePath where toFilePath = id -instance ToFilePath Text where toFilePath = FP.fromText -instance ToFilePath String where toFilePath = FP.fromText . T.pack - - --- | uses System.FilePath.CurrentOS, but can automatically convert a Text -() :: (ToFilePath filepath1, ToFilePath filepath2) => filepath1 -> filepath2 -> FilePath -x y = toFilePath x FP. toFilePath y - --- | uses System.FilePath.CurrentOS, but can automatically convert a Text -(<.>) :: (ToFilePath filepath) => filepath -> Text -> FilePath -x <.> y = toFilePath x FP.<.> y - - -toTextWarn :: FilePath -> Sh Text -toTextWarn efile = case toText efile of - Left f -> encodeError f >> return f - Right f -> return f - where - encodeError f = echo ("non-unicode file name: " <> f) - --- | Transfer from one handle to another --- For example, send contents of a process output to stdout. --- does not close the write handle. --- --- Also, return the complete contents being streamed line by line. -transferLinesAndCombine :: Handle -> (Text -> IO ()) -> IO Text -transferLinesAndCombine readHandle putWrite = - transferFoldHandleLines mempty (|>) readHandle putWrite >>= - return . lineSeqToText - -lineSeqToText :: Seq Text -> Text --- extra append puts a newline at the end -lineSeqToText = T.intercalate "\n" . toList . flip (|>) "" - -type FoldCallback a = (a -> Text -> a) - --- | Transfer from one handle to another --- For example, send contents of a process output to stdout. --- does not close the write handle. --- --- Also, fold over the contents being streamed line by line -transferFoldHandleLines :: a -> FoldCallback a -> Handle -> (Text -> IO ()) -> IO a -transferFoldHandleLines start foldLine readHandle putWrite = go start - where - go acc = do - mLine <- filterIOErrors $ TIO.hGetLine readHandle - case mLine of - Nothing -> return acc - Just line -> putWrite line >> go (foldLine acc line) - -filterIOErrors :: IO a -> IO (Maybe a) -filterIOErrors action = catchIOError - (fmap Just action) - (\e -> if isEOFError e || isIllegalOperation e -- handle was closed - then return Nothing - else ioError e) - -foldHandleLines :: a -> FoldCallback a -> Handle -> IO a -foldHandleLines start foldLine readHandle = go start - where - go acc = do - mLine <- filterIOErrors $ TIO.hGetLine readHandle - case mLine of - Nothing -> return acc - Just line -> go $ foldLine acc line - --- | same as 'trace', but use it combinator style -tag :: Sh a -> Text -> Sh a -tag action msg = do - trace msg - action - -put :: State -> Sh () -put newState = do - stateVar <- ask - liftIO (writeIORef stateVar newState) - -runCommandNoEscape :: [StdHandle] -> State -> FilePath -> [Text] -> Sh (Handle, Handle, Handle, ProcessHandle) -runCommandNoEscape handles st exe args = liftIO $ shellyProcess handles st $ - ShellCommand $ T.unpack $ T.intercalate " " (toTextIgnore exe : args) - -runCommand :: [StdHandle] -> State -> FilePath -> [Text] -> Sh (Handle, Handle, Handle, ProcessHandle) -runCommand handles st exe args = findExe exe >>= \fullExe -> - liftIO $ shellyProcess handles st $ - RawCommand (encodeString fullExe) (map T.unpack args) - where - findExe :: FilePath -> Sh FilePath - findExe -#if defined(mingw32_HOST_OS) || (defined(__GLASGOW_HASKELL__) && __GLASGOW_HASKELL__ < 708) - fp -#else - _fp -#endif - = do - mExe <- whichEith exe - case mExe of - Right execFp -> return execFp - -- windows looks in extra places besides the PATH, so just give - -- up even if the behavior is not properly specified anymore - -- - -- non-Windows < 7.8 has a bug for read-only file systems - -- https://github.com/yesodweb/Shelly.hs/issues/56 - -- it would be better to specifically detect that bug -#if defined(mingw32_HOST_OS) || (defined(__GLASGOW_HASKELL__) && __GLASGOW_HASKELL__ < 708) - Left _ -> return fp -#else - Left err -> liftIO $ throwIO $ userError err -#endif - - - - -shellyProcess :: [StdHandle] -> State -> CmdSpec -> IO (Handle, Handle, Handle, ProcessHandle) -shellyProcess reusedHandles st cmdSpec = do - (createdInH, createdOutH, createdErrorH, pHandle) <- createProcess CreateProcess { - cmdspec = cmdSpec - , cwd = Just $ encodeString $ sDirectory st - , env = Just $ sEnvironment st - , std_in = createUnless mInH - , std_out = createUnless mOutH - , std_err = createUnless mErrorH - , close_fds = False -#if MIN_VERSION_process(1,1,0) - , create_group = False -#endif -#if MIN_VERSION_process(1,2,0) - , delegate_ctlc = False -#endif -#if MIN_VERSION_process(1,3,0) - , detach_console = False - , create_new_console = False - , new_session = False -#endif -#if MIN_VERSION_process(1,4,0) - , child_group = Nothing - , child_user = Nothing -#endif -#if MIN_VERSION_process(1,5,0) - , use_process_jobs = False -#endif - } - return ( just $ createdInH <|> toHandle mInH - , just $ createdOutH <|> toHandle mOutH - , just $ createdErrorH <|> toHandle mErrorH - , pHandle - ) - where - just :: Maybe a -> a - just Nothing = error "error in shelly creating process" - just (Just j) = j - - toHandle (Just (UseHandle h)) = Just h - toHandle _ = error "shellyProcess/toHandle: internal error" - - createUnless Nothing = CreatePipe - createUnless (Just stream) = stream - - mInH = getStream mIn reusedHandles - mOutH = getStream mOut reusedHandles - mErrorH = getStream mError reusedHandles - - getStream :: (StdHandle -> Maybe StdStream) -> [StdHandle] -> Maybe StdStream - getStream _ [] = Nothing - getStream mHandle (h:hs) = mHandle h <|> getStream mHandle hs - - mIn, mOut, mError :: (StdHandle -> Maybe StdStream) - mIn (InHandle h) = Just h - mIn _ = Nothing - mOut (OutHandle h) = Just h - mOut _ = Nothing - mError (ErrorHandle h) = Just h - mError _ = Nothing - -{- --- | use for commands requiring usage of sudo. see 'run_sudo'. --- Use this pattern for priveledge separation -newtype Sudo a = Sudo { sudo :: Sh a } - --- | require that the caller explicitly state 'sudo' -run_sudo :: Text -> [Text] -> Sudo Text -run_sudo cmd args = Sudo $ run "/usr/bin/sudo" (cmd:args) --} - --- | Same as a normal 'catch' but specialized for the Sh monad. -catch_sh :: (Exception e) => Sh a -> (e -> Sh a) -> Sh a -catch_sh action handler = do - ref <- ask - liftIO $ catch (runSh action ref) (\e -> runSh (handler e) ref) - --- | Same as a normal 'handle' but specialized for the Sh monad. -handle_sh :: (Exception e) => (e -> Sh a) -> Sh a -> Sh a -handle_sh handler action = do - ref <- ask - liftIO $ handle (\e -> runSh (handler e) ref) (runSh action ref) - - --- | Same as a normal 'finally' but specialized for the 'Sh' monad. -finally_sh :: Sh a -> Sh b -> Sh a -finally_sh action handler = do - ref <- ask - liftIO $ finally (runSh action ref) (runSh handler ref) - -bracket_sh :: Sh a -> (a -> Sh b) -> (a -> Sh c) -> Sh c -bracket_sh acquire release main = do - ref <- ask - liftIO $ bracket (runSh acquire ref) - (\resource -> runSh (release resource) ref) - (\resource -> runSh (main resource) ref) - - - --- | You need to wrap exception handlers with this when using 'catches_sh'. -data ShellyHandler a = forall e . Exception e => ShellyHandler (e -> Sh a) - --- | Same as a normal 'catches', but specialized for the 'Sh' monad. -catches_sh :: Sh a -> [ShellyHandler a] -> Sh a -catches_sh action handlers = do - ref <- ask - let runner a = runSh a ref - liftIO $ catches (runner action) $ map (toHandler runner) handlers - where - toHandler :: (Sh a -> IO a) -> ShellyHandler a -> Handler a - toHandler runner (ShellyHandler handler) = Handler (\e -> runner (handler e)) - --- | Catch any exception in the Sh monad. -catchany_sh :: Sh a -> (SomeException -> Sh a) -> Sh a -catchany_sh = catch_sh - --- | Handle any exception in the Sh monad. -handleany_sh :: (SomeException -> Sh a) -> Sh a -> Sh a -handleany_sh = handle_sh - --- | Change current working directory of Sh. This does *not* change the --- working directory of the process we are running it. Instead, Sh keeps --- track of its own working directory and builds absolute paths internally --- instead of passing down relative paths. -cd :: FilePath -> Sh () -cd = traceCanonicPath ("cd " <>) >=> cd' - where - cd' dir = do - unlessM (test_d dir) $ errorExit $ "not a directory: " <> tdir - modify $ \st -> st { sDirectory = dir, sPathExecutables = Nothing } - where - tdir = toTextIgnore dir - --- | 'cd', execute a Sh action in the new directory and then pop back to the original directory -chdir :: FilePath -> Sh a -> Sh a -chdir dir action = do - d <- gets sDirectory - cd dir - action `finally_sh` cd d - --- | 'chdir', but first create the directory if it does not exit -chdir_p :: FilePath -> Sh a -> Sh a -chdir_p d action = mkdir_p d >> chdir d action - - --- | apply a String IO operations to a Text FilePath -{- -liftStringIO :: (String -> IO String) -> FilePath -> Sh FilePath -liftStringIO f = liftIO . f . unpack >=> return . pack - --- | @asString f = pack . f . unpack@ -asString :: (String -> String) -> FilePath -> FilePath -asString f = pack . f . unpack --} - -pack :: String -> FilePath -pack = decodeString - --- | Move a file. The second path could be a directory, in which case the --- original file is moved into that directory. --- wraps system-fileio 'FileSystem.rename', which may not work across FS boundaries -mv :: FilePath -> FilePath -> Sh () -mv from' to' = do - trace $ "mv " <> toTextIgnore from' <> " " <> toTextIgnore to' - from <- absPath from' - to <- absPath to' - to_dir <- test_d to - let to_loc = if not to_dir then to else to FP. filename from - liftIO $ rename from to_loc - `catchany` (\e -> throwIO $ - ReThrownException e (extraMsg to_loc from) - ) - where - extraMsg t f = "during copy from: " ++ encodeString f ++ " to: " ++ encodeString t - --- | Get back [Text] instead of [FilePath] -lsT :: FilePath -> Sh [Text] -lsT = ls >=> mapM toTextWarn - --- | Obtain the current (Sh) working directory. -pwd :: Sh FilePath -pwd = gets sDirectory `tag` "pwd" - --- | exit 0 means no errors, all other codes are error conditions -exit :: Int -> Sh a -exit 0 = liftIO exitSuccess `tag` "exit 0" -exit n = liftIO (exitWith (ExitFailure n)) `tag` ("exit " <> T.pack (show n)) - --- | echo a message and exit with status 1 -errorExit :: Text -> Sh a -errorExit msg = echo msg >> exit 1 - --- | for exiting with status > 0 without printing debug information -quietExit :: Int -> Sh a -quietExit 0 = exit 0 -quietExit n = throw $ QuietExit n - --- | fail that takes a Text -terror :: Text -> Sh a -terror = fail . T.unpack - --- | Create a new directory (fails if the directory exists). -mkdir :: FilePath -> Sh () -mkdir = traceAbsPath ("mkdir " <>) >=> - liftIO . createDirectory False - --- | Create a new directory, including parents (succeeds if the directory --- already exists). -mkdir_p :: FilePath -> Sh () -mkdir_p = traceAbsPath ("mkdir -p " <>) >=> - liftIO . createTree - --- | Create a new directory tree. You can describe a bunch of directories as --- a tree and this function will create all subdirectories. An example: --- --- > exec = mkTree $ --- > "package" # [ --- > "src" # [ --- > "Data" # leaves ["Tree", "List", "Set", "Map"] --- > ], --- > "test" # leaves ["QuickCheck", "HUnit"], --- > "dist/doc/html" # [] --- > ] --- > where (#) = Node --- > leaves = map (# []) --- -mkdirTree :: Tree FilePath -> Sh () -mkdirTree = mk . unrollPath - where mk :: Tree FilePath -> Sh () - mk (Node a ts) = do - b <- test_d a - unless b $ mkdir a - chdir a $ mapM_ mkdirTree ts - - unrollPath :: Tree FilePath -> Tree FilePath - unrollPath (Node v ts) = unrollRoot v $ map unrollPath ts - where unrollRoot x = foldr1 phi $ map Node $ splitDirectories x - phi a b = a . return . b - - -isExecutable :: FilePath -> IO Bool -isExecutable f = (executable `fmap` getPermissions (encodeString f)) `catch` (\(_ :: IOError) -> return False) - --- | Get a full path to an executable by looking at the @PATH@ environement --- variable. Windows normally looks in additional places besides the --- @PATH@: this does not duplicate that behavior. -which :: FilePath -> Sh (Maybe FilePath) -which fp = either (const Nothing) Just <$> whichEith fp - --- | Get a full path to an executable by looking at the @PATH@ environement --- variable. Windows normally looks in additional places besides the --- @PATH@: this does not duplicate that behavior. -whichEith :: FilePath -> Sh (Either String FilePath) -whichEith originalFp = whichFull -#if defined(mingw32_HOST_OS) - $ case extension originalFp of - Nothing -> originalFp <.> "exe" - Just _ -> originalFp -#else - originalFp -#endif - where - whichFull fp = do - (trace . mappend "which " . toTextIgnore) fp >> whichUntraced - where - whichUntraced | absolute fp = checkFile - | dotSlash splitOnDirs = checkFile - | length splitOnDirs > 0 = lookupPath >>= leftPathError - | otherwise = lookupCache >>= leftPathError - - splitOnDirs = splitDirectories fp - dotSlash ("./":_) = True - dotSlash _ = False - - checkFile :: Sh (Either String FilePath) - checkFile = do - exists <- liftIO $ isFile fp - return $ if exists then Right fp else - Left $ "did not find file: " <> encodeString fp - - leftPathError :: Maybe FilePath -> Sh (Either String FilePath) - leftPathError Nothing = Left <$> pathLookupError - leftPathError (Just x) = return $ Right x - - pathLookupError :: Sh String - pathLookupError = do - pATH <- get_env_text "PATH" - return $ - "shelly did not find " `mappend` encodeString fp `mappend` - " in the PATH: " `mappend` T.unpack pATH - - lookupPath :: Sh (Maybe FilePath) - lookupPath = (pathDirs >>=) $ findMapM $ \dir -> do - let fullFp = dir fp - res <- liftIO $ isExecutable fullFp - return $ if res then Just fullFp else Nothing - - lookupCache :: Sh (Maybe FilePath) - lookupCache = do - pathExecutables <- cachedPathExecutables - return $ fmap (flip () fp . fst) $ - L.find (S.member fp . snd) pathExecutables - - - pathDirs = mapM absPath =<< ((map FP.fromText . filter (not . T.null) . T.split (== searchPathSeparator)) `fmap` get_env_text "PATH") - - cachedPathExecutables :: Sh [(FilePath, S.Set FilePath)] - cachedPathExecutables = do - mPathExecutables <- gets sPathExecutables - case mPathExecutables of - Just pExecutables -> return pExecutables - Nothing -> do - dirs <- pathDirs - executables <- forM dirs (\dir -> do - files <- (liftIO . listDirectory) dir `catch_sh` (\(_ :: IOError) -> return []) - exes <- fmap (map snd) $ liftIO $ filterM (isExecutable . fst) $ - map (\f -> (f, filename f)) files - return $ S.fromList exes - ) - let cachedExecutables = zip dirs executables - modify $ \x -> x { sPathExecutables = Just cachedExecutables } - return $ cachedExecutables - - --- | A monadic findMap, taken from MissingM package -findMapM :: Monad m => (a -> m (Maybe b)) -> [a] -> m (Maybe b) -findMapM _ [] = return Nothing -findMapM f (x:xs) = do - mb <- f x - if (isJust mb) - then return mb - else findMapM f xs - --- | A monadic-conditional version of the 'unless' guard. -unlessM :: Monad m => m Bool -> m () -> m () -unlessM c a = c >>= \res -> unless res a - --- | Does a path point to an existing filesystem object? -test_e :: FilePath -> Sh Bool -test_e = absPath >=> \f -> - liftIO $ do - file <- isFile f - if file then return True else isDirectory f - --- | Does a path point to an existing file? -test_f :: FilePath -> Sh Bool -test_f = absPath >=> liftIO . isFile - --- | Test that a file is in the PATH and also executable -test_px :: FilePath -> Sh Bool -test_px exe = do - mFull <- which exe - case mFull of - Nothing -> return False - Just full -> liftIO $ isExecutable full - --- | A swiss army cannon for removing things. Actually this goes farther than a --- normal rm -rf, as it will circumvent permission problems for the files we --- own. Use carefully. --- Uses 'removeTree' -rm_rf :: FilePath -> Sh () -rm_rf infp = do - f <- traceAbsPath ("rm -rf " <>) infp - isDir <- (test_d f) - if not isDir then whenM (test_f f) $ rm_f f - else - (liftIO_ $ removeTree f) `catch_sh` (\(e :: IOError) -> - when (isPermissionError e) $ do - find f >>= mapM_ (\file -> liftIO_ $ fixPermissions (encodeString file) `catchany` \_ -> return ()) - liftIO $ removeTree f - ) - where fixPermissions file = - do permissions <- liftIO $ getPermissions file - let deletable = permissions { readable = True, writable = True, executable = True } - liftIO $ setPermissions file deletable - --- | Remove a file. Does not fail if the file does not exist. --- Does fail if the file is not a file. -rm_f :: FilePath -> Sh () -rm_f = traceAbsPath ("rm -f " <>) >=> \f -> - whenM (test_e f) $ liftIO $ removeFile f - --- | Remove a file. --- Does fail if the file does not exist (use 'rm_f' instead) or is not a file. -rm :: FilePath -> Sh () -rm = traceAbsPath ("rm " <>) >=> - -- TODO: better error message for removeFile (give filename) - liftIO . removeFile - --- | Set an environment variable. The environment is maintained in Sh --- internally, and is passed to any external commands to be executed. -setenv :: Text -> Text -> Sh () -setenv k v = if k == path_env then setPath v else setenvRaw k v - -setenvRaw :: Text -> Text -> Sh () -setenvRaw k v = modify $ \x -> x { sEnvironment = wibble $ sEnvironment x } - where - (kStr, vStr) = (T.unpack k, T.unpack v) - wibble environment = (kStr, vStr) : filter ((/=kStr) . fst) environment - -setPath :: Text -> Sh () -setPath newPath = do - modify $ \x -> x{ sPathExecutables = Nothing } - setenvRaw path_env newPath - -path_env :: Text -path_env = "PATH" - --- | add the filepath onto the PATH env variable -appendToPath :: FilePath -> Sh () -appendToPath = traceAbsPath ("appendToPath: " <>) >=> \filepath -> do - tp <- toTextWarn filepath - pe <- get_env_text path_env - setPath $ pe <> T.singleton searchPathSeparator <> tp - --- | prepend the filepath to the PATH env variable --- similar to `appendToPath` but gives high priority to the filepath instead of low priority. -prependToPath :: FilePath -> Sh () -prependToPath = traceAbsPath ("prependToPath: " <>) >=> \filepath -> do - tp <- toTextWarn filepath - pe <- get_env_text path_env - setPath $ tp <> T.singleton searchPathSeparator <> pe - -get_environment :: Sh [(String, String)] -get_environment = gets sEnvironment -{-# DEPRECATED get_environment "use get_env_all" #-} - --- | get the full environment -get_env_all :: Sh [(String, String)] -get_env_all = gets sEnvironment - --- | Fetch the current value of an environment variable. --- if non-existant or empty text, will be Nothing -get_env :: Text -> Sh (Maybe Text) -get_env k = do - mval <- return . fmap T.pack . lookup (T.unpack k) =<< gets sEnvironment - return $ case mval of - Nothing -> Nothing - Just val -> if (not $ T.null val) then Just val else Nothing - --- | deprecated -getenv :: Text -> Sh Text -getenv k = get_env_def k "" -{-# DEPRECATED getenv "use get_env or get_env_text" #-} - --- | Fetch the current value of an environment variable. Both empty and --- non-existent variables give empty string as a result. -get_env_text :: Text -> Sh Text -get_env_text = get_env_def "" - --- | Fetch the current value of an environment variable. Both empty and --- non-existent variables give the default Text value as a result -get_env_def :: Text -> Text -> Sh Text -get_env_def d = get_env >=> return . fromMaybe d -{-# DEPRECATED get_env_def "use fromMaybe DEFAULT get_env" #-} - --- | Apply a single initializer to the two output process handles (stdout and stderr) -initOutputHandles :: HandleInitializer -> StdInit -initOutputHandles f = StdInit (const $ return ()) f f - --- | Apply a single initializer to all three standard process handles (stdin, stdout and stderr) -initAllHandles :: HandleInitializer -> StdInit -initAllHandles f = StdInit f f f - --- | When running an external command, apply the given initializers to --- the specified handles for that command. --- This can for example be used to change the encoding of the --- handles or set them into binary mode. -onCommandHandles :: StdInit -> Sh a -> Sh a -onCommandHandles initHandles a = - sub $ modify (\x -> x { sInitCommandHandles = initHandles }) >> a - --- | Create a sub-Sh in which external command outputs are not echoed and --- commands are not printed. --- See 'sub'. -silently :: Sh a -> Sh a -silently a = sub $ modify (\x -> x - { sPrintStdout = False - , sPrintStderr = False - , sPrintCommands = False - }) >> a - --- | Create a sub-Sh in which external command outputs are echoed and --- Executed commands are printed --- See 'sub'. -verbosely :: Sh a -> Sh a -verbosely a = sub $ modify (\x -> x - { sPrintStdout = True - , sPrintStderr = True - , sPrintCommands = True - }) >> a - --- | Create a sub-Sh in which stdout is sent to the user-defined --- logger. When running with 'silently' the given log will not be --- called for any output. Likewise the log will also not be called for --- output from 'run_' and 'bash_' commands. -log_stdout_with :: (Text -> IO ()) -> Sh a -> Sh a -log_stdout_with logger a = sub $ modify (\s -> s { sPutStdout = logger }) - >> a - --- | Create a sub-Sh in which stderr is sent to the user-defined --- logger. When running with 'silently' the given log will not be --- called for any output. However, unlike 'log_stdout_with' the log --- will be called for output from 'run_' and 'bash_' commands. -log_stderr_with :: (Text -> IO ()) -> Sh a -> Sh a -log_stderr_with logger a = sub $ modify (\s -> s { sPutStderr = logger }) - >> a - --- | Create a sub-Sh with stdout printing on or off --- Defaults to True. -print_stdout :: Bool -> Sh a -> Sh a -print_stdout shouldPrint a = - sub $ modify (\x -> x { sPrintStdout = shouldPrint }) >> a - --- | Create a sub-Sh with stderr printing on or off --- Defaults to True. -print_stderr :: Bool -> Sh a -> Sh a -print_stderr shouldPrint a = - sub $ modify (\x -> x { sPrintStderr = shouldPrint }) >> a - - --- | Create a sub-Sh with command echoing on or off --- Defaults to False, set to True by 'verbosely' -print_commands :: Bool -> Sh a -> Sh a -print_commands shouldPrint a = sub $ modify (\st -> st { sPrintCommands = shouldPrint }) >> a - --- | Enter a sub-Sh that inherits the environment --- The original state will be restored when the sub-Sh completes. --- Exceptions are propagated normally. -sub :: Sh a -> Sh a -sub a = do - oldState <- get - modify $ \st -> st { sTrace = T.empty } - a `finally_sh` restoreState oldState - where - restoreState oldState = do - newState <- get - put oldState { - -- avoid losing the log - sTrace = sTrace oldState <> sTrace newState - -- latest command execution: not make sense to restore these to old settings - , sCode = sCode newState - , sStderr = sStderr newState - -- it is questionable what the behavior of stdin should be - , sStdin = sStdin newState - } - --- | Create a sub-Sh where commands are not traced --- Defaults to True. --- You should only set to False temporarily for very specific reasons -tracing :: Bool -> Sh a -> Sh a -tracing shouldTrace action = sub $ do - modify $ \st -> st { sTracing = shouldTrace } - action - --- | Create a sub-Sh with shell character escaping on or off. --- Defaults to @True@. --- --- Setting to @False@ allows for shell wildcard such as * to be expanded by the shell along with any other special shell characters. --- As a side-effect, setting to @False@ causes changes to @PATH@ to be ignored: --- see the 'run' documentation. -escaping :: Bool -> Sh a -> Sh a -escaping shouldEscape action = sub $ do - modify $ \st -> st { sCommandEscaping = shouldEscape } - action - --- | named after bash -e errexit. Defaults to @True@. --- When @True@, throw an exception on a non-zero exit code. --- When @False@, ignore a non-zero exit code. --- Not recommended to set to @False@ unless you are specifically checking the error code with 'lastExitCode'. -errExit :: Bool -> Sh a -> Sh a -errExit shouldExit action = sub $ do - modify $ \st -> st { sErrExit = shouldExit } - action - --- | 'find'-command follows symbolic links. Defaults to @False@. --- When @True@, follow symbolic links. --- When @False@, never follow symbolic links. -followSymlink :: Bool -> Sh a -> Sh a -followSymlink enableFollowSymlink action = sub $ do - modify $ \st -> st { sFollowSymlink = enableFollowSymlink } - action - - -defReadOnlyState :: ReadOnlyState -defReadOnlyState = ReadOnlyState { rosFailToDir = False } - --- | Deprecated now, just use 'shelly', whose default has been changed. --- Using this entry point does not create a @.shelly@ directory in the case --- of failure. Instead it logs directly into the standard error stream (@stderr@). -shellyNoDir :: MonadIO m => Sh a -> m a -shellyNoDir = shelly' ReadOnlyState { rosFailToDir = False } -{-# DEPRECATED shellyNoDir "Just use shelly. The default settings have changed" #-} - --- | Using this entry point creates a @.shelly@ directory in the case --- of failure where errors are recorded. -shellyFailDir :: MonadIO m => Sh a -> m a -shellyFailDir = shelly' ReadOnlyState { rosFailToDir = True } - --- | Enter a Sh from (Monad)IO. The environment and working directories are --- inherited from the current process-wide values. Any subsequent changes in --- processwide working directory or environment are not reflected in the --- running Sh. -shelly :: MonadIO m => Sh a -> m a -shelly = shelly' defReadOnlyState - -shelly' :: MonadIO m => ReadOnlyState -> Sh a -> m a -shelly' ros action = do - environment <- liftIO getEnvironment - dir <- liftIO getWorkingDirectory - let def = State { sCode = 0 - , sStdin = Nothing - , sStderr = T.empty - , sPutStdout = TIO.hPutStrLn stdout - , sPutStderr = TIO.hPutStrLn stderr - , sPrintStdout = True - , sPrintStderr = True - , sPrintCommands = False - , sInitCommandHandles = initAllHandles (const $ return ()) - , sCommandEscaping = True - , sEnvironment = environment - , sTracing = True - , sTrace = T.empty - , sDirectory = dir - , sPathExecutables = Nothing - , sErrExit = True - , sReadOnly = ros - , sFollowSymlink = False - } - stref <- liftIO $ newIORef def - let caught = - action `catches_sh` [ - ShellyHandler (\ex -> - case ex of - ExitSuccess -> liftIO $ throwIO ex - ExitFailure _ -> throwExplainedException ex - ) - , ShellyHandler (\ex -> case ex of - QuietExit n -> liftIO $ throwIO $ ExitFailure n) - , ShellyHandler (\(ex::SomeException) -> throwExplainedException ex) - ] - liftIO $ runSh caught stref - where - throwExplainedException :: Exception exception => exception -> Sh a - throwExplainedException ex = get >>= errorMsg >>= liftIO . throwIO . ReThrownException ex - - errorMsg st = - if not (rosFailToDir $ sReadOnly st) then ranCommands else do - d <- pwd - sf <- shellyFile - let logFile = dshelly_dirsf - (writefile logFile trc >> return ("log of commands saved to: " <> encodeString logFile)) - `catchany_sh` (\_ -> ranCommands) - - where - trc = sTrace st - ranCommands = return . mappend "Ran commands: \n" . T.unpack $ trc - - shelly_dir = ".shelly" - shellyFile = chdir_p shelly_dir $ do - fs <- ls "." - return $ pack $ show (nextNum fs) <> ".txt" - - nextNum :: [FilePath] -> Int - nextNum [] = 1 - nextNum fs = (+ 1) . maximum . map (readDef 1 . filter isDigit . encodeString . filename) $ fs - --- from safe package -readDef :: Read a => a -> String -> a -readDef def = fromMaybe def . readMay - where - readMay :: Read a => String -> Maybe a - readMay s = case [x | (x,t) <- reads s, ("","") <- lex t] of - [x] -> Just x - _ -> Nothing - -data RunFailed = RunFailed FilePath [Text] Int Text deriving (Typeable) - -instance Show RunFailed where - show (RunFailed exe args code errs) = - let codeMsg = case code of - 127 -> ". exit code 127 usually means the command does not exist (in the PATH)" - _ -> "" - in "error running: " ++ T.unpack (show_command exe args) ++ - "\nexit status: " ++ show code ++ codeMsg ++ "\nstderr: " ++ T.unpack errs - -instance Exception RunFailed - -show_command :: FilePath -> [Text] -> Text -show_command exe args = - T.intercalate " " $ map quote (toTextIgnore exe : args) - where - quote t | T.any (== '\'') t = t - quote t | T.any isSpace t = surround '\'' t - quote t | otherwise = t - --- quote one argument -quoteOne :: Text -> Text -quoteOne t = - surround '\'' $ T.replace "'" "'\\''" t - - --- returns a string that can be executed by a shell. --- NOTE: all parts are treated literally, which means that --- things like variable expansion will not be available. -quoteCommand :: FilePath -> [Text] -> Text -quoteCommand exe args = - T.intercalate " " $ map quoteOne (toTextIgnore exe : args) - -surround :: Char -> Text -> Text -surround c t = T.cons c $ T.snoc t c - -data SshMode = ParSsh | SeqSsh - --- | same as 'sshPairs', but returns () -sshPairs_ :: Text -> [(FilePath, [Text])] -> Sh () -sshPairs_ _ [] = return () -sshPairs_ server cmds = sshPairs' run_ server cmds - --- | same as 'sshPairsP', but returns () - -sshPairsPar_ :: Text -> [(FilePath, [Text])] -> Sh () -sshPairsPar_ _ [] = return () -sshPairsPar_ server cmds = sshPairsPar' run_ server cmds - --- | run commands over SSH. --- An ssh executable is expected in your path. --- Commands are in the same form as 'run', but given as pairs --- --- > sshPairs "server-name" [("cd", "dir"), ("rm",["-r","dir2"])] --- --- This interface is crude, but it works for now. --- --- Please note this sets 'escaping' to False, and the remote commands are --- quoted with single quotes, in a way such that the remote commands will see --- the literal values you passed, this means that no variable expansion and --- alike will done on either the local shell or the remote shell, and that --- if there are a single or double quotes in your arguments, they need not --- to be quoted manually. --- --- Internally the list of commands are combined with the string @&&@ before given to ssh. -sshPairs :: Text -> [(FilePath, [Text])] -> Sh Text -sshPairs _ [] = return "" -sshPairs server cmds = sshPairsWithOptions' run server [] cmds SeqSsh - --- | Same as sshPairs, but combines commands with the string @&@, so they will be started in parallell. -sshPairsPar :: Text -> [(FilePath, [Text])] -> Sh Text -sshPairsPar _ [] = return "" -sshPairsPar server cmds = sshPairsWithOptions' run server [] cmds ParSsh - -sshPairsPar' :: (FilePath -> [Text] -> Sh a) -> Text -> [(FilePath, [Text])] -> Sh a -sshPairsPar' run' server actions = sshPairsWithOptions' run' server [] actions ParSsh - -sshPairs' :: (FilePath -> [Text] -> Sh a) -> Text -> [(FilePath, [Text])] -> Sh a -sshPairs' run' server actions = sshPairsWithOptions' run' server [] actions SeqSsh - --- | Like 'sshPairs', but allows for arguments to the call to ssh. -sshPairsWithOptions :: Text -- ^ Server name. - -> [Text] -- ^ Arguments to ssh (e.g. ["-p","22"]). - -> [(FilePath, [Text])] -- ^ Pairs of commands to run on the remote. - -> Sh Text -- ^ Returns the standard output. -sshPairsWithOptions _ _ [] = return "" -sshPairsWithOptions server sshargs cmds = sshPairsWithOptions' run server sshargs cmds SeqSsh - -sshPairsWithOptions' :: (FilePath -> [Text] -> Sh a) -> Text -> [Text] -> [(FilePath, [Text])] -> SshMode -> Sh a -sshPairsWithOptions' run' server sshargs actions mode = escaping False $ do - run' "ssh" ([server] ++ sshargs ++ [sshCommandText actions mode]) - -sshCommandText :: [(FilePath, [Text])] -> SshMode -> Text -sshCommandText actions mode = - quoteOne (foldl1 joiner (map (uncurry quoteCommand) actions)) - where - joiner memo next = case mode of - SeqSsh -> memo <> " && " <> next - ParSsh -> memo <> " & " <> next - -data QuietExit = QuietExit Int deriving (Show, Typeable) -instance Exception QuietExit - --- | Shelly's wrapper around exceptions thrown in its monad -data ReThrownException e = ReThrownException e String deriving (Typeable) -instance Exception e => Exception (ReThrownException e) -instance Exception e => Show (ReThrownException e) where - show (ReThrownException ex msg) = "\n" ++ - msg ++ "\n" ++ "Exception: " ++ show ex - --- | Execute an external command. --- Takes the command name and arguments. --- --- You may prefer using 'cmd' instead, which is a variadic argument version --- of this function. --- --- 'stdout' and 'stderr' are collected. The 'stdout' is returned as --- a result of 'run', and complete stderr output is available after the fact using --- 'lastStderr' --- --- All of the stdout output will be loaded into memory. --- You can avoid this if you don't need stdout by using 'run_', --- If you want to avoid the memory and need to process the output then use 'runFoldLines' or 'runHandle' or 'runHandles'. --- --- By default shell characters are escaped and --- the command name is a name of a program that can be found via @PATH@. --- Shelly will look through the @PATH@ itself to find the command. --- --- When 'escaping' is set to @False@, shell characters are allowed. --- Since there is no longer a guarantee that a single program name is --- given, Shelly cannot look in the @PATH@ for it. --- a @PATH@ modified by setenv is not taken into account when finding the exe name. --- Instead the original Haskell program @PATH@ is used. --- On a Posix system the @env@ command can be used to make the 'setenv' PATH used when 'escaping' is set to False. @env echo hello@ instead of @echo hello@ --- -run :: FilePath -> [Text] -> Sh Text -run fp args = return . lineSeqToText =<< runFoldLines mempty (|>) fp args - --- | Like `run`, but it invokes the user-requested program with _bash_. -bash :: FilePath -> [Text] -> Sh Text -bash fp args = escaping False $ run "bash" $ bashArgs fp args - -bash_ :: FilePath -> [Text] -> Sh () -bash_ fp args = escaping False $ run_ "bash" $ bashArgs fp args - -bashArgs :: FilePath -> [Text] -> [Text] -bashArgs fp args = ["-c", "'" <> sanitise (toTextIgnore fp : args) <> "'"] - where - sanitise = T.replace "'" "\'" . T.intercalate " " - --- | Use this with `bash` to set _pipefail_ --- --- > bashPipeFail $ bash "echo foo | echo" -bashPipeFail :: (FilePath -> [Text] -> Sh a) -> FilePath -> [Text] -> Sh a -bashPipeFail runner fp args = runner "set -o pipefail;" (toTextIgnore fp : args) - --- | bind some arguments to run for re-use. Example: --- --- > monit = command "monit" ["-c", "monitrc"] --- > monit ["stop", "program"] -command :: FilePath -> [Text] -> [Text] -> Sh Text -command com args more_args = run com (args ++ more_args) - --- | bind some arguments to 'run_' for re-use. Example: --- --- > monit_ = command_ "monit" ["-c", "monitrc"] --- > monit_ ["stop", "program"] -command_ :: FilePath -> [Text] -> [Text] -> Sh () -command_ com args more_args = run_ com (args ++ more_args) - --- | bind some arguments to run for re-use, and require 1 argument. Example: --- --- > git = command1 "git" []; git "pull" ["origin", "master"] -command1 :: FilePath -> [Text] -> Text -> [Text] -> Sh Text -command1 com args one_arg more_args = run com (args ++ [one_arg] ++ more_args) - --- | bind some arguments to run for re-use, and require 1 argument. Example: --- --- > git_ = command1_ "git" []; git "pull" ["origin", "master"] -command1_ :: FilePath -> [Text] -> Text -> [Text] -> Sh () -command1_ com args one_arg more_args = run_ com (args ++ [one_arg] ++ more_args) - --- | the same as 'run', but return @()@ instead of the stdout content --- stdout will be read and discarded line-by-line -run_ :: FilePath -> [Text] -> Sh () -run_ exe args = do - state <- get - if sPrintStdout state - then runWithColor_ - else runFoldLines () (\_ _ -> ()) exe args - where - -- same a runFoldLines except Inherit Stdout - -- That allows color to show up - runWithColor_ = - runHandles exe args [OutHandle Inherit] $ \inH _ errH -> do - state <- get - errs <- liftIO $ do - hClose inH -- setStdin was taken care of before the process even ran - errVar <- (putHandleIntoMVar mempty (|>) errH (sPutStderr state) (sPrintStderr state)) - lineSeqToText `fmap` wait errVar - modify $ \state' -> state' { sStderr = errs } - return () - -liftIO_ :: IO a -> Sh () -liftIO_ = void . liftIO - --- | Similar to 'run' but gives the raw stdout handle in a callback. --- If you want even more control, use 'runHandles'. -runHandle :: FilePath -- ^ command - -> [Text] -- ^ arguments - -> (Handle -> Sh a) -- ^ stdout handle - -> Sh a -runHandle exe args withHandle = runHandles exe args [] $ \_ outH errH -> do - state <- get - errVar <- liftIO $ - (putHandleIntoMVar mempty (|>) errH (sPutStderr state) (sPrintStderr state)) - res <- withHandle outH - errs <- liftIO $ lineSeqToText `fmap` wait errVar - modify $ \state' -> state' { sStderr = errs } - return res - --- | Similar to 'run' but gives direct access to all input and output handles. --- --- Be careful when using the optional input handles. --- If you specify Inherit for a handle then attempting to access the handle in your --- callback is an error -runHandles :: FilePath -- ^ command - -> [Text] -- ^ arguments - -> [StdHandle] -- ^ optionally connect process i/o handles to existing handles - -> (Handle -> Handle -> Handle -> Sh a) -- ^ stdin, stdout and stderr - -> Sh a -runHandles exe args reusedHandles withHandles = do - -- clear stdin before beginning command execution - origstate <- get - let mStdin = sStdin origstate - put $ origstate { sStdin = Nothing, sCode = 0, sStderr = T.empty } - state <- get - - let cmdString = show_command exe args - when (sPrintCommands state) $ echo cmdString - trace cmdString - - let doRun = if sCommandEscaping state then runCommand else runCommandNoEscape - - bracket_sh - (doRun reusedHandles state exe args) - (\(_,_,_,procH) -> (liftIO $ terminateProcess procH)) - (\(inH,outH,errH,procH) -> do - - liftIO $ do - inInit (sInitCommandHandles state) inH - outInit (sInitCommandHandles state) outH - errInit (sInitCommandHandles state) errH - - liftIO $ case mStdin of - Just input -> TIO.hPutStr inH input - Nothing -> return () - - result <- withHandles inH outH errH - - (ex, code) <- liftIO $ do - ex' <- waitForProcess procH - - -- TODO: specifically catch our own error for Inherit pipes - hClose outH `catchany` (const $ return ()) - hClose errH `catchany` (const $ return ()) - hClose inH `catchany` (const $ return ()) - - return $ case ex' of - ExitSuccess -> (ex', 0) - ExitFailure n -> (ex', n) - - modify $ \state' -> state' { sCode = code } - - case (sErrExit state, ex) of - (True, ExitFailure n) -> do - newState <- get - liftIO $ throwIO $ RunFailed exe args n (sStderr newState) - _ -> return result - ) - - --- | used by 'run'. fold over stdout line-by-line as it is read to avoid keeping it in memory --- stderr is still being placed in memory under the assumption it is always relatively small -runFoldLines :: a -> FoldCallback a -> FilePath -> [Text] -> Sh a -runFoldLines start cb exe args = - runHandles exe args [] $ \inH outH errH -> do - state <- get - (errVar, outVar) <- liftIO $ do - hClose inH -- setStdin was taken care of before the process even ran - liftM2 (,) - (putHandleIntoMVar mempty (|>) errH (sPutStderr state) (sPrintStderr state)) - (putHandleIntoMVar start cb outH (sPutStdout state) (sPrintStdout state)) - errs <- liftIO $ lineSeqToText `fmap` wait errVar - modify $ \state' -> state' { sStderr = errs } - liftIO $ wait outVar - - -putHandleIntoMVar :: a -> FoldCallback a - -> Handle -- ^ out handle - -> (Text -> IO ()) -- ^ in handle - -> Bool -- ^ should it be printed while transfered? - -> IO (Async a) -putHandleIntoMVar start cb outH putWrite shouldPrint = liftIO $ async $ do - if shouldPrint - then transferFoldHandleLines start cb outH putWrite - else foldHandleLines start cb outH - - --- | The output of last external command. See 'run'. -lastStderr :: Sh Text -lastStderr = gets sStderr - --- | The exit code from the last command. --- Unless you set 'errExit' to False you won't get a chance to use this: a non-zero exit code will throw an exception. -lastExitCode :: Sh Int -lastExitCode = gets sCode - --- | set the stdin to be used and cleared by the next 'run'. -setStdin :: Text -> Sh () -setStdin input = modify $ \st -> st { sStdin = Just input } - --- | Pipe operator. set the stdout the first command as the stdin of the second. --- This does not create a shell-level pipe, but hopefully it will in the future. --- To create a shell level pipe you can set @escaping False@ and use a pipe @|@ character in a command. -(-|-) :: Sh Text -> Sh b -> Sh b -one -|- two = do - res <- print_stdout False one - setStdin res - two - --- | Copy a file, or a directory recursively. --- uses 'cp' -cp_r :: FilePath -> FilePath -> Sh () -cp_r from' to' = do - from <- absPath from' - fromIsDir <- (test_d from) - if not fromIsDir then cp from' to' else do - trace $ "cp -r " <> toTextIgnore from <> " " <> toTextIgnore to' - to <- absPath to' - toIsDir <- test_d to - - when (from == to) $ liftIO $ throwIO $ userError $ show $ "cp_r: " <> - toTextIgnore from <> " and " <> toTextIgnore to <> " are identical" - - finalTo <- if not toIsDir then mkdir to >> return to else do - let d = to dirname (addTrailingSlash from) - mkdir_p d >> return d - - ls from >>= mapM_ (\item -> cp_r (from FP. filename item) (finalTo FP. filename item)) - --- | Copy a file. The second path could be a directory, in which case the --- original file name is used, in that directory. -cp :: FilePath -> FilePath -> Sh () -cp from' to' = do - from <- absPath from' - to <- absPath to' - trace $ "cp " <> toTextIgnore from <> " " <> toTextIgnore to - to_dir <- test_d to - let to_loc = if to_dir then to FP. filename from else to - liftIO $ copyFile from to_loc `catchany` (\e -> throwIO $ - ReThrownException e (extraMsg to_loc from) - ) - where - extraMsg t f = "during copy from: " ++ encodeString f ++ " to: " ++ encodeString t - - - --- | Create a temporary directory and pass it as a parameter to a Sh --- computation. The directory is nuked afterwards. -withTmpDir :: (FilePath -> Sh a) -> Sh a -withTmpDir act = do - trace "withTmpDir" - dir <- liftIO getTemporaryDirectory - tid <- liftIO myThreadId - (pS, fhandle) <- liftIO $ openTempFile dir ("tmp" ++ filter isAlphaNum (show tid)) - let p = pack pS - liftIO $ hClose fhandle -- required on windows - rm_f p - mkdir p - act p `finally_sh` rm_rf p - --- | Write a Text to a file. -writefile :: FilePath -> Text -> Sh () -writefile f' bits = do - f <- traceAbsPath ("writefile " <>) f' - liftIO (TIO.writeFile (encodeString f) bits) - -writeBinary :: FilePath -> ByteString -> Sh () -writeBinary f' bytes = do - f <- traceAbsPath ("writeBinary " <>) f' - liftIO (BS.writeFile (encodeString f) bytes) - --- | Update a file, creating (a blank file) if it does not exist. -touchfile :: FilePath -> Sh () -touchfile = traceAbsPath ("touch " <>) >=> flip appendfile "" - --- | Append a Text to a file. -appendfile :: FilePath -> Text -> Sh () -appendfile f' bits = do - f <- traceAbsPath ("appendfile " <>) f' - liftIO (TIO.appendFile (encodeString f) bits) - -readfile :: FilePath -> Sh Text -readfile = traceAbsPath ("readfile " <>) >=> \fp -> - readBinary fp >>= - return . TE.decodeUtf8With TE.lenientDecode - --- | wraps ByteSting readFile -readBinary :: FilePath -> Sh ByteString -readBinary = traceAbsPath ("readBinary " <>) - >=> liftIO . BS.readFile . encodeString - --- | flipped hasExtension for Text -hasExt :: Text -> FilePath -> Bool -hasExt = flip hasExtension - --- | Run a Sh computation and collect timing information. --- The value returned is the amount of _real_ time spent running the computation --- in seconds, as measured by the system clock. --- The precision is determined by the resolution of `getCurrentTime`. -time :: Sh a -> Sh (Double, a) -time what = sub $ do - trace "time" - t <- liftIO getCurrentTime - res <- what - t' <- liftIO getCurrentTime - return (realToFrac $ diffUTCTime t' t, res) - --- | threadDelay wrapper that uses seconds -sleep :: Int -> Sh () -sleep = liftIO . threadDelay . (1000 * 1000 *) - --- | spawn an asynchronous action with a copy of the current state -asyncSh :: Sh a -> Sh (Async a) -asyncSh proc = do - state <- get - liftIO $ async $ shelly (put state >> proc) - --- helper because absPath can throw exceptions --- This helps give clear tracing messages -tracePath :: (FilePath -> Sh FilePath) -- ^ filepath conversion - -> (Text -> Text) -- ^ tracing statement - -> FilePath - -> Sh FilePath -- ^ converted filepath -tracePath convert tracer infp = - (convert infp >>= \fp -> traceIt fp >> return fp) - `catchany_sh` (\e -> traceIt infp >> liftIO (throwIO e)) - where traceIt = trace . tracer . toTextIgnore - -traceAbsPath :: (Text -> Text) -> FilePath -> Sh FilePath -traceAbsPath = tracePath absPath - -traceCanonicPath :: (Text -> Text) -> FilePath -> Sh FilePath -traceCanonicPath = tracePath canonic diff --git a/shelly/src/Shelly/Base.hs b/shelly/src/Shelly/Base.hs deleted file mode 100644 index a8e08d69..00000000 --- a/shelly/src/Shelly/Base.hs +++ /dev/null @@ -1,331 +0,0 @@ -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE CPP #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} -{-# LANGUAGE MultiParamTypeClasses #-} -{-# LANGUAGE TypeFamilies #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE UndecidableInstances #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE InstanceSigs#-} --- | I started exposing multiple module (starting with one for finding) --- Base prevented circular dependencies --- However, Shelly went back to exposing a single module -module Shelly.Base - ( - Sh(..), ShIO, runSh, State(..), ReadOnlyState(..), StdHandle(..), - HandleInitializer, StdInit(..), - FilePath, Text, - relPath, path, absPath, canonic, canonicalize, - test_d, test_s, - unpack, gets, get, modify, trace, - ls, lsRelAbs, - toTextIgnore, - echo, echo_n, echo_err, echo_n_err, inspect, inspect_err, - catchany, - liftIO, (>=>), - eitherRelativeTo, relativeTo, maybeRelativeTo, - whenM - -- * utilities not yet exported - , addTrailingSlash - ) where - -#if defined(__GLASGOW_HASKELL__) && __GLASGOW_HASKELL__ < 706 -import Prelude hiding (FilePath, catch) -#else -import Prelude hiding (FilePath) -#endif - -import Data.Text (Text) -import System.Process( StdStream(..) ) -import System.IO ( Handle, hFlush, stderr, stdout ) - -import Control.Monad (when, (>=>)) -import Control.Monad.Base -import Control.Monad.Trans.Control -#if !MIN_VERSION_base(4,13,0) -import Control.Applicative (Applicative, (<$>)) -#endif -import Filesystem (isDirectory, listDirectory) -import System.PosixCompat.Files( getSymbolicLinkStatus, isSymbolicLink ) -import Filesystem.Path.CurrentOS (FilePath, encodeString, relative) -import qualified Filesystem.Path.CurrentOS as FP -import qualified Filesystem as FS -import Data.IORef (readIORef, modifyIORef, IORef) -#if !MIN_VERSION_base(4,13,0) -import Data.Monoid (mappend) -#endif -import qualified Data.Text as T -import qualified Data.Text.IO as TIO -import Control.Exception (SomeException, catch, throwIO, Exception) -import Data.Maybe (fromMaybe) -import qualified Control.Monad.Catch as Catch -import Control.Monad.Trans ( MonadIO, liftIO ) -import Control.Monad.Reader.Class (MonadReader, ask) -import Control.Monad.Trans.Reader (runReaderT, ReaderT(..)) -import qualified Data.Set as S -import Data.Typeable (Typeable) - --- | ShIO is Deprecated in favor of 'Sh', which is easier to type. -type ShIO a = Sh a -{-# DEPRECATED ShIO "Use Sh instead of ShIO" #-} - -newtype Sh a = Sh { - unSh :: ReaderT (IORef State) IO a - } deriving (Applicative, Monad, MonadIO, MonadReader (IORef State), Functor, Catch.MonadMask) - -#if MIN_VERSION_base(4,13,0) -instance MonadFail Sh where - fail = liftIO . fail -#endif - -instance MonadBase IO Sh where - liftBase = Sh . ReaderT . const - -instance MonadBaseControl IO Sh where -#if MIN_VERSION_monad_control(1,0,0) - type StM Sh a = StM (ReaderT (IORef State) IO) a - liftBaseWith f = - Sh $ liftBaseWith $ \runInBase -> f $ \k -> - runInBase $ unSh k - restoreM = Sh . restoreM -#else - newtype StM Sh a = StMSh (StM (ReaderT (IORef State) IO) a) - liftBaseWith f = - Sh $ liftBaseWith $ \runInBase -> f $ \k -> - liftM StMSh $ runInBase $ unSh k - restoreM (StMSh m) = Sh . restoreM $ m -#endif - -instance Catch.MonadThrow Sh where - throwM = liftIO . Catch.throwM - -instance Catch.MonadCatch Sh where - catch (Sh (ReaderT m)) c = - Sh $ ReaderT $ \r -> m r `Catch.catch` \e -> runSh (c e) r - -runSh :: Sh a -> IORef State -> IO a -runSh = runReaderT . unSh - -data ReadOnlyState = ReadOnlyState { rosFailToDir :: Bool } -data State = State - { sCode :: Int -- ^ exit code for command that ran - , sStdin :: Maybe Text -- ^ stdin for the command to be run - , sStderr :: Text -- ^ stderr for command that ran - , sDirectory :: FilePath -- ^ working directory - , sPutStdout :: Text -> IO () -- ^ by default, hPutStrLn stdout - , sPrintStdout :: Bool -- ^ print stdout of command that is executed - , sPutStderr :: Text -> IO () -- ^ by default, hPutStrLn stderr - , sPrintStderr :: Bool -- ^ print stderr of command that is executed - , sPrintCommands :: Bool -- ^ print command that is executed - , sInitCommandHandles :: StdInit -- ^ initializers for the standard process handles - -- when running a command - , sCommandEscaping :: Bool -- ^ when running a command, escape shell characters such as '*' rather - -- than passing to the shell for expansion - , sEnvironment :: [(String, String)] - , sPathExecutables :: Maybe [(FilePath, S.Set FilePath)] -- ^ cache of executables in the PATH - , sTracing :: Bool -- ^ should we trace command execution - , sTrace :: Text -- ^ the trace of command execution - , sErrExit :: Bool -- ^ should we exit immediately on any error - , sReadOnly :: ReadOnlyState - , sFollowSymlink :: Bool -- ^ 'find'-command follows symlinks. - } - -data StdHandle = InHandle StdStream - | OutHandle StdStream - | ErrorHandle StdStream - --- | Initialize a handle before using it -type HandleInitializer = Handle -> IO () - --- | A collection of initializers for the three standard process handles -data StdInit = - StdInit { - inInit :: HandleInitializer, - outInit :: HandleInitializer, - errInit :: HandleInitializer - } - --- | A monadic-conditional version of the "when" guard. -whenM :: Monad m => m Bool -> m () -> m () -whenM c a = c >>= \res -> when res a - --- | Makes a relative path relative to the current Sh working directory. --- An absolute path is returned as is. --- To create an absolute path, use 'absPath' -relPath :: FilePath -> Sh FilePath -relPath fp = do - wd <- gets sDirectory - rel <- eitherRelativeTo wd fp - return $ case rel of - Right p -> p - Left p -> p - -eitherRelativeTo :: FilePath -- ^ anchor path, the prefix - -> FilePath -- ^ make this relative to anchor path - -> Sh (Either FilePath FilePath) -- ^ Left is canonic of second path -eitherRelativeTo relativeFP fp = do - let fullFp = relativeFP FP. fp - let relDir = addTrailingSlash relativeFP - stripIt relativeFP fp $ - stripIt relativeFP fullFp $ - stripIt relDir fp $ - stripIt relDir fullFp $ do - relCan <- canonic relDir - fpCan <- canonic fullFp - stripIt relCan fpCan $ return $ Left fpCan - where - stripIt rel toStrip nada = - case FP.stripPrefix rel toStrip of - Just stripped -> - if stripped == toStrip then nada - else return $ Right stripped - Nothing -> nada - --- | make the second path relative to the first --- Uses 'Filesystem.stripPrefix', but will canonicalize the paths if necessary -relativeTo :: FilePath -- ^ anchor path, the prefix - -> FilePath -- ^ make this relative to anchor path - -> Sh FilePath -relativeTo relativeFP fp = - fmap (fromMaybe fp) $ maybeRelativeTo relativeFP fp - -maybeRelativeTo :: FilePath -- ^ anchor path, the prefix - -> FilePath -- ^ make this relative to anchor path - -> Sh (Maybe FilePath) -maybeRelativeTo relativeFP fp = do - epath <- eitherRelativeTo relativeFP fp - return $ case epath of - Right p -> Just p - Left _ -> Nothing - - --- | add a trailing slash to ensure the path indicates a directory -addTrailingSlash :: FilePath -> FilePath -addTrailingSlash p = - if FP.null (FP.filename p) then p else - p FP. FP.empty - --- | makes an absolute path. --- Like 'canonicalize', but on an exception returns 'absPath' -canonic :: FilePath -> Sh FilePath -canonic fp = do - p <- absPath fp - liftIO $ canonicalizePath p `catchany` \_ -> return p - --- | Obtain a (reasonably) canonic file path to a filesystem object. Based on --- "canonicalizePath" in system-fileio. -canonicalize :: FilePath -> Sh FilePath -canonicalize = absPath >=> liftIO . canonicalizePath - --- | bugfix older version of canonicalizePath (system-fileio <= 0.3.7) loses trailing slash -canonicalizePath :: FilePath -> IO FilePath -canonicalizePath p = let was_dir = FP.null (FP.filename p) in - if not was_dir then FS.canonicalizePath p - else addTrailingSlash `fmap` FS.canonicalizePath p - -data EmptyFilePathError = EmptyFilePathError deriving Typeable -instance Show EmptyFilePathError where - show _ = "Empty filepath" -instance Exception EmptyFilePathError - --- | Make a relative path absolute by combining with the working directory. --- An absolute path is returned as is. --- To create a relative path, use 'relPath'. -absPath :: FilePath -> Sh FilePath -absPath p | FP.null p = liftIO $ throwIO EmptyFilePathError - | relative p = (FP. p) <$> gets sDirectory - | otherwise = return p - --- | deprecated -path :: FilePath -> Sh FilePath -path = absPath -{-# DEPRECATED path "use absPath, canonic, or relPath instead" #-} - --- | Does a path point to an existing directory? -test_d :: FilePath -> Sh Bool -test_d = absPath >=> liftIO . isDirectory - --- | Does a path point to a symlink? -test_s :: FilePath -> Sh Bool -test_s = absPath >=> liftIO . \f -> do - stat <- getSymbolicLinkStatus (encodeString f) - return $ isSymbolicLink stat - -unpack :: FilePath -> String -unpack = encodeString - -gets :: (State -> a) -> Sh a -gets f = f <$> get - -get :: Sh State -get = do - stateVar <- ask - liftIO (readIORef stateVar) - -modify :: (State -> State) -> Sh () -modify f = do - state <- ask - liftIO (modifyIORef state f) - --- | internally log what occurred. --- Log will be re-played on failure. -trace :: Text -> Sh () -trace msg = - whenM (gets sTracing) $ modify $ - \st -> st { sTrace = sTrace st `mappend` msg `mappend` "\n" } - --- | List directory contents. Does *not* include \".\" and \"..\", but it does --- include (other) hidden files. -ls :: FilePath -> Sh [FilePath] --- it is important to use path and not absPath so that the listing can remain relative -ls fp = do - trace $ "ls " `mappend` toTextIgnore fp - fmap fst $ lsRelAbs fp - -lsRelAbs :: FilePath -> Sh ([FilePath], [FilePath]) -lsRelAbs f = absPath f >>= \fp -> do - filt <- if not (relative f) then return return - else do - wd <- gets sDirectory - return (relativeTo wd) - absolute <- liftIO $ listDirectory fp - relativized <- mapM filt absolute - return (relativized, absolute) - --- | silently uses the Right or Left value of "Filesystem.Path.CurrentOS.toText" -toTextIgnore :: FilePath -> Text -toTextIgnore fp = case FP.toText fp of - Left f -> f - Right f -> f - --- | a print lifted into 'Sh' -inspect :: (Show s) => s -> Sh () -inspect x = do - (trace . T.pack . show) x - liftIO $ print x - --- | a print lifted into 'Sh' using stderr -inspect_err :: (Show s) => s -> Sh () -inspect_err x = do - let shown = T.pack $ show x - trace shown - echo_err shown - --- | Echo text to standard (error, when using _err variants) output. The _n --- variants do not print a final newline. -echo, echo_n, echo_err, echo_n_err :: Text -> Sh () -echo msg = traceEcho msg >> liftIO (TIO.putStrLn msg >> hFlush stdout) -echo_n msg = traceEcho msg >> liftIO (TIO.putStr msg >> hFlush stdout) -echo_err msg = traceEcho msg >> liftIO (TIO.hPutStrLn stderr msg >> hFlush stdout) -echo_n_err msg = traceEcho msg >> liftIO (TIO.hPutStr stderr msg >> hFlush stderr) - -traceEcho :: Text -> Sh () -traceEcho msg = trace ("echo " `mappend` "'" `mappend` msg `mappend` "'") - --- | A helper to catch any exception (same as --- @... `catch` \(e :: SomeException) -> ...@). -catchany :: IO a -> (SomeException -> IO a) -> IO a -catchany = catch - diff --git a/shelly/src/Shelly/Find.hs b/shelly/src/Shelly/Find.hs deleted file mode 100644 index 08b89d99..00000000 --- a/shelly/src/Shelly/Find.hs +++ /dev/null @@ -1,79 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE OverloadedStrings #-} --- | File finding utiliites for Shelly --- The basic 'find' takes a dir and gives back a list of files. --- If you don't just want a list, use the folding variants like 'findFold'. --- If you want to avoid traversing certain directories, use the directory filtering variants like 'findDirFilter' -module Shelly.Find - ( - find, findWhen, findFold, findDirFilter, findDirFilterWhen, findFoldDirFilter - ) where - -import Prelude hiding (FilePath) -import Shelly.Base -import Control.Monad (foldM) -#if !MIN_VERSION_base(4,13,0) -import Data.Monoid (mappend) -#endif -import System.PosixCompat.Files( getSymbolicLinkStatus, isSymbolicLink ) -import Filesystem (isDirectory) -import Filesystem.Path.CurrentOS (encodeString) - --- | List directory recursively (like the POSIX utility "find"). --- listing is relative if the path given is relative. --- If you want to filter out some results or fold over them you can do that with the returned files. --- A more efficient approach is to use one of the other find functions. -find :: FilePath -> Sh [FilePath] -find = findFold (\paths fp -> return $ paths ++ [fp]) [] - --- | 'find' that filters the found files as it finds. --- Files must satisfy the given filter to be returned in the result. -findWhen :: (FilePath -> Sh Bool) -> FilePath -> Sh [FilePath] -findWhen = findDirFilterWhen (const $ return True) - --- | Fold an arbitrary folding function over files froma a 'find'. --- Like 'findWhen' but use a more general fold rather than a filter. -findFold :: (a -> FilePath -> Sh a) -> a -> FilePath -> Sh a -findFold folder startValue = findFoldDirFilter folder startValue (const $ return True) - --- | 'find' that filters out directories as it finds --- Filtering out directories can make a find much more efficient by avoiding entire trees of files. -findDirFilter :: (FilePath -> Sh Bool) -> FilePath -> Sh [FilePath] -findDirFilter filt = findDirFilterWhen filt (const $ return True) - --- | similar 'findWhen', but also filter out directories --- Alternatively, similar to 'findDirFilter', but also filter out files --- Filtering out directories makes the find much more efficient -findDirFilterWhen :: (FilePath -> Sh Bool) -- ^ directory filter - -> (FilePath -> Sh Bool) -- ^ file filter - -> FilePath -- ^ directory - -> Sh [FilePath] -findDirFilterWhen dirFilt fileFilter = findFoldDirFilter filterIt [] dirFilt - where - filterIt paths fp = do - yes <- fileFilter fp - return $ if yes then paths ++ [fp] else paths - --- | like 'findDirFilterWhen' but use a folding function rather than a filter --- The most general finder: you likely want a more specific one -findFoldDirFilter :: (a -> FilePath -> Sh a) -> a -> (FilePath -> Sh Bool) -> FilePath -> Sh a -findFoldDirFilter folder startValue dirFilter dir = do - absDir <- absPath dir - trace ("find " `mappend` toTextIgnore absDir) - filt <- dirFilter absDir - if not filt then return startValue - -- use possible relative path, not absolute so that listing will remain relative - else do - (rPaths, aPaths) <- lsRelAbs dir - foldM traverse' startValue (zip rPaths aPaths) - where - traverse' acc (relativePath, absolutePath) = do - -- optimization: don't use Shelly API since our path is already good - isDir <- liftIO $ isDirectory absolutePath - sym <- liftIO $ fmap isSymbolicLink $ getSymbolicLinkStatus (encodeString absolutePath) - newAcc <- folder acc relativePath - follow <- fmap sFollowSymlink get - if isDir && (follow || not sym) - then findFoldDirFilter folder newAcc - dirFilter relativePath - else return newAcc diff --git a/shelly/src/Shelly/Lifted.hs b/shelly/src/Shelly/Lifted.hs deleted file mode 100644 index 2b2f9bac..00000000 --- a/shelly/src/Shelly/Lifted.hs +++ /dev/null @@ -1,584 +0,0 @@ -{-# LANGUAGE ScopedTypeVariables, DeriveDataTypeable, OverloadedStrings, - FlexibleInstances, FlexibleContexts, IncoherentInstances, - TypeFamilies, ExistentialQuantification, RankNTypes, - ImpredicativeTypes #-} -{-# OPTIONS_GHC -fno-warn-orphans #-} - --- | A module for shell-like programming in Haskell. --- Shelly's focus is entirely on ease of use for those coming from shell scripting. --- However, it also tries to use modern libraries and techniques to keep things efficient. --- --- The functionality provided by --- this module is (unlike standard Haskell filesystem functionality) --- thread-safe: each Sh maintains its own environment and its own working --- directory. --- --- Recommended usage includes putting the following at the top of your program, --- otherwise you will likely need either type annotations or type conversions --- --- > {-# LANGUAGE OverloadedStrings #-} --- > {-# LANGUAGE ExtendedDefaultRules #-} --- > {-# OPTIONS_GHC -fno-warn-type-defaults #-} --- > import Shelly --- > import qualified Data.Text as T --- > default (T.Text) -module Shelly.Lifted - ( - MonadSh(..), - MonadShControl(..), - - -- This is copied from Shelly.hs, so that we are sure to export the - -- exact same set of symbols. Whenever that export list is updated, - -- please make the same updates here and implements the corresponding - -- lifted functions. - - -- * Entering Sh. - Sh, ShIO, S.shelly, S.shellyNoDir, S.shellyFailDir, sub - , silently, verbosely, escaping, print_stdout, print_stderr, print_commands - , tracing, errExit - , log_stdout_with, log_stderr_with - - -- * Running external commands. - , run, run_, runFoldLines, S.cmd, S.FoldCallback - , (-|-), lastStderr, setStdin, lastExitCode - , command, command_, command1, command1_ - , sshPairs, sshPairs_ - , S.ShellCmd(..), S.CmdArg (..) - - -- * Running commands Using handles - , runHandle, runHandles, transferLinesAndCombine, S.transferFoldHandleLines - , S.StdHandle(..), S.StdStream(..) - - - -- * Modifying and querying environment. - , setenv, get_env, get_env_text, get_env_all, appendToPath, prependToPath - - -- * Environment directory - , cd, chdir, chdir_p, pwd - - -- * Printing - , echo, echo_n, echo_err, echo_n_err, inspect, inspect_err - , tag, trace, S.show_command - - -- * Querying filesystem. - , ls, lsT, test_e, test_f, test_d, test_s, test_px, which - - -- * Filename helpers - , absPath, (S.), (S.<.>), canonic, canonicalize, relPath, relativeTo - , S.hasExt - - -- * Manipulating filesystem. - , mv, rm, rm_f, rm_rf, cp, cp_r, mkdir, mkdir_p, mkdirTree - - -- * reading/writing Files - , readfile, readBinary, writefile, appendfile, touchfile, withTmpDir - - -- * exiting the program - , exit, errorExit, quietExit, terror - - -- * Exceptions - , bracket_sh, catchany, catch_sh, handle_sh, handleany_sh, finally_sh, catches_sh, catchany_sh - - -- * convert between Text and FilePath - , S.toTextIgnore, toTextWarn, FP.fromText - - -- * Utility Functions - , S.whenM, S.unlessM, time, sleep - - -- * Re-exported for your convenience - , liftIO, S.when, S.unless, FilePath, (S.<$>) - - -- * internal functions for writing extensions - , Shelly.Lifted.get, Shelly.Lifted.put - - -- * find functions - , S.find, S.findWhen, S.findFold, S.findDirFilter, S.findDirFilterWhen, S.findFoldDirFilter - , followSymlink - ) where - -import qualified Shelly as S -import Shelly.Base (Sh(..), ShIO, Text, (>=>), FilePath) -import qualified Shelly.Base as S -import Control.Monad ( liftM ) -import Prelude hiding ( FilePath ) -import Data.ByteString ( ByteString ) -import System.IO ( Handle ) -import Data.Tree ( Tree ) -import qualified Filesystem.Path.CurrentOS as FP - -import Control.Exception.Lifted -import Control.Exception.Enclosed -import Control.Monad.IO.Class -import Control.Monad.Trans.Control -import Control.Monad.Trans.Identity -import Control.Monad.Trans.Maybe -import Control.Monad.Trans.Cont -import Control.Monad.Trans.Except -import Control.Monad.Trans.Reader -import Control.Monad.Trans.State -import qualified Control.Monad.Trans.State.Strict as Strict -import Control.Monad.Trans.Writer -import qualified Control.Monad.Trans.Writer.Strict as Strict -import qualified Control.Monad.Trans.RWS as RWS -import qualified Control.Monad.Trans.RWS.Strict as Strict - -class Monad m => MonadSh m where - liftSh :: Sh a -> m a - -instance MonadSh Sh where - liftSh = id - -instance MonadSh m => MonadSh (IdentityT m) where - liftSh = IdentityT . liftSh -instance MonadSh m => MonadSh (MaybeT m) where - liftSh = MaybeT . liftM Just . liftSh -instance MonadSh m => MonadSh (ContT r m) where - liftSh m = ContT (liftSh m >>=) -instance MonadSh m => MonadSh (ExceptT e m) where - liftSh m = ExceptT $ do - a <- liftSh m - return (Right a) -instance MonadSh m => MonadSh (ReaderT r m) where - liftSh = ReaderT . const . liftSh -instance MonadSh m => MonadSh (StateT s m) where - liftSh m = StateT $ \s -> do - a <- liftSh m - return (a, s) -instance MonadSh m => MonadSh (Strict.StateT s m) where - liftSh m = Strict.StateT $ \s -> do - a <- liftSh m - return (a, s) -instance (Monoid w, MonadSh m) => MonadSh (WriterT w m) where - liftSh m = WriterT $ do - a <- liftSh m - return (a, mempty :: w) -instance (Monoid w, MonadSh m) => MonadSh (Strict.WriterT w m) where - liftSh m = Strict.WriterT $ do - a <- liftSh m - return (a, mempty :: w) -instance (Monoid w, MonadSh m) => MonadSh (RWS.RWST r w s m) where - liftSh m = RWS.RWST $ \_ s -> do - a <- liftSh m - return (a, s, mempty :: w) -instance (Monoid w, MonadSh m) => MonadSh (Strict.RWST r w s m) where - liftSh m = Strict.RWST $ \_ s -> do - a <- liftSh m - return (a, s, mempty :: w) - -instance MonadSh m => S.ShellCmd (m Text) where - cmdAll = (liftSh .) . S.run - -instance (MonadSh m, s ~ Text, Show s) => S.ShellCmd (m s) where - cmdAll = (liftSh .) . S.run - -instance MonadSh m => S.ShellCmd (m ()) where - cmdAll = (liftSh .) . S.run_ - -class Monad m => MonadShControl m where - data ShM m a :: * - liftShWith :: ((forall x. m x -> Sh (ShM m x)) -> Sh a) -> m a - restoreSh :: ShM m a -> m a - -instance MonadShControl Sh where - newtype ShM Sh a = ShSh a - liftShWith f = f $ liftM ShSh - restoreSh (ShSh x) = return x - {-# INLINE liftShWith #-} - {-# INLINE restoreSh #-} - -instance MonadShControl m => MonadShControl (MaybeT m) where - newtype ShM (MaybeT m) a = MaybeTShM (ShM m (Maybe a)) - liftShWith f = - MaybeT $ liftM return $ liftShWith $ \runInSh -> f $ \k -> - liftM MaybeTShM $ runInSh $ runMaybeT k - restoreSh (MaybeTShM m) = MaybeT . restoreSh $ m - {-# INLINE liftShWith #-} - {-# INLINE restoreSh #-} - -instance MonadShControl m - => MonadShControl (IdentityT m) where - newtype ShM (IdentityT m) a = IdentityTShM (ShM m a) - liftShWith f = - IdentityT $ liftM id $ liftShWith $ \runInSh -> f $ \k -> - liftM IdentityTShM $ runInSh $ runIdentityT k - restoreSh (IdentityTShM m) = IdentityT . restoreSh $ m - {-# INLINE liftShWith #-} - {-# INLINE restoreSh #-} - -instance (MonadShControl m, Monoid w) - => MonadShControl (WriterT w m) where - newtype ShM (WriterT w m) a = WriterTShM (ShM m (a, w)) - liftShWith f = - WriterT $ liftM (\x -> (x, mempty :: w)) $ liftShWith $ \runInSh -> f $ \k -> - liftM WriterTShM $ runInSh $ runWriterT k - restoreSh (WriterTShM m) = WriterT . restoreSh $ m - {-# INLINE liftShWith #-} - {-# INLINE restoreSh #-} - -instance (MonadShControl m, Monoid w) - => MonadShControl (Strict.WriterT w m) where - newtype ShM (Strict.WriterT w m) a = StWriterTShM (ShM m (a, w)) - liftShWith f = - Strict.WriterT $ liftM (\x -> (x, mempty :: w)) $ liftShWith $ \runInSh -> f $ \k -> - liftM StWriterTShM $ runInSh $ Strict.runWriterT k - restoreSh (StWriterTShM m) = Strict.WriterT . restoreSh $ m - {-# INLINE liftShWith #-} - {-# INLINE restoreSh #-} - -instance MonadShControl m - => MonadShControl (ExceptT e m) where - newtype ShM (ExceptT e m) a = ErrorTShM (ShM m (Either e a)) - liftShWith f = - ExceptT $ liftM return $ liftShWith $ \runInSh -> f $ \k -> - liftM ErrorTShM $ runInSh $ runExceptT k - restoreSh (ErrorTShM m) = ExceptT . restoreSh $ m - {-# INLINE liftShWith #-} - {-# INLINE restoreSh #-} - -instance MonadShControl m => MonadShControl (StateT s m) where - newtype ShM (StateT s m) a = StateTShM (ShM m (a, s)) - liftShWith f = StateT $ \s -> - liftM (\x -> (x,s)) $ liftShWith $ \runInSh -> f $ \k -> - liftM StateTShM $ runInSh $ runStateT k s - restoreSh (StateTShM m) = StateT . const . restoreSh $ m - {-# INLINE liftShWith #-} - {-# INLINE restoreSh #-} - -instance MonadShControl m => MonadShControl (Strict.StateT s m) where - newtype ShM (Strict.StateT s m) a = StStateTShM (ShM m (a, s)) - liftShWith f = Strict.StateT $ \s -> - liftM (\x -> (x,s)) $ liftShWith $ \runInSh -> f $ \k -> - liftM StStateTShM $ runInSh $ Strict.runStateT k s - restoreSh (StStateTShM m) = Strict.StateT . const . restoreSh $ m - {-# INLINE liftShWith #-} - {-# INLINE restoreSh #-} - -instance MonadShControl m => MonadShControl (ReaderT r m) where - newtype ShM (ReaderT r m) a = ReaderTShM (ShM m a) - liftShWith f = ReaderT $ \r -> - liftM id $ liftShWith $ \runInSh -> f $ \k -> - liftM ReaderTShM $ runInSh $ runReaderT k r - restoreSh (ReaderTShM m) = ReaderT . const . restoreSh $ m - {-# INLINE liftShWith #-} - {-# INLINE restoreSh #-} - -instance (MonadShControl m, Monoid w) - => MonadShControl (RWS.RWST r w s m) where - newtype ShM (RWS.RWST r w s m) a = RWSTShM (ShM m (a, s ,w)) - liftShWith f = RWS.RWST $ \r s -> - liftM (\x -> (x,s,mempty :: w)) $ liftShWith $ \runInSh -> f $ \k -> - liftM RWSTShM $ runInSh $ RWS.runRWST k r s - restoreSh (RWSTShM m) = RWS.RWST . const . const . restoreSh $ m - {-# INLINE liftShWith #-} - {-# INLINE restoreSh #-} - -instance (MonadShControl m, Monoid w) - => MonadShControl (Strict.RWST r w s m) where - newtype ShM (Strict.RWST r w s m) a = StRWSTShM (ShM m (a, s, w)) - liftShWith f = Strict.RWST $ \r s -> - liftM (\x -> (x,s,mempty :: w)) $ liftShWith $ \runInSh -> f $ \k -> - liftM StRWSTShM $ runInSh $ Strict.runRWST k r s - restoreSh (StRWSTShM m) = Strict.RWST . const . const . restoreSh $ m - {-# INLINE liftShWith #-} - {-# INLINE restoreSh #-} - -controlSh :: MonadShControl m => ((forall x. m x -> Sh (ShM m x)) -> Sh (ShM m a)) -> m a -controlSh = liftShWith >=> restoreSh -{-# INLINE controlSh #-} - -tag :: (MonadShControl m, MonadSh m) => m a -> Text -> m a -tag action msg = controlSh $ \runInSh -> S.tag (runInSh action) msg - -chdir :: MonadShControl m => FilePath -> m a -> m a -chdir dir action = controlSh $ \runInSh -> S.chdir dir (runInSh action) - -chdir_p :: MonadShControl m => FilePath -> m a -> m a -chdir_p dir action = controlSh $ \runInSh -> S.chdir_p dir (runInSh action) - -silently :: MonadShControl m => m a -> m a -silently a = controlSh $ \runInSh -> S.silently (runInSh a) - -verbosely :: MonadShControl m => m a -> m a -verbosely a = controlSh $ \runInSh -> S.verbosely (runInSh a) - -log_stdout_with :: MonadShControl m => (Text -> IO ()) -> m a -> m a -log_stdout_with logger a = controlSh $ \runInSh -> S.log_stdout_with logger (runInSh a) - -log_stderr_with :: MonadShControl m => (Text -> IO ()) -> m a -> m a -log_stderr_with logger a = controlSh $ \runInSh -> S.log_stderr_with logger (runInSh a) - -print_stdout :: MonadShControl m => Bool -> m a -> m a -print_stdout shouldPrint a = controlSh $ \runInSh -> S.print_stdout shouldPrint (runInSh a) - -print_stderr :: MonadShControl m => Bool -> m a -> m a -print_stderr shouldPrint a = controlSh $ \runInSh -> S.print_stderr shouldPrint (runInSh a) - -print_commands :: MonadShControl m => Bool -> m a -> m a -print_commands shouldPrint a = controlSh $ \runInSh -> S.print_commands shouldPrint (runInSh a) - -sub :: MonadShControl m => m a -> m a -sub a = controlSh $ \runInSh -> S.sub (runInSh a) - -trace :: MonadSh m => Text -> m () -trace = liftSh . S.trace - -tracing :: MonadShControl m => Bool -> m a -> m a -tracing shouldTrace action = controlSh $ \runInSh -> S.tracing shouldTrace (runInSh action) - -escaping :: MonadShControl m => Bool -> m a -> m a -escaping shouldEscape action = controlSh $ \runInSh -> S.escaping shouldEscape (runInSh action) - -errExit :: MonadShControl m => Bool -> m a -> m a -errExit shouldExit action = controlSh $ \runInSh -> S.errExit shouldExit (runInSh action) - -followSymlink :: MonadShControl m => Bool -> m a -> m a -followSymlink enableFollowSymlink action = controlSh $ \runInSh -> S.followSymlink enableFollowSymlink (runInSh action) - -(-|-) :: (MonadShControl m, MonadSh m) => m Text -> m b -> m b -one -|- two = controlSh $ \runInSh -> do - x <- runInSh one - runInSh $ restoreSh x >>= \x' -> - controlSh $ \runInSh' -> return x' S.-|- runInSh' two - -withTmpDir :: MonadShControl m => (FilePath -> m a) -> m a -withTmpDir action = controlSh $ \runInSh -> S.withTmpDir (fmap runInSh action) - -time :: MonadShControl m => m a -> m (Double, a) -time what = controlSh $ \runInSh -> do - (d, a) <- S.time (runInSh what) - runInSh $ restoreSh a >>= \x -> return (d, x) - -toTextWarn :: MonadSh m => FilePath -> m Text -toTextWarn = liftSh . toTextWarn - -transferLinesAndCombine :: MonadIO m => Handle -> (Text -> IO ()) -> m Text -transferLinesAndCombine = (liftIO .) . S.transferLinesAndCombine - -get :: MonadSh m => m S.State -get = liftSh S.get - -put :: MonadSh m => S.State -> m () -put = liftSh . S.put - -catch_sh :: (Exception e) => Sh a -> (e -> Sh a) -> Sh a -catch_sh = Control.Exception.Lifted.catch -{-# DEPRECATED catch_sh "use Control.Exception.Lifted.catch instead" #-} - -handle_sh :: (Exception e) => (e -> Sh a) -> Sh a -> Sh a -handle_sh = handle -{-# DEPRECATED handle_sh "use Control.Exception.Lifted.handle instead" #-} - -finally_sh :: Sh a -> Sh b -> Sh a -finally_sh = finally -{-# DEPRECATED finally_sh "use Control.Exception.Lifted.finally instead" #-} - -bracket_sh :: Sh a -> (a -> Sh b) -> (a -> Sh c) -> Sh c -bracket_sh = bracket -{-# DEPRECATED bracket_sh "use Control.Exception.Lifted.bracket instead" #-} - -catches_sh :: Sh a -> [Handler Sh a] -> Sh a -catches_sh = catches -{-# DEPRECATED catches_sh "use Control.Exception.Lifted.catches instead" #-} - -catchany_sh :: Sh a -> (SomeException -> Sh a) -> Sh a -catchany_sh = catchAny -{-# DEPRECATED catchany_sh "use Control.Exception.Enclosed.catchAny instead" #-} - -handleany_sh :: (SomeException -> Sh a) -> Sh a -> Sh a -handleany_sh = handleAny -{-# DEPRECATED handleany_sh "use Control.Exception.Enclosed.handleAny instead" #-} - -cd :: MonadSh m => FilePath -> m () -cd = liftSh . S.cd - -mv :: MonadSh m => FilePath -> FilePath -> m () -mv = (liftSh .) . S.mv - -lsT :: MonadSh m => FilePath -> m [Text] -lsT = liftSh . S.lsT - -pwd :: MonadSh m => m FilePath -pwd = liftSh S.pwd - -exit :: MonadSh m => Int -> m a -exit = liftSh . S.exit - -errorExit :: MonadSh m => Text -> m a -errorExit = liftSh . S.errorExit - -quietExit :: MonadSh m => Int -> m a -quietExit = liftSh . S.quietExit - -terror :: MonadSh m => Text -> m a -terror = liftSh . S.terror - -mkdir :: MonadSh m => FilePath -> m () -mkdir = liftSh . S.mkdir - -mkdir_p :: MonadSh m => FilePath -> m () -mkdir_p = liftSh . S.mkdir_p - -mkdirTree :: MonadSh m => Tree FilePath -> m () -mkdirTree = liftSh . S.mkdirTree - -which :: MonadSh m => FilePath -> m (Maybe FilePath) -which = liftSh . S.which - -test_e :: MonadSh m => FilePath -> m Bool -test_e = liftSh . S.test_e - -test_f :: MonadSh m => FilePath -> m Bool -test_f = liftSh . S.test_f - -test_px :: MonadSh m => FilePath -> m Bool -test_px = liftSh . S.test_px - -rm_rf :: MonadSh m => FilePath -> m () -rm_rf = liftSh . S.rm_rf - -rm_f :: MonadSh m => FilePath -> m () -rm_f = liftSh . S.rm_f - -rm :: MonadSh m => FilePath -> m () -rm = liftSh . S.rm - -setenv :: MonadSh m => Text -> Text -> m () -setenv = (liftSh .) . S.setenv - -appendToPath :: MonadSh m => FilePath -> m () -appendToPath = liftSh . S.appendToPath - -prependToPath :: MonadSh m => FilePath -> m () -prependToPath = liftSh . S.prependToPath - -get_env_all :: MonadSh m => m [(String, String)] -get_env_all = liftSh S.get_env_all - -get_env :: MonadSh m => Text -> m (Maybe Text) -get_env = liftSh . S.get_env - -get_env_text :: MonadSh m => Text -> m Text -get_env_text = liftSh . S.get_env_text - -sshPairs_ :: MonadSh m => Text -> [(FilePath, [Text])] -> m () -sshPairs_ = (liftSh .) . S.sshPairs_ - -sshPairs :: MonadSh m => Text -> [(FilePath, [Text])] -> m Text -sshPairs = (liftSh .) . S.sshPairs - -run :: MonadSh m => FilePath -> [Text] -> m Text -run = (liftSh .) . S.run - -command :: MonadSh m => FilePath -> [Text] -> [Text] -> m Text -command com args more_args = - liftSh $ S.command com args more_args - -command_ :: MonadSh m => FilePath -> [Text] -> [Text] -> m () -command_ com args more_args = - liftSh $ S.command_ com args more_args - -command1 :: MonadSh m => FilePath -> [Text] -> Text -> [Text] -> m Text -command1 com args one_arg more_args = - liftSh $ S.command1 com args one_arg more_args - -command1_ :: MonadSh m => FilePath -> [Text] -> Text -> [Text] -> m () -command1_ com args one_arg more_args = - liftSh $ S.command1_ com args one_arg more_args - -run_ :: MonadSh m => FilePath -> [Text] -> m () -run_ = (liftSh .) . S.run_ - -runHandle :: MonadShControl m => FilePath -- ^ command - -> [Text] -- ^ arguments - -> (Handle -> m a) -- ^ stdout handle - -> m a -runHandle exe args withHandle = - controlSh $ \runInSh -> S.runHandle exe args (fmap runInSh withHandle) - -runHandles :: MonadShControl m => FilePath -- ^ command - -> [Text] -- ^ arguments - -> [S.StdHandle] -- ^ optionally connect process i/o handles to existing handles - -> (Handle -> Handle -> Handle -> m a) -- ^ stdin, stdout and stderr - -> m a -runHandles exe args reusedHandles withHandles = - controlSh $ \runInSh -> - S.runHandles exe args reusedHandles (fmap (fmap (fmap runInSh)) withHandles) - -runFoldLines :: MonadSh m => a -> S.FoldCallback a -> FilePath -> [Text] -> m a -runFoldLines start cb exe args = liftSh $ S.runFoldLines start cb exe args - -lastStderr :: MonadSh m => m Text -lastStderr = liftSh S.lastStderr - -lastExitCode :: MonadSh m => m Int -lastExitCode = liftSh S.lastExitCode - -setStdin :: MonadSh m => Text -> m () -setStdin = liftSh . S.setStdin - -cp_r :: MonadSh m => FilePath -> FilePath -> m () -cp_r = (liftSh .) . S.cp_r - -cp :: MonadSh m => FilePath -> FilePath -> m () -cp = (liftSh .) . S.cp - -writefile :: MonadSh m => FilePath -> Text -> m () -writefile = (liftSh .) . S.writefile - -touchfile :: MonadSh m => FilePath -> m () -touchfile = liftSh . S.touchfile - -appendfile :: MonadSh m => FilePath -> Text -> m () -appendfile = (liftSh .) . S.appendfile - -readfile :: MonadSh m => FilePath -> m Text -readfile = liftSh . S.readfile - -readBinary :: MonadSh m => FilePath -> m ByteString -readBinary = liftSh . S.readBinary - -sleep :: MonadSh m => Int -> m () -sleep = liftSh . S.sleep - -echo, echo_n, echo_err, echo_n_err :: MonadSh m => Text -> m () -echo = liftSh . S.echo -echo_n = liftSh . S.echo_n -echo_err = liftSh . S.echo_err -echo_n_err = liftSh . S.echo_n_err - -relPath :: MonadSh m => FilePath -> m FilePath -relPath = liftSh . S.relPath - -relativeTo :: MonadSh m => FilePath -- ^ anchor path, the prefix - -> FilePath -- ^ make this relative to anchor path - -> m FilePath -relativeTo = (liftSh .) . S.relativeTo - -canonic :: MonadSh m => FilePath -> m FilePath -canonic = liftSh . canonic - --- | Obtain a (reasonably) canonic file path to a filesystem object. Based on --- "canonicalizePath" in system-fileio. -canonicalize :: MonadSh m => FilePath -> m FilePath -canonicalize = liftSh . S.canonicalize - -absPath :: MonadSh m => FilePath -> m FilePath -absPath = liftSh . S.absPath - -test_d :: MonadSh m => FilePath -> m Bool -test_d = liftSh . S.test_d - -test_s :: MonadSh m => FilePath -> m Bool -test_s = liftSh . S.test_s - -ls :: MonadSh m => FilePath -> m [FilePath] -ls = liftSh . S.ls - -inspect :: (Show s, MonadSh m) => s -> m () -inspect = liftSh . S.inspect - -inspect_err :: (Show s, MonadSh m) => s -> m () -inspect_err = liftSh . S.inspect_err - -catchany :: MonadBaseControl IO m => m a -> (SomeException -> m a) -> m a -catchany = Control.Exception.Lifted.catch diff --git a/shelly/src/Shelly/Pipe.hs b/shelly/src/Shelly/Pipe.hs deleted file mode 100644 index 2924509d..00000000 --- a/shelly/src/Shelly/Pipe.hs +++ /dev/null @@ -1,643 +0,0 @@ -{-# LANGUAGE FlexibleInstances, TypeSynonymInstances, - TypeFamilies, ExistentialQuantification #-} --- | This module is a wrapper for the module "Shelly". --- The only difference is a main type 'Sh'. In this module --- 'Sh' contains a list of results. Actual definition of the type 'Sh' is: --- --- > import qualified Shelly as S --- > --- > newtype Sh a = Sh { unSh :: S.Sh [a] } --- --- This definition can simplify some filesystem commands. --- A monad bind operator becomes a pipe operator and we can write --- --- > findExt ext = findWhen (pure . hasExt ext) --- > --- > main :: IO () --- > main = shs $ do --- > mkdir "new" --- > findExt "hs" "." >>= flip cp "new" --- > findExt "cpp" "." >>= rm_f --- > liftIO $ putStrLn "done" --- --- Monad methods "return" and ">>=" behave like methods for --- @ListT Shelly.Sh@, but ">>" forgets the number of --- the empty effects. So the last line prints @\"done\"@ only once. --- --- Documentation in this module mostly just reference documentation from --- the main "Shelly" module. --- --- > {-# LANGUAGE OverloadedStrings #-} --- > {-# LANGUAGE ExtendedDefaultRules #-} --- > {-# OPTIONS_GHC -fno-warn-type-defaults #-} --- > import Shelly --- > import Data.Text as T --- > default (T.Text) -module Shelly.Pipe - ( - -- * Entering Sh. - Sh, shs, shelly, shellyFailDir, shsFailDir, sub, silently, verbosely, escaping, print_stdout, print_commands, tracing, errExit, log_stdout_with, log_stderr_with - -- * List functions - , roll, unroll, liftSh - -- * Running external commands. - , FoldCallback - , run, run_, runFoldLines, cmd - , (-|-), lastStderr, setStdin, lastExitCode - , command, command_, command1, command1_ - , sshPairs, sshPairs_ - - -- * Modifying and querying environment. - , setenv, get_env, get_env_text, get_env_def, appendToPath, prependToPath - - -- * Environment directory - , cd, chdir, pwd - - -- * Printing - , echo, echo_n, echo_err, echo_n_err, inspect, inspect_err - , tag, trace, show_command - - -- * Querying filesystem. - , ls, lsT, test_e, test_f, test_d, test_s, which - - -- * Filename helpers - , absPath, (), (<.>), canonic, canonicalize, relPath, relativeTo - , hasExt - - -- * Manipulating filesystem. - , mv, rm, rm_f, rm_rf, cp, cp_r, mkdir, mkdir_p, mkdirTree - - -- * reading/writing Files - , readfile, readBinary, writefile, appendfile, touchfile, withTmpDir - - -- * exiting the program - , exit, errorExit, quietExit, terror - - -- * Exceptions - , catchany, catch_sh, finally_sh - , ShellyHandler(..), catches_sh - , catchany_sh - - -- * convert between Text and FilePath - , toTextIgnore, toTextWarn, fromText - - -- * Utilities. - , (<$>), whenM, unlessM, time - - -- * Re-exported for your convenience - , liftIO, when, unless, FilePath - - -- * internal functions for writing extensions - , get, put - - -- * find functions - , find, findWhen, findFold - , findDirFilter, findDirFilterWhen, findFoldDirFilter - , followSymlink - ) where - -import Prelude hiding (FilePath) - -import Control.Applicative -import Control.Monad -import Control.Monad.Trans -import Control.Exception hiding (handle) - -import Filesystem.Path(FilePath) - -import qualified Shelly as S - -import Shelly( - (), (<.>), hasExt - , whenM, unlessM, toTextIgnore - , fromText, catchany - , FoldCallback) - -import Data.Maybe(fromMaybe) -import Shelly.Base(State) -import Data.ByteString (ByteString) - -import Data.Tree(Tree) - -import Data.Text as T hiding (concat, all, find, cons) - - --- | This type is a simple wrapper for a type @Shelly.Sh@. --- 'Sh' contains a list of results. -newtype Sh a = Sh { unSh :: S.Sh [a] } - -instance Functor Sh where - fmap f = Sh . fmap (fmap f) . unSh - -instance Monad Sh where - return = Sh . return . return - a >>= f = Sh $ fmap concat $ mapM (unSh . f) =<< unSh a - a >> b = Sh $ unSh a >> unSh b - -instance Applicative Sh where - pure = return - (<*>) = ap - -instance Alternative Sh where - empty = mzero - (<|>) = mplus - -instance MonadPlus Sh where - mzero = Sh $ return [] - mplus a b = Sh $ liftA2 (++) (unSh a) (unSh b) - -instance MonadIO Sh where - liftIO = sh1 liftIO - -------------------------------------------------------- --- converters - -sh0 :: S.Sh a -> Sh a -sh0 = Sh . fmap return - -sh1 :: (a -> S.Sh b) -> (a -> Sh b) -sh1 f = \a -> sh0 (f a) - -sh2 :: (a1 -> a2 -> S.Sh b) -> (a1 -> a2 -> Sh b) -sh2 f = \a b -> sh0 (f a b) - -sh3 :: (a1 -> a2 -> a3 -> S.Sh b) -> (a1 -> a2 -> a3 -> Sh b) -sh3 f = \a b c -> sh0 (f a b c) - -sh4 :: (a1 -> a2 -> a3 -> a4 -> S.Sh b) -> (a1 -> a2 -> a3 -> a4 -> Sh b) -sh4 f = \a b c d -> sh0 (f a b c d) - -sh0s :: S.Sh [a] -> Sh a -sh0s = Sh - -sh1s :: (a -> S.Sh [b]) -> (a -> Sh b) -sh1s f = \a -> sh0s (f a) - -{- Just in case ... -sh2s :: (a1 -> a2 -> S.Sh [b]) -> (a1 -> a2 -> Sh b) -sh2s f = \a b -> sh0s (f a b) - -sh3s :: (a1 -> a2 -> a3 -> S.Sh [b]) -> (a1 -> a2 -> a3 -> Sh b) -sh3s f = \a b c -> sh0s (f a b c) --} - -lift1 :: (S.Sh a -> S.Sh b) -> (Sh a -> Sh b) -lift1 f = Sh . (mapM (f . return) =<< ) . unSh - -lift2 :: (S.Sh a -> S.Sh b -> S.Sh c) -> (Sh a -> Sh b -> Sh c) -lift2 f a b = Sh $ join $ liftA2 (mapM2 f') (unSh a) (unSh b) - where f' = \x y -> f (return x) (return y) - -mapM2 :: Monad m => (a -> b -> m c)-> [a] -> [b] -> m [c] -mapM2 f as bs = sequence $ liftA2 f as bs - ------------------------------------------------------------ - --- | Unpack list of results. -unroll :: Sh a -> Sh [a] -unroll = Sh . fmap return . unSh - --- | Pack list of results. It performs @concat@ inside 'Sh'. -roll :: Sh [a] -> Sh a -roll = Sh . fmap concat . unSh - --- | Transform result as list. It can be useful for filtering. -liftSh :: ([a] -> [b]) -> Sh a -> Sh b -liftSh f = Sh . fmap f . unSh - ------------------------------------------------------------------- --- Entering Sh - --- | see 'S.shelly' -shelly :: MonadIO m => Sh a -> m [a] -shelly = S.shelly . unSh - --- | Performs 'shelly' and then an empty action @return ()@. -shs :: MonadIO m => Sh () -> m () -shs x = shelly x >> return () - --- | see 'S.shellyFailDir' -shellyFailDir :: MonadIO m => Sh a -> m [a] -shellyFailDir = S.shellyFailDir . unSh - --- | Performs 'shellyFailDir' and then an empty action @return ()@. -shsFailDir :: MonadIO m => Sh () -> m () -shsFailDir x = shellyFailDir x >> return () - --- | see 'S.sub' -sub :: Sh a -> Sh a -sub = lift1 S.sub - --- See 'S.siliently' -silently :: Sh a -> Sh a -silently = lift1 S.silently - --- See 'S.verbosely -verbosely :: Sh a -> Sh a -verbosely = lift1 S.verbosely - --- | see 'S.escaping' -escaping :: Bool -> Sh a -> Sh a -escaping b = lift1 (S.escaping b) - --- | see 'S.log_stdout_with' -log_stdout_with :: (Text -> IO ()) -> Sh a -> Sh a -log_stdout_with logger = lift1 (S.log_stdout_with logger) - --- | see 'S.log_stderr_with' -log_stderr_with :: (Text -> IO ()) -> Sh a -> Sh a -log_stderr_with logger = lift1 (S.log_stdout_with logger) - --- | see 'S.print_stdout' -print_stdout :: Bool -> Sh a -> Sh a -print_stdout b = lift1 (S.print_stdout b) - --- | see 'S.print_commands -print_commands :: Bool -> Sh a -> Sh a -print_commands b = lift1 (S.print_commands b) - --- | see 'S.tracing' -tracing :: Bool -> Sh a -> Sh a -tracing b = lift1 (S.tracing b) - --- | see 'S.errExit' -errExit :: Bool -> Sh a -> Sh a -errExit b = lift1 (S.errExit b) - --- | see 'S.followSymlink' -followSymlink :: Bool -> Sh a -> Sh a -followSymlink b = lift1 (S.followSymlink b) - --- | see 'S.run' -run :: FilePath -> [Text] -> Sh Text -run a b = sh0 $ S.run a b - --- | see 'S.run_' -run_ :: FilePath -> [Text] -> Sh () -run_ a b = sh0 $ S.run_ a b - --- | see 'S.runFoldLines' -runFoldLines :: a -> FoldCallback a -> FilePath -> [Text] -> Sh a -runFoldLines a cb fp ts = sh0 $ S.runFoldLines a cb fp ts - --- | see 'S.-|-' -(-|-) :: Sh Text -> Sh b -> Sh b -(-|-) = lift2 (S.-|-) - --- | see 'S.lastStderr' -lastStderr :: Sh Text -lastStderr = sh0 S.lastStderr - --- | see 'S.setStdin' -setStdin :: Text -> Sh () -setStdin = sh1 S.setStdin - --- | see 'S.lastExitCode' -lastExitCode :: Sh Int -lastExitCode = sh0 S.lastExitCode - --- | see 'S.command' -command :: FilePath -> [Text] -> [Text] -> Sh Text -command = sh3 S.command - --- | see 'S.command_' -command_ :: FilePath -> [Text] -> [Text] -> Sh () -command_ = sh3 S.command_ - - --- | see 'S.command1' -command1 :: FilePath -> [Text] -> Text -> [Text] -> Sh Text -command1 = sh4 S.command1 - --- | see 'S.command1_' -command1_ :: FilePath -> [Text] -> Text -> [Text] -> Sh () -command1_ = sh4 S.command1_ - --- | see 'S.sshPairs' -sshPairs :: Text -> [(FilePath, [Text])] -> Sh Text -sshPairs = sh2 S.sshPairs - --- | see 'S.sshPairs_' -sshPairs_ :: Text -> [(FilePath, [Text])] -> Sh () -sshPairs_ = sh2 S.sshPairs_ - --- | see 'S.setenv' -setenv :: Text -> Text -> Sh () -setenv = sh2 S.setenv - --- | see 'S.get_env' -get_env :: Text -> Sh (Maybe Text) -get_env = sh1 S.get_env - --- | see 'S.get_env_text' -get_env_text :: Text -> Sh Text -get_env_text = sh1 S.get_env_text - --- | see 'S.get_env_def' -get_env_def :: Text -> Text -> Sh Text -get_env_def a d = sh0 $ fmap (fromMaybe d) $ S.get_env a -{-# DEPRECATED get_env_def "use fromMaybe DEFAULT get_env" #-} - --- | see 'S.appendToPath' -appendToPath :: FilePath -> Sh () -appendToPath = sh1 S.appendToPath - --- | see 'S.prependToPath' -prependToPath :: FilePath -> Sh () -prependToPath = sh1 S.prependToPath - --- | see 'S.cd' -cd :: FilePath -> Sh () -cd = sh1 S.cd - --- | see 'S.chdir' -chdir :: FilePath -> Sh a -> Sh a -chdir p = lift1 (S.chdir p) - --- | see 'S.pwd' -pwd :: Sh FilePath -pwd = sh0 S.pwd - ------------------------------------------------------------------ --- Printing - --- | Echo text to standard (error, when using _err variants) output. The _n --- variants do not print a final newline. -echo, echo_n_err, echo_err, echo_n :: Text -> Sh () - -echo = sh1 S.echo -echo_n_err = sh1 S.echo_n_err -echo_err = sh1 S.echo_err -echo_n = sh1 S.echo_n - --- | see 'S.inspect' -inspect :: Show s => s -> Sh () -inspect = sh1 S.inspect - --- | see 'S.inspect_err' -inspect_err :: Show s => s -> Sh () -inspect_err = sh1 S.inspect_err - --- | see 'S.tag' -tag :: Sh a -> Text -> Sh a -tag a t = lift1 (flip S.tag t) a - --- | see 'S.trace' -trace :: Text -> Sh () -trace = sh1 S.trace - --- | see 'S.show_command' -show_command :: FilePath -> [Text] -> Text -show_command = S.show_command - ------------------------------------------------------------------- --- Querying filesystem - --- | see 'S.ls' -ls :: FilePath -> Sh FilePath -ls = sh1s S.ls - --- | see 'S.lsT' -lsT :: FilePath -> Sh Text -lsT = sh1s S.lsT - --- | see 'S.test_e' -test_e :: FilePath -> Sh Bool -test_e = sh1 S.test_e - --- | see 'S.test_f' -test_f :: FilePath -> Sh Bool -test_f = sh1 S.test_f - --- | see 'S.test_d' -test_d :: FilePath -> Sh Bool -test_d = sh1 S.test_d - --- | see 'S.test_s' -test_s :: FilePath -> Sh Bool -test_s = sh1 S.test_s - --- | see 'S.which -which :: FilePath -> Sh (Maybe FilePath) -which = sh1 S.which - ---------------------------------------------------------------------- --- Filename helpers - --- | see 'S.absPath' -absPath :: FilePath -> Sh FilePath -absPath = sh1 S.absPath - --- | see 'S.canonic' -canonic :: FilePath -> Sh FilePath -canonic = sh1 S.canonic - --- | see 'S.canonicalize' -canonicalize :: FilePath -> Sh FilePath -canonicalize = sh1 S.canonicalize - --- | see 'S.relPath' -relPath :: FilePath -> Sh FilePath -relPath = sh1 S.relPath - --- | see 'S.relativeTo' -relativeTo :: FilePath -- ^ anchor path, the prefix - -> FilePath -- ^ make this relative to anchor path - -> Sh FilePath -relativeTo = sh2 S.relativeTo - -------------------------------------------------------------- --- Manipulating filesystem - --- | see 'S.mv' -mv :: FilePath -> FilePath -> Sh () -mv = sh2 S.mv - --- | see 'S.rm' -rm :: FilePath -> Sh () -rm = sh1 S.rm - --- | see 'S.rm_f' -rm_f :: FilePath -> Sh () -rm_f = sh1 S.rm_f - --- | see 'S.rm_rf' -rm_rf :: FilePath -> Sh () -rm_rf = sh1 S.rm_rf - --- | see 'S.cp' -cp :: FilePath -> FilePath -> Sh () -cp = sh2 S.cp - --- | see 'S.cp_r' -cp_r :: FilePath -> FilePath -> Sh () -cp_r = sh2 S.cp_r - --- | see 'S.mkdir' -mkdir :: FilePath -> Sh () -mkdir = sh1 S.mkdir - --- | see 'S.mkdir_p' -mkdir_p :: FilePath -> Sh () -mkdir_p = sh1 S.mkdir_p - --- | see 'S.mkdirTree' -mkdirTree :: Tree FilePath -> Sh () -mkdirTree = sh1 S.mkdirTree - --- | see 'S.readFile' -readfile :: FilePath -> Sh Text -readfile = sh1 S.readfile - --- | see 'S.readBinary' -readBinary :: FilePath -> Sh ByteString -readBinary = sh1 S.readBinary - --- | see 'S.writeFile' -writefile :: FilePath -> Text -> Sh () -writefile = sh2 S.writefile - --- | see 'S.touchFile' -touchfile :: FilePath -> Sh () -touchfile = sh1 S.touchfile - --- | see 'S.appendFile' -appendfile :: FilePath -> Text -> Sh () -appendfile = sh2 S.appendfile - --- | see 'S.withTmpDir' -withTmpDir :: (FilePath -> Sh a) -> Sh a -withTmpDir f = Sh $ S.withTmpDir (unSh . f) - ------------------------------------------------------------------ --- find - --- | see 'S.find' -find :: FilePath -> Sh FilePath -find = sh1s S.find - --- | see 'S.findWhen' -findWhen :: (FilePath -> Sh Bool) -> FilePath -> Sh FilePath -findWhen p a = Sh $ S.findWhen (fmap and . unSh . p) a - --- | see 'S.findFold' -findFold :: (a -> FilePath -> Sh a) -> a -> FilePath -> Sh a -findFold cons nil a = Sh $ S.findFold cons' nil' a - where nil' = return nil - cons' as dir = unSh $ roll $ mapM (flip cons dir) as - --- | see 'S.findDirFilter' -findDirFilter :: (FilePath -> Sh Bool) -> FilePath -> Sh FilePath -findDirFilter p a = Sh $ S.findDirFilter (fmap and . unSh . p) a - --- | see 'S.findDirFilterWhen' -findDirFilterWhen :: (FilePath -> Sh Bool) -- ^ directory filter - -> (FilePath -> Sh Bool) -- ^ file filter - -> FilePath -- ^ directory - -> Sh FilePath -findDirFilterWhen dirPred filePred a = - Sh $ S.findDirFilterWhen - (fmap and . unSh . dirPred) - (fmap and . unSh . filePred) - a - - --- | see 'S.findFoldDirFilterWhen' -findFoldDirFilter :: (a -> FilePath -> Sh a) -> a -> (FilePath -> Sh Bool) -> FilePath -> Sh a -findFoldDirFilter cons nil p a = Sh $ S.findFoldDirFilter cons' nil' p' a - where p' = fmap and . unSh . p - nil' = return nil - cons' as dir = unSh $ roll $ mapM (flip cons dir) as - ------------------------------------------------------------ --- exiting the program - --- | see 'S.exit' -exit :: Int -> Sh () -exit = sh1 S.exit - --- | see 'S.errorExit' -errorExit :: Text -> Sh () -errorExit = sh1 S.errorExit - --- | see 'S.quietExit' -quietExit :: Int -> Sh () -quietExit = sh1 S.quietExit - --- | see 'S.terror' -terror :: Text -> Sh a -terror = sh1 S.terror - ------------------------------------------------------------- --- Utilities - --- | see 'S.catch_sh' -catch_sh :: (Exception e) => Sh a -> (e -> Sh a) -> Sh a -catch_sh a f = Sh $ S.catch_sh (unSh a) (unSh . f) - --- | see 'S.catchany_sh' -catchany_sh :: Sh a -> (SomeException -> Sh a) -> Sh a -catchany_sh = catch_sh - - --- | see 'S.finally_sh' -finally_sh :: Sh a -> Sh b -> Sh a -finally_sh = lift2 S.finally_sh - --- | see 'S.time' -time :: Sh a -> Sh (Double, a) -time = lift1 S.time - --- | see 'S.ShellyHandler' -data ShellyHandler a = forall e . Exception e => ShellyHandler (e -> Sh a) - --- | see 'S.catches_sh' -catches_sh :: Sh a -> [ShellyHandler a] -> Sh a -catches_sh a hs = Sh $ S.catches_sh (unSh a) (fmap convert hs) - where convert :: ShellyHandler a -> S.ShellyHandler [a] - convert (ShellyHandler f) = S.ShellyHandler (unSh . f) - ------------------------------------------------------------- --- convert between Text and FilePath - --- | see 'S.toTextWarn' -toTextWarn :: FilePath -> Sh Text -toTextWarn = sh1 S.toTextWarn - -------------------------------------------------------------- --- internal functions for writing extension - -get :: Sh State -get = sh0 S.get - -put :: State -> Sh () -put = sh1 S.put - --------------------------------------------------------- --- polyvariadic vodoo - --- | Converter for the variadic argument version of 'run' called 'cmd'. -class ShellArg a where toTextArg :: a -> Text -instance ShellArg Text where toTextArg = id -instance ShellArg FilePath where toTextArg = toTextIgnore - - --- Voodoo to create the variadic function 'cmd' -class ShellCommand t where - cmdAll :: FilePath -> [Text] -> t - -instance ShellCommand (Sh Text) where - cmdAll fp args = run fp args - -instance (s ~ Text, Show s) => ShellCommand (Sh s) where - cmdAll fp args = run fp args - --- note that Sh () actually doesn't work for its case (_<- cmd) when there is no type signature -instance ShellCommand (Sh ()) where - cmdAll fp args = run_ fp args - -instance (ShellArg arg, ShellCommand result) => ShellCommand (arg -> result) where - cmdAll fp acc = \x -> cmdAll fp (acc ++ [toTextArg x]) - --- | see 'S.cmd' -cmd :: (ShellCommand result) => FilePath -> result -cmd fp = cmdAll fp [] diff --git a/shelly/src/Shelly/Unix.hs b/shelly/src/Shelly/Unix.hs deleted file mode 100644 index d83ae9aa..00000000 --- a/shelly/src/Shelly/Unix.hs +++ /dev/null @@ -1,12 +0,0 @@ -{-# LANGUAGE OverloadedStrings #-} - --- | commands that only work on Unix -module Shelly.Unix - ( kill - ) where - -import Shelly -import qualified Data.Text as T - -kill :: Int -> Sh () -kill pid = run_ "kill" ["-15", T.pack $ show pid] diff --git a/shelly/test/data/nonascii.txt b/shelly/test/data/nonascii.txt deleted file mode 100644 index 1682351a..00000000 --- a/shelly/test/data/nonascii.txt +++ /dev/null @@ -1 +0,0 @@ -Selbstverständlich überraschend diff --git a/shelly/test/data/symlinked_dir/hoge_file b/shelly/test/data/symlinked_dir/hoge_file deleted file mode 100644 index e69de29b..00000000 diff --git a/shelly/test/data/zshrc b/shelly/test/data/zshrc deleted file mode 100644 index 326f866b..00000000 --- a/shelly/test/data/zshrc +++ /dev/null @@ -1,2795 +0,0 @@ -# Filename: /etc/zsh/zshrc -# Purpose: config file for zsh (z shell) -# Authors: grml-team (grml.org), (c) Michael Prokop -# Bug-Reports: see http://grml.org/bugs/ -# License: This file is licensed under the GPL v2. -################################################################################ -# This file is sourced only for interactive shells. It -# should contain commands to set up aliases, functions, -# options, key bindings, etc. -# -# Global Order: zshenv, zprofile, zshrc, zlogin -################################################################################ - -# USAGE -# If you are using this file as your ~/.zshrc file, please use ~/.zshrc.pre -# and ~/.zshrc.local for your own customisations. The former file is read -# before ~/.zshrc, the latter is read after it. Also, consider reading the -# refcard and the reference manual for this setup, both available from: -# - -# Contributing: -# If you want to help to improve grml's zsh setup, clone the grml-etc-core -# repository from git.grml.org: -# git clone git://git.grml.org/grml-etc-core.git -# -# Make your changes, commit them; use 'git format-patch' to create a series -# of patches and send those to the following address via 'git send-email': -# grml-etc-core@grml.org -# -# Doing so makes sure the right people get your patches for review and -# possibly inclusion. - -# zsh-refcard-tag documentation: -# You may notice strange looking comments in this file. -# These are there for a purpose. grml's zsh-refcard can now be -# automatically generated from the contents of the actual configuration -# file. However, we need a little extra information on which comments -# and what lines of code to take into account (and for what purpose). -# -# Here is what they mean: -# -# List of tags (comment types) used: -# #a# Next line contains an important alias, that should -# be included in the grml-zsh-refcard. -# (placement tag: @@INSERT-aliases@@) -# #f# Next line contains the beginning of an important function. -# (placement tag: @@INSERT-functions@@) -# #v# Next line contains an important variable. -# (placement tag: @@INSERT-variables@@) -# #k# Next line contains an important keybinding. -# (placement tag: @@INSERT-keybindings@@) -# #d# Hashed directories list generation: -# start denotes the start of a list of 'hash -d' -# definitions. -# end denotes its end. -# (placement tag: @@INSERT-hasheddirs@@) -# #A# Abbreviation expansion list generation: -# start denotes the beginning of abbreviations. -# end denotes their end. -# Lines within this section that end in '#d .*' provide -# extra documentation to be included in the refcard. -# (placement tag: @@INSERT-abbrev@@) -# #m# This tag allows you to manually generate refcard entries -# for code lines that are hard/impossible to parse. -# Example: -# #m# k ESC-h Call the run-help function -# That would add a refcard entry in the keybindings table -# for 'ESC-h' with the given comment. -# So the syntax is: #m#
-# #o# This tag lets you insert entries to the 'other' hash. -# Generally, this should not be used. It is there for -# things that cannot be done easily in another way. -# (placement tag: @@INSERT-other-foobar@@) -# -# All of these tags (except for m and o) take two arguments, the first -# within the tag, the other after the tag: -# -# #
# -# -# Where
is really just a number, which are defined by the -# @secmap array on top of 'genrefcard.pl'. The reason for numbers -# instead of names is, that for the reader, the tag should not differ -# much from a regular comment. For zsh, it is a regular comment indeed. -# The numbers have got the following meanings: -# 0 -> "default" -# 1 -> "system" -# 2 -> "user" -# 3 -> "debian" -# 4 -> "search" -# 5 -> "shortcuts" -# 6 -> "services" -# -# So, the following will add an entry to the 'functions' table in the -# 'system' section, with a (hopefully) descriptive comment: -# #f1# Edit an alias via zle -# edalias() { -# -# It will then show up in the @@INSERT-aliases-system@@ replacement tag -# that can be found in 'grml-zsh-refcard.tex.in'. -# If the section number is omitted, the 'default' section is assumed. -# Furthermore, in 'grml-zsh-refcard.tex.in' @@INSERT-aliases@@ is -# exactly the same as @@INSERT-aliases-default@@. If you want a list of -# *all* aliases, for example, use @@INSERT-aliases-all@@. - -# zsh profiling -# just execute 'ZSH_PROFILE_RC=1 zsh' and run 'zprof' to get the details -if [[ $ZSH_PROFILE_RC -gt 0 ]] ; then - zmodload zsh/zprof -fi - -# load .zshrc.pre to give the user the chance to overwrite the defaults -[[ -r ${HOME}/.zshrc.pre ]] && source ${HOME}/.zshrc.pre - -# check for version/system -# check for versions (compatibility reasons) -is4(){ - [[ $ZSH_VERSION == <4->* ]] && return 0 - return 1 -} - -is41(){ - [[ $ZSH_VERSION == 4.<1->* || $ZSH_VERSION == <5->* ]] && return 0 - return 1 -} - -is42(){ - [[ $ZSH_VERSION == 4.<2->* || $ZSH_VERSION == <5->* ]] && return 0 - return 1 -} - -is425(){ - [[ $ZSH_VERSION == 4.2.<5->* || $ZSH_VERSION == 4.<3->* || $ZSH_VERSION == <5->* ]] && return 0 - return 1 -} - -is43(){ - [[ $ZSH_VERSION == 4.<3->* || $ZSH_VERSION == <5->* ]] && return 0 - return 1 -} - -is433(){ - [[ $ZSH_VERSION == 4.3.<3->* || $ZSH_VERSION == 4.<4->* \ - || $ZSH_VERSION == <5->* ]] && return 0 - return 1 -} - -is439(){ - [[ $ZSH_VERSION == 4.3.<9->* || $ZSH_VERSION == 4.<4->* \ - || $ZSH_VERSION == <5->* ]] && return 0 - return 1 -} - -#f1# Checks whether or not you're running grml -isgrml(){ - [[ -f /etc/grml_version ]] && return 0 - return 1 -} - -#f1# Checks whether or not you're running a grml cd -isgrmlcd(){ - [[ -f /etc/grml_cd ]] && return 0 - return 1 -} - -if isgrml ; then -#f1# Checks whether or not you're running grml-small - isgrmlsmall() { - if [[ ${${${(f)"$( autologin -# Thanks go to Bart Schaefer! -isgrml && checkhome() { - if [[ -z "$ALREADY_DID_CD_HOME" ]] ; then - export ALREADY_DID_CD_HOME=$HOME - cd - fi -} - -# check for zsh v3.1.7+ - -if ! [[ ${ZSH_VERSION} == 3.1.<7->* \ - || ${ZSH_VERSION} == 3.<2->.<->* \ - || ${ZSH_VERSION} == <4->.<->* ]] ; then - - printf '-!-\n' - printf '-!- In this configuration we try to make use of features, that only\n' - printf '-!- require version 3.1.7 of the shell; That way this setup can be\n' - printf '-!- used with a wide range of zsh versions, while using fairly\n' - printf '-!- advanced features in all supported versions.\n' - printf '-!-\n' - printf '-!- However, you are running zsh version %s.\n' "$ZSH_VERSION" - printf '-!-\n' - printf '-!- While this *may* work, it might as well fail.\n' - printf '-!- Please consider updating to at least version 3.1.7 of zsh.\n' - printf '-!-\n' - printf '-!- DO NOT EXPECT THIS TO WORK FLAWLESSLY!\n' - printf '-!- If it does today, you'\''ve been lucky.\n' - printf '-!-\n' - printf '-!- Ye been warned!\n' - printf '-!-\n' - - function zstyle() { : } -fi - -# autoload wrapper - use this one instead of autoload directly -# We need to define this function as early as this, because autoloading -# 'is-at-least()' needs it. -function zrcautoload() { - emulate -L zsh - setopt extended_glob - local fdir ffile - local -i ffound - - ffile=$1 - (( found = 0 )) - for fdir in ${fpath} ; do - [[ -e ${fdir}/${ffile} ]] && (( ffound = 1 )) - done - - (( ffound == 0 )) && return 1 - if [[ $ZSH_VERSION == 3.1.<6-> || $ZSH_VERSION == <4->* ]] ; then - autoload -U ${ffile} || return 1 - else - autoload ${ffile} || return 1 - fi - return 0 -} - -# Load is-at-least() for more precise version checks Note that this test will -# *always* fail, if the is-at-least function could not be marked for -# autoloading. -zrcautoload is-at-least || is-at-least() { return 1 } - -# set some important options (as early as possible) - -# append history list to the history file; this is the default but we make sure -# because it's required for share_history. -setopt append_history - -# import new commands from the history file also in other zsh-session -is4 && setopt share_history - -# save each command's beginning timestamp and the duration to the history file -setopt extended_history - -# If a new command line being added to the history list duplicates an older -# one, the older command is removed from the list -is4 && setopt histignorealldups - -# remove command lines from the history list when the first character on the -# line is a space -setopt histignorespace - -# if a command is issued that can't be executed as a normal command, and the -# command is the name of a directory, perform the cd command to that directory. -setopt auto_cd - -# in order to use #, ~ and ^ for filename generation grep word -# *~(*.gz|*.bz|*.bz2|*.zip|*.Z) -> searches for word not in compressed files -# don't forget to quote '^', '~' and '#'! -setopt extended_glob - -# display PID when suspending processes as well -setopt longlistjobs - -# try to avoid the 'zsh: no matches found...' -setopt nonomatch - -# report the status of backgrounds jobs immediately -setopt notify - -# whenever a command completion is attempted, make sure the entire command path -# is hashed first. -setopt hash_list_all - -# not just at the end -setopt completeinword - -# Don't send SIGHUP to background processes when the shell exits. -setopt nohup - -# make cd push the old directory onto the directory stack. -setopt auto_pushd - -# avoid "beep"ing -setopt nobeep - -# don't push the same dir twice. -setopt pushd_ignore_dups - -# * shouldn't match dotfiles. ever. -setopt noglobdots - -# use zsh style word splitting -setopt noshwordsplit - -# don't error out when unset parameters are used -setopt unset - -# setting some default values -NOCOR=${NOCOR:-0} -NOMENU=${NOMENU:-0} -NOPRECMD=${NOPRECMD:-0} -COMMAND_NOT_FOUND=${COMMAND_NOT_FOUND:-0} -GRML_ZSH_CNF_HANDLER=${GRML_ZSH_CNF_HANDLER:-/usr/share/command-not-found/command-not-found} -BATTERY=${BATTERY:-0} -GRMLSMALL_SPECIFIC=${GRMLSMALL_SPECIFIC:-1} -ZSH_NO_DEFAULT_LOCALE=${ZSH_NO_DEFAULT_LOCALE:-0} - -typeset -ga ls_options -typeset -ga grep_options -if ls --help 2> /dev/null | grep -q GNU; then - ls_options=( --color=auto ) -elif [[ $OSTYPE == freebsd* ]]; then - ls_options=( -G ) -fi -if grep --help 2> /dev/null | grep -q GNU || \ - [[ $OSTYPE == freebsd* ]]; then - grep_options=( --color=auto ) -fi - -# utility functions -# this function checks if a command exists and returns either true -# or false. This avoids using 'which' and 'whence', which will -# avoid problems with aliases for which on certain weird systems. :-) -# Usage: check_com [-c|-g] word -# -c only checks for external commands -# -g does the usual tests and also checks for global aliases -check_com() { - emulate -L zsh - local -i comonly gatoo - - if [[ $1 == '-c' ]] ; then - (( comonly = 1 )) - shift - elif [[ $1 == '-g' ]] ; then - (( gatoo = 1 )) - else - (( comonly = 0 )) - (( gatoo = 0 )) - fi - - if (( ${#argv} != 1 )) ; then - printf 'usage: check_com [-c] \n' >&2 - return 1 - fi - - if (( comonly > 0 )) ; then - [[ -n ${commands[$1]} ]] && return 0 - return 1 - fi - - if [[ -n ${commands[$1]} ]] \ - || [[ -n ${functions[$1]} ]] \ - || [[ -n ${aliases[$1]} ]] \ - || [[ -n ${reswords[(r)$1]} ]] ; then - - return 0 - fi - - if (( gatoo > 0 )) && [[ -n ${galiases[$1]} ]] ; then - return 0 - fi - - return 1 -} - -# creates an alias and precedes the command with -# sudo if $EUID is not zero. -salias() { - emulate -L zsh - local only=0 ; local multi=0 - while [[ $1 == -* ]] ; do - case $1 in - (-o) only=1 ;; - (-a) multi=1 ;; - (--) shift ; break ;; - (-h) - printf 'usage: salias [-h|-o|-a] \n' - printf ' -h shows this help text.\n' - printf ' -a replace '\'' ; '\'' sequences with '\'' ; sudo '\''.\n' - printf ' be careful using this option.\n' - printf ' -o only sets an alias if a preceding sudo would be needed.\n' - return 0 - ;; - (*) printf "unkown option: '%s'\n" "$1" ; return 1 ;; - esac - shift - done - - if (( ${#argv} > 1 )) ; then - printf 'Too many arguments %s\n' "${#argv}" - return 1 - fi - - key="${1%%\=*}" ; val="${1#*\=}" - if (( EUID == 0 )) && (( only == 0 )); then - alias -- "${key}=${val}" - elif (( EUID > 0 )) ; then - (( multi > 0 )) && val="${val// ; / ; sudo }" - alias -- "${key}=sudo ${val}" - fi - - return 0 -} - -# a "print -l ${(u)foo}"-workaround for pre-4.2.0 shells -# usage: uprint foo -# Where foo is the *name* of the parameter you want printed. -# Note that foo is no typo; $foo would be wrong here! -if ! is42 ; then - uprint () { - emulate -L zsh - local -a u - local w - local parameter=$1 - - if [[ -z ${parameter} ]] ; then - printf 'usage: uprint \n' - return 1 - fi - - for w in ${(P)parameter} ; do - [[ -z ${(M)u:#$w} ]] && u=( $u $w ) - done - - builtin print -l $u - } -fi - -# Check if we can read given files and source those we can. -xsource() { - if (( ${#argv} < 1 )) ; then - printf 'usage: xsource FILE(s)...\n' >&2 - return 1 - fi - - while (( ${#argv} > 0 )) ; do - [[ -r "$1" ]] && source "$1" - shift - done - return 0 -} - -# Check if we can read a given file and 'cat(1)' it. -xcat() { - emulate -L zsh - if (( ${#argv} != 1 )) ; then - printf 'usage: xcat FILE\n' >&2 - return 1 - fi - - [[ -r $1 ]] && cat $1 - return 0 -} - -# Remove these functions again, they are of use only in these -# setup files. This should be called at the end of .zshrc. -xunfunction() { - emulate -L zsh - local -a funcs - funcs=(salias xcat xsource xunfunction zrcautoload) - - for func in $funcs ; do - [[ -n ${functions[$func]} ]] \ - && unfunction $func - done - return 0 -} - -# this allows us to stay in sync with grml's zshrc and put own -# modifications in ~/.zshrc.local -zrclocal() { - xsource "/etc/zsh/zshrc.local" - xsource "${HOME}/.zshrc.local" - return 0 -} - -# locale setup -if (( ZSH_NO_DEFAULT_LOCALE == 0 )); then - xsource "/etc/default/locale" -fi - -for var in LANG LC_ALL LC_MESSAGES ; do - [[ -n ${(P)var} ]] && export $var -done - -xsource "/etc/sysconfig/keyboard" - -TZ=$(xcat /etc/timezone) - -# set some variables -if check_com -c vim ; then -#v# - export EDITOR=${EDITOR:-vim} -else - export EDITOR=${EDITOR:-vi} -fi - -#v# -export PAGER=${PAGER:-less} - -#v# -export MAIL=${MAIL:-/var/mail/$USER} - -# if we don't set $SHELL then aterm, rxvt,.. will use /bin/sh or /bin/bash :-/ -export SHELL='/bin/zsh' - -# color setup for ls: -check_com -c dircolors && eval $(dircolors -b) -# color setup for ls on OS X: -isdarwin && export CLICOLOR=1 - -# do MacPorts setup on darwin -if isdarwin && [[ -d /opt/local ]]; then - # Note: PATH gets set in /etc/zprofile on Darwin, so this can't go into - # zshenv. - PATH="/opt/local/bin:/opt/local/sbin:$PATH" - MANPATH="/opt/local/share/man:$MANPATH" -fi -# do Fink setup on darwin -isdarwin && xsource /sw/bin/init.sh - -# load our function and completion directories -for fdir in /usr/share/grml/zsh/completion /usr/share/grml/zsh/functions; do - fpath=( ${fdir} ${fdir}/**/*(/N) ${fpath} ) - if [[ ${fpath} == '/usr/share/grml/zsh/functions' ]] ; then - for func in ${fdir}/**/[^_]*[^~](N.) ; do - zrcautoload ${func:t} - done - fi -done -unset fdir func - -# support colors in less -export LESS_TERMCAP_mb=$'\E[01;31m' -export LESS_TERMCAP_md=$'\E[01;31m' -export LESS_TERMCAP_me=$'\E[0m' -export LESS_TERMCAP_se=$'\E[0m' -export LESS_TERMCAP_so=$'\E[01;44;33m' -export LESS_TERMCAP_ue=$'\E[0m' -export LESS_TERMCAP_us=$'\E[01;32m' - -# mailchecks -MAILCHECK=30 - -# report about cpu-/system-/user-time of command if running longer than -# 5 seconds -REPORTTIME=5 - -# watch for everyone but me and root -watch=(notme root) - -# automatically remove duplicates from these arrays -typeset -U path cdpath fpath manpath - -# keybindings -if [[ "$TERM" != emacs ]] ; then - [[ -z "$terminfo[kdch1]" ]] || bindkey -M emacs "$terminfo[kdch1]" delete-char - [[ -z "$terminfo[khome]" ]] || bindkey -M emacs "$terminfo[khome]" beginning-of-line - [[ -z "$terminfo[kend]" ]] || bindkey -M emacs "$terminfo[kend]" end-of-line - [[ -z "$terminfo[kdch1]" ]] || bindkey -M vicmd "$terminfo[kdch1]" vi-delete-char - [[ -z "$terminfo[khome]" ]] || bindkey -M vicmd "$terminfo[khome]" vi-beginning-of-line - [[ -z "$terminfo[kend]" ]] || bindkey -M vicmd "$terminfo[kend]" vi-end-of-line - [[ -z "$terminfo[cuu1]" ]] || bindkey -M viins "$terminfo[cuu1]" vi-up-line-or-history - [[ -z "$terminfo[cuf1]" ]] || bindkey -M viins "$terminfo[cuf1]" vi-forward-char - [[ -z "$terminfo[kcuu1]" ]] || bindkey -M viins "$terminfo[kcuu1]" vi-up-line-or-history - [[ -z "$terminfo[kcud1]" ]] || bindkey -M viins "$terminfo[kcud1]" vi-down-line-or-history - [[ -z "$terminfo[kcuf1]" ]] || bindkey -M viins "$terminfo[kcuf1]" vi-forward-char - [[ -z "$terminfo[kcub1]" ]] || bindkey -M viins "$terminfo[kcub1]" vi-backward-char - # ncurses stuff: - [[ "$terminfo[kcuu1]" == $'\eO'* ]] && bindkey -M viins "${terminfo[kcuu1]/O/[}" vi-up-line-or-history - [[ "$terminfo[kcud1]" == $'\eO'* ]] && bindkey -M viins "${terminfo[kcud1]/O/[}" vi-down-line-or-history - [[ "$terminfo[kcuf1]" == $'\eO'* ]] && bindkey -M viins "${terminfo[kcuf1]/O/[}" vi-forward-char - [[ "$terminfo[kcub1]" == $'\eO'* ]] && bindkey -M viins "${terminfo[kcub1]/O/[}" vi-backward-char - [[ "$terminfo[khome]" == $'\eO'* ]] && bindkey -M viins "${terminfo[khome]/O/[}" beginning-of-line - [[ "$terminfo[kend]" == $'\eO'* ]] && bindkey -M viins "${terminfo[kend]/O/[}" end-of-line - [[ "$terminfo[khome]" == $'\eO'* ]] && bindkey -M emacs "${terminfo[khome]/O/[}" beginning-of-line - [[ "$terminfo[kend]" == $'\eO'* ]] && bindkey -M emacs "${terminfo[kend]/O/[}" end-of-line -fi - -## keybindings (run 'bindkeys' for details, more details via man zshzle) -# use emacs style per default: -bindkey -e -# use vi style: -# bindkey -v - -## beginning-of-line OR beginning-of-buffer OR beginning of history -## by: Bart Schaefer , Bernhard Tittelbach -beginning-or-end-of-somewhere() { - local hno=$HISTNO - if [[ ( "${LBUFFER[-1]}" == $'\n' && "${WIDGET}" == beginning-of* ) || \ - ( "${RBUFFER[1]}" == $'\n' && "${WIDGET}" == end-of* ) ]]; then - zle .${WIDGET:s/somewhere/buffer-or-history/} "$@" - else - zle .${WIDGET:s/somewhere/line-hist/} "$@" - if (( HISTNO != hno )); then - zle .${WIDGET:s/somewhere/buffer-or-history/} "$@" - fi - fi -} -zle -N beginning-of-somewhere beginning-or-end-of-somewhere -zle -N end-of-somewhere beginning-or-end-of-somewhere - - -#if [[ "$TERM" == screen ]] ; then - -## with HOME/END, move to beginning/end of line (on multiline) on first keypress -## to beginning/end of buffer on second keypress -## and to beginning/end of history on (at most) the third keypress -# terminator & non-debian xterm -bindkey '\eOH' beginning-of-somewhere # home -bindkey '\eOF' end-of-somewhere # end -# freebsd console -bindkey '\e[H' beginning-of-somewhere # home -bindkey '\e[F' end-of-somewhere # end -# xterm,gnome-terminal,quake,etc -bindkey '^[[1~' beginning-of-somewhere # home -bindkey '^[[4~' end-of-somewhere # end -# if terminal type is set to 'rxvt': -bindkey '\e[7~' beginning-of-somewhere # home -bindkey '\e[8~' end-of-somewhere # end -#fi - -bindkey '\e[A' up-line-or-search # cursor up -bindkey '\e[B' down-line-or-search # - - -## use Ctrl-left-arrow and Ctrl-right-arrow for jumping to word-beginnings on the CL -bindkey "\e[5C" forward-word -bindkey "\e[5D" backward-word -bindkey "\e[1;5C" forward-word -bindkey "\e[1;5D" backward-word -## the same for alt-left-arrow and alt-right-arrow -bindkey '^[[1;3C' forward-word -bindkey '^[[1;3D' backward-word - -# Search backward in the history for a line beginning with the current -# line up to the cursor and move the cursor to the end of the line then -zle -N history-beginning-search-backward-end history-search-end -zle -N history-beginning-search-forward-end history-search-end -#k# search history backward for entry beginning with typed text -bindkey '^xp' history-beginning-search-backward-end -#k# search history forward for entry beginning with typed text -bindkey '^xP' history-beginning-search-forward-end -#k# search history backward for entry beginning with typed text -bindkey "\e[5~" history-beginning-search-backward-end # PageUp -#k# search history forward for entry beginning with typed text -bindkey "\e[6~" history-beginning-search-forward-end # PageDown - -# bindkey -s '^l' "|less\n" # ctrl-L pipes to less -# bindkey -s '^b' " &\n" # ctrl-B runs it in the background - -# insert unicode character -# usage example: 'ctrl-x i' 00A7 'ctrl-x i' will give you an § -# See for example http://unicode.org/charts/ for unicode characters code -zrcautoload insert-unicode-char -zle -N insert-unicode-char -#k# Insert Unicode character -bindkey '^xi' insert-unicode-char - -#m# k Shift-tab Perform backwards menu completion -if [[ -n "$terminfo[kcbt]" ]]; then - bindkey "$terminfo[kcbt]" reverse-menu-complete -elif [[ -n "$terminfo[cbt]" ]]; then # required for GNU screen - bindkey "$terminfo[cbt]" reverse-menu-complete -fi - -## toggle the ,. abbreviation feature on/off -# NOABBREVIATION: default abbreviation-state -# 0 - enabled (default) -# 1 - disabled -NOABBREVIATION=${NOABBREVIATION:-0} - -grml_toggle_abbrev() { - if (( ${NOABBREVIATION} > 0 )) ; then - NOABBREVIATION=0 - else - NOABBREVIATION=1 - fi -} - -#k# Toggle abbreviation expansion on/off -zle -N grml_toggle_abbrev -bindkey '^xA' grml_toggle_abbrev - -# add a command line to the shells history without executing it -commit-to-history() { - print -s ${(z)BUFFER} - zle send-break -} -zle -N commit-to-history -bindkey "^x^h" commit-to-history - -# only slash should be considered as a word separator: -slash-backward-kill-word() { - local WORDCHARS="${WORDCHARS:s@/@}" - # zle backward-word - zle backward-kill-word -} -zle -N slash-backward-kill-word - -#k# Kill left-side word or everything up to next slash -bindkey '\ev' slash-backward-kill-word -#k# Kill left-side word or everything up to next slash -bindkey '\e^h' slash-backward-kill-word -#k# Kill left-side word or everything up to next slash -bindkey '\e^?' slash-backward-kill-word - -# use the new *-pattern-* widgets for incremental history search -if is439 ; then - bindkey '^r' history-incremental-pattern-search-backward - bindkey '^s' history-incremental-pattern-search-forward -fi - -# a generic accept-line wrapper - -# This widget can prevent unwanted autocorrections from command-name -# to _command-name, rehash automatically on enter and call any number -# of builtin and user-defined widgets in different contexts. -# -# For a broader description, see: -# -# -# The code is imported from the file 'zsh/functions/accept-line' from -# , which -# distributed under the same terms as zsh itself. - -# A newly added command will may not be found or will cause false -# correction attempts, if you got auto-correction set. By setting the -# following style, we force accept-line() to rehash, if it cannot -# find the first word on the command line in the $command[] hash. -zstyle ':acceptline:*' rehash true - -function Accept-Line() { - setopt localoptions noksharrays - local -a subs - local -xi aldone - local sub - local alcontext=${1:-$alcontext} - - zstyle -a ":acceptline:${alcontext}" actions subs - - (( ${#subs} < 1 )) && return 0 - - (( aldone = 0 )) - for sub in ${subs} ; do - [[ ${sub} == 'accept-line' ]] && sub='.accept-line' - zle ${sub} - - (( aldone > 0 )) && break - done -} - -function Accept-Line-getdefault() { - emulate -L zsh - local default_action - - zstyle -s ":acceptline:${alcontext}" default_action default_action - case ${default_action} in - ((accept-line|)) - printf ".accept-line" - ;; - (*) - printf ${default_action} - ;; - esac -} - -function Accept-Line-HandleContext() { - zle Accept-Line - - default_action=$(Accept-Line-getdefault) - zstyle -T ":acceptline:${alcontext}" call_default \ - && zle ${default_action} -} - -function accept-line() { - setopt localoptions noksharrays - local -ax cmdline - local -x alcontext - local buf com fname format msg default_action - - alcontext='default' - buf="${BUFFER}" - cmdline=(${(z)BUFFER}) - com="${cmdline[1]}" - fname="_${com}" - - Accept-Line 'preprocess' - - zstyle -t ":acceptline:${alcontext}" rehash \ - && [[ -z ${commands[$com]} ]] \ - && rehash - - if [[ -n ${com} ]] \ - && [[ -n ${reswords[(r)$com]} ]] \ - || [[ -n ${aliases[$com]} ]] \ - || [[ -n ${functions[$com]} ]] \ - || [[ -n ${builtins[$com]} ]] \ - || [[ -n ${commands[$com]} ]] ; then - - # there is something sensible to execute, just do it. - alcontext='normal' - Accept-Line-HandleContext - - return - fi - - if [[ -o correct ]] \ - || [[ -o correctall ]] \ - && [[ -n ${functions[$fname]} ]] ; then - - # nothing there to execute but there is a function called - # _command_name; a completion widget. Makes no sense to - # call it on the commandline, but the correct{,all} options - # will ask for it nevertheless, so warn the user. - if [[ ${LASTWIDGET} == 'accept-line' ]] ; then - # Okay, we warned the user before, he called us again, - # so have it his way. - alcontext='force' - Accept-Line-HandleContext - - return - fi - - if zstyle -t ":acceptline:${alcontext}" nocompwarn ; then - alcontext='normal' - Accept-Line-HandleContext - else - # prepare warning message for the user, configurable via zstyle. - zstyle -s ":acceptline:${alcontext}" compwarnfmt msg - - if [[ -z ${msg} ]] ; then - msg="%c will not execute and completion %f exists." - fi - - zformat -f msg "${msg}" "c:${com}" "f:${fname}" - - zle -M -- "${msg}" - fi - return - elif [[ -n ${buf//[$' \t\n']##/} ]] ; then - # If we are here, the commandline contains something that is not - # executable, which is neither subject to _command_name correction - # and is not empty. might be a variable assignment - alcontext='misc' - Accept-Line-HandleContext - - return - fi - - # If we got this far, the commandline only contains whitespace, or is empty. - alcontext='empty' - Accept-Line-HandleContext -} - -zle -N accept-line -zle -N Accept-Line -zle -N Accept-Line-HandleContext - -# power completion - abbreviation expansion -# power completion / abbreviation expansion / buffer expansion -# see http://zshwiki.org/home/examples/zleiab for details -# less risky than the global aliases but powerful as well -# just type the abbreviation key and afterwards ',.' to expand it -declare -A abk -setopt extendedglob -setopt interactivecomments -abk=( -# key # value (#d additional doc string) -#A# start - '...' '../..' - '....' '../../..' - 'BG' '& exit' - 'C' '| wc -l' - 'G' '|& grep '${grep_options:+"${grep_options[*]} "} - 'H' '| head' - 'Hl' ' --help |& less -r' #d (Display help in pager) - 'L' '| less' - 'LL' '|& less -r' - 'M' '| most' - 'N' '&>/dev/null' #d (No Output) - 'R' '| tr A-z N-za-m' #d (ROT13) - 'SL' '| sort | less' - 'S' '| sort -u' - 'T' '| tail' - 'V' '|& vim -' -#A# end - 'co' './configure && make && sudo make install' -) - -zleiab() { - emulate -L zsh - setopt extendedglob - local MATCH - - if (( NOABBREVIATION > 0 )) ; then - LBUFFER="${LBUFFER},." - return 0 - fi - - matched_chars='[.-|_a-zA-Z0-9]#' - LBUFFER=${LBUFFER%%(#m)[.-|_a-zA-Z0-9]#} - LBUFFER+=${abk[$MATCH]:-$MATCH} -} - -zle -N zleiab && bindkey ",." zleiab - -#f# display contents of assoc array $abk -help-show-abk() -{ - zle -M "$(print "Type ,. after these abbreviations to expand them:"; print -a -C 2 ${(kv)abk})" -} -#k# Display list of abbreviations that expand when followed by ,. -zle -N help-show-abk && bindkey '^xb' help-show-abk - -# autoloading -zrcautoload zmv # who needs mmv or rename? -zrcautoload history-search-end - -# we don't want to quote/espace URLs on our own... -# if autoload -U url-quote-magic ; then -# zle -N self-insert url-quote-magic -# zstyle ':url-quote-magic:*' url-metas '*?[]^()~#{}=' -# else -# print 'Notice: no url-quote-magic available :(' -# fi -alias url-quote='autoload -U url-quote-magic ; zle -N self-insert url-quote-magic' - -#m# k ESC-h Call \kbd{run-help} for the 1st word on the command line -alias run-help >&/dev/null && unalias run-help -for rh in run-help{,-git,-svk,-svn}; do - zrcautoload $rh -done; unset rh - -# completion system -if zrcautoload compinit ; then - compinit || print 'Notice: no compinit available :(' -else - print 'Notice: no compinit available :(' - function zstyle { } - function compdef { } -fi - -is4 && zrcautoload zed # use ZLE editor to edit a file or function - -is4 && \ -for mod in complist deltochar mathfunc ; do - zmodload -i zsh/${mod} 2>/dev/null || print "Notice: no ${mod} available :(" -done - -# autoload zsh modules when they are referenced -if is4 ; then - zmodload -a zsh/stat zstat - zmodload -a zsh/zpty zpty - zmodload -ap zsh/mapfile mapfile -fi - -if is4 && zrcautoload insert-files && zle -N insert-files ; then - #k# Insert files and test globbing - bindkey "^xf" insert-files # C-x-f -fi - -bindkey ' ' magic-space # also do history expansion on space -#k# Trigger menu-complete -bindkey '\ei' menu-complete # menu completion via esc-i - -# press esc-e for editing command line in $EDITOR or $VISUAL -if is4 && zrcautoload edit-command-line && zle -N edit-command-line ; then - #k# Edit the current line in \kbd{\$EDITOR} - bindkey '\ee' edit-command-line -fi - -if is4 && [[ -n ${(k)modules[zsh/complist]} ]] ; then - #k# menu selection: pick item but stay in the menu - bindkey -M menuselect '\e^M' accept-and-menu-complete - # also use + and INSERT since it's easier to press repeatedly - bindkey -M menuselect "+" accept-and-menu-complete - bindkey -M menuselect "^[[2~" accept-and-menu-complete - - # accept a completion and try to complete again by using menu - # completion; very useful with completing directories - # by using 'undo' one's got a simple file browser - bindkey -M menuselect '^o' accept-and-infer-next-history -fi - -# press "ctrl-e d" to insert the actual date in the form yyyy-mm-dd -insert-datestamp() { LBUFFER+=${(%):-'%D{%Y-%m-%d}'}; } -zle -N insert-datestamp - -#k# Insert a timestamp on the command line (yyyy-mm-dd) -bindkey '^ed' insert-datestamp - -# press esc-m for inserting last typed word again (thanks to caphuso!) -insert-last-typed-word() { zle insert-last-word -- 0 -1 }; -zle -N insert-last-typed-word; - -#k# Insert last typed word -bindkey "\em" insert-last-typed-word - -function grml-zsh-fg() { - if (( ${#jobstates} )); then - zle .push-input - [[ -o hist_ignore_space ]] && BUFFER=' ' || BUFFER='' - BUFFER="${BUFFER}fg" - zle .accept-line - else - zle -M 'No background jobs. Doing nothing.' - fi -} -zle -N grml-zsh-fg -#k# A smart shortcut for \kbd{fg} -bindkey '^z' grml-zsh-fg - -# run command line as user root via sudo: -sudo-command-line() { - [[ -z $BUFFER ]] && zle up-history - if [[ $BUFFER != sudo\ * ]]; then - BUFFER="sudo $BUFFER" - CURSOR=$(( CURSOR+5 )) - fi -} -zle -N sudo-command-line - -#k# prepend the current command with "sudo" -bindkey "^os" sudo-command-line - -### jump behind the first word on the cmdline. -### useful to add options. -function jump_after_first_word() { - local words - words=(${(z)BUFFER}) - - if (( ${#words} <= 1 )) ; then - CURSOR=${#BUFFER} - else - CURSOR=${#${words[1]}} - fi -} -zle -N jump_after_first_word -#k# jump to after first word (for adding options) -bindkey '^x1' jump_after_first_word - -# complete word from history with menu (from Book: ZSH, OpenSource-Press) -zle -C hist-complete complete-word _generic -zstyle ':completion:hist-complete:*' completer _history -#k# complete word from history with menu -bindkey "^x^x" hist-complete - -## complete word from currently visible Screen or Tmux buffer. -if check_com -c screen || check_com -c tmux; then - _complete_screen_display() { - [[ "$TERM" != "screen" ]] && return 1 - - local TMPFILE=$(mktemp) - local -U -a _screen_display_wordlist - trap "rm -f $TMPFILE" EXIT - - # fill array with contents from screen hardcopy - if ((${+TMUX})); then - #works, but crashes tmux below version 1.4 - #luckily tmux -V option to ask for version, was also added in 1.4 - tmux -V &>/dev/null || return - tmux -q capture-pane \; save-buffer -b 0 $TMPFILE \; delete-buffer -b 0 - else - screen -X hardcopy $TMPFILE - # screen sucks, it dumps in latin1, apparently always. so recode it - # to system charset - check_com recode && recode latin1 $TMPFILE - fi - _screen_display_wordlist=( ${(QQ)$(<$TMPFILE)} ) - # remove PREFIX to be completed from that array - _screen_display_wordlist[${_screen_display_wordlist[(i)$PREFIX]}]="" - compadd -a _screen_display_wordlist - } - #k# complete word from currently visible GNU screen buffer - bindkey -r "^xS" - compdef -k _complete_screen_display complete-word '^xS' -fi - -# history - -ZSHDIR=$HOME/.zsh - -#v# -HISTFILE=$HOME/.zsh_history -isgrmlcd && HISTSIZE=500 || HISTSIZE=5000 -isgrmlcd && SAVEHIST=1000 || SAVEHIST=10000 # useful for setopt append_history - -# dirstack handling - -DIRSTACKSIZE=${DIRSTACKSIZE:-20} -DIRSTACKFILE=${DIRSTACKFILE:-${HOME}/.zdirs} - -if [[ -f ${DIRSTACKFILE} ]] && [[ ${#dirstack[*]} -eq 0 ]] ; then - dirstack=( ${(f)"$(< $DIRSTACKFILE)"} ) - # "cd -" won't work after login by just setting $OLDPWD, so - [[ -d $dirstack[1] ]] && cd $dirstack[1] && cd $OLDPWD -fi - -chpwd() { - local -ax my_stack - my_stack=( ${PWD} ${dirstack} ) - if is42 ; then - builtin print -l ${(u)my_stack} >! ${DIRSTACKFILE} - else - uprint my_stack >! ${DIRSTACKFILE} - fi -} - -# directory based profiles - -if is433 ; then - -CHPWD_PROFILE='default' -function chpwd_profiles() { - # Say you want certain settings to be active in certain directories. - # This is what you want. - # - # zstyle ':chpwd:profiles:/usr/src/grml(|/|/*)' profile grml - # zstyle ':chpwd:profiles:/usr/src/debian(|/|/*)' profile debian - # - # When that's done and you enter a directory that matches the pattern - # in the third part of the context, a function called chpwd_profile_grml, - # for example, is called (if it exists). - # - # If no pattern matches (read: no profile is detected) the profile is - # set to 'default', which means chpwd_profile_default is attempted to - # be called. - # - # A word about the context (the ':chpwd:profiles:*' stuff in the zstyle - # command) which is used: The third part in the context is matched against - # ${PWD}. That's why using a pattern such as /foo/bar(|/|/*) makes sense. - # Because that way the profile is detected for all these values of ${PWD}: - # /foo/bar - # /foo/bar/ - # /foo/bar/baz - # So, if you want to make double damn sure a profile works in /foo/bar - # and everywhere deeper in that tree, just use (|/|/*) and be happy. - # - # The name of the detected profile will be available in a variable called - # 'profile' in your functions. You don't need to do anything, it'll just - # be there. - # - # Then there is the parameter $CHPWD_PROFILE is set to the profile, that - # was is currently active. That way you can avoid running code for a - # profile that is already active, by running code such as the following - # at the start of your function: - # - # function chpwd_profile_grml() { - # [[ ${profile} == ${CHPWD_PROFILE} ]] && return 1 - # ... - # } - # - # The initial value for $CHPWD_PROFILE is 'default'. - # - # Version requirement: - # This feature requires zsh 4.3.3 or newer. - # If you use this feature and need to know whether it is active in your - # current shell, there are several ways to do that. Here are two simple - # ways: - # - # a) If knowing if the profiles feature is active when zsh starts is - # good enough for you, you can put the following snippet into your - # .zshrc.local: - # - # (( ${+functions[chpwd_profiles]} )) && print "directory profiles active" - # - # b) If that is not good enough, and you would prefer to be notified - # whenever a profile changes, you can solve that by making sure you - # start *every* profile function you create like this: - # - # function chpwd_profile_myprofilename() { - # [[ ${profile} == ${CHPWD_PROFILE} ]] && return 1 - # print "chpwd(): Switching to profile: $profile" - # ... - # } - # - # That makes sure you only get notified if a profile is *changed*, - # not everytime you change directory, which would probably piss - # you off fairly quickly. :-) - # - # There you go. Now have fun with that. - local -x profile - - zstyle -s ":chpwd:profiles:${PWD}" profile profile || profile='default' - if (( ${+functions[chpwd_profile_$profile]} )) ; then - chpwd_profile_${profile} - fi - - CHPWD_PROFILE="${profile}" - return 0 -} -chpwd_functions=( ${chpwd_functions} chpwd_profiles ) - -fi # is433 - -# display battery status on right side of prompt via running 'BATTERY=1 zsh' -if [[ $BATTERY -gt 0 ]] ; then - if ! check_com -c acpi ; then - BATTERY=0 - fi -fi - -battery() { -if [[ $BATTERY -gt 0 ]] ; then - PERCENT="${${"$(acpi 2>/dev/null)"}/(#b)[[:space:]]#Battery <->: [^0-9]##, (<->)%*/${match[1]}}" - if [[ -z "$PERCENT" ]] ; then - PERCENT='acpi not present' - else - if [[ "$PERCENT" -lt 20 ]] ; then - PERCENT="warning: ${PERCENT}%%" - else - PERCENT="${PERCENT}%%" - fi - fi -fi -} -# set colors for use in prompts -if zrcautoload colors && colors 2>/dev/null ; then - BLUE="%{${fg[blue]}%}" - RED="%{${fg_bold[red]}%}" - GREEN="%{${fg[green]}%}" - CYAN="%{${fg[cyan]}%}" - MAGENTA="%{${fg[magenta]}%}" - YELLOW="%{${fg[yellow]}%}" - WHITE="%{${fg[white]}%}" - NO_COLOUR="%{${reset_color}%}" -else - BLUE=$'%{\e[1;34m%}' - RED=$'%{\e[1;31m%}' - GREEN=$'%{\e[1;32m%}' - CYAN=$'%{\e[1;36m%}' - WHITE=$'%{\e[1;37m%}' - MAGENTA=$'%{\e[1;35m%}' - YELLOW=$'%{\e[1;33m%}' - NO_COLOUR=$'%{\e[0m%}' -fi - -# gather version control information for inclusion in a prompt - -if zrcautoload vcs_info; then - # `vcs_info' in zsh versions 4.3.10 and below have a broken `_realpath' - # function, which can cause a lot of trouble with our directory-based - # profiles. So: - if [[ ${ZSH_VERSION} == 4.3.<-10> ]] ; then - function VCS_INFO_realpath () { - setopt localoptions NO_shwordsplit chaselinks - ( builtin cd -q $1 2> /dev/null && pwd; ) - } - fi - - zstyle ':vcs_info:*' max-exports 2 - - if [[ -o restricted ]]; then - zstyle ':vcs_info:*' enable NONE - fi -fi - -# Change vcs_info formats for the grml prompt. The 2nd format sets up -# $vcs_info_msg_1_ to contain "zsh: repo-name" used to set our screen title. -# TODO: The included vcs_info() version still uses $VCS_INFO_message_N_. -# That needs to be the use of $VCS_INFO_message_N_ needs to be changed -# to $vcs_info_msg_N_ as soon as we use the included version. -if [[ "$TERM" == dumb ]] ; then - zstyle ':vcs_info:*' actionformats "(%s%)-[%b|%a] " "zsh: %r" - zstyle ':vcs_info:*' formats "(%s%)-[%b] " "zsh: %r" -else - # these are the same, just with a lot of colours: - zstyle ':vcs_info:*' actionformats "${MAGENTA}(${NO_COLOUR}%s${MAGENTA})${YELLOW}-${MAGENTA}[${GREEN}%b${YELLOW}|${RED}%a${MAGENTA}]${NO_COLOUR} " \ - "zsh: %r" - zstyle ':vcs_info:*' formats "${MAGENTA}(${NO_COLOUR}%s${MAGENTA})${YELLOW}-${MAGENTA}[${GREEN}%b${MAGENTA}]${NO_COLOUR}%} " \ - "zsh: %r" - zstyle ':vcs_info:(sv[nk]|bzr):*' branchformat "%b${RED}:${YELLOW}%r" -fi - -# command not found handling - -(( ${COMMAND_NOT_FOUND} == 1 )) && -function command_not_found_handler() { - emulate -L zsh - if [[ -x ${GRML_ZSH_CNF_HANDLER} ]] ; then - ${GRML_ZSH_CNF_HANDLER} $1 - fi - return 1 -} - -# set prompt -if zrcautoload promptinit && promptinit 2>/dev/null ; then - promptinit # people should be able to use their favourite prompt -else - print 'Notice: no promptinit available :(' -fi - -setopt prompt_subst - -# make sure to use right prompt only when not running a command -is41 && setopt transient_rprompt - - -function ESC_print () { - info_print $'\ek' $'\e\\' "$@" -} -function set_title () { - info_print $'\e]0;' $'\a' "$@" -} - -function info_print () { - local esc_begin esc_end - esc_begin="$1" - esc_end="$2" - shift 2 - printf '%s' ${esc_begin} - printf '%s' "$*" - printf '%s' "${esc_end}" -} - -# TODO: revise all these NO* variables and especially their documentation -# in zsh-help() below. -is4 && [[ $NOPRECMD -eq 0 ]] && precmd () { - [[ $NOPRECMD -gt 0 ]] && return 0 - # update VCS information - (( ${+functions[vcs_info]} )) && vcs_info - - if [[ $TERM == screen* ]] ; then - if [[ -n ${vcs_info_msg_1_} ]] ; then - ESC_print ${vcs_info_msg_1_} - else - ESC_print "zsh" - fi - fi - # just use DONTSETRPROMPT=1 to be able to overwrite RPROMPT - if [[ ${DONTSETRPROMPT:-} -eq 0 ]] ; then - if [[ $BATTERY -gt 0 ]] ; then - # update battery (dropped into $PERCENT) information - battery - RPROMPT="%(?..:() ${PERCENT}" - else - RPROMPT="%(?..:() " - fi - fi - # adjust title of xterm - # see http://www.faqs.org/docs/Linux-mini/Xterm-Title.html - [[ ${NOTITLE:-} -gt 0 ]] && return 0 - case $TERM in - (xterm*|rxvt*) - set_title ${(%):-"%n@%m: %~"} - ;; - esac -} - -# preexec() => a function running before every command -is4 && [[ $NOPRECMD -eq 0 ]] && \ -preexec () { - [[ $NOPRECMD -gt 0 ]] && return 0 -# set hostname if not running on host with name 'grml' - if [[ -n "$HOSTNAME" ]] && [[ "$HOSTNAME" != $(hostname) ]] ; then - NAME="@$HOSTNAME" - fi -# get the name of the program currently running and hostname of local machine -# set screen window title if running in a screen - if [[ "$TERM" == screen* ]] ; then - # local CMD=${1[(wr)^(*=*|sudo|ssh|-*)]} # don't use hostname - local CMD="${1[(wr)^(*=*|sudo|ssh|-*)]}$NAME" # use hostname - ESC_print ${CMD} - fi -# adjust title of xterm - [[ ${NOTITLE} -gt 0 ]] && return 0 - case $TERM in - (xterm*|rxvt*) - set_title "${(%):-"%n@%m:"}" "$1" - ;; - esac -} - -EXITCODE="%(?..%?%1v )" -# secondary prompt, printed when the shell needs more information to complete a -# command. -PS2='\`%_> ' -# selection prompt used within a select loop. -PS3='?# ' -# the execution trace prompt (setopt xtrace). default: '+%N:%i>' -PS4='+%N:%i:%_> ' - -# set variable debian_chroot if running in a chroot with /etc/debian_chroot -if [[ -z "$debian_chroot" ]] && [[ -r /etc/debian_chroot ]] ; then - debian_chroot=$(cat /etc/debian_chroot) -fi - -# don't use colors on dumb terminals (like emacs): -if [[ "$TERM" == dumb ]] ; then - PROMPT="${EXITCODE}${debian_chroot:+($debian_chroot)}%n@%m %40<...<%B%~%b%<< " -else - # only if $GRMLPROMPT is set (e.g. via 'GRMLPROMPT=1 zsh') use the extended - # prompt set variable identifying the chroot you work in (used in the - # prompt below) - if [[ $GRMLPROMPT -gt 0 ]] ; then - PROMPT="${RED}${EXITCODE}${CYAN}[%j running job(s)] ${GREEN}{history#%!} ${RED}%(3L.+.) ${BLUE}%* %D -${BLUE}%n${NO_COLOUR}@%m %40<...<%B%~%b%<< " - else - # This assembles the primary prompt string - if (( EUID != 0 )); then - #PROMPT="${RED}${EXITCODE}${WHITE}${debian_chroot:+($debian_chroot)}${BLUE}%n${NO_COLOUR}@%m %40<...<%B%~%b%<< " - PROMPT="${RED}${EXITCODE}${WHITE}${debian_chroot:+($debian_chroot)}${GREEN}%n@%m${BLUE} %40<...<%B%~%b%<< " - else - PROMPT="${BLUE}${EXITCODE}${WHITE}${debian_chroot:+($debian_chroot)}${RED}%n${NO_COLOUR}@%m %40<...<%B%~%b%<< " - fi - fi -fi - -PROMPT="${PROMPT}"'${vcs_info_msg_0_}'"%# " - -# if we are inside a grml-chroot set a specific prompt theme -if [[ -n "$GRML_CHROOT" ]] ; then - PROMPT="%{$fg[red]%}(CHROOT) %{$fg_bold[red]%}%n%{$fg_no_bold[white]%}@%m %40<...<%B%~%b%<< %\# " -fi - -# 'hash' some often used directories -#d# start -hash -d deb=/var/cache/apt/archives -hash -d doc=/usr/share/doc -hash -d linux=/lib/modules/$(command uname -r)/build/ -hash -d log=/var/log -hash -d slog=/var/log/syslog -hash -d src=/usr/src -hash -d templ=/usr/share/doc/grml-templates -hash -d tt=/usr/share/doc/texttools-doc -hash -d www=/var/www -#d# end - -# some aliases -if check_com -c screen ; then - if [[ $UID -eq 0 ]] ; then - if [[ -r /etc/grml/screenrc ]]; then - alias screen="${commands[screen]} -c /etc/grml/screenrc" - fi - elif [[ -r $HOME/.screenrc ]] ; then - alias screen="${commands[screen]} -c $HOME/.screenrc" - else - if [[ -r /etc/grml/screenrc_grml ]]; then - alias screen="${commands[screen]} -c /etc/grml/screenrc_grml" - else - if [[ -r /etc/grml/screenrc ]]; then - alias screen="${commands[screen]} -c /etc/grml/screenrc" - fi - fi - fi -fi - -# do we have GNU ls with color-support? -if [[ "$TERM" != dumb ]]; then - #a1# execute \kbd{@a@}:\quad ls with colors - alias ls='ls -b -CF '${ls_options:+"${ls_options[*]} "} - #a1# execute \kbd{@a@}:\quad list all files, with colors - alias la='ls -la '${ls_options:+"${ls_options[*]} "} - #a1# long colored list, without dotfiles (@a@) - alias ll='ls -l '${ls_options:+"${ls_options[*]} "} - #a1# long colored list, human readable sizes (@a@) - alias lh='ls -hAl '${ls_options:+"${ls_options[*]} "} - #a1# List files, append qualifier to filenames \\&\quad(\kbd{/} for directories, \kbd{@} for symlinks ...) - alias l='ls -lF '${ls_options:+"${ls_options[*]} "} -else - alias ls='ls -b -CF' - alias la='ls -la' - alias ll='ls -l' - alias lh='ls -hAl' - alias l='ls -lF' -fi - -alias mdstat='cat /proc/mdstat' -alias ...='cd ../../' - -# generate alias named "$KERNELVERSION-reboot" so you can use boot with kexec: -if [[ -x /sbin/kexec ]] && [[ -r /proc/cmdline ]] ; then - alias "$(uname -r)-reboot"="kexec -l --initrd=/boot/initrd.img-"$(uname -r)" --command-line=\"$(cat /proc/cmdline)\" /boot/vmlinuz-"$(uname -r)"" -fi - -# see http://www.cl.cam.ac.uk/~mgk25/unicode.html#term for details -alias term2iso="echo 'Setting terminal to iso mode' ; print -n '\e%@'" -alias term2utf="echo 'Setting terminal to utf-8 mode'; print -n '\e%G'" - -# make sure it is not assigned yet -[[ -n ${aliases[utf2iso]} ]] && unalias utf2iso -utf2iso() { - if isutfenv ; then - for ENV in $(env | command grep -i '.utf') ; do - eval export "$(echo $ENV | sed 's/UTF-8/iso885915/ ; s/utf8/iso885915/')" - done - fi -} - -# make sure it is not assigned yet -[[ -n ${aliases[iso2utf]} ]] && unalias iso2utf -iso2utf() { - if ! isutfenv ; then - for ENV in $(env | command grep -i '\.iso') ; do - eval export "$(echo $ENV | sed 's/iso.*/UTF-8/ ; s/ISO.*/UTF-8/')" - done - fi -} - -# especially for roadwarriors using GNU screen and ssh: -if ! check_com asc &>/dev/null ; then - asc() { autossh -t "$@" 'screen -RdU' } - compdef asc=ssh -fi - -#f1# Hints for the use of zsh on grml -zsh-help() { - print "$bg[white]$fg[black] -zsh-help - hints for use of zsh on grml -=======================================$reset_color" - - print ' -Main configuration of zsh happens in /etc/zsh/zshrc. -That file is part of the package grml-etc-core, if you want to -use them on a non-grml-system just get the tar.gz from -http://deb.grml.org/ or (preferably) get it from the git repository: - - http://git.grml.org/f/grml-etc-core/etc/zsh/zshrc - -This version of grml'\''s zsh setup does not use skel/.zshrc anymore. -The file is still there, but it is empty for backwards compatibility. - -For your own changes use these two files: - $HOME/.zshrc.pre - $HOME/.zshrc.local - -The former is sourced very early in our zshrc, the latter is sourced -very lately. - -System wide configuration without touching configuration files of grml -can take place in /etc/zsh/zshrc.local. - -For information regarding zsh start at http://grml.org/zsh/ - -Take a look at grml'\''s zsh refcard: -% xpdf =(zcat /usr/share/doc/grml-docs/zsh/grml-zsh-refcard.pdf.gz) - -Check out the main zsh refcard: -% '$BROWSER' http://www.bash2zsh.com/zsh_refcard/refcard.pdf - -And of course visit the zsh-lovers: -% man zsh-lovers - -You can adjust some options through environment variables when -invoking zsh without having to edit configuration files. -Basically meant for bash users who are not used to the power of -the zsh yet. :) - - "NOCOR=1 zsh" => deactivate automatic correction - "NOMENU=1 zsh" => do not use auto menu completion - (note: use ctrl-d for completion instead!) - "NOPRECMD=1 zsh" => disable the precmd + preexec commands (set GNU screen title) - "NOTITLE=1 zsh" => disable setting the title of xterms without disabling - preexec() and precmd() completely - "BATTERY=1 zsh" => activate battery status (via acpi) on right side of prompt - "COMMAND_NOT_FOUND=1 zsh" - => Enable a handler if an external command was not found - The command called in the handler can be altered by setting - the GRML_ZSH_CNF_HANDLER variable, the default is: - "/usr/share/command-not-found/command-not-found" - -A value greater than 0 is enables a feature; a value equal to zero -disables it. If you like one or the other of these settings, you can -add them to ~/.zshrc.pre to ensure they are set when sourcing grml'\''s -zshrc.' - - print " -$bg[white]$fg[black] -Please report wishes + bugs to the grml-team: http://grml.org/bugs/ -Enjoy your grml system with the zsh!$reset_color" -} - -# debian stuff -if [[ -r /etc/debian_version ]] ; then - #a3# Execute \kbd{apt-cache search} - alias acs='apt-cache search' - #a3# Execute \kbd{apt-cache show} - alias acsh='apt-cache show' - #a3# Execute \kbd{apt-cache policy} - alias acp='apt-cache policy' - #a3# Execute \kbd{apt-get dist-upgrade} - salias adg="apt-get dist-upgrade" - #a3# Execute \kbd{apt-get install} - salias agi="apt-get install" - #a3# Execute \kbd{aptitude install} - salias ati="aptitude install" - #a3# Execute \kbd{apt-get upgrade} - salias ag="apt-get upgrade" - #a3# Execute \kbd{apt-get update} - salias au="apt-get update" - #a3# Execute \kbd{aptitude update ; aptitude safe-upgrade} - salias -a up="aptitude update ; aptitude safe-upgrade" - #a3# Execute \kbd{dpkg-buildpackage} - alias dbp='dpkg-buildpackage' - #a3# Execute \kbd{grep-excuses} - alias ge='grep-excuses' - - # get a root shell as normal user in live-cd mode: - if isgrmlcd && [[ $UID -ne 0 ]] ; then - alias su="sudo su" - fi - - #a1# Take a look at the syslog: \kbd{\$PAGER /var/log/syslog} - salias llog="$PAGER /var/log/syslog" # take a look at the syslog - #a1# Take a look at the syslog: \kbd{tail -f /var/log/syslog} - salias tlog="tail -f /var/log/syslog" # follow the syslog -fi - -# sort installed Debian-packages by size -if check_com -c dpkg-query ; then - #a3# List installed Debian-packages sorted by size - alias debs-by-size="dpkg-query -Wf 'x \${Installed-Size} \${Package} \${Status}\n' | sed -ne '/^x /d' -e '/^x \(.*\) install ok installed$/s//\1/p' | sort -nr" -fi - -# if cdrecord is a symlink (to wodim) or isn't present at all warn: -if [[ -L /usr/bin/cdrecord ]] || ! check_com -c cdrecord; then - if check_com -c wodim; then - cdrecord() { - cat <' and 'cd -' with menu - # zstyle ':completion:*:*:cd:*:directory-stack' menu yes select - - # insert all expansions for expand completer - zstyle ':completion:*:expand:*' tag-order all-expansions - zstyle ':completion:*:history-words' list false - - # activate menu - zstyle ':completion:*:history-words' menu yes - - # ignore duplicate entries - zstyle ':completion:*:history-words' remove-all-dups yes - zstyle ':completion:*:history-words' stop yes - - # match uppercase from lowercase - zstyle ':completion:*' matcher-list 'm:{a-z}={A-Z}' - - # separate matches into groups - zstyle ':completion:*:matches' group 'yes' - zstyle ':completion:*' group-name '' - - if [[ "$NOMENU" -eq 0 ]] ; then - # if there are more than 5 options allow selecting from a menu - zstyle ':completion:*' menu select=5 - else - # don't use any menus at all - setopt no_auto_menu - fi - - zstyle ':completion:*:messages' format '%d' - zstyle ':completion:*:options' auto-description '%d' - - # describe options in full - zstyle ':completion:*:options' description 'yes' - - # on processes completion complete all user processes - zstyle ':completion:*:processes' command 'ps -au$USER' - - # offer indexes before parameters in subscripts - zstyle ':completion:*:*:-subscript-:*' tag-order indexes parameters - - # provide verbose completion information - zstyle ':completion:*' verbose true - - # recent (as of Dec 2007) zsh versions are able to provide descriptions - # for commands (read: 1st word in the line) that it will list for the user - # to choose from. The following disables that, because it's not exactly fast. - zstyle ':completion:*:-command-:*:' verbose false - - # set format for warnings - zstyle ':completion:*:warnings' format $'%{\e[0;31m%}No matches for:%{\e[0m%} %d' - - # define files to ignore for zcompile - zstyle ':completion:*:*:zcompile:*' ignored-patterns '(*~|*.zwc)' - zstyle ':completion:correct:' prompt 'correct to: %e' - - # Ignore completion functions for commands you don't have: - zstyle ':completion::(^approximate*):*:functions' ignored-patterns '_*' - - # Provide more processes in completion of programs like killall: - zstyle ':completion:*:processes-names' command 'ps c -u ${USER} -o command | uniq' - - # complete manual by their section - zstyle ':completion:*:manuals' separate-sections true - zstyle ':completion:*:manuals.*' insert-sections true - zstyle ':completion:*:man:*' menu yes select - - # provide .. as a completion - zstyle ':completion:*' special-dirs .. - - # run rehash on completion so new installed program are found automatically: - _force_rehash() { - (( CURRENT == 1 )) && rehash - return 1 - } - - ## correction - # some people don't like the automatic correction - so run 'NOCOR=1 zsh' to deactivate it - if [[ "$NOCOR" -gt 0 ]] ; then - zstyle ':completion:*' completer _oldlist _expand _force_rehash _complete _files _ignored - setopt nocorrect - else - # try to be smart about when to use what completer... - setopt correct - zstyle -e ':completion:*' completer ' - if [[ $_last_try != "$HISTNO$BUFFER$CURSOR" ]] ; then - _last_try="$HISTNO$BUFFER$CURSOR" - reply=(_complete _match _ignored _prefix _files) - else - if [[ $words[1] == (rm|mv) ]] ; then - reply=(_complete _files) - else - reply=(_oldlist _expand _force_rehash _complete _ignored _correct _approximate _files) - fi - fi' - fi - - # command for process lists, the local web server details and host completion - zstyle ':completion:*:urls' local 'www' '/var/www/' 'public_html' - - # caching - [[ -d $ZSHDIR/cache ]] && zstyle ':completion:*' use-cache yes && \ - zstyle ':completion::complete:*' cache-path $ZSHDIR/cache/ - - # host completion - if is42 ; then - [[ -r ~/.ssh/known_hosts ]] && _ssh_hosts=(${${${${(f)"$(<$HOME/.ssh/known_hosts)"}:#[\|]*}%%\ *}%%,*}) || _ssh_hosts=() - [[ -r /etc/hosts ]] && : ${(A)_etc_hosts:=${(s: :)${(ps:\t:)${${(f)~~"$(\n' "$0" && return 1 - for file in "$@" ; do - while [[ -h "$file" ]] ; do - ls -l $file - file=$(readlink "$file") - done - done -} - -# TODO: Is it supported to use pager settings like this? -# PAGER='less -Mr' - If so, the use of $PAGER here needs fixing -# with respect to wordsplitting. (ie. ${=PAGER}) -if check_com -c $PAGER ; then - #f1# View Debian's changelog of a given package - dchange() { - emulate -L zsh - if [[ -r /usr/share/doc/$1/changelog.Debian.gz ]] ; then - $PAGER /usr/share/doc/$1/changelog.Debian.gz - elif [[ -r /usr/share/doc/$1/changelog.gz ]] ; then - $PAGER /usr/share/doc/$1/changelog.gz - else - if check_com -c aptitude ; then - echo "No changelog for package $1 found, using aptitude to retrieve it." - if isgrml ; then - aptitude -t unstable changelog $1 - else - aptitude changelog $1 - fi - else - echo "No changelog for package $1 found, sorry." - return 1 - fi - fi - } - _dchange() { _files -W /usr/share/doc -/ } - compdef _dchange dchange - - #f1# View Debian's NEWS of a given package - dnews() { - emulate -L zsh - if [[ -r /usr/share/doc/$1/NEWS.Debian.gz ]] ; then - $PAGER /usr/share/doc/$1/NEWS.Debian.gz - else - if [[ -r /usr/share/doc/$1/NEWS.gz ]] ; then - $PAGER /usr/share/doc/$1/NEWS.gz - else - echo "No NEWS file for package $1 found, sorry." - return 1 - fi - fi - } - _dnews() { _files -W /usr/share/doc -/ } - compdef _dnews dnews - - #f1# View upstream's changelog of a given package - uchange() { - emulate -L zsh - if [[ -r /usr/share/doc/$1/changelog.gz ]] ; then - $PAGER /usr/share/doc/$1/changelog.gz - else - echo "No changelog for package $1 found, sorry." - return 1 - fi - } - _uchange() { _files -W /usr/share/doc -/ } - compdef _uchange uchange -fi - -# zsh profiling -profile() { - ZSH_PROFILE_RC=1 $SHELL "$@" -} - -#f1# Edit an alias via zle -edalias() { - [[ -z "$1" ]] && { echo "Usage: edalias " ; return 1 } || vared aliases'[$1]' ; -} -compdef _aliases edalias - -#f1# Edit a function via zle -edfunc() { - [[ -z "$1" ]] && { echo "Usage: edfunc " ; return 1 } || zed -f "$1" ; -} -compdef _functions edfunc - -# use it e.g. via 'Restart apache2' -#m# f6 Start() \kbd{/etc/init.d/\em{process}}\quad\kbd{start} -#m# f6 Restart() \kbd{/etc/init.d/\em{process}}\quad\kbd{restart} -#m# f6 Stop() \kbd{/etc/init.d/\em{process}}\quad\kbd{stop} -#m# f6 Reload() \kbd{/etc/init.d/\em{process}}\quad\kbd{reload} -#m# f6 Force-Reload() \kbd{/etc/init.d/\em{process}}\quad\kbd{force-reload} -if [[ -d /etc/init.d || -d /etc/service ]] ; then - __start_stop() { - local action_="${1:l}" # e.g Start/Stop/Restart - local service_="$2" - local param_="$3" - - local service_target_="$(readlink /etc/init.d/$service_)" - if [[ $service_target_ == "/usr/bin/sv" ]]; then - # runit - case "${action_}" in - start) if [[ ! -e /etc/service/$service_ ]]; then - $SUDO ln -s "/etc/sv/$service_" "/etc/service/" - else - $SUDO "/etc/init.d/$service_" "${action_}" "$param_" - fi ;; - # there is no reload in runits sysv emulation - reload) $SUDO "/etc/init.d/$service_" "force-reload" "$param_" ;; - *) $SUDO "/etc/init.d/$service_" "${action_}" "$param_" ;; - esac - else - # sysvinit - $SUDO "/etc/init.d/$service_" "${action_}" "$param_" - fi - } - - _grmlinitd() { - local -a scripts - scripts=( /etc/init.d/*(x:t) ) - _describe "service startup script" scripts - } - - for i in Start Restart Stop Force-Reload Reload ; do - eval "$i() { __start_stop $i \"\$1\" \"\$2\" ; }" - compdef _grmlinitd $i - done -fi - -#f1# Provides useful information on globbing -H-Glob() { - echo -e " - / directories - . plain files - @ symbolic links - = sockets - p named pipes (FIFOs) - * executable plain files (0100) - % device files (character or block special) - %b block special files - %c character special files - r owner-readable files (0400) - w owner-writable files (0200) - x owner-executable files (0100) - A group-readable files (0040) - I group-writable files (0020) - E group-executable files (0010) - R world-readable files (0004) - W world-writable files (0002) - X world-executable files (0001) - s setuid files (04000) - S setgid files (02000) - t files with the sticky bit (01000) - - print *(m-1) # Files modified up to a day ago - print *(a1) # Files accessed a day ago - print *(@) # Just symlinks - print *(Lk+50) # Files bigger than 50 kilobytes - print *(Lk-50) # Files smaller than 50 kilobytes - print **/*.c # All *.c files recursively starting in \$PWD - print **/*.c~file.c # Same as above, but excluding 'file.c' - print (foo|bar).* # Files starting with 'foo' or 'bar' - print *~*.* # All Files that do not contain a dot - chmod 644 *(.^x) # make all plain non-executable files publically readable - print -l *(.c|.h) # Lists *.c and *.h - print **/*(g:users:) # Recursively match all files that are owned by group 'users' - echo /proc/*/cwd(:h:t:s/self//) # Analogous to >ps ax | awk '{print $1}'<" -} -alias help-zshglob=H-Glob - -#v1# set number of lines to display per page -HELP_LINES_PER_PAGE=20 -#v1# set location of help-zle cache file -HELP_ZLE_CACHE_FILE=~/.cache/zsh_help_zle_lines.zsh -#f1# helper function for help-zle, actually generates the help text -help_zle_parse_keybindings() -{ - emulate -L zsh - setopt extendedglob - unsetopt ksharrays #indexing starts at 1 - - #v1# choose files that help-zle will parse for keybindings - ((${+HELPZLE_KEYBINDING_FILES})) || HELPZLE_KEYBINDING_FILES=( /etc/zsh/zshrc ~/.zshrc.pre ~/.zshrc ~/.zshrc.local ) - - if [[ -r $HELP_ZLE_CACHE_FILE ]]; then - local load_cache=0 - for f ($HELPZLE_KEYBINDING_FILES) [[ $f -nt $HELP_ZLE_CACHE_FILE ]] && load_cache=1 - [[ $load_cache -eq 0 ]] && . $HELP_ZLE_CACHE_FILE && return - fi - - #fill with default keybindings, possibly to be overwriten in a file later - #Note that due to zsh inconsistency on escaping assoc array keys, we encase the key in '' which we will remove later - local -A help_zle_keybindings - help_zle_keybindings['@']="set MARK" - help_zle_keybindings['xj']="vi-join lines" - help_zle_keybindings['xb']="jump to matching brace" - help_zle_keybindings['xu']="undo" - help_zle_keybindings['_']="undo" - help_zle_keybindings['xf']="find in cmdline" - help_zle_keybindings['a']="goto beginning of line" - help_zle_keybindings['e']="goto end of line" - help_zle_keybindings['t']="transpose charaters" - help_zle_keybindings['t']="transpose words" - help_zle_keybindings['s']="spellcheck word" - help_zle_keybindings['k']="backward kill buffer" - help_zle_keybindings['u']="forward kill buffer" - help_zle_keybindings['y']="insert previously killed word/string" - help_zle_keybindings["'"]="quote line" - help_zle_keybindings['"']="quote from mark to cursor" - help_zle_keybindings['']="repeat next cmd/char times (-10a -> -10 times 'a')" - help_zle_keybindings['u']="make next word Uppercase" - help_zle_keybindings['l']="make next word lowercase" - help_zle_keybindings['xd']="preview expansion under cursor" - help_zle_keybindings['q']="push current CL into background, freeing it. Restore on next CL" - help_zle_keybindings['.']="insert (and interate through) last word from prev CLs" - help_zle_keybindings[',']="complete word from newer history (consecutive hits)" - help_zle_keybindings['m']="repeat last typed word on current CL" - help_zle_keybindings['v']="insert next keypress symbol literally (e.g. for bindkey)" - help_zle_keybindings['!!:n*']="insert last n arguments of last command" - help_zle_keybindings['!!:n-']="insert arguments n..N-2 of last command (e.g. mv s s d)" - help_zle_keybindings['h']="show help/manpage for current command" - - #init global variables - unset help_zle_lines help_zle_sln - typeset -g -a help_zle_lines - typeset -g help_zle_sln=1 - - local k v - local lastkeybind_desc contents #last description starting with #k# that we found - local num_lines_elapsed=0 #number of lines between last description and keybinding - #search config files in the order they a called (and thus the order in which they overwrite keybindings) - for f in $HELPZLE_KEYBINDING_FILES; do - [[ -r "$f" ]] || continue #not readable ? skip it - contents="$(<$f)" - for cline in "${(f)contents}"; do - #zsh pattern: matches lines like: #k# .............. - if [[ "$cline" == (#s)[[:space:]]#\#k\#[[:space:]]##(#b)(*)[[:space:]]#(#e) ]]; then - lastkeybind_desc="$match[*]" - num_lines_elapsed=0 - #zsh pattern: matches lines that set a keybinding using bindkey or compdef -k - # ignores lines that are commentend out - # grabs first in '' or "" enclosed string with length between 1 and 6 characters - elif [[ "$cline" == [^#]#(bindkey|compdef -k)[[:space:]](*)(#b)(\"((?)(#c1,6))\"|\'((?)(#c1,6))\')(#B)(*) ]]; then - #description prevously found ? description not more than 2 lines away ? keybinding not empty ? - if [[ -n $lastkeybind_desc && $num_lines_elapsed -lt 2 && -n $match[1] ]]; then - #substitute keybinding string with something readable - k=${${${${${${${match[1]/\\e\^h/}/\\e\^\?/}/\\e\[5~/}/\\e\[6~/}//(\\e|\^\[)/}//\^/}/3~/} - #put keybinding in assoc array, possibly overwriting defaults or stuff found in earlier files - #Note that we are extracting the keybinding-string including the quotes (see Note at beginning) - help_zle_keybindings[${k}]=$lastkeybind_desc - fi - lastkeybind_desc="" - else - ((num_lines_elapsed++)) - fi - done - done - unset contents - #calculate length of keybinding column - local kstrlen=0 - for k (${(k)help_zle_keybindings[@]}) ((kstrlen < ${#k})) && kstrlen=${#k} - #convert the assoc array into preformated lines, which we are able to sort - for k v in ${(kv)help_zle_keybindings[@]}; do - #pad keybinding-string to kstrlen chars and remove outermost characters (i.e. the quotes) - help_zle_lines+=("${(r:kstrlen:)k[2,-2]}${v}") - done - #sort lines alphabetically - help_zle_lines=("${(i)help_zle_lines[@]}") - [[ -d ${HELP_ZLE_CACHE_FILE:h} ]] || mkdir -p "${HELP_ZLE_CACHE_FILE:h}" - echo "help_zle_lines=(${(q)help_zle_lines[@]})" >| $HELP_ZLE_CACHE_FILE - zcompile $HELP_ZLE_CACHE_FILE -} -typeset -g help_zle_sln -typeset -g -a help_zle_lines - -#f1# Provides (partially autogenerated) help on keybindings and the zsh line editor -help-zle() -{ - emulate -L zsh - unsetopt ksharrays #indexing starts at 1 - #help lines already generated ? no ? then do it - [[ ${+functions[help_zle_parse_keybindings]} -eq 1 ]] && {help_zle_parse_keybindings && unfunction help_zle_parse_keybindings} - #already displayed all lines ? go back to the start - [[ $help_zle_sln -gt ${#help_zle_lines} ]] && help_zle_sln=1 - local sln=$help_zle_sln - #note that help_zle_sln is a global var, meaning we remember the last page we viewed - help_zle_sln=$((help_zle_sln + HELP_LINES_PER_PAGE)) - zle -M "${(F)help_zle_lines[sln,help_zle_sln-1]}" -} -#k# display help for keybindings and ZLE (cycle pages with consecutive use) -zle -N help-zle && bindkey '^xz' help-zle - -# grep for running process, like: 'any vim' -any() { - emulate -L zsh - unsetopt KSH_ARRAYS - if [[ -z "$1" ]] ; then - echo "any - grep for process(es) by keyword" >&2 - echo "Usage: any " >&2 ; return 1 - else - ps xauwww | grep -i "${grep_options[@]}" "[${1[1]}]${1[2,-1]}" - fi -} - - -# After resuming from suspend, system is paging heavily, leading to very bad interactivity. -# taken from $LINUX-KERNELSOURCE/Documentation/power/swsusp.txt -[[ -r /proc/1/maps ]] && \ -deswap() { - print 'Reading /proc/[0-9]*/maps and sending output to /dev/null, this might take a while.' - cat $(sed -ne 's:.* /:/:p' /proc/[0-9]*/maps | sort -u | grep -v '^/dev/') > /dev/null - print 'Finished, running "swapoff -a; swapon -a" may also be useful.' -} - -# a wrapper for vim, that deals with title setting -# VIM_OPTIONS -# set this array to a set of options to vim you always want -# to have set when calling vim (in .zshrc.local), like: -# VIM_OPTIONS=( -p ) -# This will cause vim to send every file given on the -# commandline to be send to it's own tab (needs vim7). -vim() { - VIM_PLEASE_SET_TITLE='yes' command vim ${VIM_OPTIONS} "$@" -} - -# make a backup of a file -bk() { - cp -a "$1" "${1}_$(date --iso-8601=seconds)" -} - -ssl_hashes=( sha512 sha256 sha1 md5 ) - -for sh in ${ssl_hashes}; do - eval 'ssl-cert-'${sh}'() { - emulate -L zsh - if [[ -z $1 ]] ; then - printf '\''usage: %s \n'\'' "ssh-cert-'${sh}'" - return 1 - fi - openssl x509 -noout -fingerprint -'${sh}' -in $1 - }' -done; unset sh - -ssl-cert-fingerprints() { - emulate -L zsh - local i - if [[ -z $1 ]] ; then - printf 'usage: ssl-cert-fingerprints \n' - return 1 - fi - for i in ${ssl_hashes} - do ssl-cert-$i $1; - done -} - -ssl-cert-info() { - emulate -L zsh - if [[ -z $1 ]] ; then - printf 'usage: ssl-cert-info \n' - return 1 - fi - openssl x509 -noout -text -in $1 - ssl-cert-fingerprints $1 -} - -# make sure our environment is clean regarding colors -for color in BLUE RED GREEN CYAN YELLOW MAGENTA WHITE ; unset $color - -# "persistent history" -# just write important commands you always need to ~/.important_commands -if [[ -r ~/.important_commands ]] ; then - fc -R ~/.important_commands -fi - -# load the lookup subsystem if it's available on the system -zrcautoload lookupinit && lookupinit - -# variables - -# set terminal property (used e.g. by msgid-chooser) -export COLORTERM="yes" - -# aliases - -# general -#a2# Execute \kbd{du -sch} -alias da='du -sch' -#a2# Execute \kbd{jobs -l} -alias j='jobs -l' - -# listing stuff -#a2# Execute \kbd{ls -lSrah} -alias dir="ls -lSrah" -#a2# Only show dot-directories -alias lad='ls -d .*(/)' # only show dot-directories -#a2# Only show dot-files -alias lsa='ls -a .*(.)' # only show dot-files -#a2# Only files with setgid/setuid/sticky flag -alias lss='ls -l *(s,S,t)' # only files with setgid/setuid/sticky flag -#a2# Only show 1st ten symlinks -alias lsl='ls -l *(@)' # only symlinks -#a2# Display only executables -alias lsx='ls -l *(*)' # only executables -#a2# Display world-{readable,writable,executable} files -alias lsw='ls -ld *(R,W,X.^ND/)' # world-{readable,writable,executable} files -#a2# Display the ten biggest files -alias lsbig="ls -flh *(.OL[1,10])" # display the biggest files -#a2# Only show directories -alias lsd='ls -d *(/)' # only show directories -#a2# Only show empty directories -alias lse='ls -d *(/^F)' # only show empty directories -#a2# Display the ten newest files -alias lsnew="ls -rtlh *(D.om[1,10])" # display the newest files -#a2# Display the ten oldest files -alias lsold="ls -rtlh *(D.Om[1,10])" # display the oldest files -#a2# Display the ten smallest files -alias lssmall="ls -Srl *(.oL[1,10])" # display the smallest files -#a2# Display the ten newest directories and ten newest .directories -alias lsnewdir="ls -rthdl *(/om[1,10]) .*(D/om[1,10])" -#a2# Display the ten oldest directories and ten oldest .directories -alias lsolddir="ls -rthdl *(/Om[1,10]) .*(D/Om[1,10])" - -# some useful aliases -#a2# Remove current empty directory. Execute \kbd{cd ..; rmdir $OLDCWD} -alias rmcdir='cd ..; rmdir $OLDPWD || cd $OLDPWD' - -#a2# ssh with StrictHostKeyChecking=no \\&\quad and UserKnownHostsFile unset -alias insecssh='ssh -o "StrictHostKeyChecking=no" -o "UserKnownHostsFile=/dev/null"' -alias insecscp='scp -o "StrictHostKeyChecking=no" -o "UserKnownHostsFile=/dev/null"' - -# simple webserver -check_com -c python && alias http="python -m SimpleHTTPServer" - -# work around non utf8 capable software in utf environment via $LANG and luit -if check_com isutfenv && check_com luit ; then - if check_com -c mrxvt ; then - isutfenv && [[ -n "$LANG" ]] && \ - alias mrxvt="LANG=${LANG/(#b)(*)[.@]*/$match[1].iso885915} luit mrxvt" - fi - - if check_com -c aterm ; then - isutfenv && [[ -n "$LANG" ]] && \ - alias aterm="LANG=${LANG/(#b)(*)[.@]*/$match[1].iso885915} luit aterm" - fi - - if check_com -c centericq ; then - isutfenv && [[ -n "$LANG" ]] && \ - alias centericq="LANG=${LANG/(#b)(*)[.@]*/$match[1].iso885915} luit centericq" - fi -fi - -# useful functions - -#f5# Backup \kbd{file {\rm to} file\_timestamp} -bk() { - emulate -L zsh - cp -b $1 $1_`date --iso-8601=m` -} - -#f5# cd to directoy and list files -cl() { - emulate -L zsh - cd $1 && ls -a -} - -# smart cd function, allows switching to /etc when running 'cd /etc/fstab' -cd() { - if (( ${#argv} == 1 )) && [[ -f ${1} ]]; then - [[ ! -e ${1:h} ]] && return 1 - print "Correcting ${1} to ${1:h}" - builtin cd ${1:h} - else - builtin cd "$@" - fi -} - -#f5# Create Directoy and \kbd{cd} to it -mkcd() { - mkdir -p "$@" && cd "$@" -} - -#f5# Create temporary directory and \kbd{cd} to it -cdt() { - local t - t=$(mktemp -d) - echo "$t" - builtin cd "$t" -} - -#f5# Create directory under cursor or the selected area -# Press ctrl-xM to create the directory under the cursor or the selected area. -# To select an area press ctrl-@ or ctrl-space and use the cursor. -# Use case: you type "mv abc ~/testa/testb/testc/" and remember that the -# directory does not exist yet -> press ctrl-XM and problem solved -inplaceMkDirs() { - local PATHTOMKDIR - if ((REGION_ACTIVE==1)); then - local F=$MARK T=$CURSOR - if [[ $F -gt $T ]]; then - F=${CURSOR} - T=${MARK} - fi - # get marked area from buffer and eliminate whitespace - PATHTOMKDIR=${BUFFER[F+1,T]%%[[:space:]]##} - PATHTOMKDIR=${PATHTOMKDIR##[[:space:]]##} - else - local bufwords iword - bufwords=(${(z)LBUFFER}) - iword=${#bufwords} - bufwords=(${(z)BUFFER}) - PATHTOMKDIR="${(Q)bufwords[iword]}" - fi - [[ -z "${PATHTOMKDIR}" ]] && return 1 - if [[ -e "${PATHTOMKDIR}" ]]; then - zle -M " path already exists, doing nothing" - else - zle -M "$(mkdir -p -v "${PATHTOMKDIR}")" - zle end-of-line - fi -} -#k# mkdir -p from string under cursor or marked area -zle -N inplaceMkDirs && bindkey '^xM' inplaceMkDirs - -#f5# List files which have been accessed within the last {\it n} days, {\it n} defaults to 1 -accessed() { - emulate -L zsh - print -l -- *(a-${1:-1}) -} - -#f5# List files which have been changed within the last {\it n} days, {\it n} defaults to 1 -changed() { - emulate -L zsh - print -l -- *(c-${1:-1}) -} - -#f5# List files which have been modified within the last {\it n} days, {\it n} defaults to 1 -modified() { - emulate -L zsh - print -l -- *(m-${1:-1}) -} -# modified() was named new() in earlier versions, add an alias for backwards compatibility -check_com new || alias new=modified - -# use colors when GNU grep with color-support -#a2# Execute \kbd{grep -{}-color=auto} -(( $#grep_options > 0 )) && alias grep='grep '${grep_options:+"${grep_options[*]} "} - -# Translate DE<=>EN -# 'translate' looks up fot a word in a file with language-to-language -# translations (field separator should be " : "). A typical wordlist looks -# like at follows: -# | english-word : german-transmission -# It's also only possible to translate english to german but not reciprocal. -# Use the following oneliner to turn back the sort order: -# $ awk -F ':' '{ print $2" : "$1" "$3 }' \ -# /usr/local/lib/words/en-de.ISO-8859-1.vok > ~/.translate/de-en.ISO-8859-1.vok -#f5# Translates a word -trans() { - emulate -L zsh - case "$1" in - -[dD]*) - translate -l de-en $2 - ;; - -[eE]*) - translate -l en-de $2 - ;; - *) - echo "Usage: $0 { -D | -E }" - echo " -D == German to English" - echo " -E == English to German" - esac -} - -# Usage: simple-extract -# Using option -d deletes the original archive file. -#f5# Smart archive extractor -simple-extract() { - emulate -L zsh - setopt extended_glob noclobber - local DELETE_ORIGINAL DECOMP_CMD USES_STDIN USES_STDOUT GZTARGET WGET_CMD - local RC=0 - zparseopts -D -E "d=DELETE_ORIGINAL" - for ARCHIVE in "${@}"; do - case $ARCHIVE in - *.(tar.bz2|tbz2|tbz)) - DECOMP_CMD="tar -xvjf -" - USES_STDIN=true - USES_STDOUT=false - ;; - *.(tar.gz|tgz)) - DECOMP_CMD="tar -xvzf -" - USES_STDIN=true - USES_STDOUT=false - ;; - *.(tar.xz|txz|tar.lzma)) - DECOMP_CMD="tar -xvJf -" - USES_STDIN=true - USES_STDOUT=false - ;; - *.tar) - DECOMP_CMD="tar -xvf -" - USES_STDIN=true - USES_STDOUT=false - ;; - *.rar) - DECOMP_CMD="unrar x" - USES_STDIN=false - USES_STDOUT=false - ;; - *.lzh) - DECOMP_CMD="lha x" - USES_STDIN=false - USES_STDOUT=false - ;; - *.7z) - DECOMP_CMD="7z x" - USES_STDIN=false - USES_STDOUT=false - ;; - *.(zip|jar)) - DECOMP_CMD="unzip" - USES_STDIN=false - USES_STDOUT=false - ;; - *.deb) - DECOMP_CMD="ar -x" - USES_STDIN=false - USES_STDOUT=false - ;; - *.bz2) - DECOMP_CMD="bzip2 -d -c -" - USES_STDIN=true - USES_STDOUT=true - ;; - *.(gz|Z)) - DECOMP_CMD="gzip -d -c -" - USES_STDIN=true - USES_STDOUT=true - ;; - *.(xz|lzma)) - DECOMP_CMD="xz -d -c -" - USES_STDIN=true - USES_STDOUT=true - ;; - *) - print "ERROR: '$ARCHIVE' has unrecognized archive type." >&2 - RC=$((RC+1)) - continue - ;; - esac - - if ! check_com ${DECOMP_CMD[(w)1]}; then - echo "ERROR: ${DECOMP_CMD[(w)1]} not installed." >&2 - RC=$((RC+2)) - continue - fi - - GZTARGET="${ARCHIVE:t:r}" - if [[ -f $ARCHIVE ]] ; then - - print "Extracting '$ARCHIVE' ..." - if $USES_STDIN; then - if $USES_STDOUT; then - ${=DECOMP_CMD} < "$ARCHIVE" > $GZTARGET - else - ${=DECOMP_CMD} < "$ARCHIVE" - fi - else - if $USES_STDOUT; then - ${=DECOMP_CMD} "$ARCHIVE" > $GZTARGET - else - ${=DECOMP_CMD} "$ARCHIVE" - fi - fi - [[ $? -eq 0 && -n "$DELETE_ORIGINAL" ]] && rm -f "$ARCHIVE" - - elif [[ "$ARCHIVE" == (#s)(https|http|ftp)://* ]] ; then - if check_com curl; then - WGET_CMD="curl -L -k -s -o -" - elif check_com wget; then - WGET_CMD="wget -q -O - --no-check-certificate" - else - print "ERROR: neither wget nor curl is installed" >&2 - RC=$((RC+4)) - continue - fi - print "Downloading and Extracting '$ARCHIVE' ..." - if $USES_STDIN; then - if $USES_STDOUT; then - ${=WGET_CMD} "$ARCHIVE" | ${=DECOMP_CMD} > $GZTARGET - RC=$((RC+$?)) - else - ${=WGET_CMD} "$ARCHIVE" | ${=DECOMP_CMD} - RC=$((RC+$?)) - fi - else - if $USES_STDOUT; then - ${=DECOMP_CMD} =(${=WGET_CMD} "$ARCHIVE") > $GZTARGET - else - ${=DECOMP_CMD} =(${=WGET_CMD} "$ARCHIVE") - fi - fi - - else - print "ERROR: '$ARCHIVE' is neither a valid file nor a supported URI." >&2 - RC=$((RC+8)) - fi - done - return $RC -} - -__archive_or_uri() -{ - _alternative \ - 'files:Archives:_files -g "*.(#l)(tar.bz2|tbz2|tbz|tar.gz|tgz|tar.xz|txz|tar.lzma|tar|rar|lzh|7z|zip|jar|deb|bz2|gz|Z|xz|lzma)"' \ - '_urls:Remote Archives:_urls' -} - -_simple_extract() -{ - _arguments \ - '-d[delete original archivefile after extraction]' \ - '*:Archive Or Uri:__archive_or_uri' -} -compdef _simple_extract simple-extract -alias se=simple-extract - -#f5# Set all ulimit parameters to \kbd{unlimited} -allulimit() { - ulimit -c unlimited - ulimit -d unlimited - ulimit -f unlimited - ulimit -l unlimited - ulimit -n unlimited - ulimit -s unlimited - ulimit -t unlimited -} - -#f5# Change the xterm title from within GNU-screen -xtrename() { - emulate -L zsh - if [[ $1 != "-f" ]] ; then - if [[ -z ${DISPLAY} ]] ; then - printf 'xtrename only makes sense in X11.\n' - return 1 - fi - else - shift - fi - if [[ -z $1 ]] ; then - printf 'usage: xtrename [-f] "title for xterm"\n' - printf ' renames the title of xterm from _within_ screen.\n' - printf ' also works without screen.\n' - printf ' will not work if DISPLAY is unset, use -f to override.\n' - return 0 - fi - print -n "\eP\e]0;${1}\C-G\e\\" - return 0 -} - -# TODO: -# Rewrite this by either using tinyurl.com's API -# or using another shortening service to comply with -# tinyurl.com's policy. -# -# Create small urls via http://tinyurl.com using wget(1). -#function zurl() { -# emulate -L zsh -# [[ -z $1 ]] && { print "USAGE: zurl " ; return 1 } -# -# local PN url tiny grabber search result preview -# PN=$0 -# url=$1 -## Check existence of given URL with the help of ping(1). -## N.B. ping(1) only works without an eventual given protocol. -# ping -c 1 ${${url#(ftp|http)://}%%/*} >& /dev/null || \ -# read -q "?Given host ${${url#http://*/}%/*} is not reachable by pinging. Proceed anyway? [y|n] " -# -# if (( $? == 0 )) ; then -## Prepend 'http://' to given URL where necessary for later output. -# [[ ${url} != http(s|)://* ]] && url='http://'${url} -# tiny='http://tinyurl.com/create.php?url=' -# if check_com -c wget ; then -# grabber='wget -O- -o/dev/null' -# else -# print "wget is not available, but mandatory for ${PN}. Aborting." -# fi -## Looking for i.e.`copy('http://tinyurl.com/7efkze')' in TinyURL's HTML code. -# search='copy\(?http://tinyurl.com/[[:alnum:]]##*' -# result=${(M)${${${(f)"$(${=grabber} ${tiny}${url})"}[(fr)${search}*]}//[()\';]/}%%http:*} -## TinyURL provides the rather new feature preview for more confidence. -# preview='http://preview.'${result#http://} -# -# printf '%s\n\n' "${PN} - Shrinking long URLs via webservice TinyURL ." -# printf '%s\t%s\n\n' 'Given URL:' ${url} -# printf '%s\t%s\n\t\t%s\n' 'TinyURL:' ${result} ${preview} -# else -# return 1 -# fi -#} - -#f2# Find history events by search pattern and list them by date. -whatwhen() { - emulate -L zsh - local usage help ident format_l format_s first_char remain first last - usage='USAGE: whatwhen [options] ' - help='Use `whatwhen -h'\'' for further explanations.' - ident=${(l,${#${:-Usage: }},, ,)} - format_l="${ident}%s\t\t\t%s\n" - format_s="${format_l//(\\t)##/\\t}" - # Make the first char of the word to search for case - # insensitive; e.g. [aA] - first_char=[${(L)1[1]}${(U)1[1]}] - remain=${1[2,-1]} - # Default search range is `-100'. - first=${2:-\-100} - # Optional, just used for ` ' given. - last=$3 - case $1 in - ("") - printf '%s\n\n' 'ERROR: No search string specified. Aborting.' - printf '%s\n%s\n\n' ${usage} ${help} && return 1 - ;; - (-h) - printf '%s\n\n' ${usage} - print 'OPTIONS:' - printf $format_l '-h' 'show help text' - print '\f' - print 'SEARCH RANGE:' - printf $format_l "'0'" 'the whole history,' - printf $format_l '-' 'offset to the current history number; (default: -100)' - printf $format_s '<[-]first> []' 'just searching within a give range' - printf '\n%s\n' 'EXAMPLES:' - printf ${format_l/(\\t)/} 'whatwhen grml' '# Range is set to -100 by default.' - printf $format_l 'whatwhen zsh -250' - printf $format_l 'whatwhen foo 1 99' - ;; - (\?) - printf '%s\n%s\n\n' ${usage} ${help} && return 1 - ;; - (*) - # -l list results on stout rather than invoking $EDITOR. - # -i Print dates as in YYYY-MM-DD. - # -m Search for a - quoted - pattern within the history. - fc -li -m "*${first_char}${remain}*" $first $last - ;; - esac -} - -# mercurial related stuff -if check_com -c hg ; then - # gnu like diff for mercurial - # http://www.selenic.com/mercurial/wiki/index.cgi/TipsAndTricks - #f5# GNU like diff for mercurial - hgdi() { - emulate -L zsh - for i in $(hg status -marn "$@") ; diff -ubwd <(hg cat "$i") "$i" - } - - # build debian package - #a2# Alias for \kbd{hg-buildpackage} - alias hbp='hg-buildpackage' - - # execute commands on the versioned patch-queue from the current repos - alias mq='hg -R $(readlink -f $(hg root)/.hg/patches)' - - # diffstat for specific version of a mercurial repository - # hgstat => display diffstat between last revision and tip - # hgstat 1234 => display diffstat between revision 1234 and tip - #f5# Diffstat for specific version of a mercurial repos - hgstat() { - emulate -L zsh - [[ -n "$1" ]] && hg diff -r $1 -r tip | diffstat || hg export tip | diffstat - } - -fi # end of check whether we have the 'hg'-executable - -# grml-small cleanups - -# The following is used to remove zsh-config-items that do not work -# in grml-small by default. -# If you do not want these adjustments (for whatever reason), set -# $GRMLSMALL_SPECIFIC to 0 in your .zshrc.pre file (which this configuration -# sources if it is there). - -if (( GRMLSMALL_SPECIFIC > 0 )) && isgrmlsmall ; then - - unset abk[V] - unalias 'V' &> /dev/null - unfunction vman &> /dev/null - unfunction viless &> /dev/null - unfunction 2html &> /dev/null - - # manpages are not in grmlsmall - unfunction manzsh &> /dev/null - unfunction man2 &> /dev/null - -fi - -zrclocal - -## genrefcard.pl settings - -### doc strings for external functions from files -#m# f5 grml-wallpaper() Sets a wallpaper (try completion for possible values) - -### example: split functions-search 8,16,24,32 -#@# split functions-search 8 - -for file in $HOME/.zsh/*; do - source $file -done - -## END OF FILE ################################################################# -# vim:filetype=zsh foldmethod=marker autoindent expandtab shiftwidth=4 -# Local variables: -# mode: sh -# End: diff --git a/shelly/test/examples/color.hs b/shelly/test/examples/color.hs deleted file mode 100644 index aadb5def..00000000 --- a/shelly/test/examples/color.hs +++ /dev/null @@ -1,12 +0,0 @@ -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE ExtendedDefaultRules #-} -import Shelly -import System.Process (rawSystem) -import Control.Monad (void) -import Data.Text (Text) - -default (Text) - -main = shelly $ do - void $ liftIO $ rawSystem "ls" ["--color=auto", "../dist"] - run_ "ls" ["--color=auto", "../dist"] diff --git a/shelly/test/examples/drain.hs b/shelly/test/examples/drain.hs deleted file mode 100644 index 05fa9e6d..00000000 --- a/shelly/test/examples/drain.hs +++ /dev/null @@ -1,19 +0,0 @@ -{-# Language OverloadedStrings, ExtendedDefaultRules #-} -import Prelude hiding (FilePath) -import Shelly -import Control.Monad (void) -import Data.Text (Text) - -default (Text) - -main :: IO () -main = do - let exDir = "./examples" - void $ shelly $ do - let strs = ["a", "b"] :: [String] - let texts = ["a", "b"] :: [Text] - let inferred = ["a", "b"] - res <- cmd (exDir "drain.sh") strs texts inferred - echo "haskell done" - echo res - cmd $ exDir "printer.sh" diff --git a/shelly/test/examples/drain.sh b/shelly/test/examples/drain.sh deleted file mode 100755 index 00a7c3b6..00000000 --- a/shelly/test/examples/drain.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/sh -echo "starting" -sleep 2 -echo "finished" diff --git a/shelly/test/examples/printer.sh b/shelly/test/examples/printer.sh deleted file mode 100755 index aa6bac09..00000000 --- a/shelly/test/examples/printer.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/sh - -while true; do - echo "hello" - sleep 1 -done diff --git a/shelly/test/examples/run-handles.hs b/shelly/test/examples/run-handles.hs deleted file mode 100644 index b1b4215e..00000000 --- a/shelly/test/examples/run-handles.hs +++ /dev/null @@ -1,8 +0,0 @@ -{-# Language OverloadedStrings, ExtendedDefaultRules #-} -import Shelly --- This test runs, but causes this error to show up: --- Exception: cannot access an inherited pipe -main = shelly $ - runHandles "bash" ["examples/test.sh"] handles doNothing - where handles = [InHandle Inherit, OutHandle Inherit, ErrorHandle Inherit] - doNothing _ _ _ = return "" diff --git a/shelly/test/examples/test.sh b/shelly/test/examples/test.sh deleted file mode 100755 index 68d8222f..00000000 --- a/shelly/test/examples/test.sh +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/bash -echo hi diff --git a/shelly/test/src/CopySpec.hs b/shelly/test/src/CopySpec.hs deleted file mode 100644 index c601c22b..00000000 --- a/shelly/test/src/CopySpec.hs +++ /dev/null @@ -1,85 +0,0 @@ -{-# Language CPP #-} -module CopySpec ( copySpec ) where - -import TestInit - -#if defined(__GLASGOW_HASKELL__) && __GLASGOW_HASKELL__ < 706 -import Prelude hiding ( FilePath, catch) -#else -import Prelude hiding ( FilePath) -#endif -import Control.Monad (forM_) -import System.IO.Error -import Help - -copySpec :: Spec -copySpec = do - let b = "b" - let c = "c" - describe "cp file" $ do - it "cp to same dir" $ - forM_ [cp, cp_r] $ \copier -> do - res <- shelly $ - within_dir "test/a" $ do - writefile b "testing" - copier b c - readfile c - res @?= "testing" - - it "cp to other dir" $ - forM_ [cp, cp_r] $ \copier -> do - res <- shelly $ - within_dir "test/a" $ do - writefile b "testing" - mkdir c - copier b c - readfile "c/b" - res @?= "testing" - - describe "cp dir" $ do - it "to dir does not exist: create the to dir" $ do - res <- shelly $ - within_dir "test/a" $ do - mkdir b - writefile "b/d" "" - cp_r b c - cIsDir <- test_d c - liftIO $ assert $ cIsDir - test_f "c/d" - assert res - - it "to dir exists: creates a nested directory, full to path given" $ do - res <- shelly $ - within_dir "test/a" $ do - mkdir b - mkdir c - writefile "b/d" "" - cp_r b $ cb - cIsDir <- test_d c - liftIO $ assert $ cIsDir - bIsDir <- test_d $ cb - liftIO $ assert $ bIsDir - test_f "c/b/d" - assert res - - it "to dir exists: creates a nested directory, partial to path given" $ do - res <- shelly $ - within_dir "test/a" $ do - mkdir b - mkdir c - writefile "b/d" "" - cp_r b $ c - cIsDir <- test_d c - liftIO $ assert $ cIsDir - bIsDir <- test_d $ cb - liftIO $ assert $ bIsDir - test_f "c/b/d" - assert res - - it "copies the same dir" $ do - shelly $ - within_dir "test/a" $ do - mkdir b - writefile "b/d" "" - cp_r b b `catch_sh` (\e -> liftIO $ assert $ isUserError e) - assert True diff --git a/shelly/test/src/EnvSpec.hs b/shelly/test/src/EnvSpec.hs deleted file mode 100644 index c42ae06c..00000000 --- a/shelly/test/src/EnvSpec.hs +++ /dev/null @@ -1,36 +0,0 @@ -{-# LANGUAGE CPP #-} -module EnvSpec ( envSpec ) where - -import TestInit -#if defined(__GLASGOW_HASKELL__) && __GLASGOW_HASKELL__ < 706 -import Prelude hiding ( FilePath, catch) -#else -import Prelude hiding ( FilePath) -#endif -import Data.Maybe - -envSpec :: Spec -envSpec = do - describe "getting unset env variables" $ do - it "get_env" $ do - res <- shelly $ get_env "FOOBARSHELLY" - assert $ isNothing res - - it "get_env_text" $ do - res <- shelly $ get_env_text "FOOBARSHELLY" - assert $ res == "" - - describe "with SHELLY var set" $ do - it "get_env" $ do - res <- shelly $ do - setenv "SHELLY" "test" - get_env "SHELLY" - assert $ res == Just "test" - - it "get_env_text" $ do - res <- shelly $ do - setenv "SHELLY" "test" - get_env_text "SHELLY" - assert $ res == "test" - - diff --git a/shelly/test/src/FailureSpec.hs b/shelly/test/src/FailureSpec.hs deleted file mode 100644 index 329585c6..00000000 --- a/shelly/test/src/FailureSpec.hs +++ /dev/null @@ -1,29 +0,0 @@ -module FailureSpec ( failureSpec ) where - -import TestInit - -failureSpec :: Spec -failureSpec = do - let discardException action = shellyFailDir $ catchany_sh action (\_ -> return ()) - - describe "failure set to stderr" $ - it "writes a failure message to stderr" $ do - shellyFailDir $ discardException $ - liftIO $ shelly $ do - test_d ".shelly" >>= liftIO . assert . not - echo "testing" - error "bam!" - assert . not =<< shellyFailDir (test_d ".shelly") - - describe "failure set to directory" $ - it "writes a failure message to a .shelly directory" $ do - shellyFailDir $ discardException $ - shellyFailDir $ do - test_d ".shelly" >>= liftIO . assert . not - echo "testing" - error "bam!" - assert =<< shellyFailDir ( do - exists <- test_d ".shelly" - rm_rf ".shelly" - return exists - ) diff --git a/shelly/test/src/FindSpec.hs b/shelly/test/src/FindSpec.hs deleted file mode 100644 index 8734696f..00000000 --- a/shelly/test/src/FindSpec.hs +++ /dev/null @@ -1,115 +0,0 @@ -module FindSpec ( findSpec ) where - -import TestInit -import Data.List (sort) -import System.Directory (createDirectoryIfMissing) -import System.PosixCompat.Files (createSymbolicLink, fileExist) -import qualified System.FilePath as SF - -createSymlinkForTest :: IO () -createSymlinkForTest = do - createDirectoryIfMissing False symDir - fexist <- fileExist (symDir SF. "symlinked_dir") - if fexist - then return () - else createSymbolicLink - (".." SF. "symlinked_dir") - (symDir SF. "symlinked_dir") - where - rootDir = "test" SF. "data" - symDir = rootDir SF. "dir" - -findSpec :: Spec -findSpec = do - describe "relativeTo" $ do - it "relative to non-existent dir" $ do - res <- shelly $ relativeTo "rel/" "rel/foo" - res @?= "foo" - res2 <- shelly $ relativeTo "rel" "rel/foo" - res2 @?= "foo" - - it "relative to existing dir" $ do - res <- shelly $ relativeTo "test/" "test/drain.hs" - res @?= "drain.hs" - res2 <- shelly $ relativeTo "test" "test/drain.hs" - res2 @?= "drain.hs" - - it "abs path relative to existing dir" $ do - res <- shelly $ do - d <- pwd - relativeTo "test/" $ d "test/drain.hs" - res @?= "drain.hs" - - describe "relative listing" $ do - it "lists relative files" $ do - res <- shelly $ cd "test/src" >> ls "." - sort res @?= ["./CopySpec.hs", "./EnvSpec.hs", "./FailureSpec.hs", - "./FindSpec.hs", "./Help.hs", "./LiftedSpec.hs", "./LogWithSpec.hs", "./MoveSpec.hs", - "./ReadFileSpec.hs", "./RmSpec.hs", "./RunSpec.hs", "./SshSpec.hs", - "./TestInit.hs", "./TestMain.hs", - "./WhichSpec.hs", "./WriteSpec.hs", "./sleep.hs"] - - it "finds relative files" $ do - res <- shelly $ cd "test/src" >> find "." - sort res @?= ["./CopySpec.hs", "./EnvSpec.hs", "./FailureSpec.hs", - "./FindSpec.hs", "./Help.hs", "./LiftedSpec.hs", "./LogWithSpec.hs", "./MoveSpec.hs", - "./ReadFileSpec.hs", "./RmSpec.hs", "./RunSpec.hs", "./SshSpec.hs", - "./TestInit.hs", "./TestMain.hs", - "./WhichSpec.hs", "./WriteSpec.hs", "./sleep.hs"] - - describe "find" $ do - it "empty list for empty dir" $ do - let d = "deleteme" - res <- shelly $ do - mkdir_p d - res <- find d - rm_rf d - return res - res @?= [] - - it "lists relative files" $ do - res <- shelly $ find "test/src" - sort res @?= ["test/src/CopySpec.hs", "test/src/EnvSpec.hs", "test/src/FailureSpec.hs", - "test/src/FindSpec.hs", "test/src/Help.hs", "test/src/LiftedSpec.hs", - "test/src/LogWithSpec.hs", "test/src/MoveSpec.hs", "test/src/ReadFileSpec.hs", - "test/src/RmSpec.hs", "test/src/RunSpec.hs", "test/src/SshSpec.hs", - "test/src/TestInit.hs", "test/src/TestMain.hs", "test/src/WhichSpec.hs", "test/src/WriteSpec.hs", - "test/src/sleep.hs"] - - it "lists absolute files" $ do - res <- shelly $ relPath "test/src" >>= find >>= mapM (relativeTo "test/src") - sort res @?= ["CopySpec.hs", "EnvSpec.hs", "FailureSpec.hs", "FindSpec.hs", - "Help.hs", "LiftedSpec.hs", "LogWithSpec.hs", "MoveSpec.hs", - "ReadFileSpec.hs", "RmSpec.hs", "RunSpec.hs", "SshSpec.hs", - "TestInit.hs", "TestMain.hs", - "WhichSpec.hs", "WriteSpec.hs", "sleep.hs"] - - before createSymlinkForTest $ do - it "follow symlinks" $ - do res <- - shelly $ - followSymlink True $ - relPath "test/data" >>= find >>= mapM (relativeTo "test/data") - sort res @?= - [ "dir" - , "nonascii.txt" - , "symlinked_dir" - , "zshrc" - , "dir/symlinked_dir" - , "dir/symlinked_dir/hoge_file" - , "symlinked_dir/hoge_file" - ] - it "not follow symlinks" $ - do res <- - shelly $ - followSymlink False $ - relPath "test/data" >>= find >>= mapM (relativeTo "test/data") - sort res @?= - [ "dir" - , "nonascii.txt" - , "symlinked_dir" - , "zshrc" - , "dir/symlinked_dir" - , "symlinked_dir/hoge_file" - ] - diff --git a/shelly/test/src/Help.hs b/shelly/test/src/Help.hs deleted file mode 100644 index 3e98d8cc..00000000 --- a/shelly/test/src/Help.hs +++ /dev/null @@ -1,24 +0,0 @@ -module Help ( - with_dir, within_dir, - (@==) -) where - -import Shelly -#if defined(__GLASGOW_HASKELL__) && __GLASGOW_HASKELL__ < 706 -import Prelude hiding ( catch, FilePath ) -#else -import Prelude hiding ( FilePath ) -#endif -import Test.HUnit -import Control.Monad.Trans ( MonadIO ) - -(@==) :: (Eq a, Show a, MonadIO m) => a -> a -> m () -(@==) a b = liftIO (a @?= b) - -with_dir :: FilePath -> Sh a -> Sh a -with_dir d action = - mkdir_p d >> (action `finally_sh` rm_rf d) - -within_dir :: FilePath -> Sh a -> Sh a -within_dir d action = - with_dir d $ chdir d action diff --git a/shelly/test/src/LiftedSpec.hs b/shelly/test/src/LiftedSpec.hs deleted file mode 100644 index 6abb71eb..00000000 --- a/shelly/test/src/LiftedSpec.hs +++ /dev/null @@ -1,23 +0,0 @@ -{-# LANGUAGE OverloadedStrings #-} - -module LiftedSpec ( liftedSpec ) where - -import Test.HUnit hiding (path) -import Test.Hspec -import Shelly.Lifted -import Control.Concurrent.Async.Lifted -import Control.Monad.Trans.Maybe -import Test.Hspec.HUnit () - -liftedSpec :: Spec -liftedSpec = - describe "basic actions" $ - it "lifted sub" $ do - xs <- shelly $ - runMaybeT $ do - echo "Hello!" - sub $ withTmpDir $ \p -> wait =<< (async $ do - writefile (p "test.txt") "hello" - readfile (p "test.txt") - ) - xs @?= Just "hello" diff --git a/shelly/test/src/LogWithSpec.hs b/shelly/test/src/LogWithSpec.hs deleted file mode 100644 index dbb99d76..00000000 --- a/shelly/test/src/LogWithSpec.hs +++ /dev/null @@ -1,19 +0,0 @@ -{-# LANGUAGE OverloadedStrings #-} -module LogWithSpec ( logWithSpec ) where - -import TestInit -import Prelude hiding (FilePath) - -import Control.Concurrent (newEmptyMVar, takeMVar, putMVar) -import Data.Text (Text) -default (Text) - -logWithSpec :: Spec -logWithSpec = - describe "withOutputWriter" $ - it "calls writer function with handler and stdout output" $ do - outputVar <- newEmptyMVar - shelly $ log_stdout_with (putMVar outputVar) - $ run_ "echo" ["single line output"] - result <- takeMVar outputVar - assertEqual "expecting output" "single line output" result diff --git a/shelly/test/src/MoveSpec.hs b/shelly/test/src/MoveSpec.hs deleted file mode 100644 index 606006cb..00000000 --- a/shelly/test/src/MoveSpec.hs +++ /dev/null @@ -1,76 +0,0 @@ -module MoveSpec (moveSpec) where - -import TestInit -import Help - -moveSpec :: Spec -moveSpec = do - let b = "b" - let c = "c" - describe "mv file" $ do - it "to same dir" $ do - res <- shelly $ - within_dir "test/a" $ do - writefile b "testing" - mv b c - readfile c - res @?= "testing" - - it "to other dir" $ do - res <- shelly $ - within_dir "test/a" $ do - writefile b "testing" - mkdir c - mv b c - readfile "c/b" - res @?= "testing" - - describe "mv dir" $ do - it "to dir does not exist: create the to dir" $ do - res <- shelly $ - within_dir "test/a" $ do - mkdir b - writefile "b/d" "" - mv b c - cIsDir <- test_d c - liftIO $ assert cIsDir - test_f "c/d" - assert res - - it "to dir exists: creates a nested directory, full to path given" $ do - res <- shelly $ - within_dir "test/a" $ do - mkdir b - mkdir c - writefile "b/d" "" - mv b $ cb - cIsDir <- test_d c - liftIO $ assert cIsDir - bIsDir <- test_d $ cb - liftIO $ assert bIsDir - test_f "c/b/d" - assert res - - it "to dir exists: creates a nested directory, partial to path given" $ do - res <- shelly $ - within_dir "test/a" $ do - mkdir b - mkdir c - writefile "b/d" "" - mv b $ c - cIsDir <- test_d c - liftIO $ assert cIsDir - bIsDir <- test_d $ cb - liftIO $ assert bIsDir - test_f "c/b/d" - assert res - - {- - it "mv the same dir" $ do - shelly $ do - within_dir "test/a" $ do - mkdir b - writefile "b/d" "" - mv b b `catch_sh` (\e -> liftIO $ assert $ isUserError e) - assert True - -} diff --git a/shelly/test/src/ReadFileSpec.hs b/shelly/test/src/ReadFileSpec.hs deleted file mode 100644 index 0a7b4bba..00000000 --- a/shelly/test/src/ReadFileSpec.hs +++ /dev/null @@ -1,23 +0,0 @@ -{-# LANGUAGE CPP #-} - -module ReadFileSpec (readFileSpec) where - -import TestInit -#if defined(__GLASGOW_HASKELL__) && __GLASGOW_HASKELL__ < 706 -import Prelude hiding ( FilePath, catch) -#else -import Prelude hiding ( FilePath) -#endif -import qualified Data.ByteString as BS -import qualified Data.Text as T - -readFileSpec :: Spec -readFileSpec = describe "file with invalid encoding" $ do - it "readBinary" $ do - res <- shelly $ readBinary "test/data/zshrc" - assert (BS.length res > 0) - - it "readfile" $ do - res <- shelly $ readfile "test/data/zshrc" - assert (T.length res > 0) - diff --git a/shelly/test/src/RmSpec.hs b/shelly/test/src/RmSpec.hs deleted file mode 100644 index b2e8bdfb..00000000 --- a/shelly/test/src/RmSpec.hs +++ /dev/null @@ -1,82 +0,0 @@ -module RmSpec (rmSpec) where - -import TestInit -import Help - -rmSpec :: Spec -rmSpec = do - let b = "b" - let d = "dir" - describe "rm file" $ do - it "rm" $ do - res <- shelly $ do - writefile b "testing" - (True @==) =<< test_f b - rm b - test_f b - assert (not res) - - it "rm_r" $ do - res <- shelly $ do - writefile b "testing" - (True @==) =<< test_f b - rm b - test_f b - assert $ not res - - it "rm_f" $ do - res <- shelly $ do - (False @==) =<< test_f b - rm_f b - test_f b - assert $ not res - - describe "rm_rf dir" $ do - it "empty dir" $ do - res <- shelly $ do - mkdir d - rm_rf d - test_d d - assert $ not res - - it "dir with file" $ do - res <- shelly $ do - mkdir d - rm d `catchany_sh` (\_ -> return ()) - (True @==) =<< test_d d - writefile (d b) "testing" - rm d `catchany_sh` (\_ -> return ()) - (True @==) =<< test_d d - rm_rf d - test_d d - assert $ not res - - describe "rm symlink" $ do - let l = "l" - it "rm" $ do - res <- shelly $ do - writefile b "b" - cmd "ln" "-s" b l - rm l - test_f b - assert res - shelly $ rm b - - it "rm_f" $ do - res <- shelly $ do - writefile b "b" - cmd "ln" "-s" b l - rm_f l - test_f b - assert res - shelly $ rm_f b - - it "rm_rf" $ do - res <- shelly $ do - mkdir d - writefile (db) "b" - cmd "ln" "-s" (db) l - rm_rf l - test_f (db) - assert res - shelly $ rm_rf d diff --git a/shelly/test/src/RunSpec.hs b/shelly/test/src/RunSpec.hs deleted file mode 100644 index 7ad64f8c..00000000 --- a/shelly/test/src/RunSpec.hs +++ /dev/null @@ -1,58 +0,0 @@ -module RunSpec ( runSpec ) where - -import TestInit - -import qualified Data.Text as T -import System.IO - -runSpec :: Spec -runSpec = do - describe "run" $ do - it "simple command" $ do - res <- shelly $ run "echo" [ "wibble" ] - res @?= "wibble\n" - - it "with escaping" $ do - res <- shelly $ run "echo" [ "*" ] - res @?= "*\n" - - it "without escaping" $ do - res <- shelly $ escaping False $ run "echo" [ "*" ] - assert $ "README.md" `elem` T.words res - - it "with binary handle mode" $ do - res <- shelly $ onCommandHandles (initOutputHandles (flip hSetBinaryMode True)) - $ run "cat" [ "test/data/nonascii.txt" ] - res @?= "Selbstverst\228ndlich \252berraschend\n" - - -- Bash-related commands - describe "bash" $ do - it "simple command" $ do - res <- shelly $ bash "echo" [ "wibble" ] - res @?= "wibble\n" - - it "without escaping" $ do - res <- shelly $ escaping False $ bash "echo" [ "*" ] - assert $ "README.md" `elem` T.words res - - it "with binary handle mode" $ do - res <- shelly $ onCommandHandles (initOutputHandles (flip hSetBinaryMode True)) - $ bash "cat" [ "test/data/nonascii.txt" ] - res @?= "Selbstverst\228ndlich \252berraschend\n" - - {- This throws spurious errors on some systems - it "can detect failing commands in pipes" $ do - eCode <- shelly $ escaping False $ errExit False $ do - bashPipeFail - bash_ "echo" ["'foo'", "|", "ls", "\"eoueouoe\"", "2>/dev/null", "|", "echo", "'bar'" ] - lastExitCode - eCode `shouldSatisfy` (/= 0) - -} - - it "preserve pipe behaviour" $ do - (eCode, res) <- shelly $ escaping False $ errExit False $ do - res <- bash "echo" [ "'foo'", "|", "echo", "'bar'" ] - eCode <- lastExitCode - return (eCode, res) - res @?= "bar\n" - eCode @?= 0 diff --git a/shelly/test/src/SshSpec.hs b/shelly/test/src/SshSpec.hs deleted file mode 100644 index 9c47e873..00000000 --- a/shelly/test/src/SshSpec.hs +++ /dev/null @@ -1,39 +0,0 @@ -{-# LANGUAGE OverloadedStrings #-} -module SshSpec ( sshSpec ) where - -import TestInit -import qualified Data.Text as T - -sshSpec :: Spec -sshSpec = do - let q = "'" -- a single quote - let qq = "'\\''" -- quote of a single quote - let qqq = T.concat [qq, "\\", qq, qq] -- quote of qq - describe "sshCommandText" $ do - it "simple command" $ do - let res = sshCommandText [("wibble", [])] SeqSsh - res @?= T.concat [q, qq, "wibble", qq, q] - - it "space command" $ do - let res = sshCommandText [("to", ["outer space"])] SeqSsh - res @?= T.concat [q, qq, "to", qq, " ", qq, "outer space", qq ,q] - - it "multiple space commands" $ do - let res = sshCommandText [("to", ["outer space"]), ("and", ["back again"])] SeqSsh - res @?= T.concat - [ q, qq, "to", qq, " ", qq, "outer space", qq - , " && " - , qq, "and", qq, " ", qq, "back again", qq, q - ] - - it "commands with quotes and spaces" $ do - let res = sshCommandText [ ("echo", ["Godfater's brother, Tom says: \"huh??\""]) - , ("foo", ["--dir", "Tom's father/"])] SeqSsh - res @?= T.concat - [ q, qq, "echo", qq, " " - , qq, "Godfater", qqq, "s brother, Tom says: \"huh??\"", qq - , " && " - , qq, "foo", qq, " " - , qq, "--dir", qq, " " - , qq, "Tom", qqq, "s father/", qq, q - ] diff --git a/shelly/test/src/TestInit.hs b/shelly/test/src/TestInit.hs deleted file mode 100644 index 3c189134..00000000 --- a/shelly/test/src/TestInit.hs +++ /dev/null @@ -1,10 +0,0 @@ -module TestInit (module Export) where - -import Test.HUnit as Export hiding (path) -import Test.Hspec as Export -#ifdef LIFTED -import Shelly.Lifted as Export -#else -import Shelly as Export -#endif -import Test.Hspec.HUnit () diff --git a/shelly/test/src/TestMain.hs b/shelly/test/src/TestMain.hs deleted file mode 100644 index 206423a8..00000000 --- a/shelly/test/src/TestMain.hs +++ /dev/null @@ -1,32 +0,0 @@ - -module Main where - -import ReadFileSpec -import WhichSpec -import WriteSpec -import MoveSpec -import RmSpec -import FindSpec -import EnvSpec -import FailureSpec -import CopySpec -import LiftedSpec -import RunSpec -import SshSpec - -import Test.Hspec - -main :: IO () -main = hspec $ do - readFileSpec - whichSpec - writeSpec - moveSpec - rmSpec - findSpec - envSpec - failureSpec - copySpec - liftedSpec - runSpec - sshSpec diff --git a/shelly/test/src/WhichSpec.hs b/shelly/test/src/WhichSpec.hs deleted file mode 100644 index f1ea279f..00000000 --- a/shelly/test/src/WhichSpec.hs +++ /dev/null @@ -1,17 +0,0 @@ -module WhichSpec (whichSpec) where - -import TestInit - -whichSpec :: Spec -whichSpec = describe "which" $ do - it "gives full path to cabal" $ do - Just _ <- shelly $ which "find" - assert True - - it "recognizes cabal as a path executable" $ do - res <- shelly $ test_px "find" - True @?= res - - it "cannot find missing exe" $ do - Nothing <- shelly $ which "alskjdf;ashlva;ousnva;nj" - assert True diff --git a/shelly/test/src/WriteSpec.hs b/shelly/test/src/WriteSpec.hs deleted file mode 100644 index f8de972d..00000000 --- a/shelly/test/src/WriteSpec.hs +++ /dev/null @@ -1,41 +0,0 @@ -module WriteSpec ( writeSpec ) where - -import TestInit -import Prelude hiding (FilePath) - -import Data.Text (Text) -default (Text) - -createsFile :: FilePath -> (FilePath -> IO ()) -> IO () -createsFile f action = do - exists <- shelly $ test_e f - when exists $ error "cleanup after yourself!" - action f - shelly $ rm f - return () - - -writeSpec :: Spec -writeSpec = do - describe "writefile" $ - it "creates and overwrites a file" $ createsFile "foo" $ \f -> do - assert . (== "a") =<< (shelly $ writefile f "a" >> readfile f) - assert . (== "b") =<< (shelly $ writefile f "b" >> readfile f) - - describe "writeBinary" $ - it "creates and overwrites a file" $ createsFile "foo" $ \f -> do - assert . (== "a") =<< (shelly $ writeBinary f "a" >> readBinary f) - assert . (== "b") =<< (shelly $ writeBinary f "b" >> readBinary f) - - describe "appendfile" $ - it "creates and appends a file" $ createsFile "foo" $ \f -> do - assert . (== "a") =<< (shelly $ appendfile f "a" >> readfile f) - assert . (== "ab") =<< (shelly $ appendfile f "b" >> readfile f) - - describe "touchfile" $ - it "creates and updates a file" $ createsFile "foo" $ \f -> do - assert . (== "") =<< (shelly $ touchfile f >> readfile f) - assert . (== "") =<< (shelly $ touchfile f >> readfile f) - - assert . (== "a") =<< (shelly $ - writefile f "a" >> touchfile f >> readfile f) diff --git a/shelly/test/src/sleep.hs b/shelly/test/src/sleep.hs deleted file mode 100644 index ea6ce396..00000000 --- a/shelly/test/src/sleep.hs +++ /dev/null @@ -1,9 +0,0 @@ -{-# Language OverloadedStrings #-} -import Shelly - -main :: IO () -main = - shelly $ do - echo "sleeping" - run "sleep" ["5"] - echo "all done" diff --git a/shelly/test/testall b/shelly/test/testall deleted file mode 100755 index 3ee91a78..00000000 --- a/shelly/test/testall +++ /dev/null @@ -1,57 +0,0 @@ -#!/bin/sh - -set -e - -if [ -z "$DEBUG" ]; then - export DEBUG=shelly-testsuite -fi - -SUITE=./dist/build/shelly-testsuite/shelly-testsuite - -rm -f shelly-testsuite.tix -cabal build - -if [ ! -f $SUITE ]; then - cat </dev/null 2>&1 - -cat < *) -> * +class Apply (p :: Type -> Type -> Type) where + type ApplyState p :: (Type -> Type) -> Type apply :: ApplyMonad (ApplyState p) m => p wX wY -> m () unapply :: ApplyMonad (ApplyState p) m => p wX wY -> m () default unapply :: (ApplyMonad (ApplyState p) m, Invert p) => p wX wY -> m () diff --git a/src/Darcs/Patch/ApplyMonad.hs b/src/Darcs/Patch/ApplyMonad.hs index f9f72ec6..fb920b4c 100644 --- a/src/Darcs/Patch/ApplyMonad.hs +++ b/src/Darcs/Patch/ApplyMonad.hs @@ -1,4 +1,4 @@ -{-# OPTIONS_GHC -fno-warn-orphans #-} +{-# OPTIONS_GHC -Wno-orphans #-} {-# LANGUAGE FunctionalDependencies #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE UndecidableInstances #-} @@ -48,7 +48,7 @@ import GHC.Exts ( Constraint ) class (Monad m, ApplyMonad state (ApplyMonadOver state m)) => ApplyMonadTrans state m where - type ApplyMonadOver state m :: * -> * + type ApplyMonadOver state m :: Type -> Type runApplyMonad :: (ApplyMonadOver state m) x -> state m -> m (x, state m) instance MonadThrow m => ApplyMonadTrans Tree m where @@ -59,7 +59,7 @@ evalApplyMonad :: ApplyMonadTrans state m => ApplyMonadOver state m a -> state m -> m a evalApplyMonad action st = fst <$> runApplyMonad action st -type family ApplyMonadOperations (state :: (* -> *) -> *) :: (* -> *) -> Constraint +type family ApplyMonadOperations (state :: (Type -> Type) -> Type) :: (Type -> Type) -> Constraint class MonadThrow m => ApplyMonadTree m where -- a semantic, Tree-based interface for patch application @@ -80,7 +80,7 @@ type instance ApplyMonadOperations Tree = ApplyMonadTree class ( Monad m , ApplyMonadOperations state m ) - => ApplyMonad (state :: (* -> *) -> *) m | m -> state where + => ApplyMonad (state :: (Type -> Type) -> Type) m | m -> state where readFilePS :: ObjectIdOf state -> m B.ByteString diff --git a/src/Darcs/Patch/Conflict.hs b/src/Darcs/Patch/Conflict.hs index f1ec3948..e4c0c684 100644 --- a/src/Darcs/Patch/Conflict.hs +++ b/src/Darcs/Patch/Conflict.hs @@ -7,6 +7,7 @@ module Darcs.Patch.Conflict , mangleOrFail , combineConflicts , findConflicting + , isConflicted ) where import Darcs.Prelude @@ -37,8 +38,11 @@ mangleOrFail parts = conflictParts = parts } +isConflicted :: Conflict p => p wX wY -> Bool +isConflicted p = numConflicts p > 0 + class Conflict p where - isConflicted :: p wX wY -> Bool + numConflicts :: p wX wY -> Int -- | The first parameter is a context containing all patches -- preceding the ones for which we want to calculate the conflict -- resolution, which is the second parameter. diff --git a/src/Darcs/Patch/Effect.hs b/src/Darcs/Patch/Effect.hs index 352d6d22..0b2eae0f 100644 --- a/src/Darcs/Patch/Effect.hs +++ b/src/Darcs/Patch/Effect.hs @@ -1,4 +1,3 @@ -{-# OPTIONS_GHC -fno-warn-unrecognised-pragmas #-} module Darcs.Patch.Effect ( Effect(..) ) where import Darcs.Prelude diff --git a/src/Darcs/Patch/Format.hs b/src/Darcs/Patch/Format.hs index 4972f58f..991d29a1 100644 --- a/src/Darcs/Patch/Format.hs +++ b/src/Darcs/Patch/Format.hs @@ -21,7 +21,7 @@ class PatchListFormat p where -- | This type is used to tweak the way that lists of 'p' are shown for a given -- 'Patch' type 'p'. It is needed to maintain backwards compatibility for V1 -- and V2 patches. -data ListFormat (p :: (* -> * -> *)) +data ListFormat (p :: Type -> Type -> Type) = ListFormatDefault -- ^ Show and read lists without braces. | ListFormatV1 -- ^ Show lists with a single layer of braces around -- the outside, except for singletons which have no diff --git a/src/Darcs/Patch/FromPrim.hs b/src/Darcs/Patch/FromPrim.hs index 886f67ed..261e03b4 100644 --- a/src/Darcs/Patch/FromPrim.hs +++ b/src/Darcs/Patch/FromPrim.hs @@ -13,7 +13,7 @@ import Darcs.Patch.Ident ( PatchId ) import Darcs.Patch.Info ( PatchInfo ) class PrimPatch (PrimOf p) => PrimPatchBase p where - type PrimOf (p :: (* -> * -> *)) :: (* -> * -> *) + type PrimOf (p :: Type -> Type -> Type) :: Type -> Type -> Type instance PrimPatchBase p => PrimPatchBase (FL p) where type PrimOf (FL p) = PrimOf p diff --git a/src/Darcs/Patch/Ident.hs b/src/Darcs/Patch/Ident.hs index 66a37939..5d73972f 100644 --- a/src/Darcs/Patch/Ident.hs +++ b/src/Darcs/Patch/Ident.hs @@ -50,7 +50,7 @@ import Darcs.Util.Printer ( Doc ) -- | The reason this is not associated to class 'Ident' is that for technical -- reasons we want to be able to define type instances for patches that don't -- have an identity and therefore cannot be lawful members of class 'Ident'. -type family PatchId (p :: * -> * -> *) +type family PatchId (p :: Type -> Type -> Type) {- | Class of patches that have an identity/name. diff --git a/src/Darcs/Patch/Info.hs b/src/Darcs/Patch/Info.hs index 5f3834fe..8a56ecc2 100644 --- a/src/Darcs/Patch/Info.hs +++ b/src/Darcs/Patch/Info.hs @@ -57,7 +57,6 @@ import Control.Monad ( when, unless, void ) import Darcs.Util.ByteString ( decodeLocale , packStringToUTF8 - , unlinesPS , unpackPSFromUTF8 ) import qualified Darcs.Util.Parser as RM ( take ) @@ -67,18 +66,18 @@ import Darcs.Util.Parser as RM ( skipSpace, char, takeTillChar, linesStartingWithEndingWith) import Darcs.Patch.Show ( ShowPatchFor(..) ) -import qualified Data.ByteString as B (length, splitAt, null - ,isPrefixOf, tail, concat - ,empty, head, cons, append +import qualified Data.ByteString as B (length, splitAt + ,isPrefixOf, concat ,ByteString ) import qualified Data.ByteString.Char8 as BC - ( index, head, notElem, all, unpack, pack ) + ( index, notElem, all, unpack, pack ) import Data.List( isPrefixOf ) import Data.List.NonEmpty ( NonEmpty(..) ) import qualified Data.List.NonEmpty as NE +import qualified Text.XML.Light as XML import Darcs.Util.Printer ( Doc, packedString, - empty, ($$), (<+>), vcat, text, cyanText, blueText, prefix ) + empty, ($$), (<+>), vcat, text, cyanText, blueText ) import Darcs.Util.IsoDate ( readUTCDate ) import System.Time ( CalendarTime, calendarTimeToString, toClockTime, toCalendarTime ) @@ -317,35 +316,33 @@ friendlyD d = unsafePerformIO $ do ct <- toCalendarTime $ toClockTime $ readPatchDate d return $ calendarTimeToString ct -toXml :: PatchInfo -> Doc +toXml :: PatchInfo -> XML.Element toXml = toXml' True -toXmlShort :: PatchInfo -> Doc +toXmlShort :: PatchInfo -> XML.Element toXmlShort = toXml' False -toXml' :: Bool -> PatchInfo -> Doc +toXml' :: Bool -> PatchInfo -> XML.Element toXml' includeComments pi = - text " text "author='" <> escapeXMLByteString (_piAuthor pi) <> text "'" - <+> text "date='" <> escapeXMLByteString (_piDate pi) <> text "'" - <+> text "local_date='" <> escapeXML (friendlyD $ _piDate pi) <> text "'" - <+> text "inverted='" <> text (show $ _piLegacyIsInverted pi) <> text "'" - <+> text "hash='" <> text (show $ makePatchname pi) <> text "'>" - $$ indent abstract - $$ text "" - where - indent = prefix " " - name = text "" <> escapeXMLByteString (_piName pi) <> text "" - abstract | includeComments = name $$ commentsAsXml (_piLog pi) - | otherwise = name - -commentsAsXml :: [B.ByteString] -> Doc -commentsAsXml comments - | B.length comments' > 0 = text "" - <> escapeXMLByteString comments' - <> text "" - | otherwise = empty - where comments' = unlinesPS comments + -- We do NOT use the high-level accessors piName, piAuthor, etc because + -- metadataToString does not necessarily produce valid unicode Strings. This + -- is important because most programs that take XML as input for further + -- processing simply fail otherwise. + XML.unode "patch" + ( [ XML.Attr (XML.unqual "author") (unpackPSFromUTF8 (_piAuthor pi)) + , XML.Attr (XML.unqual "date") (piDateString pi) + , XML.Attr (XML.unqual "local_date") (friendlyD $ _piDate pi) + , XML.Attr (XML.unqual "inverted") (show $ _piLegacyIsInverted pi) + , XML.Attr (XML.unqual "hash") (show $ makePatchname pi) + ] + , [ XML.unode "name" $ unpackPSFromUTF8 (_piName pi) ] ++ comments + ) + where + -- note that this is supposed to list junk as well, which is why piLog is not + -- appropriate here + comments + | includeComments = map (XML.unode "comment") (map unpackPSFromUTF8 $ _piLog pi) + | otherwise = [] -- escapeXML is duplicated in Patch.lhs and Annotate.lhs -- It should probably be refactored to exist in one place. @@ -353,29 +350,12 @@ escapeXML :: String -> Doc escapeXML = text . strReplace '\'' "'" . strReplace '"' """ . strReplace '>' ">" . strReplace '<' "<" . strReplace '&' "&" --- Escape XML characters in a UTF-8 encoded ByteString, and turn it into a Doc. --- The data will be in the Doc as a bytestring. -escapeXMLByteString :: B.ByteString -> Doc -escapeXMLByteString = packedString . bstrReplace '\'' "'" - . bstrReplace '"' """ - . bstrReplace '>' ">" - . bstrReplace '<' "<" - . bstrReplace '&' "&" - strReplace :: Char -> String -> String -> String strReplace _ _ [] = [] strReplace x y (z:zs) | x == z = y ++ strReplace x y zs | otherwise = z : strReplace x y zs -bstrReplace :: Char -> String -> B.ByteString -> B.ByteString -bstrReplace c s bs | B.null bs = B.empty - | otherwise = if BC.head bs == c - then B.append (BC.pack s) - (bstrReplace c s (B.tail bs)) - else B.cons (B.head bs) - (bstrReplace c s (B.tail bs)) - -- | Hash on patch metadata (patch name, author, date, log, and the legacy -- \"inverted\" flag. -- Robust against context changes but does not guarantee patch contents. diff --git a/src/Darcs/Patch/Match.hs b/src/Darcs/Patch/Match.hs index 6519044e..c3172beb 100644 --- a/src/Darcs/Patch/Match.hs +++ b/src/Darcs/Patch/Match.hs @@ -53,12 +53,13 @@ module Darcs.Patch.Match , firstMatch , secondMatch , haveNonrangeMatch - , PatchSetMatch(..) + , PatchSetMatch , patchSetMatch , checkMatchSyntax , hasIndexRange , getMatchingTag , matchAPatchset + , matchOnePatchset , MatchFlag(..) , matchingHead , Matchable @@ -99,8 +100,9 @@ import Data.List ( isPrefixOf, intercalate ) import Data.Char ( toLower ) import Data.Typeable ( Typeable ) -import Darcs.Util.Path ( AbsolutePath ) +import Darcs.Util.Path ( AbsolutePath, toFilePath ) import Darcs.Patch ( hunkMatches, listTouchedFiles ) +import Darcs.Patch.Bundle ( readContextFile ) import Darcs.Patch.Info ( justName, justAuthor, justLog, makePatchname, piDate, piTag ) @@ -194,67 +196,70 @@ parseMatch pattern = "'.\n"++ unlines (map (" "++) $ lines $ show err) -- indent Right m -> Right (makeMatcher pattern m) -matchPattern :: String -> Matcher -matchPattern pattern = +patternmatch :: String -> Matcher +patternmatch pattern = case parseMatch pattern of Left err -> error err Right m -> m matchParser :: CharParser st MatchFun -matchParser = submatcher helpfulErrorMsg +matchParser = (option matchAnyPatch expr <* eof) helpfulErrorMsg where - submatcher = do - m <- option matchAnyPatch submatch - eof - return m - -- When using , Parsec prepends "expecting " to the given error message, -- so the phrasing below makes sense. - helpfulErrorMsg = "valid expressions over: " - ++ intercalate ", " (map (\(name, _, _, _, _) -> name) ps) - ++ "\nfor more help, see `darcs help patterns`." - - ps = primitiveMatchers + helpfulErrorMsg = + "valid expressions over: " + ++ intercalate ", " (map (\(name, _, _, _, _) -> name) primitiveMatchers) + ++ "\nfor more help, see `darcs help patterns`." - -- matchAnyPatch is returned if submatch fails without consuming any - -- input, i.e. if we pass --match '', we want to match anything. + -- matchAnyPatch is returned if expr fails without consuming any + -- input, i.e. if we pass --match '', we want to match anything matchAnyPatch = MatchFun (const True) -submatch :: CharParser st MatchFun -submatch = buildExpressionParser table match - -table :: OperatorTable Char st MatchFun -table = [ [prefix "not" negate_match, - prefix "!" negate_match ] - , [binary "||" or_match, - binary "or" or_match, - binary "&&" and_match, - binary "and" and_match ] - ] - where binary name fun = Infix (tryNameAndUseFun name fun) AssocLeft - prefix name fun = Prefix $ tryNameAndUseFun name fun - tryNameAndUseFun name fun = do _ <- trystring name - spaces - return fun - negate_match (MatchFun m) = MatchFun $ \p -> not (m p) - or_match (MatchFun m1) (MatchFun m2) = MatchFun $ \p -> m1 p || m2 p - and_match (MatchFun m1) (MatchFun m2) = MatchFun $ \p -> m1 p && m2 p - -trystring :: String -> CharParser st String -trystring s = try $ string s - -match :: CharParser st MatchFun -match = between spaces spaces (parens submatch <|> choice matchers_) - where - matchers_ = map createMatchHelper primitiveMatchers - -createMatchHelper :: (String, String, String, [String], String -> MatchFun) - -> CharParser st MatchFun -createMatchHelper (key,_,_,_,matcher) = - do _ <- trystring key - spaces - q <- quoted - return $ matcher q + -- parse a non-empty full match expression + expr :: CharParser st MatchFun + expr = buildExpressionParser table term + + table :: OperatorTable Char st MatchFun + table = + [ [ prefix "not" negate_match, prefix "!" negate_match ] + , [ binary "||" or_match + , binary "or" or_match + , binary "&&" and_match + , binary "and" and_match + ] + ] + where + binary name result = Infix (operator name result) AssocLeft + prefix name result = Prefix $ operator name result + operator name result = try (string name) >> spaces >> return result + negate_match (MatchFun m) = MatchFun $ \p -> not (m p) + or_match (MatchFun m1) (MatchFun m2) = MatchFun $ \p -> m1 p || m2 p + and_match (MatchFun m1) (MatchFun m2) = MatchFun $ \p -> m1 p && m2 p + + -- parse a term, i.e. anything we can combine with operators: + -- an expression in parentheses or a primitive match expression + term :: CharParser st MatchFun + term = between spaces spaces (parens expr <|> choice prims) + where + -- the primitive match expression parsers + prims = map prim primitiveMatchers + -- a primitive match expression is a keyword followed by an argument; + -- the result is the passed matcher applied to the argument + prim (key, _, _, _, matcher) = + fmap matcher $ try (string key) >> spaces >> argument + -- an argument in a primitive match expression + argument :: CharParser st String + argument = quoted <|> unquoted "string" + -- quoted string + quoted = + between (char '"') (char '"') (many $ try escaped <|> noneOf "\"") + -- bare (unquoted) string + unquoted = between spaces spaces (many $ noneOf " ()") + -- backslash escaped double quote or backslash + escaped = char '\\' >> oneOf "\\\"" + -- any expression in parentheses + parens = between (string "(") (string ")") -- | The string that is emitted when the user runs @darcs help patterns@. helpOnMatchers :: [String] @@ -321,19 +326,6 @@ primitiveMatchers = , ["src/foo.c", "src/", "\"src/*.(c|h)\""] , touchmatch ) ] -parens :: CharParser st MatchFun - -> CharParser st MatchFun -parens = between (string "(") (string ")") - -quoted :: CharParser st String -quoted = between (char '"') (char '"') - (many $ do { _ <- char '\\' -- allow escapes - ; try (oneOf "\\\"") <|> return '\\' - } - <|> noneOf "\"") - <|> between spaces spaces (many $ noneOf " ()") - "string" - datematch, hashmatch, authormatch, exactmatch, namematch, logmatch, hunkmatch, touchmatch :: String -> MatchFun @@ -388,7 +380,7 @@ data PatchSetMatch patchSetMatch :: [MatchFlag] -> Maybe PatchSetMatch patchSetMatch [] = Nothing patchSetMatch (OneTag t:_) = strictJust $ TagMatch $ tagmatch t -patchSetMatch (OnePattern m:_) = strictJust $ PatchMatch $ matchPattern m +patchSetMatch (OnePattern m:_) = strictJust $ PatchMatch $ patternmatch m patchSetMatch (OnePatch p:_) = strictJust $ PatchMatch $ patchmatch p patchSetMatch (OneHash h:_) = strictJust $ PatchMatch $ hashmatch' h patchSetMatch (OneIndex n:_) = strictJust $ IndexMatch n @@ -455,11 +447,11 @@ strictJust x = Just $! x -- @--tag@ options are passed (or their plural variants). nonrangeMatcher :: [MatchFlag] -> Maybe Matcher nonrangeMatcher [] = Nothing -nonrangeMatcher (OnePattern m:_) = strictJust $ matchPattern m +nonrangeMatcher (OnePattern m:_) = strictJust $ patternmatch m nonrangeMatcher (OneTag t:_) = strictJust $ tagmatch t nonrangeMatcher (OnePatch p:_) = strictJust $ patchmatch p nonrangeMatcher (OneHash h:_) = strictJust $ hashmatch' h -nonrangeMatcher (SeveralPattern m:_) = strictJust $ matchPattern m +nonrangeMatcher (SeveralPattern m:_) = strictJust $ patternmatch m nonrangeMatcher (SeveralTag t:_) = strictJust $ tagmatch t nonrangeMatcher (SeveralPatch p:_) = strictJust $ patchmatch p nonrangeMatcher (_:fs) = nonrangeMatcher fs @@ -470,8 +462,8 @@ nonrangeMatcher (_:fs) = nonrangeMatcher fs -- returns @Nothing@. firstMatcher :: [MatchFlag] -> Maybe Matcher firstMatcher [] = Nothing -firstMatcher (OnePattern m:_) = strictJust $ matchPattern m -firstMatcher (AfterPattern m:_) = strictJust $ matchPattern m +firstMatcher (OnePattern m:_) = strictJust $ patternmatch m +firstMatcher (AfterPattern m:_) = strictJust $ patternmatch m firstMatcher (AfterTag t:_) = strictJust $ tagmatch t firstMatcher (OnePatch p:_) = strictJust $ patchmatch p firstMatcher (AfterPatch p:_) = strictJust $ patchmatch p @@ -486,8 +478,8 @@ firstMatcherIsTag (_:fs) = firstMatcherIsTag fs secondMatcher :: [MatchFlag] -> Maybe Matcher secondMatcher [] = Nothing -secondMatcher (OnePattern m:_) = strictJust $ matchPattern m -secondMatcher (UpToPattern m:_) = strictJust $ matchPattern m +secondMatcher (OnePattern m:_) = strictJust $ patternmatch m +secondMatcher (UpToPattern m:_) = strictJust $ patternmatch m secondMatcher (OnePatch p:_) = strictJust $ patchmatch p secondMatcher (UpToPatch p:_) = strictJust $ patchmatch p secondMatcher (OneHash h:_) = strictJust $ hashmatch' h @@ -645,6 +637,17 @@ getMatchingTag m ps = PatchSet NilRL _ -> throw $ userError $ "Couldn't find a tag matching " ++ show m PatchSet ps' _ -> seal $ PatchSet ps' NilRL +-- | Return the patches in a 'PatchSet' up to the given 'PatchSetMatch'. +matchOnePatchset + :: MatchableRP p + => PatchSet p Origin wR + -> PatchSetMatch + -> IO (SealedPatchSet p Origin) +matchOnePatchset ps (IndexMatch n ) = return $ patchSetDrop (n - 1) ps +matchOnePatchset ps (PatchMatch m ) = return $ matchAPatchset m ps +matchOnePatchset ps (TagMatch m ) = return $ getMatchingTag m ps +matchOnePatchset ps (ContextMatch path) = readContextFile ps (toFilePath path) + -- | Rollback (i.e. apply the inverse) of what remains of a 'PatchSet' after we -- extract a 'PatchSetMatch'. This is the counterpart of 'getOnePatchset' and -- is used to create a matching state. In particular, if the match is --index=n diff --git a/src/Darcs/Patch/Named.hs b/src/Darcs/Patch/Named.hs index 43e66f05..ecab4192 100644 --- a/src/Darcs/Patch/Named.hs +++ b/src/Darcs/Patch/Named.hs @@ -49,7 +49,7 @@ import Data.List.Ordered ( nubSort ) import qualified Data.Set as S import Darcs.Patch.CommuteFn ( MergeFn, commuterIdFL, mergerIdFL ) -import Darcs.Patch.Conflict ( Conflict(..), findConflicting ) +import Darcs.Patch.Conflict ( Conflict(..), findConflicting, isConflicted ) import Darcs.Patch.Debug ( PatchDebug(..) ) import Darcs.Patch.Effect ( Effect(effect) ) import Darcs.Patch.FileHunk ( IsHunk(..) ) @@ -85,8 +85,8 @@ import Darcs.Patch.Viewing () -- for ShowPatch FL instances import Darcs.Patch.Witnesses.Eq ( Eq2(..) ) import Darcs.Patch.Witnesses.Ordered ( (:>)(..), (:\/:)(..), (:/\:)(..) - , FL(..), RL(..), mapFL, mapRL, mapFL_FL, mapRL_RL - , (+<+), (+>+), concatRLFL, reverseFL + , FL(..), RL(..), mapFL, mapFL_FL, mapRL_RL + , (+<+), (+>+), concatRLFL, reverseFL, reverseRL , (+<<+), (+>>+), concatFL ) import Darcs.Patch.Witnesses.Sealed ( Sealed, mapSeal ) @@ -239,31 +239,32 @@ This principle extends to explicit dependencies between 'Named' patches. In particular, recording a tag has the effect of resolving any as yet unresolved conflicts in a repo. -In general a 'Named' patch contains multiple changes ( a "changeset"). -Consider the named patches - -@ - Named A [] a - Named B [] (b1;b2) - Named C [] c - Named D [A,B] _ -@ - -where, at the RepoPatch level, @a@ conflicts with @b1@, and @c@ with @b2@. -@D@ depends explicitly on both @A@ and @B@, so it fully covers the conflict -between @a@ and @b1@ and thus we would be justified to consider that -particular conflict as resolved. Unfortunately we cannot detect this at the -Named patch level because RepoPatchV1 and V2 have no notion of patch -identities. Thus, at the Named level the two underlying conflicts appear as -a single large conflict between the three named patches @A@, @B@, and @C@, -and this means that patch @D@ does /not/ count as a (partial) resolution -(even though it arguably should). - -When we decide that a set of conflicting Named patches is resolved, we move -the RepoPatches contained in them to the context of the resolution. For all -other named patches, we must commute as much of their contents as possible -past the ones marked as resolved, using commutation at the RepoPatch level -(i.e. ignoring explicit dependencies). -} +To implement this here in a generic way without touching existing instances +for the underlying RepoPatch type @p@, we use the following trick: we move +the contents of patches which we regard as resolved at the Named patch layer +from the "interesting" trailing sequence to the "uninteresting" context, +before passing both on to the lower level 'resolveConflicts'. The challenge +here is to define and then compute the patches "resolved at this (Named) +layer" in such a way that it does not depend on patch order. + +The algorithm is roughly as follows: + +In a first pass (function 'prepare') we accumulate (direct) conflicts +between Named patches. This is done for all conflicted patches in the +interesting trailing sequence, as well as for any patch that conflicts with +one of them. In the same pass we calculate transitive explicit dependency +sets. This terminates when we have exhausted both the trailing patch +sequence plus any additional patches we add along the way and which we pass +along in the first argument (@todo@). + +In a second pass we actually move (the contents of) patches from the +trailing to the context sequence. + +Implementation note: I think it would be possible to fuse the two passes +into one, incrementally extending both transitive dependencies and +conflicts. I fear, however, that this will make it much harder to understand +what's going on. And the efficiency gain is probably minimal and in any case +at most an improvement by a constant factor. -} instance ( Commute p , Conflict p @@ -273,38 +274,29 @@ instance ( Commute p , ShowPatch p ) => Conflict (Named p) where - isConflicted (NamedP _ _ ps) = or (mapFL isConflicted ps) + numConflicts (NamedP _ _ ps) = sum (mapFL numConflicts ps) resolveConflicts context patches = - case separate S.empty [] context patches NilFL NilFL of + case separate patches NilFL NilFL of resolved :> unresolved -> resolveConflicts (patchcontentsRL context +<<+ resolved) (reverseFL unresolved) where - -- Separate the patch contents of an 'RL' of 'Named' patches into those - -- we regard as resolved due to explicit dependencies and any others. - -- Implicit dependencies are kept with the resolved patches. The first - -- parameter accumulates the PatchInfo of patches which we consider - -- resolved; the second one accumulates direct and indirect explicit - -- dependencies for the patches we have traversed. The third parameter - -- is the context, which is only needed as input to 'findConflicting'. + -- This partitions the patch contents into 'resolved' (by explicit + -- dependencies) and 'unresolved'. The 'resolved' part contains the + -- contents of all patches for which all direct conflicts it is involved + -- in are transitively covered via explicit dependencies by a single + -- patch. For all other patches we commute as much as we can out to the + -- 'unresolved' part. separate - :: S.Set PatchInfo -- names of resolved Named patches so far - -> [S.Set PatchInfo] -- transitive explicit dependencies so far - -> RL (Named p) w0 w1 -- context for Named patches - -> RL (Named p) w1 w2 -- Named patches under consideration + :: RL (Named p) w1 w2 -- Named patches under consideration -> FL p w2 w3 -- result: resolved at RepoPatch layer so far -> FL p w3 w4 -- result: unresolved at RepoPatch layer so far -> (FL p :> FL p) w1 w4 - separate acc_res acc_deps ctx (ps :<: p@(NamedP name deps contents)) resolved unresolved - | name `S.member` acc_res || isConflicted p - , _ :> _ :> conflicting <- findConflicting (ctx +<+ ps) p - , let conflict_ids = S.fromList $ name : mapRL ident conflicting - , any (conflict_ids `S.isSubsetOf`) acc_deps = - -- Either we already determined that p is considered resolved, - -- or p is conflicted and all patches involved in the conflict are - -- transitively explicitly depended upon by a single patch. - -- The action is to regard everything in 'contents' as resolved. - separate (acc_res `S.union` conflict_ids) (extend name deps acc_deps) - ctx ps (contents +>+ resolved) unresolved + separate (ps :<: NamedP name _ contents) resolved unresolved + | -- any direct conflict that we are part of + css <- S.filter (name `S.member`) final_conflicts + -- ... needs to be fully covered (transitively) by a single patch + , all (\cs -> any (cs `S.isSubsetOf`) final_depends) css = + separate ps (contents +>+ resolved) unresolved | otherwise = -- Commute as much as we can of our patch 'contents' past 'resolved', -- without dragging dependencies along. @@ -314,9 +306,55 @@ instance ( Commute p case genCommuteWhatWeCanRL (commuterIdFL commute) (reverseFL contents :> resolved) of dragged :> resolved' :> more_unresolved -> - separate acc_res (extend name deps acc_deps) ctx ps + separate ps (dragged +>>+ resolved') (more_unresolved +>>+ unresolved) - separate _ _ _ NilRL resolved unresolved = resolved :> unresolved + separate NilRL resolved unresolved = resolved :> unresolved + + (final_conflicts, final_depends) = prepare S.empty S.empty [] context patches + + -- Calculate direct conflicts and transitive explicit dependencies. This + -- needs to (potentially) look at the complete history, but as we do for + -- RepoPatchV3 resolution we terminate early when the set of interesting + -- patches ('todo') becomes exhausted. + -- Accumulating parameters: + -- * todo: patch names to consider, namely all participants in conflicts + -- we encounter on the way. + -- invariant: never contains name of a patch we already traversed + -- * conflicts: set of direct conflicts constructed so far; note that + -- each element is a pair i.e. two-element set + -- * depends: list of transitive explicit dependency sets so far + prepare + :: S.Set PatchInfo -- todo + -> S.Set(S.Set PatchInfo) -- direct conflicts so far + -> [S.Set PatchInfo] -- transitive explicit dependencies so far + -> RL (Named p) wA wB -- context + -> RL (Named p) wB wC -- patches under consideration + -> (S.Set (S.Set PatchInfo), [S.Set PatchInfo]) + prepare todo conflicts depends ctx (ps :<: p) + | isConflicted p || ident p `S.member` todo = + prepare (updTodo p cs todo) (updConflicts p cs conflicts) + (updDepends p depends) ctx ps + | otherwise = -- not part of any conflict + prepare (updTodo p cs todo) conflicts (updDepends p depends) ctx ps + where cs = conflictingNames (ctx +<+ ps) p + prepare todo conflicts depends _ NilRL + | S.null todo = (conflicts, depends) + prepare todo conflicts depends (ctx :<: p) NilRL + | ident p `S.member` todo || any (`S.member` todo) cs = + prepare (updTodo p cs todo) (updConflicts p cs conflicts) + (updDepends p depends) ctx NilRL + | otherwise = + -- may be part of a conflict but not with any interesting + -- patch, so we can and should ignore its conflicts + prepare (updTodo p S.empty todo) conflicts + (updDepends p depends) ctx NilRL + where cs = conflictingNames ctx p + prepare _ _ _ NilRL NilRL = error "autsch, hit the bottom" + + updTodo (NamedP name _ _) cs todo = cs <> (name `S.delete` todo) + updConflicts (NamedP name _ _) our_cs all_cs = + S.map (`S.insert` S.singleton name) our_cs <> all_cs + updDepends (NamedP n ds _) = extendDeps n ds -- Extend a list of sets of dependencies by adding the new list of -- dependencies to each set that contains the given 'name'. If 'name' @@ -325,15 +363,45 @@ instance ( Commute p -- Since we have to track whether 'name' was found in any of the input -- sets, this is not a straight-forward fold, so we use explicit -- recursion. - extend :: Ord a => a -> [a] -> [S.Set a] -> [S.Set a] - extend _ [] acc_deps = acc_deps - extend name deps acc_deps = go False (S.fromList deps) acc_deps where + extendDeps :: Ord a => a -> [a] -> [S.Set a] -> [S.Set a] + extendDeps _ [] = id + extendDeps name new_deps = go False (S.fromList new_deps) where go False new [] = [new] go True _ [] = [] go found new (ds:dss) | name `S.member` ds = ds `S.union` new : go True new dss | otherwise = ds : go found new dss + -- The set of (direct) conflicts we can read off a patch (in context). + -- This is slightly more involved than just calling 'findConflicting' due + -- to the fact that the latter also commutes out any patch that + -- explicitly depends on the ones we actually conflict with. + conflictingNames ctx p = + case findConflicting ctx p of + _ :> p' :> ps -> onlyRealConflicts p' (reverseRL ps) S.empty + + -- This filters out patches that 'findConflicting' finds + -- that are /only/ there because they explicitly depend on + -- patches that are actually in conflict. + onlyRealConflicts + :: Named p wB wC + -> FL (Named p) wC wD + -> S.Set (PatchInfo) + -> S.Set (PatchInfo) + onlyRealConflicts _ NilFL r = r + onlyRealConflicts p (q :>: qs) r = + case commute (p :> q) of + Just (_ :> p') + | numConflicts p /= numConflicts p' -> + onlyRealConflicts p' qs (patch2patchinfo q `S.insert` r) + | otherwise -> onlyRealConflicts p' qs r + Nothing -> + -- This should be 'error "impossible"' but due to commutation + -- bugs in V1 and V2 we would run into those errors quite a lot. + -- So we act as if the rest (qs) are real conflicts. Which is + -- wrong but better than crashing darcs for those legacy formats. + S.fromList (mapFL patch2patchinfo qs) `S.union` r + instance (PrimPatchBase p, Unwind p) => Unwind (Named p) where fullUnwind (NamedP _ _ ps) = squashUnwound (mapFL_FL fullUnwind ps) @@ -418,4 +486,3 @@ instance Show2 p => Show1 (Named p wX) instance Show2 p => Show2 (Named p) instance PatchDebug p => PatchDebug (Named p) - diff --git a/src/Darcs/Patch/Object.hs b/src/Darcs/Patch/Object.hs index ce595012..ba4d8071 100644 --- a/src/Darcs/Patch/Object.hs +++ b/src/Darcs/Patch/Object.hs @@ -10,10 +10,10 @@ import Darcs.Util.Path ( AnchoredPath, encodeWhite, anchorPath ) import Darcs.Util.Printer ( Doc, text, packedString ) import Darcs.Util.Tree ( Tree ) --- | Given a state type (parameterized over a monad m :: * -> *), this gives us +-- | Given a state type (parameterized over a monad m :: Type -> Type), this gives us -- the type of the key with which we can lookup an item (or object) in the -- state. -type family ObjectIdOf (state :: (* -> *) -> *) +type family ObjectIdOf (state :: (Type -> Type) -> Type) -- | We require from such a key (an 'ObjectId') that it has a canonical way -- to format itself to a 'Doc'. For historical reasons, this takes a parameter diff --git a/src/Darcs/Patch/PatchInfoAnd.hs b/src/Darcs/Patch/PatchInfoAnd.hs index f41b927b..16760bf2 100644 --- a/src/Darcs/Patch/PatchInfoAnd.hs +++ b/src/Darcs/Patch/PatchInfoAnd.hs @@ -300,7 +300,7 @@ instance IsHunk (PatchInfoAndG p) where instance PatchDebug p => PatchDebug (PatchInfoAndG p) instance (Commute p, Conflict p, Summary p, PrimPatchBase p, PatchListFormat p, ShowPatch p) => Conflict (PatchInfoAnd p) where - isConflicted = isConflicted . hopefully + numConflicts = numConflicts . hopefully -- Note: this relies on the laziness of 'hopefully' for efficiency -- and correctness in the face of lazy repositories resolveConflicts context patches = diff --git a/src/Darcs/Patch/Permutations.hs b/src/Darcs/Patch/Permutations.hs index a6ad15ba..ba921be5 100644 --- a/src/Darcs/Patch/Permutations.hs +++ b/src/Darcs/Patch/Permutations.hs @@ -16,7 +16,7 @@ -- the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, -- Boston, MA 02110-1301, USA. -{-# OPTIONS_GHC -fno-warn-orphans #-} +{-# OPTIONS_GHC -Wno-orphans #-} module Darcs.Patch.Permutations ( removeFL diff --git a/src/Darcs/Patch/Prim.hs b/src/Darcs/Patch/Prim.hs index b088364c..92944837 100644 --- a/src/Darcs/Patch/Prim.hs +++ b/src/Darcs/Patch/Prim.hs @@ -1,4 +1,3 @@ -{-# OPTIONS_GHC -fno-warn-orphans #-} module Darcs.Patch.Prim ( PrimApply(..) , PrimCoalesce(..) diff --git a/src/Darcs/Patch/Prim/FileUUID/Apply.hs b/src/Darcs/Patch/Prim/FileUUID/Apply.hs index 39c49130..16f62025 100644 --- a/src/Darcs/Patch/Prim/FileUUID/Apply.hs +++ b/src/Darcs/Patch/Prim/FileUUID/Apply.hs @@ -1,5 +1,5 @@ {-# LANGUAGE MultiParamTypeClasses, OverloadedStrings, UndecidableInstances #-} -{-# OPTIONS_GHC -fno-warn-orphans #-} +{-# OPTIONS_GHC -Wno-orphans #-} module Darcs.Patch.Prim.FileUUID.Apply ( hunkEdit, ObjectMap(..) ) where import Darcs.Prelude diff --git a/src/Darcs/Patch/Prim/FileUUID/Commute.hs b/src/Darcs/Patch/Prim/FileUUID/Commute.hs index 8613519a..a337a9f7 100644 --- a/src/Darcs/Patch/Prim/FileUUID/Commute.hs +++ b/src/Darcs/Patch/Prim/FileUUID/Commute.hs @@ -1,4 +1,4 @@ -{-# OPTIONS_GHC -fno-warn-orphans #-} +{-# OPTIONS_GHC -Wno-orphans #-} module Darcs.Patch.Prim.FileUUID.Commute () where import Darcs.Prelude diff --git a/src/Darcs/Patch/Prim/FileUUID/Details.hs b/src/Darcs/Patch/Prim/FileUUID/Details.hs index 13f85188..04bcdf8f 100644 --- a/src/Darcs/Patch/Prim/FileUUID/Details.hs +++ b/src/Darcs/Patch/Prim/FileUUID/Details.hs @@ -1,4 +1,4 @@ -{-# OPTIONS_GHC -fno-warn-orphans #-} +{-# OPTIONS_GHC -Wno-orphans #-} module Darcs.Patch.Prim.FileUUID.Details () where diff --git a/src/Darcs/Patch/Prim/FileUUID/ObjectMap.hs b/src/Darcs/Patch/Prim/FileUUID/ObjectMap.hs index e89e8f56..8ea768ab 100644 --- a/src/Darcs/Patch/Prim/FileUUID/ObjectMap.hs +++ b/src/Darcs/Patch/Prim/FileUUID/ObjectMap.hs @@ -47,7 +47,7 @@ data Location = L !UUID !Name -- TODO use HashMap instead? type DirContent = M.Map Name UUID -data Object (m :: * -> *) +data Object (m :: Type -> Type) = Directory DirContent | Blob (m FileContent) !(Maybe Hash) @@ -59,7 +59,7 @@ isDirectory :: Object m -> Bool isDirectory Directory{} = True isDirectory Blob{} = False -data ObjectMap (m :: * -> *) = ObjectMap +data ObjectMap (m :: Type -> Type) = ObjectMap { getObject :: UUID -> m (Maybe (Object m)) , putObject :: UUID -> Object m -> m (ObjectMap m) , listObjects :: m [UUID] diff --git a/src/Darcs/Patch/Prim/FileUUID/Read.hs b/src/Darcs/Patch/Prim/FileUUID/Read.hs index 78b04b44..9ac236ca 100644 --- a/src/Darcs/Patch/Prim/FileUUID/Read.hs +++ b/src/Darcs/Patch/Prim/FileUUID/Read.hs @@ -1,5 +1,5 @@ {-# LANGUAGE ViewPatterns, OverloadedStrings #-} -{-# OPTIONS_GHC -fno-warn-orphans #-} +{-# OPTIONS_GHC -Wno-orphans #-} module Darcs.Patch.Prim.FileUUID.Read () where import Darcs.Prelude hiding ( take ) diff --git a/src/Darcs/Patch/Prim/FileUUID/Show.hs b/src/Darcs/Patch/Prim/FileUUID/Show.hs index 8564bf13..af3dee44 100644 --- a/src/Darcs/Patch/Prim/FileUUID/Show.hs +++ b/src/Darcs/Patch/Prim/FileUUID/Show.hs @@ -1,4 +1,4 @@ -{-# OPTIONS_GHC -fno-warn-orphans #-} +{-# OPTIONS_GHC -Wno-orphans #-} {-# LANGUAGE OverloadedStrings, UndecidableInstances #-} module Darcs.Patch.Prim.FileUUID.Show ( displayHunk ) diff --git a/src/Darcs/Patch/Prim/V1/Apply.hs b/src/Darcs/Patch/Prim/V1/Apply.hs index 67332803..9beaf1f7 100644 --- a/src/Darcs/Patch/Prim/V1/Apply.hs +++ b/src/Darcs/Patch/Prim/V1/Apply.hs @@ -1,4 +1,4 @@ -{-# OPTIONS_GHC -fno-warn-orphans #-} +{-# OPTIONS_GHC -Wno-orphans #-} {-# LANGUAGE MultiWayIf #-} module Darcs.Patch.Prim.V1.Apply () where diff --git a/src/Darcs/Patch/Prim/V1/Coalesce.hs b/src/Darcs/Patch/Prim/V1/Coalesce.hs index 14e84c35..cc6c6991 100644 --- a/src/Darcs/Patch/Prim/V1/Coalesce.hs +++ b/src/Darcs/Patch/Prim/V1/Coalesce.hs @@ -1,4 +1,4 @@ -{-# OPTIONS_GHC -fno-warn-orphans #-} +{-# OPTIONS_GHC -Wno-orphans #-} {-# LANGUAGE TupleSections #-} module Darcs.Patch.Prim.V1.Coalesce diff --git a/src/Darcs/Patch/Prim/V1/Commute.hs b/src/Darcs/Patch/Prim/V1/Commute.hs index 1b893a12..b825985c 100644 --- a/src/Darcs/Patch/Prim/V1/Commute.hs +++ b/src/Darcs/Patch/Prim/V1/Commute.hs @@ -1,15 +1,12 @@ -{-# OPTIONS_GHC -fno-warn-orphans #-} +{-# OPTIONS_GHC -Wno-orphans #-} module Darcs.Patch.Prim.V1.Commute () where import Darcs.Prelude -import Control.Monad ( MonadPlus, msum, mzero, mplus ) -import Control.Applicative ( Alternative(..) ) - import qualified Data.ByteString as B ( ByteString ) import qualified Data.ByteString.Char8 as BC ( pack ) -import Darcs.Util.Path ( AnchoredPath, movedirfilename, isPrefix ) +import Darcs.Util.Path ( movedirfilename, isPrefix ) import Darcs.Patch.Witnesses.Unsafe ( unsafeCoerceP ) import Darcs.Patch.Witnesses.Ordered ( (:>)(..) ) import Darcs.Patch.Prim.V1.Core @@ -17,183 +14,94 @@ import Darcs.Patch.Prim.V1.Core import Darcs.Patch.Invert ( Invert(..) ) import Darcs.Patch.Merge ( CleanMerge(..) ) import Darcs.Patch.Commute ( Commute(..) ) +import Darcs.Patch.CommuteFn ( CommuteFn ) import Darcs.Patch.Permutations () -- for Invert instance of FL import Darcs.Patch.Prim.Class ( primCleanMerge ) import Darcs.Patch.TokenReplace ( tryTokReplace ) -isSuperdir :: AnchoredPath -> AnchoredPath -> Bool -isSuperdir d1 d2 = isPrefix d1 d2 && d1 /= d2 - -{- -This is the original definition. -Note that it explicitly excludes equality: - -isSuperdir d1 d2 = isd (fn2fp d1) (fn2fp d2) - where - isd s1 s2 = - length s2 >= length s1 + 1 && take (length s1 + 1) s2 == s1 ++ "/" --} - -isInDirectory :: AnchoredPath -> AnchoredPath -> Bool -isInDirectory = isPrefix -{- -Again, here is the orginial definition: -isInDirectory d f = iid (fn2fp d) (fn2fp f) - where iid (cd:cds) (cf:cfs) - | cd /= cf = False - | otherwise = iid cds cfs - iid [] ('/':_) = True - iid [] [] = True -- Count directory itself as being in directory... - iid _ _ = False --} - -data Perhaps a = Unknown | Failed | Succeeded a - -instance Functor Perhaps where - fmap _ Unknown = Unknown - fmap _ Failed = Failed - fmap f (Succeeded x) = Succeeded (f x) - -instance Applicative Perhaps where - pure = Succeeded - _ <*> Failed = Failed - _ <*> Unknown = Unknown - Failed <*> _ = Failed - Unknown <*> _ = Unknown - Succeeded f <*> Succeeded x = Succeeded (f x) - -instance Monad Perhaps where - (Succeeded x) >>= k = k x - Failed >>= _ = Failed - Unknown >>= _ = Unknown - return = pure - -instance Alternative Perhaps where - empty = Unknown - Unknown <|> ys = ys - Failed <|> _ = Failed - (Succeeded x) <|> _ = Succeeded x - -instance MonadPlus Perhaps where - mzero = Unknown - mplus = (<|>) - -toMaybe :: Perhaps a -> Maybe a -toMaybe (Succeeded x) = Just x -toMaybe _ = Nothing - -cleverCommute :: CommuteFunction -> CommuteFunction -cleverCommute c (p1:>p2) = - case c (p1 :> p2) of - Succeeded x -> Succeeded x - Failed -> Failed - Unknown -> case c (invert p2 :> invert p1) of - Succeeded (p1' :> p2') -> Succeeded (invert p2' :> invert p1') - Failed -> Failed - Unknown -> Unknown ---cleverCommute c (p1,p2) = c (p1,p2) `mplus` --- (case c (invert p2,invert p1) of --- Succeeded (p1', p2') -> Succeeded (invert p2', invert p1') --- Failed -> Failed --- Unknown -> Unknown) +failed :: Maybe a +failed = Nothing -speedyCommute :: CommuteFunction -- Deal with common cases quickly! - -- Two file-patches modifying different files trivially commute. -speedyCommute (p1@(FP f1 _) :> p2@(FP f2 _)) - | f1 /= f2 = Succeeded (unsafeCoerceP p2 :> unsafeCoerceP p1) -speedyCommute _other = Unknown +type CommuteFunction p = CommuteFn p p -everythingElseCommute :: CommuteFunction -everythingElseCommute = eec - where - eec :: CommuteFunction - eec (p1 :> ChangePref p f t) = Succeeded (ChangePref p f t :> unsafeCoerceP p1) - eec (ChangePref p f t :> p2) = Succeeded (unsafeCoerceP p2 :> ChangePref p f t) - eec xx = cleverCommute commuteFiledir xx - -{- -Note that it must be true that - -commutex (A^-1 A, P) = Just (P, A'^-1 A') - -and - -if commutex (A, B) == Just (B', A') -then commutex (B^-1, A^-1) == Just (A'^-1, B'^-1) --} +-- | Use the invert-commute law. +invertCommute :: Invert p => CommuteFunction p -> CommuteFunction p +invertCommute c (p1:>p2) = do + ip1' :> ip2' <- c (invert p2 :> invert p1) + return (invert ip2' :> invert ip1') instance Commute Prim where - commute x = toMaybe $ msum [speedyCommute x, - everythingElseCommute x - ] - -commuteFiledir :: CommuteFunction -commuteFiledir (FP f1 p1 :> FP f2 p2) = - if f1 /= f2 then Succeeded ( FP f2 (unsafeCoerceP p2) :> FP f1 (unsafeCoerceP p1) ) - else commuteFP f1 (p1 :> p2) -commuteFiledir (DP d1 p1 :> DP d2 p2) = - if not (isInDirectory d1 d2 || isInDirectory d2 d1) && d1 /= d2 - then Succeeded ( DP d2 (unsafeCoerceP p2) :> DP d1 (unsafeCoerceP p1) ) - else Failed -commuteFiledir (FP f fp :> DP d dp) = - if not $ isInDirectory d f - then Succeeded (DP d (unsafeCoerceP dp) :> FP f (unsafeCoerceP fp)) - else Failed - --- FIXME using isSuperdir here makes no sense, should use just isPrefix - + commute = commuteFiledir + +commuteFiledir :: CommuteFunction Prim +commuteFiledir (FP f1 p1 :> FP f2 p2) + | f1 == f2 = do + p2' :> p1' <- commuteFP (p1 :> p2) + return (FP f2 p2' :> FP f1 p1') + | otherwise = return (FP f2 (unsafeCoerceP p2) :> FP f1 (unsafeCoerceP p1)) +commuteFiledir (DP d1 p1 :> DP d2 p2) + | isPrefix d1 d2 || isPrefix d2 d1 = failed + | otherwise = return (DP d2 (unsafeCoerceP p2) :> DP d1 (unsafeCoerceP p1)) +commuteFiledir (FP f fp :> DP d dp) + | isPrefix d f = failed + | otherwise = return (DP d (unsafeCoerceP dp) :> FP f (unsafeCoerceP fp)) +commuteFiledir pair@(DP _ _ :> FP _ _) = invertCommute commuteFiledir pair commuteFiledir (FP f1 p1 :> Move d d') - | f1 == d' = Failed - | (p1 == AddFile || p1 == RmFile) && d == f1 = Failed - | otherwise = Succeeded (Move d d' :> FP (movedirfilename d d' f1) (unsafeCoerceP p1)) + | f1 == d' = failed + | (p1 == AddFile || p1 == RmFile) && d == f1 = failed + | otherwise = + return (Move d d' :> FP (movedirfilename d d' f1) (unsafeCoerceP p1)) +commuteFiledir pair@(Move _ _ :> FP _ _) = invertCommute commuteFiledir pair commuteFiledir (DP d1 p1 :> Move d d') - | isSuperdir d1 d' || isSuperdir d1 d = Failed - | d == d1 = Failed -- The exact guard is p1 == AddDir && d == d1 - -- but note d == d1 suffices because we know p1 != RmDir - -- (and hence p1 == AddDir) since patches must be sequential. - | d1 == d' = Failed - | otherwise = Succeeded (Move d d' :> DP (movedirfilename d d' d1) (unsafeCoerceP p1)) + | isPrefix d1 d' || isPrefix d1 d = failed + | otherwise = + return (Move d d' :> DP (movedirfilename d d' d1) (unsafeCoerceP p1)) +commuteFiledir pair@(Move _ _ :> DP _ _) = invertCommute commuteFiledir pair commuteFiledir (Move f f' :> Move d d') - | f == d' || f' == d = Failed - | f == d || f' == d' = Failed - | d `isSuperdir` f && f' `isSuperdir` d' = Failed + | f == d' || f' == d = failed + | f == d || f' == d' = failed + | d `isPrefix` f && f' `isPrefix` d' = failed | otherwise = - Succeeded (Move (movedirfilename f' f d) (movedirfilename f' f d') :> - Move (movedirfilename d d' f) (movedirfilename d d' f')) - -commuteFiledir _ = Unknown - -type CommuteFunction = forall wX wY . (Prim :> Prim) wX wY -> Perhaps ((Prim :> Prim) wX wY) - -commuteFP :: AnchoredPath -> (FilePatchType :> FilePatchType) wX wY - -> Perhaps ((Prim :> Prim) wX wY) -commuteFP f (p1 :> Hunk line1 [] []) = - Succeeded (FP f (Hunk line1 [] []) :> FP f (unsafeCoerceP p1)) -commuteFP f (Hunk line1 [] [] :> p2) = - Succeeded (FP f (unsafeCoerceP p2) :> FP f (Hunk line1 [] [])) -commuteFP f (Hunk line1 old1 new1 :> Hunk line2 old2 new2) = + return + (Move (movedirfilename f' f d) (movedirfilename f' f d') :> + Move (movedirfilename d d' f) (movedirfilename d d' f')) +commuteFiledir (p1 :> ChangePref p f t) = + return (ChangePref p f t :> unsafeCoerceP p1) +commuteFiledir (ChangePref p f t :> p2) = + return (unsafeCoerceP p2 :> ChangePref p f t) + +commuteFP :: CommuteFunction FilePatchType +commuteFP (p1 :> Hunk line1 [] []) = + return (Hunk line1 [] [] :> unsafeCoerceP p1) +commuteFP (Hunk line1 [] [] :> p2) = + return (unsafeCoerceP p2 :> Hunk line1 [] []) +commuteFP (Hunk line1 old1 new1 :> Hunk line2 old2 new2) = case commuteHunkLines line1 (length old1) (length new1) line2 (length old2) (length new2) of Just (line2', line1') -> - Succeeded (FP f (Hunk line2' old2 new2) :> FP f (Hunk line1' old1 new1)) - Nothing -> Failed -commuteFP f (Hunk line1 old1 new1 :> TokReplace t o n) = + return (Hunk line2' old2 new2 :> Hunk line1' old1 new1) + Nothing -> failed +commuteFP (Hunk line1 old1 new1 :> TokReplace t o n) = let po = BC.pack o; pn = BC.pack n in case tryTokReplaces t po pn old1 of - Nothing -> Failed + Nothing -> failed Just old1' -> case tryTokReplaces t po pn new1 of - Nothing -> Failed - Just new1' -> Succeeded (FP f (TokReplace t o n) :> - FP f (Hunk line1 old1' new1')) -commuteFP f (TokReplace t1 o1 n1 :> TokReplace t2 o2 n2) - | t1 /= t2 = Failed - | o1 == o2 = Failed - | n1 == o2 = Failed - | o1 == n2 = Failed - | n1 == n2 = Failed - | otherwise = Succeeded (FP f (TokReplace t2 o2 n2) :> - FP f (TokReplace t1 o1 n1)) -commuteFP _ _ = Unknown + Nothing -> failed + Just new1' -> return (TokReplace t o n :> Hunk line1 old1' new1') +commuteFP pair@(TokReplace {} :> Hunk {}) = invertCommute commuteFP pair +commuteFP (TokReplace t1 o1 n1 :> TokReplace t2 o2 n2) + | t1 /= t2 = failed + | o1 == o2 = failed + | n1 == o2 = failed + | o1 == n2 = failed + | n1 == n2 = failed + | otherwise = return (TokReplace t2 o2 n2 :> TokReplace t1 o1 n1) +commuteFP (AddFile :> _) = failed +commuteFP (RmFile :> _) = failed +commuteFP (Binary {} :> _) = failed +commuteFP (_ :> AddFile) = failed +commuteFP (_ :> RmFile) = failed +commuteFP (_ :> Binary {}) = failed commuteHunkLines :: Int -> Int -> Int -> Int -> Int -> Int -> Maybe (Int, Int) diff --git a/src/Darcs/Patch/Prim/V1/Details.hs b/src/Darcs/Patch/Prim/V1/Details.hs index 599baef2..7e8fa176 100644 --- a/src/Darcs/Patch/Prim/V1/Details.hs +++ b/src/Darcs/Patch/Prim/V1/Details.hs @@ -1,4 +1,4 @@ -{-# OPTIONS_GHC -fno-warn-orphans #-} +{-# OPTIONS_GHC -Wno-orphans #-} module Darcs.Patch.Prim.V1.Details () where diff --git a/src/Darcs/Patch/Prim/V1/Mangle.hs b/src/Darcs/Patch/Prim/V1/Mangle.hs index 7f5dc12d..2320b8da 100644 --- a/src/Darcs/Patch/Prim/V1/Mangle.hs +++ b/src/Darcs/Patch/Prim/V1/Mangle.hs @@ -1,5 +1,5 @@ {-# LANGUAGE ViewPatterns #-} -{-# OPTIONS_GHC -fno-warn-orphans #-} +{-# OPTIONS_GHC -Wno-orphans #-} module Darcs.Patch.Prim.V1.Mangle () where import Darcs.Prelude diff --git a/src/Darcs/Patch/Prim/V1/Read.hs b/src/Darcs/Patch/Prim/V1/Read.hs index f5859257..78990c08 100644 --- a/src/Darcs/Patch/Prim/V1/Read.hs +++ b/src/Darcs/Patch/Prim/V1/Read.hs @@ -1,4 +1,4 @@ -{-# OPTIONS_GHC -fno-warn-orphans #-} +{-# OPTIONS_GHC -Wno-orphans #-} module Darcs.Patch.Prim.V1.Read () where import Darcs.Prelude diff --git a/src/Darcs/Patch/Prim/V1/Show.hs b/src/Darcs/Patch/Prim/V1/Show.hs index 11d2fc84..d65832a8 100644 --- a/src/Darcs/Patch/Prim/V1/Show.hs +++ b/src/Darcs/Patch/Prim/V1/Show.hs @@ -1,4 +1,4 @@ -{-# OPTIONS_GHC -fno-warn-orphans #-} +{-# OPTIONS_GHC -Wno-orphans #-} {-# LANGUAGE ViewPatterns, UndecidableInstances #-} module Darcs.Patch.Prim.V1.Show ( showHunk ) diff --git a/src/Darcs/Patch/RepoPatch.hs b/src/Darcs/Patch/RepoPatch.hs index d58931fb..6c186162 100644 --- a/src/Darcs/Patch/RepoPatch.hs +++ b/src/Darcs/Patch/RepoPatch.hs @@ -23,6 +23,8 @@ module Darcs.Patch.RepoPatch , Unwind(..) ) where +import Darcs.Prelude + import Darcs.Patch.Annotate ( AnnotateRP ) import Darcs.Patch.Apply ( Apply(..) ) import Darcs.Patch.Commute ( Commute(..) ) @@ -30,7 +32,7 @@ import Darcs.Patch.Conflict ( Conflict(..) ) import Darcs.Patch.Effect ( Effect(..) ) import Darcs.Patch.FileHunk ( IsHunk(..) ) import Darcs.Patch.Format ( PatchListFormat(..) ) -import Darcs.Patch.FromPrim ( PrimPatchBase(..), PrimOf(..), FromPrim(..), ToPrim(..) ) +import Darcs.Patch.FromPrim ( PrimPatchBase(..), PrimOf, FromPrim(..), ToPrim(..) ) import Darcs.Patch.Inspect ( PatchInspect(..) ) import Darcs.Patch.Merge ( Merge(..) ) import Darcs.Patch.Read ( ReadPatch(..) ) diff --git a/src/Darcs/Patch/Set.hs b/src/Darcs/Patch/Set.hs index 494100f2..39249cf6 100644 --- a/src/Darcs/Patch/Set.hs +++ b/src/Darcs/Patch/Set.hs @@ -31,6 +31,7 @@ module Darcs.Patch.Set , patchSetSnoc , patchSetSplit , patchSetDrop + , tagsCovering ) where import Darcs.Prelude @@ -39,7 +40,8 @@ import qualified Data.Set as S import Darcs.Patch.Ident ( Ident(..), PatchId ) import Darcs.Patch.Info ( PatchInfo, piTag ) -import Darcs.Patch.PatchInfoAnd ( PatchInfoAnd, info ) +import Darcs.Patch.Named ( getdeps ) +import Darcs.Patch.PatchInfoAnd ( PatchInfoAnd, hopefully, info ) import Darcs.Patch.Witnesses.Sealed ( Sealed(..) ) import Darcs.Patch.Witnesses.Ordered ( FL, RL(..), (+<+), (+<<+), (:>)(..), reverseRL, @@ -139,6 +141,38 @@ patchSetInventoryHashes (PatchSet ts _) = mapRL (\(Tagged _ _ mh) -> mh) ts patchSetTags :: PatchSet p wX wY -> [String] patchSetTags = catMaybes . mapRL (piTag . info) . patchSet2RL +-- Find all tags that cover the latest patch matching the given matcher. +-- +-- The algorithm: Go back until a matching patch is found. On the way, collect +-- information about tags. For each tag remember name and explicit +-- dependencies. Then go forward, and add every tag to the result that covers +-- the patch or covers one of the tags found so far. As a special optimization, +-- known clean tags always depend everything before them, so we don't have to +-- check their explicit dependencies. +tagsCovering + :: forall p wO wX + . (forall wA wB. PatchInfoAnd p wA wB -> Bool) + -> PatchSet p wO wX + -> Maybe [String] +tagsCovering matcher = fmap (catMaybes . fmap piTag) . go [] + where + go :: [(PatchInfo, Maybe[PatchInfo])] -> PatchSet p wO wY -> Maybe [PatchInfo] + go _ (PatchSet NilRL NilRL) = Nothing + go tags (PatchSet (ts :<: Tagged ps t _) NilRL) + | matcher t = Just $ checkCovered (info t) tags + | otherwise = go ((info t, Nothing) : tags) (PatchSet ts ps) + go tags (PatchSet ts (ps :<: p)) + | matcher p = Just $ checkCovered (info p) tags + | Just _ <- piTag (info p) = + go ((info p, Just (getdeps (hopefully p))) : tags) (PatchSet ts ps) + | otherwise = go tags (PatchSet ts ps) + + checkCovered i ((t,Nothing):ts) = t : checkCovered i ts + checkCovered i ((t,Just is):ts) + | i `elem` is = t : checkCovered i ts + | otherwise = checkCovered i ts + checkCovered _ [] = [] + inOrderTags :: PatchSet p wS wX -> [PatchInfo] inOrderTags (PatchSet ts _) = go ts where go :: RL(Tagged t1) wT wY -> [PatchInfo] diff --git a/src/Darcs/Patch/Split.hs b/src/Darcs/Patch/Split.hs index b0b7bdba..fb3544de 100644 --- a/src/Darcs/Patch/Split.hs +++ b/src/Darcs/Patch/Split.hs @@ -1,5 +1,5 @@ {-# LANGUAGE ViewPatterns #-} -{-# OPTIONS_GHC -fno-warn-unused-imports #-} +{-# OPTIONS_GHC -Wno-unused-imports #-} -- Copyright (C) 2009 Ganesh Sittampalam -- -- Permission is hereby granted, free of charge, to any person diff --git a/src/Darcs/Patch/Summary.hs b/src/Darcs/Patch/Summary.hs index aff82126..d78a4f31 100644 --- a/src/Darcs/Patch/Summary.hs +++ b/src/Darcs/Patch/Summary.hs @@ -14,6 +14,7 @@ import Darcs.Prelude import Data.List.Ordered ( nubSort ) import Data.Maybe ( catMaybes ) +import qualified Text.XML.Light as XML import Darcs.Patch.Format ( FileNameFormat(FileNameFormatDisplay) ) import Darcs.Patch.FromPrim ( PrimPatchBase(..) ) @@ -30,8 +31,6 @@ import Darcs.Util.Printer , ($$) , (<+>) , empty - , minus - , plus , text , vcat ) @@ -75,22 +74,8 @@ plainSummary = vcat . map (summChunkToLine False) . genSummary . conflictedEffec plainSummaryFL :: (Summary e, PrimDetails (PrimOf e)) => FL e wX wY -> Doc plainSummaryFL = vcat . map (summChunkToLine False) . genSummary . concat . mapFL conflictedEffect -xmlSummary :: (Summary p, PrimDetails (PrimOf p)) => p wX wY -> Doc -xmlSummary p = text "" - $$ (vcat . map summChunkToXML . genSummary . conflictedEffect $ p) - $$ text "" - --- Yuck duplicated code below... -escapeXML :: String -> Doc -escapeXML = text . strReplace '\'' "'" . strReplace '"' """ . - strReplace '>' ">" . strReplace '<' "<" . strReplace '&' "&" - -strReplace :: Char -> String -> String -> String -strReplace _ _ [] = [] -strReplace x y (z:zs) - | x == z = y ++ strReplace x y zs - | otherwise = z : strReplace x y zs --- end yuck duplicated code. +xmlSummary :: (Summary p, PrimDetails (PrimOf p)) => p wX wY -> XML.Element +xmlSummary p = XML.unode "summary" (catMaybes . map summChunkToXML . genSummary . conflictedEffect $ p) -- | High-level representation of a piece of patch summary data SummChunk = SummChunk SummDetail ConflictState @@ -128,37 +113,47 @@ genSummary p combineOp _ SummRm = Just SummRm combineOp SummMod SummMod = Just SummMod -summChunkToXML :: SummChunk -> Doc +summChunkToXML :: SummChunk -> Maybe XML.Element summChunkToXML (SummChunk detail c) = - case detail of - SummRmDir f -> xconf c "remove_directory" (xfn f) - SummAddDir f -> xconf c "add_directory" (xfn f) - SummFile SummRm f _ _ _ -> xconf c "remove_file" (xfn f) - SummFile SummAdd f _ _ _ -> xconf c "add_file" (xfn f) - SummFile SummMod f r a x -> xconf c "modify_file" $ xfn f <> xrm r <> xad a <> xrp x - SummMv f1 f2 -> text " xfn f1 - <> text "\" to=\"" <> xfn f2 <> text"\"/>" - SummNone -> empty - where - xconf Okay t x = text ('<':t++">") $$ x $$ text ("") - xconf Conflicted t x = text ('<':t++" conflict='true'>") $$ x $$ text ("") - xconf Duplicated t x = text ('<':t++" duplicate='true'>") $$ x $$ text ("") - xfn = escapeXML . anchorPath "" - -- - xad 0 = empty - xad a = text "" - xrm 0 = empty - xrm a = text "" - xrp 0 = empty - xrp a = text "" + case detail of + SummRmDir f -> Just $ xconf c "remove_directory" [] [cdata (xfn f)] + SummAddDir f -> Just $ xconf c "add_directory" [] [cdata (xfn f)] + SummFile SummRm f _ _ _ -> Just $ xconf c "remove_file" [] [cdata (xfn f)] + SummFile SummAdd f _ _ _ -> Just $ xconf c "add_file" [] [cdata (xfn f)] + SummFile SummMod f r a x -> + Just $ xconf c "modify_file" [] ([cdata (xfn f)] <> xrm r <> xad a <> xrp x) + SummMv f1 f2 -> + Just $ + xconf c "move" + [XML.Attr (XML.unqual "from") (xfn f1), XML.Attr (XML.unqual "to") (xfn f2)] [] + SummNone -> Nothing + where + xconf :: ConflictState -> String -> [XML.Attr] -> [XML.Content] -> XML.Element + xconf Okay t as cs = XML.unode t (as, cs) + xconf Conflicted t as cs = + XML.unode t (XML.Attr (XML.unqual "conflict") "true":as, cs) + xconf Duplicated t as cs = + XML.unode t (XML.Attr (XML.unqual "suplicate") "true":as, cs) + xfn = anchorPath "" + cdata s = XML.Text (XML.blank_cdata {XML.cdData = s}) + xad 0 = [] + xad a = [XML.Elem $ XML.unode "added_lines" (XML.Attr (XML.unqual "num") (show a))] + xrm 0 = [] + xrm a = [XML.Elem $ XML.unode "removed_lines" (XML.Attr (XML.unqual "num") (show a))] + xrp 0 = [] + xrp a = [XML.Elem $ XML.unode "replaced_tokens" (XML.Attr (XML.unqual "num") (show a))] summChunkToLine :: Bool -> SummChunk -> Doc summChunkToLine machineReadable (SummChunk detail c) = case detail of SummRmDir f -> lconf c "R" $ formatFileName FileNameFormatDisplay f <> text "/" SummAddDir f -> lconf c "A" $ formatFileName FileNameFormatDisplay f <> text "/" - SummFile SummRm f _ _ _ -> lconf c "R" $ formatFileName FileNameFormatDisplay f - SummFile SummAdd f _ _ _ -> lconf c "A" $ formatFileName FileNameFormatDisplay f + SummFile SummRm f r a x + | machineReadable -> lconf c "R" $ formatFileName FileNameFormatDisplay f + | otherwise -> lconf c "R" $ formatFileName FileNameFormatDisplay f <+> rm r <+> ad a <+> rp x + SummFile SummAdd f r a x + | machineReadable -> lconf c "A" $ formatFileName FileNameFormatDisplay f + | otherwise -> lconf c "A" $ formatFileName FileNameFormatDisplay f <+> rm r <+> ad a <+> rp x SummFile SummMod f r a x | machineReadable -> lconf c "M" $ formatFileName FileNameFormatDisplay f | otherwise -> lconf c "M" $ formatFileName FileNameFormatDisplay f <+> rm r <+> ad a <+> rp x @@ -178,8 +173,8 @@ summChunkToLine machineReadable (SummChunk detail c) = | otherwise = text t <+> x <+> text "duplicate" -- ad 0 = empty - ad a = plus <> text (show a) + ad a = text "+" <> text (show a) rm 0 = empty - rm a = minus <> text (show a) + rm a = text "-" <> text (show a) rp 0 = empty rp a = text "r" <> text (show a) diff --git a/src/Darcs/Patch/V1/Apply.hs b/src/Darcs/Patch/V1/Apply.hs index 45bd49eb..9c74515a 100644 --- a/src/Darcs/Patch/V1/Apply.hs +++ b/src/Darcs/Patch/V1/Apply.hs @@ -1,4 +1,4 @@ -{-# OPTIONS_GHC -fno-warn-orphans #-} +{-# OPTIONS_GHC -Wno-orphans #-} module Darcs.Patch.V1.Apply () where import Darcs.Prelude diff --git a/src/Darcs/Patch/V1/Commute.hs b/src/Darcs/Patch/V1/Commute.hs index cd5920dc..b5e415b4 100644 --- a/src/Darcs/Patch/V1/Commute.hs +++ b/src/Darcs/Patch/V1/Commute.hs @@ -15,7 +15,7 @@ -- the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, -- Boston, MA 02110-1301, USA. -{-# OPTIONS_GHC -fno-warn-orphans #-} +{-# OPTIONS_GHC -Wno-orphans #-} {-# OPTIONS_GHC -Wno-incomplete-uni-patterns #-} module Darcs.Patch.V1.Commute @@ -331,8 +331,9 @@ instance PrimPatch prim => CommuteNoConflicts (RepoPatchV1 prim) where commuteNoConflicts x = toMaybe $ everythingElseCommute x instance PrimPatch prim => Conflict (RepoPatchV1 prim) where - isConflicted (PP _) = False - isConflicted _ = True + numConflicts (PP _) = 0 + numConflicts (Merger _ _ q p) = 1 + numConflicts q + numConflicts p + numConflicts (Regrem _ _ q p) = 1 + numConflicts q + numConflicts p resolveConflicts _ = map mangleOrFail . combineConflicts resolveOne where resolveOne p | isMerger p = [publicUnravel p] diff --git a/src/Darcs/Patch/V1/Read.hs b/src/Darcs/Patch/V1/Read.hs index 1bf85888..3ec7a7a4 100644 --- a/src/Darcs/Patch/V1/Read.hs +++ b/src/Darcs/Patch/V1/Read.hs @@ -1,4 +1,4 @@ -{-# OPTIONS_GHC -fno-warn-orphans #-} +{-# OPTIONS_GHC -Wno-orphans #-} module Darcs.Patch.V1.Read () where import Darcs.Prelude diff --git a/src/Darcs/Patch/V1/Show.hs b/src/Darcs/Patch/V1/Show.hs index b08330e9..843c2f61 100644 --- a/src/Darcs/Patch/V1/Show.hs +++ b/src/Darcs/Patch/V1/Show.hs @@ -1,4 +1,4 @@ -{-# OPTIONS_GHC -fno-warn-orphans #-} +{-# OPTIONS_GHC -Wno-orphans #-} module Darcs.Patch.V1.Show ( showPatch_ ) where import Darcs.Prelude diff --git a/src/Darcs/Patch/V1/Viewing.hs b/src/Darcs/Patch/V1/Viewing.hs index ae4b3a35..0500847c 100644 --- a/src/Darcs/Patch/V1/Viewing.hs +++ b/src/Darcs/Patch/V1/Viewing.hs @@ -1,4 +1,4 @@ -{-# OPTIONS_GHC -fno-warn-orphans #-} +{-# OPTIONS_GHC -Wno-orphans #-} module Darcs.Patch.V1.Viewing () where import Darcs.Prelude diff --git a/src/Darcs/Patch/V2/Non.hs b/src/Darcs/Patch/V2/Non.hs index c738aa70..39e8113d 100644 --- a/src/Darcs/Patch/V2/Non.hs +++ b/src/Darcs/Patch/V2/Non.hs @@ -15,7 +15,8 @@ -- the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, -- Boston, MA 02110-1301, USA. -{-# OPTIONS_GHC -fno-warn-orphans -fno-warn-name-shadowing #-} +{-# OPTIONS_GHC -Wno-orphans #-} +{-# OPTIONS_GHC -Wno-name-shadowing #-} {-# LANGUAGE UndecidableInstances #-} diff --git a/src/Darcs/Patch/V2/RepoPatch.hs b/src/Darcs/Patch/V2/RepoPatch.hs index 62b83691..f387b216 100644 --- a/src/Darcs/Patch/V2/RepoPatch.hs +++ b/src/Darcs/Patch/V2/RepoPatch.hs @@ -15,8 +15,7 @@ -- the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, -- Boston, MA 02110-1301, USA. -{-# OPTIONS_GHC -fno-warn-orphans #-} -{-# OPTIONS_GHC -fno-warn-name-shadowing #-} +{-# OPTIONS_GHC -Wno-name-shadowing #-} {-# OPTIONS_GHC -Wno-incomplete-uni-patterns #-} module Darcs.Patch.V2.RepoPatch @@ -364,9 +363,9 @@ instance Summary (RepoPatchV2 prim) where conflictedEffect (Normal x) = [IsC Okay x] instance PrimPatch prim => Conflict (RepoPatchV2 prim) where - isConflicted (Conflictor {}) = True - isConflicted (InvConflictor {}) = True - isConflicted _ = False + numConflicts (Conflictor ix xx _) = length ix + lengthFL xx + numConflicts (InvConflictor ix xx _) = length ix + lengthFL xx + numConflicts _ = 0 resolveConflicts _ = map mangleOrFail . combineConflicts resolveOne where resolveOne :: RepoPatchV2 prim wX wY -> [[Sealed (FL prim wY)]] diff --git a/src/Darcs/Patch/V3.hs b/src/Darcs/Patch/V3.hs index 02696183..29065c64 100644 --- a/src/Darcs/Patch/V3.hs +++ b/src/Darcs/Patch/V3.hs @@ -1,4 +1,4 @@ -{-# OPTIONS_GHC -fno-warn-orphans #-} +{-# OPTIONS_GHC -Wno-orphans #-} module Darcs.Patch.V3 ( RepoPatchV3 ) where import Darcs.Prelude diff --git a/src/Darcs/Patch/V3/Resolution.hs b/src/Darcs/Patch/V3/Resolution.hs index ef4ab06c..049e6b7d 100644 --- a/src/Darcs/Patch/V3/Resolution.hs +++ b/src/Darcs/Patch/V3/Resolution.hs @@ -1,5 +1,5 @@ {- | Conflict resolution for 'RepoPatchV3' -} -{-# OPTIONS_GHC -fno-warn-orphans #-} +{-# OPTIONS_GHC -Wno-orphans #-} module Darcs.Patch.V3.Resolution () where import qualified Data.Set as S @@ -8,7 +8,7 @@ import Darcs.Prelude import Data.List ( partition, sort ) import Darcs.Patch.Commute ( commuteFL ) -import Darcs.Patch.Conflict ( Conflict(..), mangleOrFail ) +import Darcs.Patch.Conflict ( Conflict(..), isConflicted, mangleOrFail ) import Darcs.Patch.Ident ( Ident(..), SignedId(..), StorableId(..) ) import Darcs.Patch.Prim ( PrimPatch ) import Darcs.Patch.Prim.WithName ( PrimWithName, wnPatch ) @@ -67,8 +67,8 @@ typically with fewer alternatives, it has some disadvantages in practice: instance (SignedId name, StorableId name, PrimPatch prim) => Conflict (RepoPatchV3 name prim) where - isConflicted Conflictor{} = True - isConflicted Prim{} = False + numConflicts (Conflictor _ x _) = S.size x + numConflicts Prim{} = 0 resolveConflicts context = map resolveOne . conflictingAlternatives context where @@ -106,9 +106,8 @@ the following state: @res@ - A list of sets of @name@s, initially empty, with the @name@s of patches - involved in conflicts that are (partially) resolved. Used to post - process the result (see below). + A list of two-element sets of @name@s, representing resolved direct + conflicts. Used to post process the result (see below). We inspect any conflictor in the trailing 'RL', as well as any patch whose @name@ is in @todo@ throughout the history, terminating early if the @@ -131,10 +130,10 @@ transitive conflict and not all patches involved may have been fully resolved. (Remember that the commute rules for V3 are such that a patch depends on a conflictor if it depends on /any/ of the patches involved in the conflict.) To make sure that the result is independent of the order of -patches, we need to remember the set of directly conflicting patches (by -adding it to @res@). When the traversal terminates, we use this information -to join any components connected by these sets into larger components. See -the discussion below for details. +patches, we need to remember all direct conflicts that the patch is part of +(by adding them to @res@). When the traversal terminates, we use this +information to join any components connected by these sets into larger +components. See the discussion below for details. In both cases, if the patch is conflicted, we insert any patch that the candidate conflicts with into @todo@ (and remove the patch itself). Note @@ -181,22 +180,35 @@ findComponents context patches = go S.empty [] [] context patches NilFL where go todo done res cs (ps :<: p) passedby | isConflicted p || ident p `S.member` todo , Just (_ :> p') <- commuteFL (p :> passedby) = - go (updTodo p todo) (updDone p' done) res cs ps (p :>: passedby) + go todo' (updDone p' done) res cs ps (p :>: passedby) | otherwise = - go (updTodo p todo) done (updRes p res) cs ps (p :>: passedby) + go todo' done (updRes p res) cs ps (p :>: passedby) + where + todo' = S.map ctxId (conflicts p) <> (ident p -| todo) go todo done res _ NilRL _ | S.null todo = sort $ map purgeDeps $ foldr joinOverlapping done res go todo done res (cs :<: p) NilRL passedby | ident p `S.member` todo , Just (_ :> p') <- commuteFL (p :> passedby) = - go (updTodo p todo) (updDone p' done) res cs NilRL (p :>: passedby) + go todo' (updDone p' done) res cs NilRL (p :>: passedby) | otherwise = - go (updTodo p todo) done (updRes p res) cs NilRL (p :>: passedby) + go todo' done (updRes p res) cs NilRL (p :>: passedby) + where + todo' = ident p -| todo go _ _ _ NilRL NilRL _ = error "autsch, hit the bottom" - updTodo p todo = S.map ctxId (conflicts p) <> (ident p -| todo) updDone p' done = joinOrAddNew (allConflicts p') done - updRes p res = S.map ctxId (allConflicts p) : res + + -- Update the list of resolved direct conflicts, to be used in the last step + -- to join unresolved transitive conflict sets. Note that this list contains + -- only pairs i.e. two-element sets: for each conflicted patch p, we pair it + -- with any patch it conflicts with. See the discussion of the algorithm + -- above and tests/issue2727-resolutions-order-independent10.sh for an + -- example case, found by QuickCheck. + updRes p res = map (ident p `pair`) (conflictIds p) ++ res + where + pair a b = S.fromList [a, b] + conflictIds = S.toList . S.map ctxId . conflicts conflicts (Conflictor _ x _) = x conflicts _ = S.empty diff --git a/src/Darcs/Patch/Viewing.hs b/src/Darcs/Patch/Viewing.hs index 81ab6d15..f5789298 100644 --- a/src/Darcs/Patch/Viewing.hs +++ b/src/Darcs/Patch/Viewing.hs @@ -15,7 +15,7 @@ -- the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, -- Boston, MA 02110-1301, USA. -{-# OPTIONS_GHC -fno-warn-orphans #-} +{-# OPTIONS_GHC -Wno-orphans #-} {-# LANGUAGE UndecidableInstances #-} module Darcs.Patch.Viewing ( showContextHunk diff --git a/src/Darcs/Prelude.hs b/src/Darcs/Prelude.hs index 1a82a895..10de5be0 100644 --- a/src/Darcs/Prelude.hs +++ b/src/Darcs/Prelude.hs @@ -3,8 +3,9 @@ {- This module abstracts over the differences in the Haskell Prelude over -multiple GHC versions, and also hides some symbols that are exported by the -Prelude but clash with common names in the Darcs code. +multiple GHC versions, hides some symbols that are exported by the +Prelude but clash with common names in the Darcs code, and re-exports +occasional functions from non-Prelude models that are used widely in Darcs. Broadly it exports everything that the latest Prelude supports, minus the things we explicitly exclude. Since we now use the NoImplicitPrelude extension, @@ -22,10 +23,11 @@ If something is needed from the Prelude that's hidden by default, then add it to the Prelude import. -} -{-# LANGUAGE CPP #-} module Darcs.Prelude ( module Prelude , module Control.Applicative + , module Data.Kind + , module Data.List , module Data.Monoid , Semigroup(..) , module Data.Traversable @@ -42,11 +44,9 @@ import Prelude hiding , -- because it's in the new Prelude but only in Data.Monoid in older GHCs Monoid(..) -#if MIN_VERSION_base(4,11,0) , -- because it's in the new Prelude but only in Data.Semigroup in older GHCs Semigroup(..) -#endif , -- because it's in the new Prelude but only in Data.Traversable in older GHCs traverse @@ -62,6 +62,8 @@ import Prelude hiding ) import Control.Applicative ( Applicative(..), (<$>), (<*>) ) +import Data.List ( foldl' ) +import Data.Kind ( Type ) import Data.Monoid ( Monoid(..) ) import Data.Semigroup ( Semigroup(..) ) import Data.Traversable ( traverse ) diff --git a/src/Darcs/Repository.hs b/src/Darcs/Repository.hs index 25069ad0..fc1e29fc 100644 --- a/src/Darcs/Repository.hs +++ b/src/Darcs/Repository.hs @@ -23,6 +23,7 @@ module Darcs.Repository , repoFormat , repoPristineType , repoCache + , RepoFormat(..) , PristineType(..) , HashedDir(..) , Cache @@ -171,6 +172,7 @@ import Darcs.Repository.Create , EmptyRepository(..) ) import Darcs.Repository.Flags ( UpdatePending(..) ) +import Darcs.Repository.Format ( RepoFormat(..) ) import Darcs.Patch.Set ( PatchSet, SealedPatchSet ) import Darcs.Patch.PatchInfoAnd ( PatchInfoAnd ) diff --git a/src/Darcs/Repository/ApplyPatches.hs b/src/Darcs/Repository/ApplyPatches.hs index 46739ede..5a311361 100644 --- a/src/Darcs/Repository/ApplyPatches.hs +++ b/src/Darcs/Repository/ApplyPatches.hs @@ -15,7 +15,7 @@ -- the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, -- Boston, MA 02110-1301, USA. -{-# OPTIONS_GHC -fno-warn-missing-methods #-} +{-# OPTIONS_GHC -Wno-missing-methods #-} {-# LANGUAGE MultiParamTypeClasses #-} module Darcs.Repository.ApplyPatches diff --git a/src/Darcs/Repository/Clone.hs b/src/Darcs/Repository/Clone.hs index 9eacd590..4e539c62 100644 --- a/src/Darcs/Repository/Clone.hs +++ b/src/Darcs/Repository/Clone.hs @@ -24,7 +24,6 @@ import Darcs.Repository.Identify ( identifyRepositoryFor, ReadingOrWriting(..) ) import Darcs.Repository.Pristine ( applyToTentativePristine , createPristineDirectoryTree - , writePristine ) import Darcs.Repository.Hashed ( copyHashedInventory @@ -67,7 +66,6 @@ import Darcs.Repository.Format , formatHas ) import Darcs.Repository.Prefs ( addRepoSource, deleteSources ) -import Darcs.Repository.Match ( getOnePatchset ) import Darcs.Util.File ( copyFileOrUrl , Cachable(..) @@ -121,7 +119,7 @@ import Darcs.Patch.Set , patchSetInventoryHashes , progressPatchSet ) -import Darcs.Patch.Match ( MatchFlag(..), patchSetMatch ) +import Darcs.Patch.Match ( MatchFlag(..), patchSetMatch, matchOnePatchset ) import Darcs.Patch.Progress ( progressRLShowTags, progressFL ) import Darcs.Patch.Apply ( Apply(..) ) import Darcs.Patch.Witnesses.Sealed ( Sealed(..) ) @@ -136,7 +134,7 @@ import Darcs.Patch.Witnesses.Ordered ) import Darcs.Patch.PatchInfoAnd ( PatchInfoAnd, extractHash ) -import Darcs.Util.Tree( Tree, emptyTree ) +import Darcs.Util.Tree( Tree ) import Darcs.Util.Exception ( catchall ) import Darcs.Util.English ( englishNum, Noun(..) ) @@ -229,7 +227,7 @@ cloneRepository repourl mysimplename v useCache cloneKind um rdarcs sse -- the following is necessary to be able to read _toRepo's patches _toRepo <- revertRepositoryChanges _toRepo patches <- readPatches _toRepo - Sealed context <- getOnePatchset _toRepo psm + Sealed context <- matchOnePatchset patches psm to_remove :\/: only_in_context <- return $ findUncommon patches context case only_in_context of NilFL -> do @@ -252,7 +250,11 @@ cloneRepository repourl mysimplename v useCache cloneKind um rdarcs sse $ text "Missing patches from context:" $$ description only_in_context when (forget == YesForgetParent) deleteSources - -- check for unresolved conflicts + -- TODO Checking for unresolved conflicts means we have to download + -- at least all the patches referenced by hashed_inventory, even if + -- --lazy is in effect. This can take a long time, and in extreme + -- cases can even result in --lazy being slower than --complete. + putVerbose v $ text "Checking for unresolved conflicts..." patches <- readPatches _toRepo let conflicts = patchsetConflictResolutions patches _ <- announceConflicts "clone" (YesAllowConflicts MarkConflicts) conflicts @@ -389,7 +391,6 @@ copyRepoOldFashioned :: forall p wU wR. (RepoPatch p, ApplyState p ~ Tree) -> IO () copyRepoOldFashioned fromRepo _toRepo verb withWorkingDir = do _toRepo <- revertRepositoryChanges _toRepo - _ <- writePristine _toRepo emptyTree patches <- readPatches fromRepo let k = "Copying patch" beginTedious k diff --git a/src/Darcs/Repository/Hashed.hs b/src/Darcs/Repository/Hashed.hs index fa78c251..aaccec94 100644 --- a/src/Darcs/Repository/Hashed.hs +++ b/src/Darcs/Repository/Hashed.hs @@ -38,7 +38,6 @@ module Darcs.Repository.Hashed import Darcs.Prelude import Control.Monad ( unless, when ) -import Data.List ( foldl' ) import System.Directory ( copyFile , createDirectoryIfMissing diff --git a/src/Darcs/Repository/InternalTypes.hs b/src/Darcs/Repository/InternalTypes.hs index eaa8204d..e47865fc 100644 --- a/src/Darcs/Repository/InternalTypes.hs +++ b/src/Darcs/Repository/InternalTypes.hs @@ -66,7 +66,7 @@ data SAccessType (rt :: AccessType) where -- -- * the recorded state when outside a transaction, or -- * the tentative state when inside a transaction. -data Repository (rt :: AccessType) (p :: * -> * -> *) wU wR = +data Repository (rt :: AccessType) (p :: Type -> Type -> Type) wU wR = Repo !String !RepoFormat !PristineType Cache (SAccessType rt) type role Repository nominal nominal nominal nominal diff --git a/src/Darcs/Repository/Inventory.hs b/src/Darcs/Repository/Inventory.hs index 15d372d2..36187ed5 100644 --- a/src/Darcs/Repository/Inventory.hs +++ b/src/Darcs/Repository/Inventory.hs @@ -64,6 +64,10 @@ readPatchesFromInventoryFile invPath repo = do -- | Read a complete 'PatchSet' from a 'Cache', by following the chain of -- 'Inventory's, starting with the given one. +-- Note that we read inventories and patches lazily, explicitly using +-- 'unsafeInterleaveIO' to delay IO actions until the value is demanded. This +-- is justified by the fact that inventories and patches are stored in hashed +-- format, which implies that the files we read are never mutated. readPatchesFromInventory :: (PatchListFormat p, ReadPatch p) => Cache -> Inventory diff --git a/src/Darcs/Repository/Match.hs b/src/Darcs/Repository/Match.hs index 25ecda5a..73dfa877 100644 --- a/src/Darcs/Repository/Match.hs +++ b/src/Darcs/Repository/Match.hs @@ -16,34 +16,22 @@ -- Boston, MA 02110-1301, USA. module Darcs.Repository.Match - ( - getPristineUpToMatch - , getOnePatchset + ( getPristineUpToMatch ) where import Darcs.Prelude -import Darcs.Patch.Match - ( rollbackToPatchSetMatch - , PatchSetMatch(..) - , getMatchingTag - , matchAPatchset - ) - -import Darcs.Patch.Bundle ( readContextFile ) -import Darcs.Patch.Apply( ApplyState ) import Darcs.Patch ( RepoPatch ) -import Darcs.Patch.Set ( Origin, SealedPatchSet, patchSetDrop ) +import Darcs.Patch.Apply ( ApplyState ) +import Darcs.Patch.Match ( PatchSetMatch, rollbackToPatchSetMatch ) -import Darcs.Repository.InternalTypes ( Repository ) import Darcs.Repository.Hashed ( readPatches ) +import Darcs.Repository.InternalTypes ( Repository ) import Darcs.Repository.Pristine ( readPristine ) import Darcs.Util.Tree ( Tree ) import Darcs.Util.Tree.Monad ( virtualTreeIO ) -import Darcs.Util.Path ( toFilePath ) - -- | Return the pristine tree up to the given 'PatchSetMatch'. -- In the typical case where the match is closer to the end of the repo than -- its beginning, this is (a lot) more efficient than applying the result of @@ -56,17 +44,3 @@ getPristineUpToMatch r psm = do ps <- readPatches r tree <- readPristine r snd <$> virtualTreeIO (rollbackToPatchSetMatch psm ps) tree - --- | Return the patches up to the given 'PatchSetMatch'. -getOnePatchset :: RepoPatch p - => Repository rt p wU wR - -> PatchSetMatch - -> IO (SealedPatchSet p Origin) -getOnePatchset repository pm = - case pm of - IndexMatch n -> patchSetDrop (n-1) <$> readPatches repository - PatchMatch m -> matchAPatchset m <$> readPatches repository - TagMatch m -> getMatchingTag m <$> readPatches repository - ContextMatch path -> do - ref <- readPatches repository - readContextFile ref (toFilePath path) diff --git a/src/Darcs/Repository/PatchIndex.hs b/src/Darcs/Repository/PatchIndex.hs index ea70693f..755dd242 100644 --- a/src/Darcs/Repository/PatchIndex.hs +++ b/src/Darcs/Repository/PatchIndex.hs @@ -37,7 +37,7 @@ module Darcs.Repository.PatchIndex import Darcs.Prelude -import Control.Exception ( catch ) +import Control.Exception ( catch, throw ) import Control.Monad ( forM_, unless, when, (>=>) ) import Control.Monad.State.Strict ( evalState, execState, State, gets, modify ) @@ -278,7 +278,14 @@ lookupFid :: AnchoredPath -> PIM FileId lookupFid fn = do maybeFid <- lookupFid' fn case maybeFid of - Nothing -> error $ "couldn't find " ++ displayPath fn ++ " in patch index" + Nothing -> + -- This error may be caused by broken patches, rather than a bug in + -- the patch index itself, for instance, broken move patches with + -- non-existing source or target. Thus we should not use 'error' here. + throw $ userError $ + "Couldn't find " ++ displayPath fn ++ " in patch index.\n\ + \Please run `darcs check`. If that reports issues mentioning " ++ displayPath fn ++ " \n\ + \then running `darcs repair` probably fixes the problem." Just fid -> return fid -- | lookup current fid of filepatch, returning a Maybe to allow failure diff --git a/src/Darcs/Repository/Paths.hs b/src/Darcs/Repository/Paths.hs index 952958bd..5f6c0a13 100644 --- a/src/Darcs/Repository/Paths.hs +++ b/src/Darcs/Repository/Paths.hs @@ -14,23 +14,22 @@ makeDarcsdirPath name = darcsdir name lockPath = makeDarcsdirPath "lock" -- | Location of the prefs directory. -prefsDir = "prefs" -prefsDirPath = makeDarcsdirPath prefsDir +prefsDirPath = makeDarcsdirPath "prefs" -- | Location of the (one and only) head inventory. -hashedInventory = "hashed_inventory" -hashedInventoryPath = makeDarcsdirPath hashedInventory +hashedInventoryPath = makeDarcsdirPath "hashed_inventory" -- | Location of the (one and only) tentative head inventory. -tentativeHashedInventory = "tentative_hashed_inventory" -tentativeHashedInventoryPath = makeDarcsdirPath tentativeHashedInventory +tentativeHashedInventoryPath = makeDarcsdirPath "tentative_hashed_inventory" -- | Location of parent inventories. inventoriesDir = hashedDir HashedInventoriesDir inventoriesDirPath = makeDarcsdirPath inventoriesDir --- | Location of pristine trees. +-- | Location of the (one and only) tentative pristine root tentativePristinePath = makeDarcsdirPath "tentative_pristine" + +-- | Location of pristine trees. pristineDir = hashedDir HashedPristineDir pristineDirPath = makeDarcsdirPath pristineDir @@ -52,7 +51,6 @@ formatPath = makeDarcsdirPath "format" -- | Location of pending files pendingPath = patchesDirPath "pending" tentativePendingPath = patchesDirPath "pending.tentative" -newPendingPath = patchesDirPath "pending.new" -- | Location of unrevert bundle. unrevertPath = patchesDirPath "unrevert" diff --git a/src/Darcs/Repository/Prefs.hs b/src/Darcs/Repository/Prefs.hs index b5f3eee2..711ebd60 100644 --- a/src/Darcs/Repository/Prefs.hs +++ b/src/Darcs/Repository/Prefs.hs @@ -301,9 +301,14 @@ xdgCacheDir = do `catchall` return Nothing globalCacheDir :: IO (Maybe FilePath) -globalCacheDir | windows = (( "cache2") `fmap`) `fmap` globalPrefsDir - | osx = (( "darcs") `fmap`) `fmap` osxCacheDir - | otherwise = (( "darcs") `fmap`) `fmap` xdgCacheDir +globalCacheDir = do + env <- getEnvironment + case lookup "DARCS_CACHE_DIR" env of + Nothing + | windows -> (( "cache2") `fmap`) `fmap` globalPrefsDir + | osx -> (( "darcs") `fmap`) `fmap` osxCacheDir + | otherwise -> (( "darcs") `fmap`) `fmap` xdgCacheDir + d -> return d -- |tryMakeBoringRegexp attempts to create a Regex from a given String. The -- evaluation is forced, to ensure any malformed exceptions are thrown here, @@ -765,7 +770,8 @@ prefsFilesHelp = , "" , "A global cache is enabled by default in your home directory under" , "`.cache/darcs` (older versions of darcs used `.darcs/cache` for this)," - , "or `$XDG_CACHE_HOME/darcs` if the environment variable is set, see" + , "or `$DARCS_CACHE_DIR` if the environment variable is set," + , "or else `$XDG_CACHE_HOME/darcs` if the environment variable is set, see" , "https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html." , "The cache allows darcs to avoid re-downloading patches (for example, when" , "doing a second darcs clone of the same repository), and also allows darcs" diff --git a/src/Darcs/Repository/Traverse.hs b/src/Darcs/Repository/Traverse.hs index befde75a..04b66fc2 100644 --- a/src/Darcs/Repository/Traverse.hs +++ b/src/Darcs/Repository/Traverse.hs @@ -2,14 +2,11 @@ module Darcs.Repository.Traverse ( cleanRepository , cleanPristineDir , listInventories - , listInventoriesRepoDir - , listPatchesLocalBucketed , specialPatches ) where import Darcs.Prelude -import Data.Maybe ( fromJust ) import qualified Data.ByteString.Char8 as BC ( unpack, pack ) import qualified Data.Set as Set @@ -33,24 +30,21 @@ import Darcs.Repository.InternalTypes , withRepoDir ) import Darcs.Repository.Paths - ( tentativeHashedInventory + ( tentativeHashedInventoryPath , tentativePristinePath - , inventoriesDir , inventoriesDirPath , patchesDirPath , pristineDirPath ) -import Darcs.Repository.Prefs ( globalCacheDir ) import Darcs.Util.ByteString ( gzReadFilePS ) import Darcs.Util.Cache ( Cache , HashedDir(HashedPristineDir) - , bucketFolder , cleanCachesWithHint ) import Darcs.Util.Exception ( ifDoesNotExistError ) -import Darcs.Util.Global ( darcsdir, debugMessage ) +import Darcs.Util.Global ( debugMessage ) import Darcs.Util.Lock ( removeFileMayNotExist ) import Darcs.Util.Tree.Hashed ( followPristineHashes ) @@ -58,14 +52,6 @@ import Darcs.Util.Tree.Hashed ( followPristineHashes ) cleanRepository :: Repository 'RW p wU wR -> IO () cleanRepository r = cleanPristine r >> cleanInventories r >> cleanPatches r --- | The way patchfiles, inventories, and pristine trees are stored. --- 'PlainLayout' means all files are in the same directory. 'BucketedLayout' --- means we create a second level of subdirectories, such that all files whose --- hash starts with the same two letters are in the same directory. --- Currently, only the global cache uses 'BucketedLayout' while repositories --- use the 'PlainLayout'. -data DirLayout = PlainLayout | BucketedLayout - -- | Remove unreferenced entries in the pristine cache. cleanPristine :: Repository 'RW p wU wR -> IO () cleanPristine r = withRepoDir r $ do @@ -116,97 +102,54 @@ specialPatches = ["unrevert", "pending", "pending.tentative"] cleanPatches :: Repository 'RW p wU wR -> IO () cleanPatches _ = do debugMessage "Cleaning out patches..." - hs <- (specialPatches ++) <$> listPatchesLocal PlainLayout darcsdir darcsdir + hs <- (specialPatches ++) <$> listPatchesLocal fs <- ifDoesNotExistError [] (listDirectory patchesDirPath) mapM_ (removeFileMayNotExist . (patchesDirPath )) (diffHashLists fs hs) -- | Return a list of the inventories hashes. --- The first argument can be readInventory or readInventoryLocal. --- The second argument specifies whether the files are expected --- to be stored in plain or in bucketed format. --- The third argument is the directory of the parent inventory files. --- The fourth argument is the directory of the head inventory file. -listInventoriesWith - :: (FilePath -> IO Inventory) - -> DirLayout - -> String -> String -> IO [String] -listInventoriesWith readInv dirformat baseDir startDir = do - mbStartingWithInv <- getStartingWithHash startDir tentativeHashedInventory - followStartingWiths mbStartingWithInv +-- The argument can be 'readInventory' or 'readInventoryLocal'. +listInventoriesWith :: (FilePath -> IO Inventory) -> IO [String] +listInventoriesWith readInv = do + mbNextInv <- getParent tentativeHashedInventoryPath + withCurrentDirectory inventoriesDirPath (follow mbNextInv) where - getStartingWithHash dir file = inventoryParent <$> readInv (dir file) - - invDir = baseDir inventoriesDir - nextDir dir = case dirformat of - BucketedLayout -> invDir bucketFolder dir - PlainLayout -> invDir - - followStartingWiths Nothing = return [] - followStartingWiths (Just hash) = do - let startingWith = encodeValidHash hash - mbNextInv <- getStartingWithHash (nextDir startingWith) startingWith - (startingWith :) <$> followStartingWiths mbNextInv + getParent path = inventoryParent <$> readInv path + follow Nothing = return [] + follow (Just hash) = do + let parentFileName = encodeValidHash hash + mbNextInv <- getParent parentFileName + (parentFileName :) <$> follow mbNextInv -- | Return a list of the inventories hashes. -- This function attempts to retrieve missing inventory files from the cache. listInventories :: IO [String] -listInventories = - listInventoriesWith readInventory PlainLayout darcsdir darcsdir +listInventories = listInventoriesWith readInventory -- | Return inventories hashes by following the head inventory. -- This function does not attempt to retrieve missing inventory files. listInventoriesLocal :: IO [String] -listInventoriesLocal = - listInventoriesWith readInventoryLocal PlainLayout darcsdir darcsdir - --- | Return a list of the inventories hashes. --- The argument @repoDir@ is the directory of the repository from which --- we are going to read the head inventory file. --- The rest of hashed files are read from the global cache. -listInventoriesRepoDir :: String -> IO [String] -listInventoriesRepoDir repoDir = do - gCacheDir' <- globalCacheDir - let gCacheInvDir = fromJust gCacheDir' - listInventoriesWith - readInventoryLocal - BucketedLayout - gCacheInvDir - (repoDir darcsdir) +listInventoriesLocal = listInventoriesWith readInventoryLocal -- | Return a list of the patch filenames, extracted from inventory -- files, by starting with the head inventory and then following the -- chain of parent inventories. -- -- This function does not attempt to download missing inventory files. --- --- * The first argument specifies whether the files are expected --- to be stored in plain or in bucketed format. --- * The second argument is the directory of the parent inventory. --- * The third argument is the directory of the head inventory. -listPatchesLocal :: DirLayout -> String -> String -> IO [String] -listPatchesLocal dirformat baseDir startDir = do - inventory <- readInventory (startDir tentativeHashedInventory) +listPatchesLocal :: IO [String] +listPatchesLocal = do + inventory <- readInventory tentativeHashedInventoryPath followStartingWiths (inventoryParent inventory) (inventoryPatchNames inventory) where - invDir = baseDir inventoriesDir - nextDir dir = - case dirformat of - BucketedLayout -> invDir bucketFolder dir - PlainLayout -> invDir + invDir = inventoriesDirPath followStartingWiths Nothing patches = return patches followStartingWiths (Just hash) patches = do let startingWith = encodeValidHash hash - inv <- readInventoryLocal (nextDir startingWith startingWith) + inv <- readInventoryLocal (invDir startingWith) (patches ++) <$> followStartingWiths (inventoryParent inv) (inventoryPatchNames inv) --- |listPatchesLocalBucketed is similar to listPatchesLocal, but --- it read the inventory directory under @darcsDir@ in bucketed format. -listPatchesLocalBucketed :: String -> String -> IO [String] -listPatchesLocalBucketed = listPatchesLocal BucketedLayout - -- | Read the given inventory file if it exist, otherwise return an empty -- inventory. Used when we expect that some inventory files may be missing. -- Still fails with an error message if file cannot be parsed. diff --git a/src/Darcs/UI/ApplyPatches.hs b/src/Darcs/UI/ApplyPatches.hs index f0d34dc5..2074c5c4 100644 --- a/src/Darcs/UI/ApplyPatches.hs +++ b/src/Darcs/UI/ApplyPatches.hs @@ -48,7 +48,7 @@ import Darcs.Util.English ( presentParticiple ) import Darcs.Util.Printer ( vcat, text ) import Darcs.Util.Tree( Tree ) -data PatchProxy (p :: * -> * -> *) = PatchProxy +data PatchProxy (p :: Type -> Type -> Type) = PatchProxy -- |This class is a hack to abstract over pull/apply and rebase pull/apply. class PatchApplier pa where diff --git a/src/Darcs/UI/Commands.hs b/src/Darcs/UI/Commands.hs index afa5520e..054d26ad 100644 --- a/src/Darcs/UI/Commands.hs +++ b/src/Darcs/UI/Commands.hs @@ -49,6 +49,7 @@ module Darcs.UI.Commands , amInRepository , amNotInRepository , findRepository + , noPrereq ) where import Control.Monad ( when, unless ) @@ -58,6 +59,7 @@ import System.Console.GetOpt ( OptDescr ) import System.IO ( stderr ) import System.IO.Error ( catchIOError ) import System.Environment ( setEnv ) +import qualified Text.XML.Light as XML import Darcs.Prelude @@ -97,7 +99,7 @@ import Darcs.UI.PrintPatch ( showWithSummary ) import Darcs.Util.ByteString ( decodeLocale, packStringToUTF8 ) import Darcs.Util.Path ( AbsolutePath, anchorPath ) import Darcs.Util.Printer - ( Doc, text, (<+>), ($$), ($+$), hsep, vcat + ( Doc, text, (<+>), ($+$), hsep , putDocLnWith, hPutDocLn, renderString ) import Darcs.Util.Printer.Color ( fancyPrinters, ePutDocLn ) @@ -142,7 +144,7 @@ data DarcsCommand = -- second one is the path where darcs was executed. (AbsolutePath, AbsolutePath) -> [DarcsFlag] -> [String] -> IO () - , commandPrereq :: [DarcsFlag] -> IO (Either String ()) + , commandPrereq :: CommandPrereq , commandCompleteArgs :: (AbsolutePath, AbsolutePath) -> [DarcsFlag] -> [String] -> IO [String] , commandArgdefaults :: [DarcsFlag] -> AbsolutePath -> [String] @@ -154,10 +156,12 @@ data DarcsCommand = , commandName :: String , commandHelp :: Doc , commandDescription :: String - , commandPrereq :: [DarcsFlag] -> IO (Either String ()) + , commandPrereq :: CommandPrereq , commandSubCommands :: [CommandControl] } +type CommandPrereq = [DarcsFlag] -> IO (Either String ()) + data CommandOptions = CommandOptions { coBasicOptions :: [DarcsOptDescr DarcsFlag] , coAdvancedOptions :: [DarcsOptDescr DarcsFlag] @@ -312,10 +316,9 @@ setEnvDarcsPatches ps = do finishedOneIO k "DARCS_PATCHES" setEnvCautiously "DARCS_PATCHES" (renderString $ showWithSummary ps) finishedOneIO k "DARCS_PATCHES_XML" - setEnvCautiously "DARCS_PATCHES_XML" . renderString $ - text "" $$ - vcat (mapFL (toXml . info) ps) $$ - text "" + setEnvCautiously + "DARCS_PATCHES_XML" $ + XML.ppElement $ XML.unode "patches" $ mapFL (toXml . info) ps finishedOneIO k "DARCS_FILES" setEnvCautiously "DARCS_FILES" $ unlines filepaths endTedious k @@ -348,15 +351,18 @@ defaultRepo :: [DarcsFlag] -> AbsolutePath -> [String] -> IO [String] defaultRepo _ _ [] = maybeToList <$> getDefaultRepo defaultRepo _ _ args = return args -amInHashedRepository :: [DarcsFlag] -> IO (Either String ()) +amInHashedRepository :: CommandPrereq amInHashedRepository fs = R.amInHashedRepository (workRepo fs) -amInRepository :: [DarcsFlag] -> IO (Either String ()) +amInRepository :: CommandPrereq amInRepository fs = R.amInRepository (workRepo fs) -amNotInRepository :: [DarcsFlag] -> IO (Either String ()) +amNotInRepository :: CommandPrereq amNotInRepository fs = R.amNotInRepository (maybe WorkRepoCurrentDir WorkRepoDir (newRepo ? fs)) -findRepository :: [DarcsFlag] -> IO (Either String ()) +findRepository :: CommandPrereq findRepository fs = R.findRepository (workRepo fs) + +noPrereq :: CommandPrereq +noPrereq _ = return $ Right () diff --git a/src/Darcs/UI/Commands/Amend.hs b/src/Darcs/UI/Commands/Amend.hs index 3e89db0d..5b7272b4 100644 --- a/src/Darcs/UI/Commands/Amend.hs +++ b/src/Darcs/UI/Commands/Amend.hs @@ -185,6 +185,7 @@ amend = DarcsCommand advancedOpts = O.umask ^ O.setScriptsExecutable + ^ O.canonize allOpts = withStdOpts basicOpts advancedOpts amendCmd fps flags args = pathSetFromArgs fps args >>= doAmend flags @@ -204,13 +205,14 @@ doAmend cfg files = readPendingAndWorking (diffingOpts cfg) repository files -- auxiliary function needed because the witness types differ for the -- isTag case + let da = O.diffAlgorithm ? cfg let go :: FL (PrimOf p) wR wU1 -> IO () go NilFL | not (hasEditMetadata cfg) = putInfo cfg "No changes!" go ch = do let selection_config = selectionConfigPrim First "record" (patchSelOpts cfg) - (Just (primSplitter (O.diffAlgorithm ? cfg))) + (Just (primSplitter da)) files (chosenPatches :> _) <- runInvertibleSelection ch selection_config addChangesToPatch cfg repository kept oldp chosenPatches pending working @@ -221,14 +223,15 @@ doAmend cfg files = then do let selection_config = selectionConfigPrim Last "unrecord" (patchSelOpts cfg) - (Just (primSplitter (O.diffAlgorithm ? cfg))) + (Just (primSplitter da)) files (_ :> chosenPrims) <- runInvertibleSelection (effect oldp) selection_config let invPrims = reverseRL (invertFL chosenPrims) addChangesToPatch cfg repository kept oldp invPrims pending working - else - go (canonizeFL (O.diffAlgorithm ? cfg) (pending +>+ working)) + else do + let maybeCanonize = if O.canonize ? cfg then canonizeFL da else id + go (maybeCanonize (pending +>+ working)) -- amending a tag else if hasEditMetadata cfg && isNothing files diff --git a/src/Darcs/UI/Commands/Annotate.hs b/src/Darcs/UI/Commands/Annotate.hs index e92f8b98..c72c8c17 100644 --- a/src/Darcs/UI/Commands/Annotate.hs +++ b/src/Darcs/UI/Commands/Annotate.hs @@ -42,7 +42,7 @@ import qualified Data.ByteString.Char8 as BC ( pack, concat, intercalate ) import Data.ByteString.Lazy ( toChunks ) import Darcs.Patch.ApplyMonad( withFileNames ) import Darcs.Patch.Match ( patchSetMatch, rollbackToPatchSetMatch ) -import Darcs.Repository.Match ( getOnePatchset ) +import Darcs.Patch.Match ( matchOnePatchset ) import Darcs.Repository.PatchIndex ( getRelevantSubsequence, canUsePatchIndex ) import Darcs.Patch.Witnesses.Sealed ( Sealed(..), seal ) import qualified Darcs.Patch.Annotate as A @@ -104,7 +104,7 @@ annotateCmd' opts fixed_path = withRepository (useCache ? opts) $ RepoJob $ \rep (patches, initial, path) <- case patchSetMatch matchFlags of Just psm -> do - Sealed x <- getOnePatchset repository psm + Sealed x <- matchOnePatchset r psm case withFileNames Nothing [fixed_path] (rollbackToPatchSetMatch psm r) of (_, [path'], _) -> do initial <- snd `fmap` virtualTreeIO (rollbackToPatchSetMatch psm r) recorded diff --git a/src/Darcs/UI/Commands/Clone.hs b/src/Darcs/UI/Commands/Clone.hs index a0f5adb5..7390c8ec 100644 --- a/src/Darcs/UI/Commands/Clone.hs +++ b/src/Darcs/UI/Commands/Clone.hs @@ -32,13 +32,16 @@ import System.Exit ( ExitCode(..) ) import System.FilePath.Posix ( joinPath, splitDirectories ) import Control.Monad ( when, unless ) -import Darcs.UI.Commands ( DarcsCommand(..), withStdOpts - , nodefaults - , commandStub - , commandAlias - , putInfo - , putFinished - ) +import Darcs.UI.Commands + ( DarcsCommand(..) + , commandAlias + , commandStub + , noPrereq + , nodefaults + , putFinished + , putInfo + , withStdOpts + ) import Darcs.UI.Completion ( noArgs ) import Darcs.UI.Flags ( DarcsFlag @@ -133,7 +136,7 @@ clone = DarcsCommand , commandExtraArgs = -1 , commandExtraArgHelp = ["", "[]"] , commandCommand = cloneCmd - , commandPrereq = \_ -> return $ Right () + , commandPrereq = noPrereq , commandCompleteArgs = noArgs , commandArgdefaults = nodefaults , commandOptions = cloneOpts diff --git a/src/Darcs/UI/Commands/Convert/Darcs2.hs b/src/Darcs/UI/Commands/Convert/Darcs2.hs index b9a99380..e803023e 100644 --- a/src/Darcs/UI/Commands/Convert/Darcs2.hs +++ b/src/Darcs/UI/Commands/Convert/Darcs2.hs @@ -75,7 +75,13 @@ import Darcs.Repository.Format import Darcs.Repository.Hashed ( UpdatePristine(..), tentativelyAddPatch_ ) import Darcs.Repository.Prefs ( showMotd, prefsFilePath ) -import Darcs.UI.Commands ( DarcsCommand(..), nodefaults, putFinished, withStdOpts ) +import Darcs.UI.Commands + ( DarcsCommand(..) + , noPrereq + , nodefaults + , putFinished + , withStdOpts + ) import Darcs.UI.Commands.Convert.Util ( updatePending ) import Darcs.UI.Commands.Util ( commonHelpWithPrefsTemplates ) import Darcs.UI.Completion ( noArgs ) @@ -134,7 +140,7 @@ convertDarcs2 = DarcsCommand , commandExtraArgs = -1 , commandExtraArgHelp = ["", "[]"] , commandCommand = toDarcs2 - , commandPrereq = \_ -> return $ Right () + , commandPrereq = noPrereq , commandCompleteArgs = noArgs , commandArgdefaults = nodefaults , commandOptions = opts diff --git a/src/Darcs/UI/Commands/Convert/Import.hs b/src/Darcs/UI/Commands/Convert/Import.hs index 2e4c0878..b39d1366 100644 --- a/src/Darcs/UI/Commands/Convert/Import.hs +++ b/src/Darcs/UI/Commands/Convert/Import.hs @@ -80,6 +80,7 @@ import Darcs.Repository.State (readPristine) import Darcs.UI.Commands ( DarcsCommand(..) , nodefaults + , noPrereq , withStdOpts ) import Darcs.UI.Commands.Convert.Util @@ -162,7 +163,7 @@ convertImport = DarcsCommand , commandExtraArgs = -1 , commandExtraArgHelp = ["[]"] , commandCommand = fastImport - , commandPrereq = \_ -> return $ Right () + , commandPrereq = noPrereq , commandCompleteArgs = noArgs , commandArgdefaults = nodefaults , commandOptions = opts diff --git a/src/Darcs/UI/Commands/Diff.hs b/src/Darcs/UI/Commands/Diff.hs index 71f6cd85..54f0aea2 100644 --- a/src/Darcs/UI/Commands/Diff.hs +++ b/src/Darcs/UI/Commands/Diff.hs @@ -62,7 +62,7 @@ import Darcs.Util.Lock ( withTempDir ) import Darcs.Util.Path ( AbsolutePath, AnchoredPath, isPrefix, toFilePath ) import Darcs.Util.Printer ( Doc, putDocLn, text, vcat ) import Darcs.Util.Prompt ( askEnter ) -import Darcs.Util.Tree.Hashed ( hashedTreeIO, writeDarcsHashed ) +import Darcs.Util.Tree.Hashed ( hashedTreeIO ) import Darcs.Util.Tree.Plain ( writePlainTree ) import Darcs.Util.Workaround ( getCurrentDirectory ) @@ -199,8 +199,6 @@ doDiff opts mpaths = withRepository (useCache ? opts) $ RepoJob $ \repository -> -- during the 'apply' and 'unapply' operations below. let cache = mkDirCache tdir pristine <- readPristine repository - -- fill our temporary cache - _ <- writeDarcsHashed pristine cache -- @base@ will be like our working tree, /except/ that it contains only -- the unrecorded changes that affect the given file paths, see comment diff --git a/src/Darcs/UI/Commands/Help.hs b/src/Darcs/UI/Commands/Help.hs index e81a9ad0..34c8358d 100644 --- a/src/Darcs/UI/Commands/Help.hs +++ b/src/Darcs/UI/Commands/Help.hs @@ -46,6 +46,7 @@ import Darcs.UI.Commands , disambiguateCommands , extractCommands , getSubcommands + , noPrereq , nodefaults , normalCommand , withStdOpts @@ -127,7 +128,7 @@ help = DarcsCommand , commandExtraArgs = -1 , commandExtraArgHelp = ["[ [DARCS_SUBCOMMAND]] "] , commandCommand = \ x y z -> helpCmd x y z >> exitSuccess - , commandPrereq = \_ -> return $ Right () + , commandPrereq = noPrereq , commandCompleteArgs = \_ _ -> return . completeArgs , commandArgdefaults = nodefaults , commandOptions = withStdOpts oid oid diff --git a/src/Darcs/UI/Commands/Init.hs b/src/Darcs/UI/Commands/Init.hs index b1e56952..5271e89f 100644 --- a/src/Darcs/UI/Commands/Init.hs +++ b/src/Darcs/UI/Commands/Init.hs @@ -30,6 +30,7 @@ import Darcs.UI.Commands , putFinished , withStdOpts , putWarning + , noPrereq ) import Darcs.UI.Commands.Util ( commonHelpWithPrefsTemplates ) import Darcs.UI.Completion ( noArgs ) @@ -91,7 +92,7 @@ initialize = DarcsCommand , commandDescription = initializeDescription , commandExtraArgs = -1 , commandExtraArgHelp = ["[]"] - , commandPrereq = \_ -> return $ Right () + , commandPrereq = noPrereq , commandCommand = initializeCmd , commandCompleteArgs = noArgs , commandArgdefaults = nodefaults diff --git a/src/Darcs/UI/Commands/Log.hs b/src/Darcs/UI/Commands/Log.hs index b971a6dc..56a1383f 100644 --- a/src/Darcs/UI/Commands/Log.hs +++ b/src/Darcs/UI/Commands/Log.hs @@ -32,6 +32,7 @@ import Control.Arrow ( second ) import Control.Exception ( catch, IOException ) import Control.Monad ( when, unless ) import Control.Monad.State.Strict ( evalState, get, gets, modify ) +import qualified Text.XML.Light as XML import Darcs.UI.PrintPatch ( showFriendly ) import Darcs.Patch.PatchInfoAnd ( PatchInfoAndG, fmapFLPIAP, hopefullyM, info ) @@ -62,7 +63,7 @@ import Darcs.Repository ( PatchInfoAnd, import Darcs.Util.Lock ( withTempDir ) import Darcs.Patch.Set ( PatchSet, patchSet2RL, Origin ) import Darcs.Patch.Format ( PatchListFormat ) -import Darcs.Patch.Info ( toXml, toXmlShort, showPatchInfo, displayPatchInfo, escapeXML, PatchInfo ) +import Darcs.Patch.Info ( toXml, toXmlShort, showPatchInfo, displayPatchInfo, PatchInfo ) import Darcs.Patch.Ident ( PatchId ) import Darcs.Patch.Invertible ( mkInvertible ) import Darcs.Patch.Depends ( contextPatches ) @@ -92,12 +93,11 @@ import Darcs.Util.Printer , ($$) , (<+>) , formatWords + , fromXml , hsep - , insertBeforeLastline , prefix , simplePrinters , text - , vcat , vsep ) import Darcs.Util.Printer.Color ( fancyPrinters ) @@ -373,7 +373,7 @@ changelog :: forall p wStart wX changelog opts patches li | O.changesFormat ? opts == Just O.CountPatches = text $ show $ length $ liPatches li - | hasXmlOutput opts = xml_changelog + | hasXmlOutput opts = fromXml xml_changelog | O.yes (O.withSummary ? opts) || verbose opts = vsep (map (number_patch change_with_summary) ps) $$ mbErr | otherwise = vsep (map (number_patch description') ps) $$ mbErr @@ -387,33 +387,28 @@ changelog opts patches li else showFriendly (verbosity ? opts) (O.withSummary ? opts) p | otherwise = description hp $$ indent (text "[this patch is unavailable]") - xml_changelog = vcat - [ text "" - , vcat xml_created_as - , vcat xml_changes - , text "" - ] + xml_changelog = XML.unode "changelog" (xml_created_as ++ xml_changes) - xml_with_summary :: Sealed2 (PatchInfoAndG p) -> Doc + xml_with_summary :: Sealed2 (PatchInfoAndG p) -> XML.Element xml_with_summary (Sealed2 hp) | Just p <- hopefullyM hp = - let - deps = getdeps p - xmlDependencies = - text "" - $$ vcat (map (indent . toXmlShort) deps) - $$ text "" - summary | deps == [] = indent $ xmlSummary p - | otherwise = indent $ xmlDependencies $$ xmlSummary p - in - insertBeforeLastline (toXml $ info hp) summary + let + deps = getdeps p + xmlDeps = XML.unode "explicit_dependencies" (map toXmlShort deps) + summary | null deps = [xmlSummary p] + | otherwise = [xmlDeps] ++ [xmlSummary p] + xml_info = toXml $ info hp + in + xml_info { XML.elContent = XML.elContent xml_info ++ map XML.Elem summary } xml_with_summary (Sealed2 hp) = toXml (info hp) indent = prefix " " + xml_changes :: [XML.Element] xml_changes = case O.withSummary ? opts of O.YesSummary -> map xml_with_summary ps O.NoSummary -> map (toXml . unseal2 info) ps + xml_created_as :: [XML.Element] xml_created_as = map create (liRenames li) where - create :: (AnchoredPath, AnchoredPath) -> Doc + create :: (AnchoredPath, AnchoredPath) -> XML.Element create rename@(_, as) = createdAsXml (first_change_of as) rename -- We need to reorder the patches when they haven't been reversed -- already, so that we find the *first* patch that modifies a given @@ -457,15 +452,14 @@ logContext opts = do changes :: DarcsCommand changes = commandAlias "changes" Nothing log -createdAsXml :: PatchInfo -> (AnchoredPath, AnchoredPath) -> Doc +createdAsXml :: PatchInfo -> (AnchoredPath, AnchoredPath) -> XML.Element createdAsXml pinfo (current, createdAs) = - text "" - $$ toXml pinfo - $$ text "" + XML.unode "created_as" + ( [ XML.Attr (XML.unqual "current_name") (displayPath current) + , XML.Attr (XML.unqual "original_name") (displayPath createdAs) + ] + , toXml pinfo + ) logPatchSelOpts :: [DarcsFlag] -> S.PatchSelectionOptions logPatchSelOpts flags = S.PatchSelectionOptions diff --git a/src/Darcs/UI/Commands/Optimize.hs b/src/Darcs/UI/Commands/Optimize.hs index fb0ae557..9c6e6dae 100644 --- a/src/Darcs/UI/Commands/Optimize.hs +++ b/src/Darcs/UI/Commands/Optimize.hs @@ -32,7 +32,7 @@ import System.Directory , removeDirectoryRecursive , withCurrentDirectory ) -import Darcs.UI.Commands ( DarcsCommand(..), nodefaults +import Darcs.UI.Commands ( DarcsCommand(..), nodefaults, noPrereq , amInHashedRepository, amInRepository, putInfo , normalCommand, withStdOpts ) import Darcs.UI.Completion ( noArgs ) @@ -547,7 +547,7 @@ optimizeGlobalCache = common , commandHelp = optimizeHelpGlobalCache , commandDescription = "Garbage collect global cache" , commandCommand = optimizeGlobalCacheCmd - , commandPrereq = \_ -> return $ Right () + , commandPrereq = noPrereq } optimizeHelpGlobalCache :: Doc diff --git a/src/Darcs/UI/Commands/Rebase.hs b/src/Darcs/UI/Commands/Rebase.hs index 0f38cbdd..695078ad 100644 --- a/src/Darcs/UI/Commands/Rebase.hs +++ b/src/Darcs/UI/Commands/Rebase.hs @@ -525,7 +525,7 @@ inject = DarcsCommand , commandOptions = injectOpts } where - injectBasicOpts = O.keepDate ^ O.author ^ O.diffAlgorithm + injectBasicOpts = O.keepDate ^ O.author ^ O.diffAlgorithm ^ O.withSummary injectOpts = injectBasicOpts `withStdOpts` O.umask injectDescription = "Merge a change from the fixups of a patch into the patch itself." @@ -600,7 +600,7 @@ obliterate = DarcsCommand , commandOptions = obliterateOpts } where - obliterateBasicOpts = O.diffAlgorithm + obliterateBasicOpts = O.diffAlgorithm ^ O.withSummary obliterateOpts = obliterateBasicOpts `withStdOpts` O.umask obliterateDescription = "Obliterate a patch that is currently suspended." diff --git a/src/Darcs/UI/Commands/Record.hs b/src/Darcs/UI/Commands/Record.hs index 0b375b60..eb078215 100644 --- a/src/Darcs/UI/Commands/Record.hs +++ b/src/Darcs/UI/Commands/Record.hs @@ -206,6 +206,7 @@ record = DarcsCommand = O.logfile ^ O.umask ^ O.setScriptsExecutable + ^ O.canonize allOpts = basicOpts `withStdOpts` advancedOpts -- | commit is an alias for record @@ -271,7 +272,8 @@ doRecord repository cfg files pw@(pending :> working) = do debugMessage "I'm slurping the repository." debugMessage "About to select changes..." let da = O.diffAlgorithm ? cfg - (chs :> _ ) <- runInvertibleSelection (canonizeFL da $ pending +>+ working) $ + maybeCanonize = if O.canonize ? cfg then canonizeFL da else id + (chs :> _ ) <- runInvertibleSelection (maybeCanonize $ pending +>+ working) $ selectionConfigPrim First "record" (patchSelOpts cfg) (Just (primSplitter (O.diffAlgorithm ? cfg))) diff --git a/src/Darcs/UI/Commands/Send.hs b/src/Darcs/UI/Commands/Send.hs index e9a25d30..b29ba4d5 100644 --- a/src/Darcs/UI/Commands/Send.hs +++ b/src/Darcs/UI/Commands/Send.hs @@ -126,7 +126,7 @@ import Darcs.UI.Completion ( prefArgs ) import Darcs.UI.Commands.Util ( getUniqueDPatchName ) import Darcs.Util.Printer ( Doc, formatWords, vsep, text, ($$), (<+>), putDoc, putDocLn - , quoted, renderPS, sentence, vcat + , quoted, renderPS, vcat ) import Darcs.Util.English ( englishNum, Noun(..) ) import Darcs.Util.Exception ( catchall ) @@ -598,7 +598,7 @@ selectionIsNull :: Doc selectionIsNull = text "You don't want to send any patches, and that's fine with me!" emailBackedUp :: String -> Doc -emailBackedUp mf = sentence $ "Email body left in" <+> text mf <> "." +emailBackedUp mf = "Email body left in" <+> text mf <> "." promptCharSetWarning :: String -> String promptCharSetWarning msg = "Warning: " ++ msg ++ " Send anyway?" @@ -613,8 +613,8 @@ aborted :: Doc aborted = "Aborted." success :: String -> String -> Doc -success to cc = sentence $ - "Successfully sent patch bundle to:" <+> text to <+> copies cc +success to cc = + "Successfully sent patch bundle to:" <+> text to <+> copies cc <> "." where copies "" = "" copies x = "and cc'ed" <+> text x @@ -623,7 +623,7 @@ postingPatch :: String -> Doc postingPatch url = "Posting patch to" <+> text url wroteBundle :: FilePathLike a => a -> Doc -wroteBundle a = sentence $ "Wrote patch to" <+> text (toFilePath a) +wroteBundle a = "Wrote patch to" <+> text (toFilePath a) <> "." savedButNotSent :: String -> Doc savedButNotSent to = diff --git a/src/Darcs/UI/Commands/ShowAuthors.hs b/src/Darcs/UI/Commands/ShowAuthors.hs index a1015baa..e1d67452 100644 --- a/src/Darcs/UI/Commands/ShowAuthors.hs +++ b/src/Darcs/UI/Commands/ShowAuthors.hs @@ -15,6 +15,9 @@ -- the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, -- Boston, MA 02110-1301, USA. +-- to suppress an irrelevant warning GHC 9.10 and 9.12 +-- NE.unzip is shifting to being monomorphic but our usage already is +{-# OPTIONS_GHC -Wno-x-data-list-nonempty-unzip #-} module Darcs.UI.Commands.ShowAuthors ( showAuthors, Spelling, compiledAuthorSpellings, canonizeAuthor, rankAuthors ) where diff --git a/src/Darcs/UI/Commands/ShowRepo.hs b/src/Darcs/UI/Commands/ShowRepo.hs index b8d32d01..99d266d7 100644 --- a/src/Darcs/UI/Commands/ShowRepo.hs +++ b/src/Darcs/UI/Commands/ShowRepo.hs @@ -19,10 +19,10 @@ module Darcs.UI.Commands.ShowRepo ( showRepo ) where import Darcs.Prelude -import Data.Char ( toLower, isSpace ) import Data.List ( intercalate ) -import Control.Monad ( when, unless, liftM ) -import Text.Html ( tag, stringToHtml ) +import Data.Maybe ( catMaybes ) +import qualified Text.XML.Light as XML + import Darcs.Util.Path ( AbsolutePath ) import Darcs.UI.Flags ( DarcsFlag, useCache, hasXmlOutput, enumeratePatches ) import Darcs.UI.Options ( (^), oid, (?) ) @@ -31,6 +31,9 @@ import Darcs.UI.Commands ( DarcsCommand(..), withStdOpts, nodefaults, amInReposi import Darcs.UI.Completion ( noArgs ) import Darcs.Repository ( Repository + , RepoFormat + , PristineType + , Cache , repoFormat , repoLocation , repoPristineType @@ -41,15 +44,16 @@ import Darcs.Repository import Darcs.Repository.Hashed( repoXor ) import Darcs.Repository.PatchIndex ( isPatchIndexDisabled, doesPatchIndexExist ) import Darcs.Repository.Prefs - ( Pref(Author, Defaultrepo, Prefs) + ( Pref(Author, Defaultrepo) , getMotd , getPreflist + , getPrefval ) import Darcs.Patch ( RepoPatch ) import Darcs.Patch.Set ( patchSet2RL ) import Darcs.Patch.Witnesses.Ordered ( lengthRL ) -import qualified Data.ByteString.Char8 as BC (unpack) import Darcs.Patch.Apply( ApplyState ) +import Darcs.Util.ByteString ( decodeLocale ) import Darcs.Util.Printer ( Doc, text ) import Darcs.Util.Tree ( Tree ) @@ -94,89 +98,119 @@ showRepo = DarcsCommand showRepoOpts = showRepoBasicOpts `withStdOpts` oid repoCmd :: (AbsolutePath, AbsolutePath) -> [DarcsFlag] -> [String] -> IO () -repoCmd _ opts _ = - let put_mode = if hasXmlOutput opts then showInfoXML else showInfoUsr - in withRepository (useCache ? opts) $ - RepoJob $ \repository -> - actuallyShowRepo (putInfo put_mode) repository opts - --- Some convenience functions to output a labelled text string or an --- XML tag + value (same API). If no value, output is suppressed --- entirely. Borrow some help from Text.Html to perform XML output. - -type ShowInfo = String -> String -> String - -showInfoXML :: ShowInfo -showInfoXML t i = show $ tag (safeTag t) $ stringToHtml i - -safeTag :: String -> String -safeTag [] = [] -safeTag (' ':cs) = safeTag cs -safeTag ('#':cs) = "num_" ++ safeTag cs -safeTag (c:cs) = toLower c : safeTag cs - --- labelled strings: labels are right-aligned at 15 characters; --- subsequent lines in multi-line output are indented accordingly. -showInfoUsr :: ShowInfo -showInfoUsr t i = replicate (15 - length t) ' ' ++ t ++ ": " ++ - intercalate ('\n' : replicate 17 ' ') (lines i) ++ "\n" - -type PutInfo = String -> String -> IO () -putInfo :: ShowInfo -> PutInfo -putInfo m t i = unless (null i) (putStr $ m t i) - --- Primary show-repo operation. Determines ordering of output for --- sub-displays. The `out' argument is one of the above operations to --- output a labelled text string or an XML tag and contained value. - -actuallyShowRepo +repoCmd _ opts _ = do + withRepository (useCache ? opts) $ + RepoJob $ \r -> + getRepoInfo r opts >>= putStr . + if hasXmlOutput opts then + XML.ppElement . XML.unode "repository" + else + showRepoInfo + +data RepoInfo = RepoInfo + { riFormat :: RepoFormat + , riRoot :: String + , riPristineType :: PristineType + , riCache :: Cache + , riPatchIndex :: String + , riTestPref :: Maybe String + , riBinariesfilePref :: Maybe String + , riBoringfilePref :: Maybe String + , riPredistPref :: Maybe String + , riAuthor :: Maybe String + , riDefaultRemote :: Maybe String + , riNumPatches :: Maybe Int + , riWeakHash :: Maybe String + , riMotd :: Maybe String + } + +getRepoInfo :: (RepoPatch p, ApplyState p ~ Tree) - => PutInfo -> Repository rt p wU wR -> [DarcsFlag] -> IO () -actuallyShowRepo out r opts = do - when (hasXmlOutput opts) (putStr "\n") - out "Format" $ showInOneLine $ repoFormat r - let loc = repoLocation r - out "Root" loc - out "PristineType" $ show $ repoPristineType r - out "Cache" $ showInOneLine $ repoCache r - piExists <- doesPatchIndexExist loc - piDisabled <- isPatchIndexDisabled loc - out "PatchIndex" $ - case (piExists, piDisabled) of - (_, True) -> "disabled" - (True, False) -> "enabled" - (False, False) -> "enabled, but not yet created" - showRepoPrefs out - when (enumeratePatches opts) (do numPatches r >>= (out "Num Patches" . show) - showXor out r) - showRepoMOTD out r - when (hasXmlOutput opts) (putStr "\n") - -showXor :: (RepoPatch p, ApplyState p ~ Tree) - => PutInfo -> Repository rt p wU wR -> IO () -showXor out repo = do - theXor <- repoXor repo - out "Weak Hash" (show theXor) - --- Most of the actual elements being displayed are part of the Show --- class; that's fine for a Haskeller, but not for the common user, so --- the routines below work to provide more human-readable information --- regarding the repository elements. + => Repository rt p wU wR -> [DarcsFlag] -> IO RepoInfo +getRepoInfo r opts = do + let riFormat = repoFormat r + let riRoot = repoLocation r + let riPristineType = repoPristineType r + let riCache = repoCache r + piExists <- doesPatchIndexExist riRoot + piDisabled <- isPatchIndexDisabled riRoot + let riPatchIndex = showPatchIndexInfo (piExists, piDisabled) + riBinariesfilePref <- getPrefval "binariesfile" + riBoringfilePref <- getPrefval "boringfile" + riPredistPref <- getPrefval "predist" + riTestPref <- getPrefval "test" + let unlessnull x = if null x then Nothing else Just x + riAuthor <- showPrefList <$> getPreflist Author + riDefaultRemote <- showPrefList <$> getPreflist Defaultrepo + riNumPatches <- + if enumeratePatches opts then + Just . lengthRL . patchSet2RL <$> readPatches r + else + return Nothing + riWeakHash <- + if enumeratePatches opts then Just . show <$> repoXor r else return Nothing + riMotd <- unlessnull . decodeLocale <$> getMotd riRoot + return $ RepoInfo {..} + +instance XML.Node RepoInfo where + node qn RepoInfo {..} = + XML.node qn $ + [ XML.unode "format" $ showInOneLine riFormat + , XML.unode "root" riRoot + , XML.unode "pristinetype" (show riPristineType) + , XML.unode "cache" (showInOneLine riCache) + , XML.unode "patchindex" riPatchIndex + ] + ++ + catMaybes + [ XML.unode "testpref" <$> riTestPref + , XML.unode "binariesfilepref" <$> riBinariesfilePref + , XML.unode "boringfilepref" <$> riBoringfilePref + , XML.unode "predistpref" <$> riPredistPref + , XML.unode "author" <$> riAuthor + , XML.unode "defaultremote" <$> riDefaultRemote + , XML.unode "numpatches" . show <$> riNumPatches + , XML.unode "weakhash" <$> riWeakHash + , XML.unode "motd" <$> riMotd + ] + +showRepoInfo :: RepoInfo -> String +showRepoInfo RepoInfo{..} = + unlines $ + [ out "Format" $ showInOneLine riFormat + , out "Root" riRoot + , out "PristineType" $ show riPristineType + , out "Cache" $ showInOneLine $ riCache + , out "PatchIndex" $ riPatchIndex + ] + ++ catMaybes + [ out "test Pref" <$> riTestPref + , out "binariesfile Pref" <$> riBinariesfilePref + , out "boringfile Pref" <$> riBoringfilePref + , out "predist Pref" <$> riPredistPref + , out "Author" <$> riAuthor + , out "Default Remote" <$> riDefaultRemote + , out "Num Patches" . show <$> riNumPatches + , out "Weak Hash" <$> riWeakHash + , out "MOTD" <$> riMotd + ] + where + -- labelled strings: labels are right-aligned at 15 characters; + -- subsequent lines in multi-line output are indented accordingly. + out t i = + replicate (15 - length t) ' ' ++ t ++ ": " ++ + intercalate ('\n' : replicate 17 ' ') (lines i) + +showPatchIndexInfo :: (Bool, Bool) -> String +showPatchIndexInfo pi = + case pi of + (_, True) -> "disabled" + (True, False) -> "enabled" + (False, False) -> "enabled, but not yet created" showInOneLine :: Show a => a -> String showInOneLine = intercalate ", " . lines . show -showRepoPrefs :: PutInfo -> IO () -showRepoPrefs out = do - getPreflist Prefs >>= mapM_ prefOut - getPreflist Author >>= out "Author" . unlines - getPreflist Defaultrepo >>= out "Default Remote" . unlines - where prefOut = uncurry out . (\(p,v) -> (p++" Pref", dropWhile isSpace v)) . break isSpace - -showRepoMOTD :: PutInfo -> Repository rt p wU wR -> IO () -showRepoMOTD out repo = getMotd (repoLocation repo) >>= out "MOTD" . BC.unpack - --- Support routines to provide information used by the PutInfo operations above. - -numPatches :: (RepoPatch p, ApplyState p ~ Tree) => Repository rt p wU wR -> IO Int -numPatches r = (lengthRL . patchSet2RL) `liftM` readPatches r +showPrefList :: [String] -> Maybe String +showPrefList [] = Nothing +showPrefList ss = Just $ intercalate ", " ss diff --git a/src/Darcs/UI/Commands/ShowTags.hs b/src/Darcs/UI/Commands/ShowTags.hs index 0d4148a1..76863534 100644 --- a/src/Darcs/UI/Commands/ShowTags.hs +++ b/src/Darcs/UI/Commands/ShowTags.hs @@ -22,15 +22,21 @@ module Darcs.UI.Commands.ShowTags import Darcs.Prelude import Control.Monad ( unless ) -import Data.Maybe ( fromMaybe ) +import Data.Maybe ( fromMaybe, maybeToList ) import System.IO ( stderr, hPutStrLn ) -import Darcs.Patch.Set ( PatchSet, patchSetTags ) +import Darcs.Patch.Match + ( MatchFlag(OnePattern) + , MatchableRP + , checkMatchSyntax + , matchAPatch + ) +import Darcs.Patch.Set ( PatchSet(..), patchSetTags, tagsCovering ) import Darcs.Repository ( readPatches, withRepositoryLocation, RepoJob(..) ) import Darcs.UI.Commands ( DarcsCommand(..), withStdOpts, nodefaults, findRepository ) import Darcs.UI.Completion ( noArgs ) import Darcs.UI.Flags ( DarcsFlag, useCache, getRepourl ) -import Darcs.UI.Options ( oid, (?) ) +import Darcs.UI.Options ( oid, (?), (^) ) import qualified Darcs.UI.Options.All as O import Darcs.Util.Path ( AbsolutePath ) import Darcs.Util.Printer ( Doc, formatText ) @@ -62,7 +68,7 @@ showTags = DarcsCommand , commandOptions = showTagsOpts } where - showTagsBasicOpts = O.possiblyRemoteRepo + showTagsBasicOpts = O.possiblyRemoteRepo ^ O.covering showTagsOpts = showTagsBasicOpts `withStdOpts` oid tagsCmd :: (AbsolutePath, AbsolutePath) -> [DarcsFlag] -> [String] -> IO () @@ -70,8 +76,12 @@ tagsCmd _ opts _ = let repodir = fromMaybe "." (getRepourl opts) in withRepositoryLocation (useCache ? opts) repodir $ RepoJob $ \repo -> readPatches repo >>= printTags where - printTags :: PatchSet p wW wZ -> IO () - printTags = mapM_ process . patchSetTags + printTags :: MatchableRP p => PatchSet p wW wZ -> IO () + printTags ps = do + checkMatchSyntax (maybeToList (fmap OnePattern (O.covering ? opts))) + case findTags ps of + Nothing -> fail "No patches matching the pattern have been found." + Just ts -> mapM_ process ts process :: String -> IO () process t = normalize t t False >>= putStrLn normalize :: String -> String -> Bool -> IO String @@ -85,3 +95,8 @@ tagsCmd _ opts _ = let repodir = fromMaybe "." (getRepourl opts) in else do rest <- normalize t xs flag return $ x : rest + findTags :: MatchableRP p => PatchSet p wX wY -> Maybe [String] + findTags = + case O.covering ? opts of + Nothing -> Just . patchSetTags + Just pat -> tagsCovering (matchAPatch [OnePattern pat]) diff --git a/src/Darcs/UI/Commands/Test/Impl.hs b/src/Darcs/UI/Commands/Test/Impl.hs index 202b3c3f..256e171b 100644 --- a/src/Darcs/UI/Commands/Test/Impl.hs +++ b/src/Darcs/UI/Commands/Test/Impl.hs @@ -61,7 +61,7 @@ import Darcs.Repository.ApplyPatches ( DefaultIO, runDefault ) -- |This type is used to track the state of the testing tree. -- For example, 'Testing IO wX wY Int' requires that the testing -- tree start in state 'wX', and leaves it in state 'wY'. -newtype Testing m (wX :: *) (wY :: *) a = Testing { unTesting :: m a } +newtype Testing m (wX :: Type) (wY :: Type) a = Testing { unTesting :: m a } -- |Once we've finished tracking down a test failure, we no longer care -- about tracking the actual state of the testing tree. This witness @@ -111,8 +111,8 @@ liftTesting m = TestingEnv $ ReaderT $ \_ -> m -- the only real implementation, the unit tests for testing are based on -- mock implementations. class Monad m => TestRunner m where - type ApplyPatchReqs m (p :: * -> * -> *) :: Constraint - type DisplayPatchReqs m (p :: * -> * -> *) :: Constraint + type ApplyPatchReqs m (p :: Type -> Type -> Type) :: Constraint + type DisplayPatchReqs m (p :: Type -> Type -> Type) :: Constraint -- |Output a message writeMsg :: String -> m wX wX () @@ -191,12 +191,12 @@ exitCodeToTestResult (ExitFailure 125) = Untestable exitCodeToTestResult (ExitFailure n) = Testable (Failure (TestFailure n)) -- |A 'TestCmd' runs the test on a given repository state. -data TestCmd = TestCmd (forall (wX :: *) . TestingIO wX wX (TestResult wX)) +data TestCmd = TestCmd (forall (wX :: Type) . TestingIO wX wX (TestResult wX)) runTestCmd :: TestCmd -> TestingIO wX wX (TestResult wX) runTestCmd (TestCmd cmd) = cmd -mkTestCmd :: (forall (wX :: *) . IO (TestResult wX)) -> TestCmd +mkTestCmd :: (forall (wX :: Type) . IO (TestResult wX)) -> TestCmd mkTestCmd cmd = TestCmd (Testing cmd) -- |'PatchSeq' is a sequence of patches, implemented as a binary tree, @@ -270,7 +270,7 @@ type StrategyResultSealed p = -- what should be done with the final result of the strategy. This for -- example allows a post-processing "minimise blame" pass to be run. -- The witnesses make it hard to wrap this up in a standard abstraction. -data WithResult (m :: * -> * -> * -> *) p a = +data WithResult (m :: Type -> Type -> Type -> Type) p a = WithResult { runWithResult :: forall wSuccess wFailure diff --git a/src/Darcs/UI/Commands/Unrecord.hs b/src/Darcs/UI/Commands/Unrecord.hs index 0a456cfd..f3b213b1 100644 --- a/src/Darcs/UI/Commands/Unrecord.hs +++ b/src/Darcs/UI/Commands/Unrecord.hs @@ -94,7 +94,7 @@ import qualified Darcs.UI.SelectChanges as S ( PatchSelectionOptions(..) ) import Darcs.Util.English ( presentParticiple ) import Darcs.Util.Lock ( writeDocBinFile ) import Darcs.Util.Path ( AbsolutePath, toFilePath, useAbsoluteOrStd ) -import Darcs.Util.Printer ( Doc, formatWords, putDoc, sentence, text, ($+$), (<+>) ) +import Darcs.Util.Printer ( Doc, formatWords, putDoc, text, ($+$), (<+>) ) import Darcs.Util.Progress ( debugMessage ) import Darcs.Util.Prompt ( promptYorn ) import Darcs.Util.SignalHandler ( catchInterrupt, withSignalsBlocked ) @@ -304,7 +304,7 @@ savetoBundle opts removed@(x :>: _) orig = do when exists $ fail $ "Directory or file named '" ++ (show outname) ++ "' already exists." useAbsoluteOrStd writeDocBinFile putDoc outname bundle - putInfo opts $ sentence $ + putInfo opts $ (<> ".") $ useAbsoluteOrStd (("Saved patch bundle" <+>) . text . toFilePath) (text "stdout") diff --git a/src/Darcs/UI/Commands/Util.hs b/src/Darcs/UI/Commands/Util.hs index e38dd725..454c1069 100644 --- a/src/Darcs/UI/Commands/Util.hs +++ b/src/Darcs/UI/Commands/Util.hs @@ -43,6 +43,8 @@ import Data.Maybe ( fromMaybe ) import System.Exit ( ExitCode(..), exitWith, exitSuccess ) import System.Posix.Files ( isDirectory ) +import qualified Text.XML.Light as XML + import Darcs.Patch ( RepoPatch, xmlSummary ) import Darcs.Patch.Apply ( ApplyState ) import Darcs.Patch.Depends @@ -91,8 +93,8 @@ import Darcs.Util.Exception ( clarifyErrors ) import Darcs.Util.File ( getFileStatus ) import Darcs.Util.Path ( AnchoredPath, displayPath, getUniquePathName ) import Darcs.Util.Printer - ( Doc, formatWords, ($+$), text, (<+>), hsep, ($$), vcat, vsep - , putDocLn, insertBeforeLastline, prefix + ( Doc, formatWords, ($+$), text, (<+>), hsep, ($$), vsep + , putDocLn, fromXml , putDocLnWith, pathlist ) import Darcs.Util.Printer.Color ( fancyPrinters ) @@ -147,23 +149,19 @@ printDryRunMessageAndExit action v s d x interactive patches = do putInfoX $ hsep [ "Will", text action, "the following patches:" ] putDocLn put_mode where - put_mode = if x == YesXml - then text "" $$ - vcat (mapFL (indent . xml_info s) patches) $$ - text "" - else vsep $ mapFL (showFriendly v s) patches + put_mode = + if x == YesXml + then + fromXml $ XML.unode "patches" $ mapFL (xml_info s) patches + else vsep $ mapFL (showFriendly v s) patches putInfoX = if x == YesXml then const (return ()) else putDocLn - xml_info YesSummary = xml_with_summary - xml_info NoSummary = toXml . info - - xml_with_summary hp - | Just p <- hopefullyM hp = insertBeforeLastline (toXml $ info hp) - (indent $ xmlSummary p) - xml_with_summary hp = toXml (info hp) - - indent = prefix " " + xml_info YesSummary hp + | Just p <- hopefullyM hp = + let el = toXml (info hp) + in el { XML.elContent = XML.elContent el ++ [XML.Elem (xmlSummary p)] } + xml_info _ hp = toXml (info hp) -- | Given a repository and two common command options, classify the given list -- of paths according to whether they exist in the pristine or working tree. diff --git a/src/Darcs/UI/External.hs b/src/Darcs/UI/External.hs index 77028404..03215896 100644 --- a/src/Darcs/UI/External.hs +++ b/src/Darcs/UI/External.hs @@ -415,12 +415,14 @@ getViewer = Just `fmap` (getEnv "DARCS_PAGER" `catchall` getEnv "PAGER") return Nothing pipeDocToPager :: String -> [String] -> Printers -> Doc -> IO ExitCode - pipeDocToPager "" _ pr inp = do hPutDocLnWith pr stdout inp return ExitSuccess - -pipeDocToPager c args pr inp = pipeDocInternal (PipeToOther pr) c args inp +pipeDocToPager c args pr inp = + -- Evaluate pr with the current stdout, not the pipe's write end, + -- so we get colored output with less. Note that we pass it -R so + -- that it doesn't escape color codes. + pipeDocInternal (PipeToOther (const (pr stdout))) c args inp -- | Given two shell commands as arguments, execute the former. The -- latter is then executed if the former failed because the executable diff --git a/src/Darcs/UI/Options/All.hs b/src/Darcs/UI/Options/All.hs index 51c6c337..6c9d7ec5 100644 --- a/src/Darcs/UI/Options/All.hs +++ b/src/Darcs/UI/Options/All.hs @@ -212,6 +212,9 @@ module Darcs.UI.Options.All , ChangesFormat (..) , changesFormat + -- record + , canonize + -- replace , tokens , forceReplace @@ -232,6 +235,9 @@ module Darcs.UI.Options.All , EnumPatches (..) , enumPatches + -- show tags + , covering + -- gzcrcs , GzcrcsAction (..) , gzcrcsActions @@ -1227,6 +1233,15 @@ changesFormat = withDefault Nothing , RawNoArg [] ["number"] F.NumberPatches (Just NumberPatches) "number the changes" , RawNoArg [] ["count"] F.Count (Just CountPatches) "output count of changes" ] +-- ** record, amend + +-- | This one is only for testing. It allows to reconstruct failing QC test cases +-- for named patches. +canonize :: PrimDarcsOption Bool +canonize = withDefault True + [ RawNoArg [] ["canonize"] F.Canonize True "canonize changes before recording" + , RawNoArg [] ["no-canonize"] F.NoCanonize False "do not canonize changes before recording" ] + -- ** replace tokens :: PrimDarcsOption (Maybe String) @@ -1292,6 +1307,13 @@ enumPatches = withDefault YesEnumPatches , RawNoArg [] ["no-enum-patches"] F.NoEnumPatches NoEnumPatches "don't include statistics requiring enumeration of patches" ] +-- ** show tags + +covering :: PrimDarcsOption (Maybe String) +covering = singleStrArg [] ["covering"] F.Covering arg "PATTERN" + "show tags that depend on the latest patch that matches PATTERN" + where arg (F.Covering s) = Just s; arg _ = Nothing + -- ** gzcrcs data GzcrcsAction = GzcrcsCheck | GzcrcsRepair deriving (Eq, Show) diff --git a/src/Darcs/UI/Options/Flags.hs b/src/Darcs/UI/Options/Flags.hs index 71f0f216..5d0eaf4f 100644 --- a/src/Darcs/UI/Options/Flags.hs +++ b/src/Darcs/UI/Options/Flags.hs @@ -46,6 +46,7 @@ data DarcsFlag = Version | ExactVersion | ListCommands | Verify AbsolutePath | VerifySSL AbsolutePath | RemoteDarcsOpt String | EditDescription | NoEditDescription + | Canonize | NoCanonize | Toks String | EditLongComment | NoEditLongComment | PromptLongComment | KeepDate | NoKeepDate @@ -98,6 +99,7 @@ data DarcsFlag = Version | ExactVersion | ListCommands | PatchIndexFlag | NoPatchIndexFlag | EnumPatches | NoEnumPatches + | Covering String | WithPrefsTemplates | NoPrefsTemplates | OptimizeDeep | OptimizeShallow deriving ( Eq, Show ) diff --git a/src/Darcs/UI/PatchHeader.hs b/src/Darcs/UI/PatchHeader.hs index fc910dac..75a200ff 100644 --- a/src/Darcs/UI/PatchHeader.hs +++ b/src/Darcs/UI/PatchHeader.hs @@ -41,6 +41,7 @@ import Darcs.UI.External ( editFile ) import Darcs.UI.Flags ( getEasyAuthor, promptAuthor, getDate ) import Darcs.UI.Options ( Config, (?) ) import qualified Darcs.UI.Options.All as O +import Darcs.UI.Prompt ( promptYornorq ) import qualified Darcs.UI.SelectChanges as S ( PatchSelectionOptions(..) ) import Darcs.UI.SelectChanges ( askAboutDepends ) @@ -48,7 +49,7 @@ import qualified Darcs.Util.Diff as D ( DiffAlgorithm ) import Darcs.Util.English ( capitalize ) import Darcs.Util.Global ( darcsLastMessage ) import Darcs.Util.Path ( FilePathLike, toFilePath ) -import Darcs.Util.Prompt ( PromptConfig(..), askUser, promptChar, promptYorn ) +import Darcs.Util.Prompt ( PromptConfig(..), askUser, promptChar ) import Darcs.Util.Printer ( Doc, text, ($+$), vcat, prefixLines, renderString ) import qualified Darcs.Util.Ratified as Ratified ( hGetContents ) @@ -184,11 +185,12 @@ getLog m_name has_pipe log_file ask_long m_old chs = is_badname = isJust . just_a_badname - prompt_long_comment oldname = - do let verb = case m_old of Nothing -> "add a"; Just _ -> "edit the" - y <- promptYorn $ "Do you want to "++verb++" long comment?" - if y then get_log_using_editor oldname - else return (oldname, default_log, Nothing) + prompt_long_comment oldname = do + let verb = case m_old of { Nothing -> "add a"; Just _ -> "edit the" } + edit = get_log_using_editor oldname + no_edit = return (oldname, default_log, Nothing) + prompt = "Do you want to " ++ verb ++ " long comment?" + promptYornorq prompt (verb ++ " long comment") edit no_edit get_log_using_editor p = do let logf = darcsLastMessage @@ -253,6 +255,7 @@ data PatchHeaderConfig = PatchHeaderConfig , author :: Maybe String , patchname :: Maybe String , askLongComment :: Maybe O.AskLongComment + , canonizeChanges :: Bool } patchHeaderConfig :: Config -> PatchHeaderConfig @@ -263,6 +266,7 @@ patchHeaderConfig cfg = PatchHeaderConfig , author = O.author ? cfg , patchname = O.patchname ? cfg , askLongComment = O.askLongComment ? cfg + , canonizeChanges = O.canonize ? cfg } -- | Update the metadata for a patch. @@ -283,7 +287,8 @@ updatePatchHeader :: forall p wX wY wZ . (RepoPatch p, ApplyState p ~ Tree) -> HijackT IO (Maybe String, PatchInfoAnd p wX wZ) updatePatchHeader verb ask_deps pSelOpts PatchHeaderConfig{..} oldp chs = do - let newchs = canonizeFL diffAlgorithm (patchcontents oldp +>+ chs) + let maybeCanonize = if canonizeChanges then canonizeFL diffAlgorithm else id + let newchs = maybeCanonize (patchcontents oldp +>+ chs) let old_pdeps = getdeps oldp newdeps <- diff --git a/src/Darcs/UI/Prompt.hs b/src/Darcs/UI/Prompt.hs index b1370b05..ec7002f3 100644 --- a/src/Darcs/UI/Prompt.hs +++ b/src/Darcs/UI/Prompt.hs @@ -3,10 +3,12 @@ module Darcs.UI.Prompt ( PromptChoice(..) , PromptConfig(..) , runPrompt + , promptYornorq ) where import Darcs.Prelude import Data.List ( find, intercalate ) +import System.Exit ( exitSuccess ) import qualified Darcs.Util.Prompt as P data PromptChoice a = PromptChoice @@ -24,18 +26,23 @@ data PromptConfig a = PromptConfig } -- | Generate the help string from a verb and list of choice groups -helpFor :: String -> [[PromptChoice a]] -> String -helpFor jn choices = +helpFor :: String -> [[PromptChoice a]] -> Maybe Char -> String +helpFor jn choices def = unlines $ [ "How to use " ++ jn ++ ":" ] ++ intercalate [""] (map (map help . filter pcWhen) choices) ++ [ "" , "?: show this help" - , "" - , ": accept the current default (which is capitalized)" - ] + ] ++ defaultHelp where help i = pcKey i : (": " ++ pcHelp i) + defaultHelp = + case def of + Nothing -> [] + Just _ -> + [ "" + , ": accept the current default (which is capitalized)" + ] lookupAction :: Char -> [PromptChoice a] -> Maybe (IO a) lookupAction key choices = pcAction <$> find ((==key).pcKey) choices @@ -48,4 +55,17 @@ runPrompt pcfg@PromptConfig{..} = do P.PromptConfig pPrompt (map pcKey choices) [] Nothing "?h" case lookupAction key choices of Just action -> action - Nothing -> putStrLn (helpFor pVerb pChoices) >> runPrompt pcfg + Nothing -> putStrLn (helpFor pVerb pChoices pDefault) >> runPrompt pcfg + +-- | Prompt the user for a yes or no or cancel +promptYornorq :: String -> String -> IO a -> IO a -> IO a +promptYornorq prompt verb yes no = + runPrompt (PromptConfig prompt verb choices Nothing) + where + quit = putStrLn "Command cancelled." >> exitSuccess + choices = + [ [ PromptChoice 'y' True yes ("yes, do " ++ verb) + , PromptChoice 'n' True no ("no, don't " ++ verb) + , PromptChoice 'q' True quit "quit (cancel command)" + ] + ] diff --git a/src/Darcs/UI/SelectChanges.hs b/src/Darcs/UI/SelectChanges.hs index e2c73b53..b6a18975 100644 --- a/src/Darcs/UI/SelectChanges.hs +++ b/src/Darcs/UI/SelectChanges.hs @@ -393,7 +393,7 @@ runInvertibleSelection ps psc = runReaderT (selection ps) psc where {- end of runInvertibleSelection -} -- | The equivalent of 'runSelection' for the @darcs log@ command -viewChanges :: (ShowPatch p, ShowContextPatch p, ApplyState p ~ Tree) +viewChanges :: (ShowPatch p, ShowContextPatch p) => PatchSelectionOptions -> [Sealed2 p] -> IO () viewChanges ps_opts = textView ps_opts Nothing 0 [] @@ -422,7 +422,7 @@ keysFor = concatMap (map kp) -- | The function for selecting a patch to amend record. Read at your own risks. withSelectedPatchFromList - :: (Commute p, Matchable p, ShowPatch p, ShowContextPatch p, ApplyState p ~ Tree) + :: (Commute p, Matchable p, ShowPatch p, ShowContextPatch p) => String -- name of calling command (always "amend" as of now) -> RL p wX wY -> PatchSelectionOptions @@ -446,7 +446,7 @@ data WithSkipped p wX wY = WithSkipped -- patches, including pending and also that the skipped sequences has an -- ending context that matches the recorded state, z, of the repository. wspfr :: forall p wX wY wZ. - (Commute p, Matchable p, ShowPatch p, ShowContextPatch p, ApplyState p ~ Tree) + (Commute p, Matchable p, ShowPatch p, ShowContextPatch p) => String -> (forall wA wB . p wA wB -> Bool) -> RL p wX wY @@ -537,7 +537,7 @@ initialSelectionState lps pcs = -- | The actual interactive selection process. textSelect :: ( Commute p, Invert p, ShowPatch p, ShowContextPatch p - , PatchInspect p, ApplyState p ~ Tree ) + , PatchInspect p ) => FL (LabelledPatch p) wX wY -> PatchChoices p wX wY -> PatchSelectionM p IO (PatchChoices p wX wY) @@ -551,7 +551,7 @@ textSelect lps' pcs = unless (rightmost z) $ textSelect' textSelect' :: ( Commute p, Invert p, ShowPatch p, ShowContextPatch p - , PatchInspect p, ApplyState p ~ Tree ) + , PatchInspect p ) => InteractiveSelectionM p wX wY () textSelect' = do z <- gets lps @@ -904,7 +904,7 @@ printCurrent = do liftIO $ printFriendly (verbosity o) (withSummary o) $ unLabel lp -- | The interactive part of @darcs changes@ -textView :: (ShowPatch p, ShowContextPatch p, ApplyState p ~ Tree) +textView :: (ShowPatch p, ShowContextPatch p) => PatchSelectionOptions -> Maybe Int -> Int -> [Sealed2 p] -> [Sealed2 p] -> IO () diff --git a/src/Darcs/Util/Lock.hs b/src/Darcs/Util/Lock.hs index 850d6b77..3264cf22 100644 --- a/src/Darcs/Util/Lock.hs +++ b/src/Darcs/Util/Lock.hs @@ -26,8 +26,6 @@ module Darcs.Util.Lock , writeBinFile , writeTextFile , writeDocBinFile - , appendBinFile - , appendTextFile , appendDocBinFile , readBinFile , readTextFile @@ -304,12 +302,6 @@ readDocBinFile :: FilePathLike p => p -> IO Doc readDocBinFile fp = do ps <- B.readFile $ toFilePath fp return $ if B.null ps then empty else packedString ps -appendBinFile :: FilePathLike p => p -> B.ByteString -> IO () -appendBinFile f s = appendToFile Binary f $ \h -> B.hPut h s - -appendTextFile :: FilePathLike p => p -> String -> IO () -appendTextFile f s = appendToFile Text f $ \h -> hPutStr h s - appendDocBinFile :: FilePathLike p => p -> Doc -> IO () appendDocBinFile f d = appendToFile Binary f $ \h -> hPutDoc h d diff --git a/src/Darcs/Util/Path.hs b/src/Darcs/Util/Path.hs index e321e421..e6b6df3e 100644 --- a/src/Darcs/Util/Path.hs +++ b/src/Darcs/Util/Path.hs @@ -59,7 +59,6 @@ module Darcs.Util.Path -- anchored at a certain root (this is usually the Tree root). They are -- represented by a list of Names (these are just strict bytestrings). , Name - , name2fp , makeName , rawMakeName , eqAnycase @@ -92,8 +91,10 @@ import Darcs.Util.ByteString ( decodeLocale, encodeLocale ) import Data.Binary import qualified Data.ByteString as B import qualified Data.ByteString.Char8 as BC +import qualified Data.ByteString.Short as BS import Data.Char ( chr, isSpace, ord, toLower ) import Data.List ( inits, isPrefixOf, isSuffixOf, stripPrefix ) +import GHC.Base ( unsafeChr ) import GHC.Stack ( HasCallStack ) import qualified System.Directory ( setCurrentDirectory ) import System.Directory ( doesDirectoryExist, doesPathExist ) @@ -124,35 +125,33 @@ displayPath p realPath :: AnchoredPath -> FilePath realPath = anchorPath "" --- | 'encodeWhite' translates whitespace in filenames to a darcs-specific --- format (numerical representation according to 'ord' surrounded by --- backslashes). Note that backslashes are also escaped since they are used --- in the encoding. +-- | Encode whitespace and backslashes in filenames to a darcs-specific +-- format (numerical representation according to 'ord' surrounded by +-- backslashes). -- -- > encodeWhite "hello there" == "hello\32\there" -- > encodeWhite "hello\there" == "hello\92\there" encodeWhite :: FilePath -> String -encodeWhite (c:cs) | isSpace c || c == '\\' = - '\\' : show (ord c) ++ "\\" ++ encodeWhite cs -encodeWhite (c:cs) = c : encodeWhite cs -encodeWhite [] = [] +encodeWhite = foldr encodesWhiteChar [] where + encodesWhiteChar c acc + | isSpace c || c == '\\' = '\\' : show (ord c) ++ '\\' : acc + | otherwise = c : acc --- | 'decodeWhite' interprets the Darcs-specific \"encoded\" filenames --- produced by 'encodeWhite' +-- | Decode filenames from the darcs-specific encoding produced by +-- 'encodeWhite'. -- -- > decodeWhite "hello\32\there" == Right "hello there" -- > decodeWhite "hello\92\there" == Right "hello\there" -- > decodeWhite "hello\there" == Left "malformed filename" decodeWhite :: String -> Either String FilePath -decodeWhite cs_ = go cs_ [] False - where go "" acc True = Right (reverse acc) -- if there was a replace, use new string - go "" _ False = Right cs_ -- if not, use input string - go ('\\':cs) acc _ = - case break (=='\\') cs of - (theord, '\\':rest) -> - go rest (chr (read theord) :acc) True - _ -> Left $ "malformed filename: " ++ cs_ - go (c:cs) acc modified = go cs (c:acc) modified +decodeWhite s = go s where + go [] = return [] + go (c:cs) + | c == '\\' = + case break (== '\\') cs of + (theord, '\\':rest) -> (chr (read theord) :) <$> go rest + _ -> Left $ "malformed filename: " ++ s + | otherwise = (c :) <$> go cs class FilePathOrURL a where toPath :: a -> String @@ -384,7 +383,14 @@ getUniquePathName talkative buildMsg buildName = go (-1) -- AnchoredPath utilities -- -newtype Name = Name { unName :: B.ByteString } deriving (Binary, Eq, Show, Ord) +-- The type of file and directory names that can appear as entries inside a +-- directory. Must not be empty, ".", or "..", and must not contain path +-- separators. Also must be a valid (relative) file path on the native platform +-- (TODO this is currently not checked). +newtype Name = Name BS.ShortByteString deriving (Binary, Eq, Ord, Show) + +fromName :: Name -> B.ByteString +fromName (Name s) = BS.fromShort s -- | This is a type of "sane" file paths. These are always canonic in the sense -- that there are no stray slashes, no ".." components and similar. They are @@ -425,13 +431,10 @@ anchorPath :: FilePath -> AnchoredPath -> FilePath anchorPath dir p = dir FilePath. decodeLocale (flatten p) {-# INLINE anchorPath #-} -name2fp :: Name -> FilePath -name2fp (Name ps) = decodeLocale ps - -- FIXME returning "." for the root is wrong flatten :: AnchoredPath -> BC.ByteString flatten (AnchoredPath []) = BC.singleton '.' -flatten (AnchoredPath p) = BC.intercalate (BC.singleton '/') [n | (Name n) <- p] +flatten (AnchoredPath p) = BC.intercalate (BC.singleton '/') (map fromName p) -- | Make a 'Name' from a 'String'. May fail if the input 'String' -- is invalid, that is, "", ".", "..", or contains a '/'. @@ -479,7 +482,7 @@ rawMakeName :: B.ByteString -> Either String Name rawMakeName s | isBadName s = Left $ "'"++decodeLocale s++"' is not a valid AnchoredPath component name" - | otherwise = Right (Name s) + | otherwise = Right (Name (BS.toShort s)) isBadName :: B.ByteString -> Bool isBadName n = hasPathSeparator n || n `elem` forbiddenNames @@ -491,13 +494,16 @@ forbiddenNames :: [B.ByteString] forbiddenNames = [BC.empty, BC.pack ".", BC.pack ".."] hasPathSeparator :: B.ByteString -> Bool -hasPathSeparator = BC.elem '/' +hasPathSeparator x = any (`BC.elem` x) NativeFilePath.pathSeparators eqAnycase :: Name -> Name -> Bool -eqAnycase (Name a) (Name b) = BC.map toLower a == BC.map toLower b +eqAnycase (Name a) (Name b) = BS.map to_lower a == BS.map to_lower b + where + to_lower :: Word8 -> Word8 + to_lower = fromIntegral . ord . toLower . unsafeChr . fromIntegral encodeWhiteName :: Name -> B.ByteString -encodeWhiteName = encodeLocale . encodeWhite . decodeLocale . unName +encodeWhiteName = encodeLocale . encodeWhite . decodeLocale . fromName decodeWhiteName :: B.ByteString -> Either String Name decodeWhiteName = diff --git a/src/Darcs/Util/Printer.hs b/src/Darcs/Util/Printer.hs index ff7f18d4..5ac09ab5 100644 --- a/src/Darcs/Util/Printer.hs +++ b/src/Darcs/Util/Printer.hs @@ -9,9 +9,8 @@ module Darcs.Util.Printer -- * 'Doc' type and structural combinators Doc(Doc,unDoc) , empty, (<>), (), (<+>), ($$), ($+$), vcat, vsep, hcat, hsep - , minus, newline, plus, space, backslash, lparen, rparen - , parens, sentence -- * Constructing 'Doc's + , newline , text , hiddenText , invisibleText @@ -22,9 +21,9 @@ module Darcs.Util.Printer , userchunk, packedString , prefix , hiddenPrefix - , insertBeforeLastline , prefixLines , invisiblePS, userchunkPS + , fromXml -- * Rendering to 'String' , renderString, renderStringWith -- * Rendering to 'ByteString' @@ -37,8 +36,6 @@ module Darcs.Util.Printer , simplePrinters, invisiblePrinter, simplePrinter -- * Printables , Printable(..) - , doc - , printable, invisiblePrintable, hiddenPrintable, userchunkPrintable -- * Constructing colored 'Doc's , Color(..) , blueText, redText, greenText, magentaText, cyanText @@ -60,6 +57,7 @@ import Data.String ( IsString(..) ) import System.IO ( Handle, stdout ) import qualified Data.ByteString as B ( ByteString, hPut, concat ) import qualified Data.ByteString.Char8 as BC ( singleton ) +import qualified Text.XML.Light as XML import Darcs.Util.ByteString ( linesPS, decodeLocale, encodeLocale, gzWriteHandle ) import Darcs.Util.Global ( debugMessage ) @@ -78,42 +76,10 @@ spaceP = Both " " (BC.singleton ' ') newlineP :: Printable newlineP = S "\n" --- | A 'Doc' representing a space (\" \") -space :: Doc -space = unsafeBoth " " (BC.singleton ' ') - -- | A 'Doc' representing a newline newline :: Doc newline = unsafeChar '\n' --- | A 'Doc' representing a \"-\" -minus :: Doc -minus = unsafeBoth "-" (BC.singleton '-') - --- | A 'Doc' representing a \"+\" -plus :: Doc -plus = unsafeBoth "+" (BC.singleton '+') - --- | A 'Doc' representing a \"\\\" -backslash :: Doc -backslash = unsafeBoth "\\" (BC.singleton '\\') - --- | A 'Doc' that represents @\"(\"@ -lparen :: Doc -lparen = unsafeBoth "(" (BC.singleton '(') - --- | A 'Doc' that represents @\")\"@ -rparen :: Doc -rparen = unsafeBoth ")" (BC.singleton ')') - --- | prop> parens d = lparen <> d <> rparen -parens :: Doc -> Doc -parens d = lparen <> d <> rparen - --- | Turn a 'Doc' into a sentence. This appends a ".". -sentence :: Doc -> Doc -sentence = (<> text ".") - -- | Format a list of 'FilePath's as quoted text. It deliberately refuses to -- use English.andClauses but rather separates the quoted strings only with a -- space, because this makes it usable for copy and paste e.g. as arguments to @@ -274,15 +240,6 @@ prefixLines :: Doc -> Doc -> Doc prefixLines prefixer prefixee = vcat $ map (prefixer <+>) $ map packedString $ linesPS $ renderPS prefixee --- TODO try to find another way to do this, it's rather a violation --- of the Doc abstraction -insertBeforeLastline :: Doc -> Doc -> Doc -insertBeforeLastline a b = - case reverse $ map packedString $ linesPS $ renderPS a of - (ll:ls) -> vcat (reverse ls) $$ b $$ ll - [] -> - error "empty Doc given as first argument of Printer.insert_before_last_line" - lineColor :: Color -> Doc -> Doc lineColor c d = Doc $ \st -> case lineColorT (printers st) c d of Doc d' -> d' st @@ -534,3 +491,6 @@ quoted s = text "\"" <> text (escape s) <> text "\"" escape (c:cs) = if c `elem` ['\\', '"'] then '\\' : c : escape cs else c : escape cs + +fromXml :: XML.Element -> Doc +fromXml = text . XML.ppElement diff --git a/src/Darcs/Util/Ssh.hs b/src/Darcs/Util/Ssh.hs index 9d57fac4..3a6388d8 100644 --- a/src/Darcs/Util/Ssh.hs +++ b/src/Darcs/Util/Ssh.hs @@ -255,12 +255,13 @@ grabSSH src c = do copySSH :: String -> SshFilePath -> FilePath -> IO () copySSH rdarcs src dest = do debugMessage $ "copySSH file: " ++ sshFilePathOf src - -- TODO why do we disable progress reporting here? - withoutProgress $ do - mc <- getSshConnection rdarcs src - case mc of - Just v -> withMVar v (grabSSH src >=> B.writeFile dest) - Nothing -> do + mc <- getSshConnection rdarcs src + case mc of + Just v -> withMVar v (grabSSH src >=> B.writeFile dest) + Nothing -> + -- disable progress reporting because child inherits stdout + -- TODO check if we can avoid that + withoutProgress $ do -- remote 'darcs transfer-mode' does not work => use scp let u = escape_dollar $ sshFilePathOf src (scpcmd, args) <- getSSH SCP diff --git a/src/Darcs/Util/Tree/Hashed.hs b/src/Darcs/Util/Tree/Hashed.hs index b1f14fe8..893a398e 100644 --- a/src/Darcs/Util/Tree/Hashed.hs +++ b/src/Darcs/Util/Tree/Hashed.hs @@ -47,7 +47,7 @@ import Darcs.Util.Hash ) import Darcs.Util.Parser import Darcs.Util.Path ( Name, decodeWhiteName, encodeWhiteName ) -import Darcs.Util.Progress ( debugMessage, finishedOneIO, withSizedProgress ) +import Darcs.Util.Progress ( debugMessage ) import Darcs.Util.Tree ( Blob(..) , ItemType(..) @@ -197,14 +197,13 @@ writeDarcsHashed tree' cache = do debugMessage "writeDarcsHashed" t <- darcsUpdateDirHashes <$> expand tree' let items = list t - withSizedProgress "Getting pristine" (length items) $ \k -> do - sequence_ [readAndWriteBlob k b | (_, File b) <- items] - let dirs = darcsFormatDir t : [darcsFormatDir d | (_, SubTree d) <- items] - mapM_ (dump k) dirs + sequence_ [readAndWriteBlob b | (_, File b) <- items] + let dirs = darcsFormatDir t : [darcsFormatDir d | (_, SubTree d) <- items] + mapM_ dump dirs return (fromHash (darcsTreeHash t)) where - readAndWriteBlob k b = readBlob b >>= dump k - dump k x = fsCreateHashedFile cache x >>= finishedOneIO k . encodeValidHash + readAndWriteBlob b = readBlob b >>= dump + dump x = fsCreateHashedFile cache x -- | Create a hashed file from a 'Cache' and file content. In case the file -- exists it is kept untouched and is assumed to have the right content. diff --git a/src/Darcs/Util/Tree/Monad.hs b/src/Darcs/Util/Tree/Monad.hs index b2b33b22..ff44584a 100644 --- a/src/Darcs/Util/Tree/Monad.hs +++ b/src/Darcs/Util/Tree/Monad.hs @@ -39,7 +39,13 @@ module Darcs.Util.Tree.Monad import Darcs.Prelude hiding ( readFile, writeFile ) -import Darcs.Util.Path ( AnchoredPath, anchoredRoot, displayPath, movedirfilename ) +import Darcs.Util.Path + ( AnchoredPath + , anchoredRoot + , displayPath + , isPrefix + , movedirfilename + ) import Darcs.Util.Tree import Data.List( sortBy ) @@ -269,6 +275,9 @@ rename from to = do unless (isNothing found_to) $ throwM $ mkIOError AlreadyExists "rename" Nothing (Just (displayPath to)) + when (isPrefix from to) $ + throwM $ + mkIOError InvalidArgument "rename" Nothing (Just (displayPath to)) modifyItem from Nothing modifyItem to item renameChanged from to diff --git a/tests/clone.sh b/tests/clone.sh index 9544ec3d..f5cde343 100755 --- a/tests/clone.sh +++ b/tests/clone.sh @@ -94,7 +94,7 @@ darcs init temp1 cd temp1 echo first > a darcs record -lam 'first' -firsthash=`darcs log --xml | grep 'hash=' | sed -e "s/.*hash='//" -e "s/'>//"` +firsthash=$(darcs log --xml | grep 'hash=' | sed -E -e 's/.*hash="([^"]+)".*/\1/') echo second > b darcs record -lam 'second' cd .. diff --git a/tests/conflict-fight.sh b/tests/conflict-fight.sh index 9f4c4b95..ed0546b3 100755 --- a/tests/conflict-fight.sh +++ b/tests/conflict-fight.sh @@ -2,6 +2,9 @@ . ./lib +# this crashes nowadays with darcs-2 in step 9 +skip-formats darcs-2 + # step 1 mkdir temp0 cd temp0 diff --git a/tests/failing-issue1241-rollback-with-file-beyond-tag.sh b/tests/failing-issue1241-rollback-with-file-beyond-tag.sh new file mode 100755 index 00000000..d59afea1 --- /dev/null +++ b/tests/failing-issue1241-rollback-with-file-beyond-tag.sh @@ -0,0 +1,18 @@ +#!/usr/bin/env bash + +. lib + +rm -rf R +darcs init R +cd R +echo one > f +darcs record -lam one +darcs tag tag +# this works as expected: +darcs rollback -a --matches 'touch f' >&2 +darcs whatsnew >&2 +darcs revert -a +# but this says 'No patches selected': +darcs rollback -a f >&2 +darcs whatsnew >&2 +cd .. diff --git a/tests/failing-issue1702-optimize-relink-vs-cache.sh b/tests/failing-issue1702-optimize-relink-vs-cache.sh index d184915f..b57cde44 100755 --- a/tests/failing-issue1702-optimize-relink-vs-cache.sh +++ b/tests/failing-issue1702-optimize-relink-vs-cache.sh @@ -60,7 +60,7 @@ fi inR=(R/_darcs/patches/*-*) inS=(S/_darcs/patches/*-*) patch=$(basename $inR) -inC=$(find $HOME/.cache/darcs/patches -name $patch) +inC=$(find $DARCS_CACHE_DIR/patches -name $patch) ## Confirm that all three are hard linked. same_inode $inR $inS diff --git a/tests/failing-issue2729-index-corner-case.sh b/tests/failing-issue2729-index-corner-case.sh new file mode 100644 index 00000000..4f0c7322 --- /dev/null +++ b/tests/failing-issue2729-index-corner-case.sh @@ -0,0 +1,14 @@ +. ./lib + +# the epoch we use below is Unix specific +abort_windows + +rm -rf R +darcs init R +cd R + +# create a file with zero size and timestamp +touch -d "1970-01-01 00:00:00 UTC" f +darcs record -lam 'add f' +# this should not crash darcs: +darcs unrecord -a diff --git a/tests/failing-issue2729-index-corner-case2.sh b/tests/failing-issue2729-index-corner-case2.sh new file mode 100644 index 00000000..7d54979b --- /dev/null +++ b/tests/failing-issue2729-index-corner-case2.sh @@ -0,0 +1,18 @@ +. ./lib + +# the epoch we use below is Unix specific +abort_windows + +rm -rf R +darcs init R +cd R + +touch b +darcs record -lam 'add b' + +# create a file with zero size and timestamp +touch -d "1970-01-01 00:00:00 UTC" f +darcs add f +rm _darcs/index +# this should not crash darcs: +darcs whatsnew -s diff --git a/tests/issue1210-no-global-cache-in-sources.sh b/tests/issue1210-no-global-cache-in-sources.sh index 6339bd6d..094bc592 100755 --- a/tests/issue1210-no-global-cache-in-sources.sh +++ b/tests/issue1210-no-global-cache-in-sources.sh @@ -25,10 +25,8 @@ . ./lib -cacheDir=$HOME/.cache/darcs - rm -rf R S darcs init --repo R darcs get R S -not grep "$cacheDir" S/_darcs/prefs/sources +not grep "$DARCS_CACHE_DIR" S/_darcs/prefs/sources not grep "cache:" S/_darcs/prefs/sources diff --git a/tests/issue2136-log_created_as_for_multiple_files.sh b/tests/issue2136-log_created_as_for_multiple_files.sh index 40f4fd2d..a3ef6d4e 100755 --- a/tests/issue2136-log_created_as_for_multiple_files.sh +++ b/tests/issue2136-log_created_as_for_multiple_files.sh @@ -69,7 +69,7 @@ xmlLogRev=$(darcs log --reverse --xml tldir/f2 f5 tldir/d2 d5 f6) # xmlLog needs to be quoted everywhere, otherwise this hack to retrieve the # 2 following lines won't work. checkRename () { - echo "$1" | grep "" -C2 | tail -1 | grep "$4" + echo "$1" | grep "" -C2 | tail -1 | grep "$4" } checkInXML () { @@ -83,4 +83,4 @@ checkInXML "$xmlLog" checkInXML "$xmlLogRev" # But don't mention unchanged files. -echo "$xmlLog" | not grep "]*'\./f6'" +echo "$xmlLog" | not grep ']*"\./f6"' diff --git a/tests/issue2333.sh b/tests/issue2333.sh index b1571213..60b2066d 100755 --- a/tests/issue2333.sh +++ b/tests/issue2333.sh @@ -3,8 +3,6 @@ . lib # Load some portability helpers. -require_ghc 706 - # work around issue2720 (MacOS) if test -x /usr/bin/security; then ln -s /usr/bin/security . diff --git a/tests/issue2378-moving-directory-to-file.sh b/tests/issue2378-moving-directory-to-file.sh old mode 100644 new mode 100755 index 48be97a6..85b35d47 --- a/tests/issue2378-moving-directory-to-file.sh +++ b/tests/issue2378-moving-directory-to-file.sh @@ -1,4 +1,10 @@ -darcs initialize +#!/usr/bin/env bash + +. lib + +rm -rf R +darcs initialize R +cd R mkdir d darcs add d echo sometext > d/f @@ -11,4 +17,5 @@ darcs record -am'removed d' --skip-long-comment darcs move f d darcs record -am'moved f to d' --skip-long-comment darcs obliterate --last=3 --all -darcs whatsnew -l +not darcs whatsnew -l +cd .. diff --git a/tests/issue2380-rename-to-deleted-file.sh b/tests/issue2380-rename-to-deleted-file.sh index 216eb651..35852c00 100755 --- a/tests/issue2380-rename-to-deleted-file.sh +++ b/tests/issue2380-rename-to-deleted-file.sh @@ -22,10 +22,12 @@ ## BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ## ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN ## CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -rm -rf R && darcs init --repo R . lib +rm -rf R +darcs init --repo R + cd R echo foostuff > foo echo otherstuff > other @@ -45,3 +47,4 @@ diff expected actual darcs rev -a [[ -e foo && -e other && $( whoutput2 cat << EOF > expected2 -R ./foo +R ./foo -1 a ./foo/ a ./foo/bar EOF diff --git a/tests/issue2556-apply-fails-for-large-bundle.sh b/tests/issue2556-apply-fails-for-large-bundle.sh new file mode 100755 index 00000000..d5865b93 --- /dev/null +++ b/tests/issue2556-apply-fails-for-large-bundle.sh @@ -0,0 +1,12 @@ +#!/usr/bin/env bash + +. lib + +rm -rf R +darcs init R +cd R +dd if=/dev/zero of=large bs=1000000 count=2200 +# check that it does not fail with an IOC exception +# but just because it runs into a parse error +not darcs apply 2>&1 | grep 'not enough input' +cd .. diff --git a/tests/issue2727-resolutions-order-independent1.sh b/tests/issue2727-resolutions-order-independent1.sh new file mode 100755 index 00000000..9fc50d60 --- /dev/null +++ b/tests/issue2727-resolutions-order-independent1.sh @@ -0,0 +1,400 @@ +#!/usr/bin/env bash + +# Manual reconstruction of: +# Named RepoPatchV3: +# using V2.Prim wrapper for Prim.V1: +# resolutions are invariant under reorderings: [Failed] +# *** Failed! (after 67 tests and 30 shrinks): +# [...] +# (used seed -580976364380586561) + +. lib + +# expected output for all reorderings; note that the conflict is fully +# (transitively) covered by the single patch xjdeyyjdyibnmfpszdrp, +# see comment below where we record it +cat >log <./b <log 2>&1 +diff -u ../log log >&2 + +cd .. + +# same patches, different order +# (qgfgwkdlhlwkrlvqomlv comes before xzjgiiwvaanwuomlhnri) +darcs init R2 +cd R2 +darcs pull ../R1 -a -p qgfgwkdlhlwkrlvqomlv +# pull the rest +darcs pull ../R1 -a --allow-conflicts +darcs mark-conflicts >log 2>&1 +diff -u ../log log >&2 +cd .. + +# more reorderings +rm -rf R3 R4 +darcs init R3 +darcs init R4 +cd R3 +darcs pull ../R1 -a -p xzjgiiwvaanwuomlhnri +darcs pull ../R1 -a --allow-conflicts -p kwkmkvebzoazwwreddfi +darcs pull ../R1 -a -p wrszpgidbqmixqwgtkck +darcs pull ../R1 -a -p qgfgwkdlhlwkrlvqomlv +darcs pull ../R1 -a -p xjdeyyjdyibnmfpszdrp +darcs mark-conflicts >log 2>&1 +diff -u ../log log >&2 +cd .. +cd R4 +darcs pull ../R1 -a -p xzjgiiwvaanwuomlhnri +darcs pull ../R1 -a -p wrszpgidbqmixqwgtkck +darcs pull ../R1 -a --allow-conflicts -p kwkmkvebzoazwwreddfi +darcs pull ../R1 -a -p qgfgwkdlhlwkrlvqomlv +darcs pull ../R1 -a -p xjdeyyjdyibnmfpszdrp +darcs mark-conflicts >log 2>&1 +diff -u ../log log >&2 +cd .. + +exit # success + +# Beautified complete output of the failing QC test case + +Named RepoPatchV3: + using V2.Prim wrapper for Prim.V1: + resolutions are invariant under reorderings: [Failed] +*** Failed! (after 67 tests and 30 shrinks): +resolutions differ: r1= + +[ [ Sealed + (Prim + { unPrim = DP (AnchoredPath [ Name { unName = "a" } ]) RmDir } :>: + NilFL) + , Sealed + (Prim + { unPrim = + FP + (AnchoredPath + [ Name { unName = "a" } , Name { unName = "HX.txt" } ]) + AddFile + } :>: + NilFL) + ] +] + +r2= + +[] + +for patches + +patch 29cb9bb4b7ddddd6c2d29231af2a24b16d5f7dee +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * kwkmkvebzoazwwreddfi +addfile ./a/HX.txt +hunk ./b 1 ++J m ++b ++z u ++g ++O ++x ++X L ++V ++d y ++j L ++u F ++S ++Q ++x a ++y ++i ++t C ++W ++p +patch bbd7551ea6a300453f31e183e557b8d64862af81 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * qgfgwkdlhlwkrlvqomlv +depend 29cb9bb4b7ddddd6c2d29231af2a24b16d5f7dee + * kwkmkvebzoazwwreddfi +patch 8830f4d5e74044abe99b5293ff8d798d1deefc42 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * xzjgiiwvaanwuomlhnri +conflictor +hash -45 854e976bb830f9a93b65751d86b2a7116204bbee +rmfile ./a/HX.txt +v v v v v v v +hash 45 854e976bb830f9a93b65751d86b2a7116204bbee +addfile ./a/HX.txt +************* +hash 15 2d58936ad26e5da9e486252bf280b638d303b34e +rmdir ./a +^ ^ ^ ^ ^ ^ ^ +patch 2aad7b0472f94fd65f83d37f7802348619251241 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * wrszpgidbqmixqwgtkck +depend 8830f4d5e74044abe99b5293ff8d798d1deefc42 + * xzjgiiwvaanwuomlhnri +patch 6aee31fac33af70f1a90395de0f6785fc91d8ac2 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * xjdeyyjdyibnmfpszdrp +depend bbd7551ea6a300453f31e183e557b8d64862af81 + * qgfgwkdlhlwkrlvqomlv +depend 8830f4d5e74044abe99b5293ff8d798d1deefc42 + * xzjgiiwvaanwuomlhnri + +versus + +patch 29cb9bb4b7ddddd6c2d29231af2a24b16d5f7dee +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * kwkmkvebzoazwwreddfi +addfile ./a/HX.txt +hunk ./b 1 ++J m ++b ++z u ++g ++O ++x ++X L ++V ++d y ++j L ++u F ++S ++Q ++x a ++y ++i ++t C ++W ++p +patch 8830f4d5e74044abe99b5293ff8d798d1deefc42 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * xzjgiiwvaanwuomlhnri +conflictor +hash -45 854e976bb830f9a93b65751d86b2a7116204bbee +rmfile ./a/HX.txt +v v v v v v v +hash 45 854e976bb830f9a93b65751d86b2a7116204bbee +addfile ./a/HX.txt +************* +hash 15 2d58936ad26e5da9e486252bf280b638d303b34e +rmdir ./a +^ ^ ^ ^ ^ ^ ^ +patch bbd7551ea6a300453f31e183e557b8d64862af81 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * qgfgwkdlhlwkrlvqomlv +depend 29cb9bb4b7ddddd6c2d29231af2a24b16d5f7dee + * kwkmkvebzoazwwreddfi +patch 2aad7b0472f94fd65f83d37f7802348619251241 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * wrszpgidbqmixqwgtkck +depend 8830f4d5e74044abe99b5293ff8d798d1deefc42 + * xzjgiiwvaanwuomlhnri +patch 6aee31fac33af70f1a90395de0f6785fc91d8ac2 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * xjdeyyjdyibnmfpszdrp +depend bbd7551ea6a300453f31e183e557b8d64862af81 + * qgfgwkdlhlwkrlvqomlv +depend 8830f4d5e74044abe99b5293ff8d798d1deefc42 + * xzjgiiwvaanwuomlhnri + +Sealed2 + (WithStartState2 + (WithNames V1Model [ Dir "a" , File "b" [] ] []) + (SeqMS + (ParMS + (SeqMS + (SeqMS + NilMS + (NamedP + PatchInfo + { _piDate = "20240606010532" + , _piName = "kwkmkvebzoazwwreddfi" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + [] + (PrimWithName + (PrimPatchId 45 854e976bb830f9a93b65751d86b2a7116204bbee) + Prim + { unPrim = + FP + (AnchoredPath + [ Name { unName = "a" } , Name { unName = "HX.txt" } ]) + AddFile + } :>: + (PrimWithName + (PrimPatchId 44 fa5470f2fb48c14457e1f14929bc7913f3487109) + Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "b" } ]) + (Hunk + 1 + [] + [ "J m" + , "b" + , "z u" + , "g" + , "O" + , "x" + , "X L" + , "V" + , "d y" + , "j L" + , "u F" + , "S" + , "Q" + , "x a" + , "y" + , "i" + , "t C" + , "W" + , "p" + ]) + } :>: + NilFL)))) + (NamedP + PatchInfo + { _piDate = "20240606010532" + , _piName = "qgfgwkdlhlwkrlvqomlv" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + [ PatchInfo + { _piDate = "20240606010532" + , _piName = "kwkmkvebzoazwwreddfi" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + ] + NilFL)) + (SeqMS + (SeqMS + NilMS + (NamedP + PatchInfo + { _piDate = "20240606010532" + , _piName = "xzjgiiwvaanwuomlhnri" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + [] + (PrimWithName + (PrimPatchId 15 2d58936ad26e5da9e486252bf280b638d303b34e) + Prim + { unPrim = DP (AnchoredPath [ Name { unName = "a" } ]) RmDir } :>: + NilFL))) + (NamedP + PatchInfo + { _piDate = "20240606010532" + , _piName = "wrszpgidbqmixqwgtkck" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + [ PatchInfo + { _piDate = "20240606010532" + , _piName = "xzjgiiwvaanwuomlhnri" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + ] + NilFL))) + (NamedP + PatchInfo + { _piDate = "20240606010532" + , _piName = "xjdeyyjdyibnmfpszdrp" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + [ PatchInfo + { _piDate = "20240606010532" + , _piName = "qgfgwkdlhlwkrlvqomlv" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + , PatchInfo + { _piDate = "20240606010532" + , _piName = "xzjgiiwvaanwuomlhnri" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + ] + NilFL))) + +(used seed -580976364380586561) diff --git a/tests/issue2727-resolutions-order-independent10.sh b/tests/issue2727-resolutions-order-independent10.sh new file mode 100755 index 00000000..80ad21ce --- /dev/null +++ b/tests/issue2727-resolutions-order-independent10.sh @@ -0,0 +1,538 @@ +#!/usr/bin/env bash + +. lib + +# With darcs-1 this test fails due to commutation bugs; it would crash had +# we not made the algorithm more tolerant wrt such bugs. +# With darcs-2 the final diff fails because (1) conflicting alternatives +# aren't sorted and (2) it reports one alternative with an inverse pair of +# hunk 'ztsvuthkcyyrmwfyytjn 1' mixed in. The latter would be eliminated when +# we actually created markup which we can't because of the replace that's +# part of the conflict. +skip-formats darcs-1 darcs-2 + +rm -rf B +darcs init B +cd B +cat >a <a <a <a <a <log 2>&1 +grep -i 'no conflicts' log +grep -vi 'cannot mark' log +cd .. + +rm -rf D +darcs clone B D +cd D +darcs replace f M a +darcs record -am vqqtzxvvzjgddgwlezwj +cd .. + +rm -rf R1 +darcs clone B R1 +cd R1 +darcs pull -a --allow-conflicts ../B1 -p ztsvuthkcyyrmwfyytjn +darcs pull -a --allow-conflicts ../B2 -p ntqrzqeyfiehhlogtsuc +darcs pull -a --allow-conflicts ../C -p otkvniorrncosojszsxh +darcs pull -a --allow-conflicts ../D -p vqqtzxvvzjgddgwlezwj +# with darcs-1 format we don't even get here because the previous +# line crashes darcs +darcs mark-conflicts >log 2>&1 +not darcs whatsnew >> log +cd .. + +rm -rf R2 +darcs clone B R2 +cd R2 +darcs pull -a --allow-conflicts ../B2 -p ntqrzqeyfiehhlogtsuc +darcs pull -a --allow-conflicts ../B1 -p ztsvuthkcyyrmwfyytjn +darcs pull -a --allow-conflicts ../C -p otkvniorrncosojszsxh +darcs pull -a --allow-conflicts ../D -p vqqtzxvvzjgddgwlezwj +darcs mark-conflicts >log 2>&1 +not darcs whatsnew >> log +cd .. + +diff -u R1/log R2/log >&2 + +exit + +Named RepoPatchV3: + using V2.Prim wrapper for Prim.V1: + resolutions are invariant under reorderings: [Failed] +*** Failed! (after 53391 tests and 30 shrinks, -q=100000): +resolutions differ: r1= + +[ [ Sealed + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk 1 [ "" ] [ "h" , "p M" , "i d" ]) + } :>: + NilFL) + , Sealed + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) (Hunk 2 [ "" ] [ "A T" ]) + } :>: + NilFL) + , Sealed + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (TokReplace "A-Za-z_0-9" "y" "T") + } :>: + NilFL) + , Sealed + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk 1 [] [ "B" , "I" , "c m" , "u" ]) + } :>: + NilFL) + , Sealed + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (TokReplace "A-Za-z_0-9" "f" "M") + } :>: + NilFL) + ] +] + +r2= + +[ [ Sealed + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk 1 [ "" ] [ "h" , "p M" , "i d" ]) + } :>: + NilFL) + , Sealed + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk 1 [] [ "B" , "I" , "c m" , "u" ]) + } :>: + NilFL) + , Sealed + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (TokReplace "A-Za-z_0-9" "f" "M") + } :>: + NilFL) + ] +, [ Sealed + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) (Hunk 2 [ "" ] [ "A T" ]) + } :>: + NilFL) + , Sealed + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (TokReplace "A-Za-z_0-9" "y" "T") + } :>: + NilFL) + ] +] + +for context + +and patches + +patch f7691d00b9c98171d604308f8702123ff03e8b3b +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * ztsvuthkcyyrmwfyytjn +hash 15 845f46746f8daf575a42e4bde5cb9d65274e1d98 +hunk ./a 1 +- ++h ++p M ++i d +hash 19 4627349ce0608c332decd5fd80541a3f026cc576 +hunk ./a 4 +- ++A T +patch 17db37c2d9280b950359870756b09020861ae4c8 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * ntqrzqeyfiehhlogtsuc +conflictor +hash -19 4627349ce0608c332decd5fd80541a3f026cc576 +hunk ./a 4 +-A T ++ +v v v v v v v +hash 19 4627349ce0608c332decd5fd80541a3f026cc576 +hunk ./a 4 +- ++A T +************* +hash 31 80a23b42177d6d019627de006f92129d6f950654 +replace ./a [A-Za-z_0-9] y T +^ ^ ^ ^ ^ ^ ^ +conflictor +hash -15 845f46746f8daf575a42e4bde5cb9d65274e1d98 +hunk ./a 1 +-h +-p M +-i d ++ +v v v v v v v +hash 15 845f46746f8daf575a42e4bde5cb9d65274e1d98 +hunk ./a 1 +- ++h ++p M ++i d +************* +hash 19 4627349ce0608c332decd5fd80541a3f026cc576 +hunk ./a 2 +- ++A T +************* +hash 66 e9d7ccc116faef824262571d15e15b09dcb68ff9 +hunk ./a 1 +- +- ++Z h ++W i +^ ^ ^ ^ ^ ^ ^ +patch 1123170e60fa9dae9cceb196169022ebaf07fb21 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * otkvniorrncosojszsxh +depend f7691d00b9c98171d604308f8702123ff03e8b3b + * ztsvuthkcyyrmwfyytjn +depend 17db37c2d9280b950359870756b09020861ae4c8 + * ntqrzqeyfiehhlogtsuc +hash 33 ca5c07a3d8e20ca553cb6b83d5c2a2df0bd5e34e +hunk ./a 1 ++B ++I ++c m ++u +patch 2a27683b9b6cc747116eb46c83e98d3a8be44102 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * vqqtzxvvzjgddgwlezwj +conflictor +hash -33 ca5c07a3d8e20ca553cb6b83d5c2a2df0bd5e34e +hunk ./a 1 +-B +-I +-c m +-u +v v v v v v v +hash 15 845f46746f8daf575a42e4bde5cb9d65274e1d98 +hunk ./a 1 +- ++h ++p M ++i d +************* +hash 33 ca5c07a3d8e20ca553cb6b83d5c2a2df0bd5e34e +hunk ./a 1 ++B ++I ++c m ++u +************* +hash 39 7e735b17621d8772e585be32344064c0250455c1 +replace ./a [A-Za-z_0-9] f M +^ ^ ^ ^ ^ ^ ^ + +versus + +for context + +and patches + +patch 17db37c2d9280b950359870756b09020861ae4c8 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * ntqrzqeyfiehhlogtsuc +hash 31 80a23b42177d6d019627de006f92129d6f950654 +replace ./a [A-Za-z_0-9] y T +hash 66 e9d7ccc116faef824262571d15e15b09dcb68ff9 +hunk ./a 1 +- +- ++Z h ++W i +patch f7691d00b9c98171d604308f8702123ff03e8b3b +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * ztsvuthkcyyrmwfyytjn +conflictor +hash -66 e9d7ccc116faef824262571d15e15b09dcb68ff9 +hunk ./a 1 +-Z h +-W i ++ ++ +v v v v v v v +hash 66 e9d7ccc116faef824262571d15e15b09dcb68ff9 +hunk ./a 1 +- +- ++Z h ++W i +************* +hash 15 845f46746f8daf575a42e4bde5cb9d65274e1d98 +hunk ./a 1 +- ++h ++p M ++i d +^ ^ ^ ^ ^ ^ ^ +conflictor +hash -31 80a23b42177d6d019627de006f92129d6f950654 +replace ./a [A-Za-z_0-9] T y +v v v v v v v +hash 31 80a23b42177d6d019627de006f92129d6f950654 +replace ./a [A-Za-z_0-9] y T +************* +hash 66 e9d7ccc116faef824262571d15e15b09dcb68ff9 +hunk ./a 1 +- +- ++Z h ++W i +************* +hash 19 4627349ce0608c332decd5fd80541a3f026cc576 +hunk ./a 2 +- ++A T +^ ^ ^ ^ ^ ^ ^ +patch 1123170e60fa9dae9cceb196169022ebaf07fb21 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * otkvniorrncosojszsxh +depend f7691d00b9c98171d604308f8702123ff03e8b3b + * ztsvuthkcyyrmwfyytjn +depend 17db37c2d9280b950359870756b09020861ae4c8 + * ntqrzqeyfiehhlogtsuc +hash 33 ca5c07a3d8e20ca553cb6b83d5c2a2df0bd5e34e +hunk ./a 1 ++B ++I ++c m ++u +patch 2a27683b9b6cc747116eb46c83e98d3a8be44102 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * vqqtzxvvzjgddgwlezwj +conflictor +hash -33 ca5c07a3d8e20ca553cb6b83d5c2a2df0bd5e34e +hunk ./a 1 +-B +-I +-c m +-u +v v v v v v v +hash 15 845f46746f8daf575a42e4bde5cb9d65274e1d98 +hunk ./a 1 +- ++h ++p M ++i d +************* +hash 33 ca5c07a3d8e20ca553cb6b83d5c2a2df0bd5e34e +hunk ./a 1 ++B ++I ++c m ++u +************* +hash 39 7e735b17621d8772e585be32344064c0250455c1 +replace ./a [A-Za-z_0-9] f M +^ ^ ^ ^ ^ ^ ^ + +Sealed2 + (WithStartState2 + (WithNames V1Model [ File "a" [ "" , "" ] ] []) + (WithSplit + 6 + (ParMS + (SeqMS + (ParMS + (SeqMS + NilMS + (NamedP + PatchInfo + { _piDate = "20240606010532" + , _piName = "ztsvuthkcyyrmwfyytjn" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + [] + (PrimWithName + (PrimPatchId 15 845f46746f8daf575a42e4bde5cb9d65274e1d98) + Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk 1 [ "" ] [ "h" , "p M" , "i d" ]) + } :>: + (PrimWithName + (PrimPatchId 19 4627349ce0608c332decd5fd80541a3f026cc576) + Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk 4 [ "" ] [ "A T" ]) + } :>: + NilFL)))) + (SeqMS + NilMS + (NamedP + PatchInfo + { _piDate = "20240606010532" + , _piName = "ntqrzqeyfiehhlogtsuc" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + [] + (PrimWithName + (PrimPatchId 31 80a23b42177d6d019627de006f92129d6f950654) + Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (TokReplace "A-Za-z_0-9" "y" "T") + } :>: + (PrimWithName + (PrimPatchId 66 e9d7ccc116faef824262571d15e15b09dcb68ff9) + Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk 1 [ "" , "" ] [ "Z h" , "W i" ]) + } :>: + NilFL))))) + (NamedP + PatchInfo + { _piDate = "20240606010532" + , _piName = "otkvniorrncosojszsxh" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + [ PatchInfo + { _piDate = "20240606010532" + , _piName = "ztsvuthkcyyrmwfyytjn" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + , PatchInfo + { _piDate = "20240606010532" + , _piName = "ntqrzqeyfiehhlogtsuc" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + ] + (PrimWithName + (PrimPatchId 33 ca5c07a3d8e20ca553cb6b83d5c2a2df0bd5e34e) + Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk 1 [] [ "B" , "I" , "c m" , "u" ]) + } :>: + NilFL))) + (SeqMS + NilMS + (NamedP + PatchInfo + { _piDate = "20240606010532" + , _piName = "vqqtzxvvzjgddgwlezwj" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + [] + (PrimWithName + (PrimPatchId 39 7e735b17621d8772e585be32344064c0250455c1) + Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (TokReplace "A-Za-z_0-9" "f" "M") + } :>: + NilFL)))))) + +(used seed 1452290259292424930) diff --git a/tests/issue2727-resolutions-order-independent11.sh b/tests/issue2727-resolutions-order-independent11.sh new file mode 100755 index 00000000..c1e63297 --- /dev/null +++ b/tests/issue2727-resolutions-order-independent11.sh @@ -0,0 +1,516 @@ +#!/usr/bin/env bash + +. lib + +rm -rf B +darcs init B +cd B +cat >a <a <a <a <T.txt <&1 | grep -v Backing > log +darcs tag resolved +darcs pull -a --allow-conflicts ../C +darcs mark-conflicts >log 2>&1 +not darcs whatsnew +cd .. + +rm -rf R2 +darcs clone B R2 +cd R2 +# context +#darcs pull -a --allow-conflicts ../C -p gepgjtanefpzuhyxmxcr +darcs pull -a --allow-conflicts ../C -p lgeoexmrdwvgtnlahuxs +darcs pull -a --allow-conflicts ../C -p lizpqmmnboazzdnlbnxf +darcs pull -a --allow-conflicts ../C -p vlstfkqxkukjydejkqgt +# patches +#darcs pull -a --mark-conflicts ../C 2>&1 | grep -v Backing > log +darcs pull ../R1 -a -t resolved +darcs pull -a --allow-conflicts ../C +darcs mark-conflicts >log 2>&1 +not darcs whatsnew +cd .. + +diff -u R1/log R2/log >&2 + +exit + +Named RepoPatchV3: + using V2.Prim wrapper for Prim.V1: + resolutions are invariant under reorderings: [Failed] +*** Failed! (after 64289 tests and 11 shrinks): +resolutions differ: r1= + +[] + +r2= + +[ [ Sealed + -- vlstfkqxkukjydejkqgt 2 + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "T.txt" } ]) + (TokReplace "A-Za-z_0-9" "f" "q") + } :>: + NilFL) + , Sealed + -- hgugmytjbqlawubbavvd + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "T.txt" } ]) + (TokReplace "A-Za-z_0-9" "f" "E") + } :>: + NilFL) + ] +] + +for context + +patch 28835563346ea5df54afe26c435563f2d57a4d71 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * vlstfkqxkukjydejkqgt +hash 58 bb9908351ba91c082b051d1e6eb820a742d97fc6 +hunk ./a 2 ++f F +hash 68 196c27fbda15e4f995a60e88a090f249e02c8f15 +replace ./a [A-Za-z_0-9] f E +patch 39fed960a889a4bf748c2722af9982dde0d31488 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * lgeoexmrdwvgtnlahuxs +conflictor +hash -58 bb9908351ba91c082b051d1e6eb820a742d97fc6 +hunk ./a 2 +-E F +v v v v v v v +hash 58 bb9908351ba91c082b051d1e6eb820a742d97fc6 +hunk ./a 2 ++E F +************* +hash 79 6c584e1373537e7ef3dba3a27f9f78ae9be92b42 +hunk ./a 1 +-E ++a ++E R +^ ^ ^ ^ ^ ^ ^ +conflictor +v v v v v v v +hash 58 bb9908351ba91c082b051d1e6eb820a742d97fc6 +hunk ./a 2 ++E F +************* +hash 53 9a12eddf3317e62d87bbae7a31fb17146d50475d +hunk ./a 1 +-E ++U +^ ^ ^ ^ ^ ^ ^ +patch d158c6b4cd13498a4c0af74150dff4f08be7e540 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * gepgjtanefpzuhyxmxcr +patch 54680f79d948878ce7a2407389af93bec5f37a51 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * lizpqmmnboazzdnlbnxf +hash 44 758a11bf60f90707efc5092cf8dca0c220b5253b +move ./a ./T.txt +conflictor +v v v v v v v +hash 53 9a12eddf3317e62d87bbae7a31fb17146d50475d +hunk ./T.txt 1 +-E ++U +************* +hash 79 6c584e1373537e7ef3dba3a27f9f78ae9be92b42 +hunk ./T.txt 1 +-E ++a ++E R +************* +hash 33 6f3797b497b06a9addc64552a8a0a557af4dac4b +hunk ./T.txt 1 ++U ++d ++R A ++X +^ ^ ^ ^ ^ ^ ^ + +and patches + +patch fc493018bee29a7830033ac7b84459beea51696d +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * hgugmytjbqlawubbavvd +conflictor +hash -68 196c27fbda15e4f995a60e88a090f249e02c8f15 +replace ./T.txt [A-Za-z_0-9] E f +v v v v v v v +hash 68 196c27fbda15e4f995a60e88a090f249e02c8f15 +replace ./T.txt [A-Za-z_0-9] f E +************* +hash 61 ab53b2276a54cbbd20e268ec12dd8b0ec02df27c +replace ./T.txt [A-Za-z_0-9] f q +^ ^ ^ ^ ^ ^ ^ +patch 04d74fdab4eb33b56a779a140f1995b01abee292 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * wkotffbwxvocllnnvdvp +depend 39fed960a889a4bf748c2722af9982dde0d31488 + * lgeoexmrdwvgtnlahuxs +depend 28835563346ea5df54afe26c435563f2d57a4d71 + * vlstfkqxkukjydejkqgt +depend fc493018bee29a7830033ac7b84459beea51696d + * hgugmytjbqlawubbavvd + +versus + +for context + +patch d158c6b4cd13498a4c0af74150dff4f08be7e540 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * gepgjtanefpzuhyxmxcr +patch 39fed960a889a4bf748c2722af9982dde0d31488 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * lgeoexmrdwvgtnlahuxs +hash 79 6c584e1373537e7ef3dba3a27f9f78ae9be92b42 +hunk ./a 1 +-f ++a ++f R +hash 53 9a12eddf3317e62d87bbae7a31fb17146d50475d +hunk ./a 1 +-a +-f R ++U +patch 54680f79d948878ce7a2407389af93bec5f37a51 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * lizpqmmnboazzdnlbnxf +hash 44 758a11bf60f90707efc5092cf8dca0c220b5253b +move ./a ./T.txt +conflictor +hash -53 9a12eddf3317e62d87bbae7a31fb17146d50475d +hunk ./T.txt 1 +-U ++a ++f R +hash -79 6c584e1373537e7ef3dba3a27f9f78ae9be92b42 +hunk ./T.txt 1 +-a +-f R ++f +v v v v v v v +hash 53 9a12eddf3317e62d87bbae7a31fb17146d50475d +hunk ./T.txt 1 +-f ++U +************* +hash 79 6c584e1373537e7ef3dba3a27f9f78ae9be92b42 +hunk ./T.txt 1 +-f ++a ++f R +************* +hash 33 6f3797b497b06a9addc64552a8a0a557af4dac4b +hunk ./T.txt 1 ++U ++d ++R A ++X +^ ^ ^ ^ ^ ^ ^ +patch 28835563346ea5df54afe26c435563f2d57a4d71 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * vlstfkqxkukjydejkqgt +conflictor +v v v v v v v +hash 53 9a12eddf3317e62d87bbae7a31fb17146d50475d +hunk ./T.txt 1 +-f ++U +************* +hash 79 6c584e1373537e7ef3dba3a27f9f78ae9be92b42 +hunk ./T.txt 1 +-f ++a ++f R +************* +hash 58 bb9908351ba91c082b051d1e6eb820a742d97fc6 +hunk ./T.txt 2 ++f F +^ ^ ^ ^ ^ ^ ^ +hash 68 196c27fbda15e4f995a60e88a090f249e02c8f15 +replace ./T.txt [A-Za-z_0-9] f E + +and patches + +patch fc493018bee29a7830033ac7b84459beea51696d +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * hgugmytjbqlawubbavvd +conflictor +hash -68 196c27fbda15e4f995a60e88a090f249e02c8f15 +replace ./T.txt [A-Za-z_0-9] E f +v v v v v v v +hash 68 196c27fbda15e4f995a60e88a090f249e02c8f15 +replace ./T.txt [A-Za-z_0-9] f E +************* +hash 61 ab53b2276a54cbbd20e268ec12dd8b0ec02df27c +replace ./T.txt [A-Za-z_0-9] f q +^ ^ ^ ^ ^ ^ ^ +patch 04d74fdab4eb33b56a779a140f1995b01abee292 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * wkotffbwxvocllnnvdvp +depend 39fed960a889a4bf748c2722af9982dde0d31488 + * lgeoexmrdwvgtnlahuxs +depend 28835563346ea5df54afe26c435563f2d57a4d71 + * vlstfkqxkukjydejkqgt +depend fc493018bee29a7830033ac7b84459beea51696d + * hgugmytjbqlawubbavvd + +Sealed2 + (WithStartState2 + (WithNames V1Model [ File "a" [ "f" ] ] []) + (WithSplit + 2 + (SeqMS + (ParMS + (ParMS + (ParMS + (SeqMS + NilMS + (NamedP + PatchInfo + { _piDate = "20240606010532" + , _piName = "gepgjtanefpzuhyxmxcr" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + [] + NilFL)) + (SeqMS + NilMS + (NamedP + PatchInfo + { _piDate = "20240606010532" + , _piName = "lgeoexmrdwvgtnlahuxs" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + [] + (PrimWithName + (PrimPatchId 79 6c584e1373537e7ef3dba3a27f9f78ae9be92b42) + Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk 1 [ "f" ] [ "a" , "f R" ]) + } :>: + (PrimWithName + (PrimPatchId 53 9a12eddf3317e62d87bbae7a31fb17146d50475d) + Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk 1 [ "a" , "f R" ] [ "U" ]) + } :>: + NilFL))))) + (SeqMS + NilMS + (NamedP + PatchInfo + { _piDate = "20240606010532" + , _piName = "vlstfkqxkukjydejkqgt" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + [] + (PrimWithName + (PrimPatchId 58 bb9908351ba91c082b051d1e6eb820a742d97fc6) + Prim + { unPrim = + FP (AnchoredPath [ Name { unName = "a" } ]) (Hunk 2 [] [ "f F" ]) + } :>: + (PrimWithName + (PrimPatchId 68 196c27fbda15e4f995a60e88a090f249e02c8f15) + Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (TokReplace "A-Za-z_0-9" "f" "E") + } :>: + NilFL))))) + (ParMS + (SeqMS + NilMS + (NamedP + PatchInfo + { _piDate = "20240606010532" + , _piName = "lizpqmmnboazzdnlbnxf" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + [] + (PrimWithName + (PrimPatchId 44 758a11bf60f90707efc5092cf8dca0c220b5253b) + Prim + { unPrim = + Move + (AnchoredPath [ Name { unName = "a" } ]) + (AnchoredPath [ Name { unName = "T.txt" } ]) + } :>: + (PrimWithName + (PrimPatchId 33 6f3797b497b06a9addc64552a8a0a557af4dac4b) + Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "T.txt" } ]) + (Hunk 1 [] [ "U" , "d" , "R A" , "X" ]) + } :>: + NilFL)))) + (SeqMS + NilMS + (NamedP + PatchInfo + { _piDate = "20240606010532" + , _piName = "hgugmytjbqlawubbavvd" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + [] + (PrimWithName + (PrimPatchId 61 ab53b2276a54cbbd20e268ec12dd8b0ec02df27c) + Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (TokReplace "A-Za-z_0-9" "f" "q") + } :>: + NilFL))))) + (NamedP + PatchInfo + { _piDate = "20240606010532" + , _piName = "wkotffbwxvocllnnvdvp" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + [ PatchInfo + { _piDate = "20240606010532" + , _piName = "lgeoexmrdwvgtnlahuxs" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + , PatchInfo + { _piDate = "20240606010532" + , _piName = "vlstfkqxkukjydejkqgt" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + , PatchInfo + { _piDate = "20240606010532" + , _piName = "hgugmytjbqlawubbavvd" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + ] + NilFL)))) + +(used seed 1083374694840011397) -q=100000 diff --git a/tests/issue2727-resolutions-order-independent2.sh b/tests/issue2727-resolutions-order-independent2.sh new file mode 100755 index 00000000..0bba14b9 --- /dev/null +++ b/tests/issue2727-resolutions-order-independent2.sh @@ -0,0 +1,315 @@ +#!/usr/bin/env bash + +# Slightly simplified manual reconstruction of: +# Named RepoPatchV3: +# using V2.Prim wrapper for Prim.V1: +# resolutions are invariant under reorderings: [Failed] +# *** Failed! (after 99 tests and 40 shrinks): +# [...] +# (used seed -7870230807607679712) + +. lib + +# expected output for all reorderings +cat >log <a <log 2>&1 +diff -u ../log log >&2 +cd .. + +rm -rf R2 +darcs init R2 +cd R2 +darcs pull -a ../R1 -p oqdpurqjnggqiznmdifn +darcs pull -a --allow-conflicts ../R1 +darcs mark-conflicts >log 2>&1 +diff -u ../log log >&2 +cd .. + +exit # success + +# Beautified complete output of the failing QC test case + +Named RepoPatchV3: + using V2.Prim wrapper for Prim.V1: + resolutions are invariant under reorderings: [Failed] +*** Failed! (after 99 tests and 40 shrinks): +resolutions differ: r1= + +[] + +r2= + +[ [ Sealed + (Prim + { unPrim = + Move + (AnchoredPath [ Name { unName = "a" } ]) + (AnchoredPath [ Name { unName = "R.txt" } ]) + } :>: + NilFL) + , Sealed + (Prim + { unPrim = + Move + (AnchoredPath [ Name { unName = "a" } ]) + (AnchoredPath [ Name { unName = "g.txt" } ]) + } :>: + NilFL) + ] +] + +for patches + +patch 8d0f0563f8834e222234f837f4b1bb93c377d3b8 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * xbkikpxxbyoohohyxnvo +move ./a ./R.txt +patch 815947a05dc371855fbaadd16235247d0c794c91 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * gbpkzeypshrtfirwnvhl +hunk ./R.txt 1 ++V Y ++J W ++J +conflictor +hash -17 445e2df79b6442cdc97ecff12c439e5bdd7a7aea +move ./R.txt ./a +v v v v v v v +hash 17 445e2df79b6442cdc97ecff12c439e5bdd7a7aea +move ./a ./R.txt +************* +hash 69 6d10ffc887dcb55b698728e78460525cd9896c39 +move ./a ./g.txt +^ ^ ^ ^ ^ ^ ^ +patch a9f36328f99a8a31a178f6fda4a8b4ddf8df6f96 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * oqdpurqjnggqiznmdifn +depend 8d0f0563f8834e222234f837f4b1bb93c377d3b8 + * xbkikpxxbyoohohyxnvo +patch b02015183418696b10c22a57603b004dff50a7ec +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * rbokwwdfgukkjukzunjg +adddir ./BNC +move ./BNC ./V +patch db5de13b37d37bc562426c17ef930afd688982a9 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * argiyzgbxqgskixomnuu +depend 815947a05dc371855fbaadd16235247d0c794c91 + * gbpkzeypshrtfirwnvhl +depend 8d0f0563f8834e222234f837f4b1bb93c377d3b8 + * xbkikpxxbyoohohyxnvo + +versus + +patch 8d0f0563f8834e222234f837f4b1bb93c377d3b8 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * xbkikpxxbyoohohyxnvo +move ./a ./R.txt +patch a9f36328f99a8a31a178f6fda4a8b4ddf8df6f96 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * oqdpurqjnggqiznmdifn +depend 8d0f0563f8834e222234f837f4b1bb93c377d3b8 + * xbkikpxxbyoohohyxnvo +patch 815947a05dc371855fbaadd16235247d0c794c91 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * gbpkzeypshrtfirwnvhl +hunk ./R.txt 1 ++V Y ++J W ++J +conflictor +hash -17 445e2df79b6442cdc97ecff12c439e5bdd7a7aea +move ./R.txt ./a +v v v v v v v +hash 17 445e2df79b6442cdc97ecff12c439e5bdd7a7aea +move ./a ./R.txt +************* +hash 69 6d10ffc887dcb55b698728e78460525cd9896c39 +move ./a ./g.txt +^ ^ ^ ^ ^ ^ ^ +patch b02015183418696b10c22a57603b004dff50a7ec +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * rbokwwdfgukkjukzunjg +adddir ./BNC +move ./BNC ./V +patch db5de13b37d37bc562426c17ef930afd688982a9 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * argiyzgbxqgskixomnuu +depend 815947a05dc371855fbaadd16235247d0c794c91 + * gbpkzeypshrtfirwnvhl +depend 8d0f0563f8834e222234f837f4b1bb93c377d3b8 + * xbkikpxxbyoohohyxnvo + +Sealed2 + (WithStartState2 + (WithNames V1Model [ File "a" [] ] []) + (SeqMS + (SeqMS + (ParMS + (ParMS + (SeqMS + NilMS + (NamedP + PatchInfo + { _piDate = "20240606010532" + , _piName = "gbpkzeypshrtfirwnvhl" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + [] + (PrimWithName + (PrimPatchId 62 5d5373b4231aba09cea3b47754b5f5ece2cfe5a9) + Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk 1 [] [ "V Y" , "J W" , "J" ]) + } :>: + (PrimWithName + (PrimPatchId 69 6d10ffc887dcb55b698728e78460525cd9896c39) + Prim + { unPrim = + Move + (AnchoredPath [ Name { unName = "a" } ]) + (AnchoredPath [ Name { unName = "g.txt" } ]) + } :>: + NilFL)))) + (SeqMS + NilMS + (NamedP + PatchInfo + { _piDate = "20240606010532" + , _piName = "rbokwwdfgukkjukzunjg" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + [] + (PrimWithName + (PrimPatchId 21 2ceccc937280317d079b556c14fabeb372a263be) + Prim + { unPrim = DP (AnchoredPath [ Name { unName = "BNC" } ]) AddDir + } :>: + (PrimWithName + (PrimPatchId 63 cee1499945193e1b5b260a3313f217508b175b82) + Prim + { unPrim = + Move + (AnchoredPath [ Name { unName = "BNC" } ]) + (AnchoredPath [ Name { unName = "V" } ]) + } :>: + NilFL))))) + (SeqMS + NilMS + (NamedP + PatchInfo + { _piDate = "20240606010532" + , _piName = "xbkikpxxbyoohohyxnvo" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + [] + (PrimWithName + (PrimPatchId 17 445e2df79b6442cdc97ecff12c439e5bdd7a7aea) + Prim + { unPrim = + Move + (AnchoredPath [ Name { unName = "a" } ]) + (AnchoredPath [ Name { unName = "R.txt" } ]) + } :>: + NilFL)))) + (NamedP + PatchInfo + { _piDate = "20240606010532" + , _piName = "argiyzgbxqgskixomnuu" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + [ PatchInfo + { _piDate = "20240606010532" + , _piName = "gbpkzeypshrtfirwnvhl" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + , PatchInfo + { _piDate = "20240606010532" + , _piName = "xbkikpxxbyoohohyxnvo" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + ] + NilFL)) + (NamedP + PatchInfo + { _piDate = "20240606010532" + , _piName = "oqdpurqjnggqiznmdifn" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + [ PatchInfo + { _piDate = "20240606010532" + , _piName = "xbkikpxxbyoohohyxnvo" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + ] + NilFL))) + +(used seed -7870230807607679712) diff --git a/tests/issue2727-resolutions-order-independent3.sh b/tests/issue2727-resolutions-order-independent3.sh new file mode 100755 index 00000000..84ba7bf0 --- /dev/null +++ b/tests/issue2727-resolutions-order-independent3.sh @@ -0,0 +1,393 @@ +#!/usr/bin/env bash + +# Manual reconstruction of +# Named RepoPatchV3: +# using V2.Prim wrapper for Prim.V1: +# resolutions are invariant under reorderings: [Failed] +# *** Failed! (after 2338 tests and 5 shrinks): +# (used seed 5663187684998127060) -q=10000 +# see below for details + +. lib + +# so we always record non-canonized patches +pwd="$PWD" +trap "cp $pwd/defaults $pwd/.darcs/" EXIT +cp .darcs/defaults . +echo ALL no-canonize >> .darcs/defaults + +# expected output for all reorderings +cat >log <a <a <a <a <log 2>&1 +darcs whatsnew >>log +cd .. + +rm -rf R2 +darcs clone B R2 +cd R2 +darcs pull -a --allow-conflicts ../B2 -p lawjerjcdzhyobidfalw +darcs pull -a --allow-conflicts ../B1 -p cyhqaafycrmqgelxxurv +darcs pull -a --allow-conflicts ../B1 -p tyqredtlxtijcwmnpeyw +darcs pull -a --allow-conflicts ../R1 -p vveiqeofcxzrtrbsyxkq +darcs mark-conflicts >log 2>&1 +darcs whatsnew >>log +cd .. + +diff R1/log R2/log + +exit # success + +# Beautified complete output of the failing QC test case + +Named RepoPatchV3: + using V2.Prim wrapper for Prim.V1: + resolutions are invariant under reorderings: [Failed] +*** Failed! (after 2338 tests and 5 shrinks): +resolutions differ: r1= + +[ [ Sealed + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk 1 [] [ "Z" , "p O" ]) + } :>: + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk 2 [ "p O" ] [ "u k" ]) + } :>: + NilFL)) + , Sealed + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk 1 [] [ "p O" , "O" ]) + } :>: + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) (Hunk 1 [ "p O" ] [ "s" ]) + } :>: + NilFL)) + ] +] + +r2= + +[] + +for patches + +patch 9f88ca6fc4cc5bb609273260191bb36d9743cd75 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * cyhqaafycrmqgelxxurv +hunk ./a 1 ++G ++p t +replace ./a [A-Za-z_0-9] t O +patch 324a26cca1536a76b5b279d1fc6d46e78b81ca6a +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * lawjerjcdzhyobidfalw +conflictor +hash -20 ef9bd40f01457f56f489b06b621ddc913172c77b +hunk ./a 1 +-G +-p O +v v v v v v v +hash 20 ef9bd40f01457f56f489b06b621ddc913172c77b +hunk ./a 1 ++G ++p O +************* +hash 11 b7bb145a8f75b82ab932df5a9e1e5c4c95fb89c9 +hunk ./a 1 ++p O ++O +^ ^ ^ ^ ^ ^ ^ +conflictor +v v v v v v v +hash 20 ef9bd40f01457f56f489b06b621ddc913172c77b +hunk ./a 1 ++G ++p O +************* +hash 25 421f502eae08d9bd4fd7098fa4c8fc777fb3ed35 +hunk ./a 1 ++s ++O +^ ^ ^ ^ ^ ^ ^ +patch 92281481e69782b95a5fb3283afc7260e6945635 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * tyqredtlxtijcwmnpeyw +replace ./a [A-Za-z_0-9] G Z +conflictor +v v v v v v v +hash 11 b7bb145a8f75b82ab932df5a9e1e5c4c95fb89c9 +hunk ./a 1 ++p O ++O +************* +hash 25 421f502eae08d9bd4fd7098fa4c8fc777fb3ed35 +hunk ./a 1 ++s ++O +************* +hash 11 8c36e2cc520dfd81f3343e8ef3ee04a8f6346daa +hunk ./a 1 ++Z ++u k +^ ^ ^ ^ ^ ^ ^ +patch 6c0596d530069fd1fb7e6217edafad2506893ee8 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * vveiqeofcxzrtrbsyxkq +depend 92281481e69782b95a5fb3283afc7260e6945635 + * tyqredtlxtijcwmnpeyw +depend 324a26cca1536a76b5b279d1fc6d46e78b81ca6a + * lawjerjcdzhyobidfalw + +versus + +patch 324a26cca1536a76b5b279d1fc6d46e78b81ca6a +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * lawjerjcdzhyobidfalw +hunk ./a 1 ++p t ++t +hunk ./a 1 +-p t ++s +patch 9f88ca6fc4cc5bb609273260191bb36d9743cd75 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * cyhqaafycrmqgelxxurv +conflictor +hash -25 421f502eae08d9bd4fd7098fa4c8fc777fb3ed35 +hunk ./a 1 +-s ++p t +hash -11 b7bb145a8f75b82ab932df5a9e1e5c4c95fb89c9 +hunk ./a 1 +-p t +-t +v v v v v v v +hash 11 b7bb145a8f75b82ab932df5a9e1e5c4c95fb89c9 +hunk ./a 1 ++p t ++t +************* +hash 25 421f502eae08d9bd4fd7098fa4c8fc777fb3ed35 +hunk ./a 1 ++s ++t +************* +hash 20 ef9bd40f01457f56f489b06b621ddc913172c77b +hunk ./a 1 ++G ++p t +^ ^ ^ ^ ^ ^ ^ +replace ./a [A-Za-z_0-9] t O +patch 92281481e69782b95a5fb3283afc7260e6945635 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * tyqredtlxtijcwmnpeyw +replace ./a [A-Za-z_0-9] G Z +conflictor +v v v v v v v +hash 11 b7bb145a8f75b82ab932df5a9e1e5c4c95fb89c9 +hunk ./a 1 ++p O ++O +************* +hash 25 421f502eae08d9bd4fd7098fa4c8fc777fb3ed35 +hunk ./a 1 ++s ++O +************* +hash 11 8c36e2cc520dfd81f3343e8ef3ee04a8f6346daa +hunk ./a 1 ++Z ++u k +^ ^ ^ ^ ^ ^ ^ +patch 6c0596d530069fd1fb7e6217edafad2506893ee8 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * vveiqeofcxzrtrbsyxkq +depend 92281481e69782b95a5fb3283afc7260e6945635 + * tyqredtlxtijcwmnpeyw +depend 324a26cca1536a76b5b279d1fc6d46e78b81ca6a + * lawjerjcdzhyobidfalw + +Sealed2 + (WithStartState2 + (WithNames V1Model [ File "a" [] ] []) + (SeqMS + (ParMS + (SeqMS + (SeqMS + NilMS + (NamedP + PatchInfo + { _piDate = "20240606010532" + , _piName = "cyhqaafycrmqgelxxurv" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + [] + (PrimWithName + (PrimPatchId 20 ef9bd40f01457f56f489b06b621ddc913172c77b) + Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk 1 [] [ "G" , "p t" ]) + } :>: + (PrimWithName + (PrimPatchId 13 43be3fbfc956ba067b3aa3f52b9e57944fc80281) + Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (TokReplace "A-Za-z_0-9" "t" "O") + } :>: + NilFL)))) + (NamedP + PatchInfo + { _piDate = "20240606010532" + , _piName = "tyqredtlxtijcwmnpeyw" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + [] + (PrimWithName + (PrimPatchId 22 d321382f12821eb87f7dd5f0ef4b8da4a37e164a) + Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (TokReplace "A-Za-z_0-9" "G" "Z") + } :>: + (PrimWithName + (PrimPatchId 11 8c36e2cc520dfd81f3343e8ef3ee04a8f6346daa) + Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk 2 [ "p O" ] [ "u k" ]) + } :>: + NilFL)))) + (SeqMS + NilMS + (NamedP + PatchInfo + { _piDate = "20240606010532" + , _piName = "lawjerjcdzhyobidfalw" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + [] + (PrimWithName + (PrimPatchId 11 b7bb145a8f75b82ab932df5a9e1e5c4c95fb89c9) + Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk 1 [] [ "p t" , "t" ]) + } :>: + (PrimWithName + (PrimPatchId 25 421f502eae08d9bd4fd7098fa4c8fc777fb3ed35) + Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) (Hunk 1 [ "p t" ] [ "s" ]) + } :>: + NilFL))))) + (NamedP + PatchInfo + { _piDate = "20240606010532" + , _piName = "vveiqeofcxzrtrbsyxkq" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + [ PatchInfo + { _piDate = "20240606010532" + , _piName = "tyqredtlxtijcwmnpeyw" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + , PatchInfo + { _piDate = "20240606010532" + , _piName = "lawjerjcdzhyobidfalw" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + ] + NilFL))) + +(used seed 5663187684998127060) diff --git a/tests/issue2727-resolutions-order-independent4.sh b/tests/issue2727-resolutions-order-independent4.sh new file mode 100755 index 00000000..bf96bc81 --- /dev/null +++ b/tests/issue2727-resolutions-order-independent4.sh @@ -0,0 +1,438 @@ +#!/usr/bin/env bash + +. lib + +# so we always record non-canonized patches +pwd="$PWD" +trap "cp $pwd/defaults $pwd/.darcs/" EXIT +cp .darcs/defaults . +echo ALL no-canonize >> .darcs/defaults + +# Expected output from mark-conflicts. +# Two versions because V1 and V2 don't sort the alternatives. +cat >log1 <log2 <./a/AJg.txt <log 2>&1 +(diff -u ../log1 log || diff -u ../log2 log) >&2 +cd .. + +rm -rf R2 +darcs init R2 +cd R2 +darcs pull ../R1 -a --allow-conflicts -p jeyfglcxqipagormgcia +darcs pull ../R1 -a --allow-conflicts -p ndatodkajykujxqtjpwp +darcs pull ../R1 -a --allow-conflicts -p dcncilcydqhpujrnvksn +darcs pull ../R1 -a --allow-conflicts -p mjyujvdcahsdqxgwzpda +darcs pull ../R1 -a --allow-conflicts -p intermediate +darcs pull ../R1 -a --allow-conflicts -p sqgzmtwutgroespcnhcc +darcs mark-conflicts >log 2>&1 +(diff -u ../log1 log || diff -u ../log2 log) >&2 +cd .. + +exit # success + +# Beautified complete output of the failing QC test case + +Named RepoPatchV3: + using V2.Prim wrapper for Prim.V1: + resolutions are invariant under reorderings: [Failed] +*** Failed! (after 1383 tests and 9 shrinks): +resolutions differ: r1= + +[ [ Sealed + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "b" } ]) + (TokReplace "A-Za-z_0-9" "Y" "v") + } :>: + NilFL) + , Sealed + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "b" } ]) + (TokReplace "A-Za-z_0-9" "Y" "w") + } :>: + NilFL) + ] +] + +r2= + +[ [ Sealed + (Prim + { unPrim = + Move + (AnchoredPath [ Name { unName = "b" } ]) + (AnchoredPath + [ Name { unName = "a" } , Name { unName = "AJg.txt" } ]) + } :>: + NilFL) + , Sealed + (Prim + { unPrim = DP (AnchoredPath [ Name { unName = "a" } ]) RmDir } :>: + NilFL) + , Sealed + (Prim + { unPrim = + Move + (AnchoredPath [ Name { unName = "b" } ]) + (AnchoredPath [ Name { unName = "uL.txt" } ]) + } :>: + NilFL) + ] +, [ Sealed + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "b" } ]) + (TokReplace "A-Za-z_0-9" "Y" "v") + } :>: + NilFL) + , Sealed + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "b" } ]) + (TokReplace "A-Za-z_0-9" "Y" "w") + } :>: + NilFL) + ] +] + +for patches + +patch 7c521ce7eed39691b2e0d4a355859bd5df787e9f +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * ndatodkajykujxqtjpwp +rmdir ./a +move ./b ./uL.txt +patch 2da3103d6832f2a134c704202f916ad28e5925f2 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * dcncilcydqhpujrnvksn +depend 7c521ce7eed39691b2e0d4a355859bd5df787e9f + * ndatodkajykujxqtjpwp +replace ./uL.txt [A-Za-z_0-9] Y w +patch 4e0ec50916bbbd4dc9210d21e75bf412b2e49ddf +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * mjyujvdcahsdqxgwzpda +conflictor +hash -81 ad3167ee294f86b047b7e942d9adf89bb16974bc +move ./uL.txt ./b +hash -57 eb0113cea8cd48d4f0708b74434ba659f885c9a3 +adddir ./a +v v v v v v v +hash 57 eb0113cea8cd48d4f0708b74434ba659f885c9a3 +rmdir ./a +************* +hash 81 ad3167ee294f86b047b7e942d9adf89bb16974bc +move ./b ./uL.txt +************* +hash 15 c7230c21b7cb710a96919dce19093e46e7cbf956 +move ./b ./a/AJg.txt +^ ^ ^ ^ ^ ^ ^ +hunk ./b 1 ++I u ++G ++w +patch baaf5a5badc48ad6f447ef4a498dc9efaef09a17 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * jeyfglcxqipagormgcia +conflictor +hash -67 ab063243f8f27665186aaf9ffccfc36eb32043a7 +replace ./b [A-Za-z_0-9] w Y +v v v v v v v +hash 67 ab063243f8f27665186aaf9ffccfc36eb32043a7 +replace ./b [A-Za-z_0-9] Y w +************* +hash 27 7a19bc7ee8e9ba40c584a3353170e0c5fac64a5d +replace ./b [A-Za-z_0-9] Y v +^ ^ ^ ^ ^ ^ ^ +patch 24047e8a378e356f88cdf97a2440bab8b77cc912 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * sqgzmtwutgroespcnhcc +depend 4e0ec50916bbbd4dc9210d21e75bf412b2e49ddf + * mjyujvdcahsdqxgwzpda +depend 7c521ce7eed39691b2e0d4a355859bd5df787e9f + * ndatodkajykujxqtjpwp +depend 2da3103d6832f2a134c704202f916ad28e5925f2 + * dcncilcydqhpujrnvksn + +versus + +patch baaf5a5badc48ad6f447ef4a498dc9efaef09a17 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * jeyfglcxqipagormgcia +replace ./b [A-Za-z_0-9] Y v +patch 7c521ce7eed39691b2e0d4a355859bd5df787e9f +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * ndatodkajykujxqtjpwp +rmdir ./a +move ./b ./uL.txt +patch 2da3103d6832f2a134c704202f916ad28e5925f2 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * dcncilcydqhpujrnvksn +depend 7c521ce7eed39691b2e0d4a355859bd5df787e9f + * ndatodkajykujxqtjpwp +conflictor +hash -27 7a19bc7ee8e9ba40c584a3353170e0c5fac64a5d +replace ./uL.txt [A-Za-z_0-9] v Y +v v v v v v v +hash 27 7a19bc7ee8e9ba40c584a3353170e0c5fac64a5d +replace ./uL.txt [A-Za-z_0-9] Y v +************* +hash 67 ab063243f8f27665186aaf9ffccfc36eb32043a7 +replace ./uL.txt [A-Za-z_0-9] Y w +^ ^ ^ ^ ^ ^ ^ +patch 4e0ec50916bbbd4dc9210d21e75bf412b2e49ddf +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * mjyujvdcahsdqxgwzpda +conflictor +hash -81 ad3167ee294f86b047b7e942d9adf89bb16974bc +move ./uL.txt ./b +hash -57 eb0113cea8cd48d4f0708b74434ba659f885c9a3 +adddir ./a +v v v v v v v +hash 57 eb0113cea8cd48d4f0708b74434ba659f885c9a3 +rmdir ./a +************* +hash 81 ad3167ee294f86b047b7e942d9adf89bb16974bc +move ./b ./uL.txt +************* +hash 15 c7230c21b7cb710a96919dce19093e46e7cbf956 +move ./b ./a/AJg.txt +^ ^ ^ ^ ^ ^ ^ +hunk ./b 1 ++I u ++G ++Y +patch 24047e8a378e356f88cdf97a2440bab8b77cc912 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * sqgzmtwutgroespcnhcc +depend 4e0ec50916bbbd4dc9210d21e75bf412b2e49ddf + * mjyujvdcahsdqxgwzpda +depend 7c521ce7eed39691b2e0d4a355859bd5df787e9f + * ndatodkajykujxqtjpwp +depend 2da3103d6832f2a134c704202f916ad28e5925f2 + * dcncilcydqhpujrnvksn + +Sealed2 + (WithStartState2 + (WithNames V1Model [ Dir "a" , File "b" [] ] []) + (SeqMS + (ParMS + (SeqMS + (SeqMS + NilMS + (NamedP + PatchInfo + { _piDate = "20240606010532" + , _piName = "mjyujvdcahsdqxgwzpda" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + [] + (PrimWithName + (PrimPatchId 15 c7230c21b7cb710a96919dce19093e46e7cbf956) + Prim + { unPrim = + Move + (AnchoredPath [ Name { unName = "b" } ]) + (AnchoredPath + [ Name { unName = "a" } , Name { unName = "AJg.txt" } ]) + } :>: + (PrimWithName + (PrimPatchId 63 09627d09eaf29c8a349ac2ad4a1d08e951a1c095) + Prim + { unPrim = + FP + (AnchoredPath + [ Name { unName = "a" } , Name { unName = "AJg.txt" } ]) + (Hunk 1 [] [ "I u" , "G" , "Y" ]) + } :>: + NilFL)))) + (NamedP + PatchInfo + { _piDate = "20240606010532" + , _piName = "jeyfglcxqipagormgcia" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + [] + (PrimWithName + (PrimPatchId 27 7a19bc7ee8e9ba40c584a3353170e0c5fac64a5d) + Prim + { unPrim = + FP + (AnchoredPath + [ Name { unName = "a" } , Name { unName = "AJg.txt" } ]) + (TokReplace "A-Za-z_0-9" "Y" "v") + } :>: + NilFL))) + (SeqMS + (SeqMS + NilMS + (NamedP + PatchInfo + { _piDate = "20240606010532" + , _piName = "ndatodkajykujxqtjpwp" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + [] + (PrimWithName + (PrimPatchId 57 eb0113cea8cd48d4f0708b74434ba659f885c9a3) + Prim + { unPrim = DP (AnchoredPath [ Name { unName = "a" } ]) RmDir } :>: + (PrimWithName + (PrimPatchId 81 ad3167ee294f86b047b7e942d9adf89bb16974bc) + Prim + { unPrim = + Move + (AnchoredPath [ Name { unName = "b" } ]) + (AnchoredPath [ Name { unName = "uL.txt" } ]) + } :>: + NilFL)))) + (NamedP + PatchInfo + { _piDate = "20240606010532" + , _piName = "dcncilcydqhpujrnvksn" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + [ PatchInfo + { _piDate = "20240606010532" + , _piName = "ndatodkajykujxqtjpwp" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + ] + (PrimWithName + (PrimPatchId 67 ab063243f8f27665186aaf9ffccfc36eb32043a7) + Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "uL.txt" } ]) + (TokReplace "A-Za-z_0-9" "Y" "w") + } :>: + NilFL)))) + (NamedP + PatchInfo + { _piDate = "20240606010532" + , _piName = "sqgzmtwutgroespcnhcc" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + [ PatchInfo + { _piDate = "20240606010532" + , _piName = "mjyujvdcahsdqxgwzpda" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + , PatchInfo + { _piDate = "20240606010532" + , _piName = "ndatodkajykujxqtjpwp" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + , PatchInfo + { _piDate = "20240606010532" + , _piName = "dcncilcydqhpujrnvksn" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + ] + NilFL))) + +(used seed 6479663611722759425) diff --git a/tests/issue2727-resolutions-order-independent5.sh b/tests/issue2727-resolutions-order-independent5.sh new file mode 100755 index 00000000..4b906a0a --- /dev/null +++ b/tests/issue2727-resolutions-order-independent5.sh @@ -0,0 +1,852 @@ +#!/usr/bin/env bash + +. lib + +if grep myers .darcs/defaults; then + skip-formats darcs-1 +fi + +# so we always record non-canonized patches +pwd="$PWD" +trap "cp $pwd/defaults $pwd/.darcs/" EXIT +cp .darcs/defaults . +echo ALL no-canonize >> .darcs/defaults + +rm -rf B +darcs init B +cd B +mkdir a +cat >./b <./b <./b <./b <./b <./b <>./b <log 2>&1 +darcs whatsnew >>log || not darcs whatsnew >>log +cd .. + +rm -rf R2 +darcs clone B R2 +cd R2 +darcs pull -a --allow-conflicts ../R1 -p icodxgoobrqkafkptdwr +darcs pull -a --allow-conflicts ../R1 -p fxhzfgiulmnzgaojznqo +darcs pull -a --allow-conflicts ../R1 -p hlluqtinwjoxdunvzoqi +darcs pull -a --allow-conflicts ../R1 -p angtickssosgnvqxpbgw +# irrelevant +# darcs pull -a --allow-conflicts ../R1 -p dofgshqaobnokmjqygyu +darcs pull -a --allow-conflicts ../R1 -p fmujbnoeyizgucisozes +darcs mark-conflicts >log 2>&1 +darcs whatsnew >>log || not darcs whatsnew >>log +cd .. + +# In R1 we see +# move ./a ./pPF, from icodxgoobrqkafkptdwr +# versus +# move ./a ./LQ, from fxhzfgiulmnzgaojznqo +# In R2 this conflict is seen as resolved, rightly so, +# since fmujbnoeyizgucisozes transitively explicitly depends +# on both. +# With tracing one can see that the inputs to RepoPatchV3.resolveConflicts are +# the same, except that the internal order in 'resolved' (10 patches in both +# R1 and R2) differs. So this is a bug in RepoPatchV3.resolveConflicts! + +diff -u R1/log R2/log >&2 + +exit # success + +# Beautified complete output of the failing QC test case + +with -q=100000 +Named RepoPatchV3: + using V2.Prim wrapper for Prim.V1: + resolutions are invariant under reorderings: [Failed] +*** Failed! (after 15498 tests and 52 shrinks): +resolutions differ: r1= + +[ [ Sealed + (Prim + { unPrim = + Move + (AnchoredPath [ Name { unName = "a" } ]) + (AnchoredPath [ Name { unName = "pPF" } ]) + } :>: + NilFL) + , Sealed + (Prim + { unPrim = + Move + (AnchoredPath [ Name { unName = "a" } ]) + (AnchoredPath [ Name { unName = "LQ" } ]) + } :>: + NilFL) + ] +, [ Sealed + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "b" } ]) + (Hunk 21 [ "" , "" , "" , "" , "" , "" ] [ "E o" , "D" ]) + } :>: + NilFL) + , Sealed + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "b" } ]) (Hunk 21 [ "" ] [ "J h" ]) + } :>: + NilFL) + , Sealed + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "b" } ]) + (Hunk + 24 + [ "" , "" , "" , "" , "q E" , "w N" , "e" , "C C" , "" ] + [ "U r" ]) + } :>: + NilFL) + ] +] + +r2= + +[ [ Sealed + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "b" } ]) + (Hunk 21 [ "" , "" , "" , "" , "" , "" ] [ "E o" , "D" ]) + } :>: + NilFL) + , Sealed + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "b" } ]) (Hunk 21 [ "" ] [ "J h" ]) + } :>: + NilFL) + , Sealed + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "b" } ]) + (Hunk + 24 + [ "" , "" , "" , "" , "q E" , "w N" , "e" , "C C" , "" ] + [ "U r" ]) + } :>: + NilFL) + ] +] + +for patches + +patch a76aa8828b0ec2e8e3d5cc4892d143ec1dd989eb +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * hlluqtinwjoxdunvzoqi +hunk ./b 5 +- +- +- +- +-q E +-w N +-e +-C C +- ++U r +hunk ./b 2 +- ++J h +patch c4ffab4c9f7e77946aea095e503c8560693bb55b +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * icodxgoobrqkafkptdwr +move ./a ./pPF +replace ./b [A-Za-z_0-9] G n +patch daafdb7bcf593a09e7851509261775a4c8bbfe17 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * fxhzfgiulmnzgaojznqo +conflictor +hash -20 1c5266057ed19c48edeffb661b55d0258cf24293 +move ./pPF ./a +v v v v v v v +hash 20 1c5266057ed19c48edeffb661b55d0258cf24293 +move ./a ./pPF +************* +hash 65 06b0727251abb54cbb32df731daa2845e3087de4 +move ./a ./LQ +^ ^ ^ ^ ^ ^ ^ +hunk ./b 1 +- ++J ++R v ++p ++U C ++F ++W P ++Z ++k M ++W D ++T s ++h ++E o ++M n ++Y m ++j p ++F ++S ++p Q ++c y ++n p +patch 6e7e5f9b43099f1de0ee0a26a6ae418a0b6f6a9e +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * angtickssosgnvqxpbgw +conflictor +hash -59 8be3b3901e5737a34b93d153a48978f4103008d8 +hunk ./b 21 +-J h ++ +hash -90 84e09ac2f5e73d35f90726422b70e2e5857af0d4 +hunk ./b 24 +-U r ++ ++ ++ ++ ++q E ++w N ++e ++C C ++ +v v v v v v v +hash 59 8be3b3901e5737a34b93d153a48978f4103008d8 +hunk ./b 21 +- ++J h +************* +hash 90 84e09ac2f5e73d35f90726422b70e2e5857af0d4 +hunk ./b 24 +- +- +- +- +-q E +-w N +-e +-C C +- ++U r +************* +hash 49 e82d51ebb8529f70a0bbf002eecbd32f3a45e1e9 +hunk ./b 21 +- +- +- +- +- +- ++E o ++D +^ ^ ^ ^ ^ ^ ^ +conflictor +v v v v v v v +hash 90 84e09ac2f5e73d35f90726422b70e2e5857af0d4 +hunk ./b 24 +- +- +- +- +-q E +-w N +-e +-C C +- ++U r +************* +hash 54 6b11fc5e2a329e572e08eac6f9d04ede137232e2 +hunk ./b 32 +- +- ++O ++E K ++V ++W k ++b k ++I Z ++p +^ ^ ^ ^ ^ ^ ^ +patch b393b57809990125ddefcd1950bb3ae337e38b4b +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * dofgshqaobnokmjqygyu +adddir ./a/t +patch 4bfaee19885b6d7c79bd01bdf61d35bd463a0c79 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * fmujbnoeyizgucisozes +depend c4ffab4c9f7e77946aea095e503c8560693bb55b + * icodxgoobrqkafkptdwr +depend a76aa8828b0ec2e8e3d5cc4892d143ec1dd989eb + * hlluqtinwjoxdunvzoqi +depend daafdb7bcf593a09e7851509261775a4c8bbfe17 + * fxhzfgiulmnzgaojznqo +hunk ./b 34 ++A F ++p w ++p + +versus + +patch c4ffab4c9f7e77946aea095e503c8560693bb55b +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * icodxgoobrqkafkptdwr +move ./a ./pPF +replace ./b [A-Za-z_0-9] G n +patch daafdb7bcf593a09e7851509261775a4c8bbfe17 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * fxhzfgiulmnzgaojznqo +conflictor +hash -20 1c5266057ed19c48edeffb661b55d0258cf24293 +move ./pPF ./a +v v v v v v v +hash 20 1c5266057ed19c48edeffb661b55d0258cf24293 +move ./a ./pPF +************* +hash 65 06b0727251abb54cbb32df731daa2845e3087de4 +move ./a ./LQ +^ ^ ^ ^ ^ ^ ^ +hunk ./b 1 +- ++J ++R v ++p ++U C ++F ++W P ++Z ++k M ++W D ++T s ++h ++E o ++M n ++Y m ++j p ++F ++S ++p Q ++c y ++n p +patch a76aa8828b0ec2e8e3d5cc4892d143ec1dd989eb +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * hlluqtinwjoxdunvzoqi +hunk ./b 24 +- +- +- +- +-q E +-w N +-e +-C C +- ++U r +hunk ./b 21 +- ++J h +patch 6e7e5f9b43099f1de0ee0a26a6ae418a0b6f6a9e +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * angtickssosgnvqxpbgw +conflictor +hash -59 8be3b3901e5737a34b93d153a48978f4103008d8 +hunk ./b 21 +-J h ++ +hash -90 84e09ac2f5e73d35f90726422b70e2e5857af0d4 +hunk ./b 24 +-U r ++ ++ ++ ++ ++q E ++w N ++e ++C C ++ +v v v v v v v +hash 59 8be3b3901e5737a34b93d153a48978f4103008d8 +hunk ./b 21 +- ++J h +************* +hash 90 84e09ac2f5e73d35f90726422b70e2e5857af0d4 +hunk ./b 24 +- +- +- +- +-q E +-w N +-e +-C C +- ++U r +************* +hash 49 e82d51ebb8529f70a0bbf002eecbd32f3a45e1e9 +hunk ./b 21 +- +- +- +- +- +- ++E o ++D +^ ^ ^ ^ ^ ^ ^ +conflictor +v v v v v v v +hash 90 84e09ac2f5e73d35f90726422b70e2e5857af0d4 +hunk ./b 24 +- +- +- +- +-q E +-w N +-e +-C C +- ++U r +************* +hash 54 6b11fc5e2a329e572e08eac6f9d04ede137232e2 +hunk ./b 32 +- +- ++O ++E K ++V ++W k ++b k ++I Z ++p +^ ^ ^ ^ ^ ^ ^ +patch b393b57809990125ddefcd1950bb3ae337e38b4b +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * dofgshqaobnokmjqygyu +adddir ./a/t +patch 4bfaee19885b6d7c79bd01bdf61d35bd463a0c79 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * fmujbnoeyizgucisozes +depend c4ffab4c9f7e77946aea095e503c8560693bb55b + * icodxgoobrqkafkptdwr +depend a76aa8828b0ec2e8e3d5cc4892d143ec1dd989eb + * hlluqtinwjoxdunvzoqi +depend daafdb7bcf593a09e7851509261775a4c8bbfe17 + * fxhzfgiulmnzgaojznqo +hunk ./b 34 ++A F ++p w ++p + +Sealed2 + (WithStartState2 + (WithNames + V1Model + [ Dir "a" + , File + "b" + [ "" + , "" + , "" + , "" + , "" + , "" + , "" + , "" + , "q E" + , "w N" + , "e" + , "C C" + , "" + , "" + ] + ] + []) + (SeqMS + (ParMS + (ParMS + (ParMS + (SeqMS + NilMS + (NamedP + PatchInfo + { _piDate = "20240606010532" + , _piName = "icodxgoobrqkafkptdwr" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + [] + (PrimWithName + (PrimPatchId 20 1c5266057ed19c48edeffb661b55d0258cf24293) + Prim + { unPrim = + Move + (AnchoredPath [ Name { unName = "a" } ]) + (AnchoredPath [ Name { unName = "pPF" } ]) + } :>: + (PrimWithName + (PrimPatchId 11 2eabc4deab38658effa885b7e3f495b4877191ee) + Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "b" } ]) + (TokReplace "A-Za-z_0-9" "G" "n") + } :>: + NilFL)))) + (SeqMS + NilMS + (NamedP + PatchInfo + { _piDate = "20240606010532" + , _piName = "hlluqtinwjoxdunvzoqi" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + [] + (PrimWithName + (PrimPatchId 90 84e09ac2f5e73d35f90726422b70e2e5857af0d4) + Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "b" } ]) + (Hunk + 5 + [ "" , "" , "" , "" , "q E" , "w N" , "e" , "C C" , "" ] + [ "U r" ]) + } :>: + (PrimWithName + (PrimPatchId 59 8be3b3901e5737a34b93d153a48978f4103008d8) + Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "b" } ]) + (Hunk 2 [ "" ] [ "J h" ]) + } :>: + NilFL))))) + (SeqMS + NilMS + (NamedP + PatchInfo + { _piDate = "20240606010532" + , _piName = "fxhzfgiulmnzgaojznqo" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + [] + (PrimWithName + (PrimPatchId 65 06b0727251abb54cbb32df731daa2845e3087de4) + Prim + { unPrim = + Move + (AnchoredPath [ Name { unName = "a" } ]) + (AnchoredPath [ Name { unName = "LQ" } ]) + } :>: + (PrimWithName + (PrimPatchId 16 b1cf48180a70c89defa9f2010c3b57e9c80c9071) + Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "b" } ]) + (Hunk + 1 + [ "" ] + [ "J" + , "R v" + , "p" + , "U C" + , "F" + , "W P" + , "Z" + , "k M" + , "W D" + , "T s" + , "h" + , "E o" + , "M G" + , "Y m" + , "j p" + , "F" + , "S" + , "p Q" + , "c y" + , "G p" + ]) + } :>: + NilFL))))) + (ParMS + (SeqMS + NilMS + (NamedP + PatchInfo + { _piDate = "20240606010532" + , _piName = "angtickssosgnvqxpbgw" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + [] + (PrimWithName + (PrimPatchId 49 e82d51ebb8529f70a0bbf002eecbd32f3a45e1e9) + Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "b" } ]) + (Hunk 2 [ "" , "" , "" , "" , "" , "" ] [ "E o" , "D" ]) + } :>: + (PrimWithName + (PrimPatchId 54 6b11fc5e2a329e572e08eac6f9d04ede137232e2) + Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "b" } ]) + (Hunk + 9 + [ "" , "" ] + [ "O" , "E K" , "V" , "W k" , "b k" , "I Z" , "p" ]) + } :>: + NilFL)))) + (SeqMS + NilMS + (NamedP + PatchInfo + { _piDate = "20240606010532" + , _piName = "dofgshqaobnokmjqygyu" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + [] + (PrimWithName + (PrimPatchId 16 460ef68ae545f13c99bfba068017c6035fc8ec3d) + Prim + { unPrim = + DP + (AnchoredPath [ Name { unName = "a" } , Name { unName = "t" } ]) + AddDir + } :>: + NilFL))))) + (NamedP + PatchInfo + { _piDate = "20240606010532" + , _piName = "fmujbnoeyizgucisozes" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + [ PatchInfo + { _piDate = "20240606010532" + , _piName = "icodxgoobrqkafkptdwr" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + , PatchInfo + { _piDate = "20240606010532" + , _piName = "hlluqtinwjoxdunvzoqi" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + , PatchInfo + { _piDate = "20240606010532" + , _piName = "fxhzfgiulmnzgaojznqo" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + ] + (PrimWithName + (PrimPatchId 79 cabd8f4de1367c0461b06979a3309d2e42f1838f) + Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "b" } ]) + (Hunk 34 [] [ "A F" , "p w" , "p" ]) + } :>: + NilFL)))) + +(used seed 6479663611722759425) diff --git a/tests/issue2727-resolutions-order-independent6.sh b/tests/issue2727-resolutions-order-independent6.sh new file mode 100755 index 00000000..4a63e907 --- /dev/null +++ b/tests/issue2727-resolutions-order-independent6.sh @@ -0,0 +1,236 @@ +#!/usr/bin/env bash + +. lib + +rm -rf B +darcs init B +cd B +mkdir ./a +touch ./b +darcs record -lam 'initial state' +cd .. + +# 4 branches with 1 patch each + +rm -rf B1 +darcs clone B B1 +cd B1 +rmdir ./a +darcs record -am b1 +cd .. + +rm -rf B2 +darcs clone B B2 +cd B2 +cat >./b <./b <log 2>&1 +not darcs whatsnew +cd .. + +rm -rf R2 +darcs clone B R2 +cd R2 +# context +darcs pull --allow-conflicts ../B2 -a +darcs pull --allow-conflicts ../B3 -a +darcs pull --allow-conflicts ../B1 -a +not darcs whatsnew +darcs pull --mark-conflicts ../B4 -a >log 2>&1 +not darcs whatsnew +cd .. + +diff -u R1/log R2/log >&2 + +exit; # success + +RepoPatchV3: + using V2.Prim wrapper for Prim.V1: + resolutions are invariant under reorderings: [Failed] +*** Failed! (after 168 tests and 34 shrinks): +resolutions differ: r1= + +[ [ Sealed + (Prim + { unPrim = DP (AnchoredPath [ Name { unName = "a" } ]) RmDir } :>: + NilFL) + , Sealed + (Prim + { unPrim = DP (AnchoredPath [ Name { unName = "a" } ]) RmDir } :>: + NilFL) + ] +, [ Sealed + (Prim + { unPrim = + FP (AnchoredPath [ Name { unName = "b" } ]) (Hunk 1 [] [ "w" ]) + } :>: + NilFL) + , Sealed + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "b" } ]) (Hunk 1 [] [ "P" , "Y" ]) + } :>: + NilFL) + ] +] + +r2= + +[ [ Sealed + (Prim + { unPrim = DP (AnchoredPath [ Name { unName = "a" } ]) RmDir } :>: + NilFL) + , Sealed + (Prim + { unPrim = DP (AnchoredPath [ Name { unName = "a" } ]) RmDir } :>: + NilFL) + ] +] + +for context + +hash 67 34a0f2ded1b229162692b24a3fd66fa8b9731467 +hunk ./b 1 ++P ++Y +hash 45 5c42bf7895717660a24749fcbf5b02ddf81b95aa +rmdir ./a +conflictor +hash -67 34a0f2ded1b229162692b24a3fd66fa8b9731467 +hunk ./b 1 +-P +-Y +v v v v v v v +hash 67 34a0f2ded1b229162692b24a3fd66fa8b9731467 +hunk ./b 1 ++P ++Y +************* +hash 62 5fe16ca909b66e466b57a292d038c4fda79b3d2d +hunk ./b 1 ++w +^ ^ ^ ^ ^ ^ ^ + +and patches + +conflictor +hash -45 5c42bf7895717660a24749fcbf5b02ddf81b95aa +adddir ./a +v v v v v v v +hash 45 5c42bf7895717660a24749fcbf5b02ddf81b95aa +rmdir ./a +************* +hash 51 e96ccc18e8b980e2f1324a0421b081b065e160da +rmdir ./a +^ ^ ^ ^ ^ ^ ^ + +versus + +for context + +hash 62 5fe16ca909b66e466b57a292d038c4fda79b3d2d +hunk ./b 1 ++w +conflictor +hash -62 5fe16ca909b66e466b57a292d038c4fda79b3d2d +hunk ./b 1 +-w +v v v v v v v +hash 62 5fe16ca909b66e466b57a292d038c4fda79b3d2d +hunk ./b 1 ++w +************* +hash 67 34a0f2ded1b229162692b24a3fd66fa8b9731467 +hunk ./b 1 ++P ++Y +^ ^ ^ ^ ^ ^ ^ +hash 45 5c42bf7895717660a24749fcbf5b02ddf81b95aa +rmdir ./a + +and patches + +conflictor +hash -45 5c42bf7895717660a24749fcbf5b02ddf81b95aa +adddir ./a +v v v v v v v +hash 45 5c42bf7895717660a24749fcbf5b02ddf81b95aa +rmdir ./a +************* +hash 51 e96ccc18e8b980e2f1324a0421b081b065e160da +rmdir ./a +^ ^ ^ ^ ^ ^ ^ + +Sealed2 + (WithStartState2 + V1Model + [ Dir "a" , File "b" [] ] + (WithSplit + 1 + (ParMS + (ParMS + (SeqMS + NilMS + (PrimWithName + (PrimPatchId 45 5c42bf7895717660a24749fcbf5b02ddf81b95aa) + Prim + { unPrim = DP (AnchoredPath [ Name { unName = "a" } ]) RmDir })) + (SeqMS + NilMS + (PrimWithName + (PrimPatchId 62 5fe16ca909b66e466b57a292d038c4fda79b3d2d) + Prim + { unPrim = + FP (AnchoredPath [ Name { unName = "b" } ]) (Hunk 1 [] [ "w" ]) + }))) + (ParMS + (SeqMS + NilMS + (PrimWithName + (PrimPatchId 67 34a0f2ded1b229162692b24a3fd66fa8b9731467) + Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "b" } ]) (Hunk 1 [] [ "P" , "Y" ]) + })) + (SeqMS + NilMS + (PrimWithName + (PrimPatchId 51 e96ccc18e8b980e2f1324a0421b081b065e160da) + Prim + { unPrim = DP (AnchoredPath [ Name { unName = "a" } ]) RmDir + })))))) + +(use -r=5572661293594798996 and -q=1000) diff --git a/tests/issue2727-resolutions-order-independent7.sh b/tests/issue2727-resolutions-order-independent7.sh new file mode 100755 index 00000000..8e0da455 --- /dev/null +++ b/tests/issue2727-resolutions-order-independent7.sh @@ -0,0 +1,349 @@ +#!/usr/bin/env bash + +. lib + +# Test fails for darcs-1 patches. We can't be bothered to fix those. +skip-formats darcs-1 + +rm -rf B +darcs init B +cd B +cat >./a <./a <./a <./a <./a <&1 | grep -v 'Backing up' > log +darcs whatsnew >>log +cd .. + +rm -rf R2 +darcs clone B R2 +cd R2 +# context +darcs pull --allow-conflicts ../B2 -a +darcs pull --allow-conflicts ../B1 -a +darcs pull --allow-conflicts ../B3 -a +darcs pull --mark-conflicts ../B4 -a 2>&1 | grep -v 'Backing up' > log +darcs whatsnew >>log +cd .. + +diff -u R1/log R2/log >&2 + +exit; # success + +RepoPatchV3: + using V2.Prim wrapper for Prim.V1: + resolutions are invariant under reorderings: [Failed] +*** Failed! (after 4457 tests and 25 shrinks): +resolutions differ: r1= + +[ [ Sealed + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk 1 [ "" ] [ "k C" , "U W" , "L" , "f P" , "A" , "A" ]) + } :>: + NilFL) + , Sealed + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk 1 [ "" , "" ] [ "L" , "w" ]) + } :>: + NilFL) + , Sealed + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk 2 [ "" , "" ] [ "L x" ]) + } :>: + NilFL) + ] +] + +r2= + +[ [ Sealed + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk 1 [ "" ] [ "k C" , "U W" , "L" , "f P" , "A" , "A" ]) + } :>: + NilFL) + , Sealed + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk 3 [ "" ] [ "t" , "w" ]) + } :>: + NilFL) + , Sealed + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk 1 [ "" , "" ] [ "L" , "w" ]) + } :>: + NilFL) + , Sealed + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk 2 [ "" , "" ] [ "L x" ]) + } :>: + NilFL) + ] +] + +for context + +hash 30 fca312e3b62378403077619bfdab030feb8a1190 +hunk ./a 1 +- +- ++L ++w +conflictor +hash -30 fca312e3b62378403077619bfdab030feb8a1190 +hunk ./a 1 +-L +-w ++ ++ +v v v v v v v +hash 30 fca312e3b62378403077619bfdab030feb8a1190 +hunk ./a 1 +- +- ++L ++w +************* +hash 35 33afb5708f81f4dfa5bbc58664ab3a58ebdb57ff +hunk ./a 2 +- +- ++L x +^ ^ ^ ^ ^ ^ ^ +conflictor +v v v v v v v +hash 35 33afb5708f81f4dfa5bbc58664ab3a58ebdb57ff +hunk ./a 2 +- +- ++L x +************* +hash 9 a1d3de47f5195b3b6d2fd141760f78a27e97b105 +hunk ./a 3 +- ++t ++w +^ ^ ^ ^ ^ ^ ^ + +and patches + +conflictor +v v v v v v v +hash 30 fca312e3b62378403077619bfdab030feb8a1190 +hunk ./a 1 +- +- ++L ++w +************* +hash 2 25b39f6a7888bce66be38bf70d9af2dfa169c986 +hunk ./a 1 +- ++k C ++U W ++L ++f P ++A ++A +^ ^ ^ ^ ^ ^ ^ + +versus + +for context + +hash 35 33afb5708f81f4dfa5bbc58664ab3a58ebdb57ff +hunk ./a 2 +- +- ++L x +conflictor +hash -35 33afb5708f81f4dfa5bbc58664ab3a58ebdb57ff +hunk ./a 2 +-L x ++ ++ +v v v v v v v +hash 35 33afb5708f81f4dfa5bbc58664ab3a58ebdb57ff +hunk ./a 2 +- +- ++L x +************* +hash 30 fca312e3b62378403077619bfdab030feb8a1190 +hunk ./a 1 +- +- ++L ++w +^ ^ ^ ^ ^ ^ ^ +conflictor +v v v v v v v +hash 35 33afb5708f81f4dfa5bbc58664ab3a58ebdb57ff +hunk ./a 2 +- +- ++L x +************* +hash 9 a1d3de47f5195b3b6d2fd141760f78a27e97b105 +hunk ./a 3 +- ++t ++w +^ ^ ^ ^ ^ ^ ^ + +and patches + +conflictor +v v v v v v v +hash 30 fca312e3b62378403077619bfdab030feb8a1190 +hunk ./a 1 +- +- ++L ++w +************* +hash 2 25b39f6a7888bce66be38bf70d9af2dfa169c986 +hunk ./a 1 +- ++k C ++U W ++L ++f P ++A ++A +^ ^ ^ ^ ^ ^ ^ + +Sealed2 + (WithStartState2 + V1Model + [ File "a" [ "" , "" , "" ] ] + (WithSplit + 1 + (ParMS + (ParMS + (SeqMS + NilMS + (PrimWithName + (PrimPatchId 30 fca312e3b62378403077619bfdab030feb8a1190) + Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk 1 [ "" , "" ] [ "L" , "w" ]) + })) + (SeqMS + NilMS + (PrimWithName + (PrimPatchId 35 33afb5708f81f4dfa5bbc58664ab3a58ebdb57ff) + Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk 2 [ "" , "" ] [ "L x" ]) + }))) + (ParMS + (SeqMS + NilMS + (PrimWithName + (PrimPatchId 9 a1d3de47f5195b3b6d2fd141760f78a27e97b105) + Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk 3 [ "" ] [ "t" , "w" ]) + })) + (SeqMS + NilMS + (PrimWithName + (PrimPatchId 2 25b39f6a7888bce66be38bf70d9af2dfa169c986) + Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk 1 [ "" ] [ "k C" , "U W" , "L" , "f P" , "A" , "A" ]) + })))))) + +(used seed -7633521189062312427) -q=10000 diff --git a/tests/issue2727-resolutions-order-independent8.sh b/tests/issue2727-resolutions-order-independent8.sh new file mode 100755 index 00000000..0d156a58 --- /dev/null +++ b/tests/issue2727-resolutions-order-independent8.sh @@ -0,0 +1,555 @@ +#!/usr/bin/env bash + +# This one is interesting. The difference in resolutions here is purely one +# between two separate conflicts versus one large, otherwise they are +# identical. + +. lib + +# Crashes with darcs-1 ("precondition violated" in findConflicting) +skip-formats darcs-1 + +rm -rf B +darcs init B +cd B +cat >./a <./a <./a <./a <./a <./a <&1 | grep -v /B | grep -v 'Backing up' > log +darcs whatsnew >> log +cd .. + +rm -rf R2 +darcs clone B R2 +cd R2 +darcs pull --allow-conflicts -a ../B1 -p b1 +darcs pull --allow-conflicts -a ../B3 -p b31 +darcs pull --allow-conflicts -a ../B2 -p b2 +darcs pull --mark-conflicts -a ../B3 ../B4 2>&1 | grep -v /B | grep -v 'Backing up' > log +darcs whatsnew >> log +cd .. + +diff -u R1/log R2/log >&2 + +exit + +RepoPatchV3: + using V2.Prim wrapper for Prim.V1: + resolutions are invariant under reorderings: [Failed] +*** Failed! (after 9489 tests and 43 shrinks): +resolutions differ: r1= + +[ [ Sealed + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk 1 [ "" , "" ] [ "W" , "X U" , "t b" , "r" , "b q" , "w" ]) + } :>: + NilFL) + , Sealed + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk + 1 + [ "" ] + [ "K b" , "u" , "p" , "I i" , "d U" , "W" , "R d" , "e" , "g f" ]) + } :>: + NilFL) + ] +, [ Sealed + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk 3 [ "" , "" ] [ "r" , "L" , "g H" , "e" ]) + } :>: + NilFL) + , Sealed + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk + 4 + [ "" ] + [ "a E" + , "A v" + , "Y" + , "d n" + , "N S" + , "H" + , "w o" + , "F Z" + , "R g" + , "g H" + , "e" + ]) + } :>: + NilFL) + ] +] + +r2= + +[ [ Sealed + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk 1 [ "" , "" ] [ "W" , "X U" , "t b" , "r" , "b q" , "w" ]) + } :>: + NilFL) + , Sealed + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk + 1 + [ "" ] + [ "K b" , "u" , "p" , "I i" , "d U" , "W" , "R d" , "e" , "g f" ]) + } :>: + NilFL) + , Sealed + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk 3 [ "" , "" ] [ "r" , "L" , "g H" , "e" ]) + } :>: + NilFL) + , Sealed + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk + 4 + [ "" ] + [ "a E" + , "A v" + , "Y" + , "d n" + , "N S" + , "H" + , "w o" + , "F Z" + , "R g" + , "g H" + , "e" + ]) + } :>: + NilFL) + ] +] + +for context + +hash 58 c9263b1c3ee4d4c8e32683c4bb068572dcaffe96 +hunk ./a 2 +- +- ++N P ++a E +conflictor +hash -58 c9263b1c3ee4d4c8e32683c4bb068572dcaffe96 +hunk ./a 2 +-N P +-a E ++ ++ +v v v v v v v +hash 58 c9263b1c3ee4d4c8e32683c4bb068572dcaffe96 +hunk ./a 2 +- +- ++N P ++a E +************* +hash 47 f264fa4869a162df5f44340c2189a09f12e29311 +hunk ./a 3 +- +- ++r ++L ++g H ++e +^ ^ ^ ^ ^ ^ ^ +conflictor +v v v v v v v +hash 58 c9263b1c3ee4d4c8e32683c4bb068572dcaffe96 +hunk ./a 2 +- +- ++N P ++a E +************* +hash 15 12bdd7e73e75bf2040c97f1697b4b6b7faf2a37e +hunk ./a 1 +- +- ++W ++X U ++t b ++r ++b q ++w +^ ^ ^ ^ ^ ^ ^ + +and patches + +conflictor +v v v v v v v +hash 15 12bdd7e73e75bf2040c97f1697b4b6b7faf2a37e +hunk ./a 1 +- +- ++W ++X U ++t b ++r ++b q ++w +************* +hash 24 51703d6dd2cf12d87407bf7b721dda10aec9afa1 +hunk ./a 1 +- ++K b ++u ++p ++I i ++d U ++W ++R d ++e ++g f +^ ^ ^ ^ ^ ^ ^ +conflictor +v v v v v v v +hash 47 f264fa4869a162df5f44340c2189a09f12e29311 +hunk ./a 3 +- +- ++r ++L ++g H ++e +************* +hash 81 ef0c87ff7603d48eb2cc700b114937b313fce309 +hunk ./a 4 +- ++a E ++A v ++Y ++d n ++N S ++H ++w o ++F Z ++R g ++g H ++e +^ ^ ^ ^ ^ ^ ^ + +versus + +for context + +hash 47 f264fa4869a162df5f44340c2189a09f12e29311 +hunk ./a 3 +- +- ++r ++L ++g H ++e +hash 15 12bdd7e73e75bf2040c97f1697b4b6b7faf2a37e +hunk ./a 1 +- +- ++W ++X U ++t b ++r ++b q ++w +conflictor +hash -15 12bdd7e73e75bf2040c97f1697b4b6b7faf2a37e +hunk ./a 1 +-W +-X U +-t b +-r +-b q +-w ++ ++ +hash -47 f264fa4869a162df5f44340c2189a09f12e29311 +hunk ./a 3 +-r +-L +-g H +-e ++ ++ +v v v v v v v +hash 15 12bdd7e73e75bf2040c97f1697b4b6b7faf2a37e +hunk ./a 1 +- +- ++W ++X U ++t b ++r ++b q ++w +************* +hash 47 f264fa4869a162df5f44340c2189a09f12e29311 +hunk ./a 3 +- +- ++r ++L ++g H ++e +************* +hash 58 c9263b1c3ee4d4c8e32683c4bb068572dcaffe96 +hunk ./a 2 +- +- ++N P ++a E +^ ^ ^ ^ ^ ^ ^ + +and patches + +conflictor +v v v v v v v +hash 47 f264fa4869a162df5f44340c2189a09f12e29311 +hunk ./a 3 +- +- ++r ++L ++g H ++e +************* +hash 81 ef0c87ff7603d48eb2cc700b114937b313fce309 +hunk ./a 4 +- ++a E ++A v ++Y ++d n ++N S ++H ++w o ++F Z ++R g ++g H ++e +^ ^ ^ ^ ^ ^ ^ +conflictor +v v v v v v v +hash 15 12bdd7e73e75bf2040c97f1697b4b6b7faf2a37e +hunk ./a 1 +- +- ++W ++X U ++t b ++r ++b q ++w +************* +hash 24 51703d6dd2cf12d87407bf7b721dda10aec9afa1 +hunk ./a 1 +- ++K b ++u ++p ++I i ++d U ++W ++R d ++e ++g f +^ ^ ^ ^ ^ ^ ^ + +Sealed2 + (WithStartState2 + V1Model + [ File "a" [ "" , "" , "" , "" ] ] + (WithSplit + 2 + (ParMS + (ParMS + (SeqMS + NilMS + (PrimWithName + (PrimPatchId 47 f264fa4869a162df5f44340c2189a09f12e29311) + Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk 3 [ "" , "" ] [ "r" , "L" , "g H" , "e" ]) + })) + (SeqMS + NilMS + (PrimWithName + (PrimPatchId 58 c9263b1c3ee4d4c8e32683c4bb068572dcaffe96) + Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk 2 [ "" , "" ] [ "N P" , "a E" ]) + }))) + (ParMS + (SeqMS + (SeqMS + NilMS + (PrimWithName + (PrimPatchId 15 12bdd7e73e75bf2040c97f1697b4b6b7faf2a37e) + Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk 1 [ "" , "" ] [ "W" , "X U" , "t b" , "r" , "b q" , "w" ]) + })) + (PrimWithName + (PrimPatchId 81 ef0c87ff7603d48eb2cc700b114937b313fce309) + Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk + 8 + [ "" ] + [ "a E" + , "A v" + , "Y" + , "d n" + , "N S" + , "H" + , "w o" + , "F Z" + , "R g" + , "g H" + , "e" + ]) + })) + (SeqMS + NilMS + (PrimWithName + (PrimPatchId 24 51703d6dd2cf12d87407bf7b721dda10aec9afa1) + Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk + 1 + [ "" ] + [ "K b" , "u" , "p" , "I i" , "d U" , "W" , "R d" , "e" , "g f" ]) + })))))) + +(used seed -6466895320292341779) diff --git a/tests/issue2727-resolutions-order-independent9.sh b/tests/issue2727-resolutions-order-independent9.sh new file mode 100755 index 00000000..7977537f --- /dev/null +++ b/tests/issue2727-resolutions-order-independent9.sh @@ -0,0 +1,393 @@ +#!/usr/bin/env bash + +. lib + +rm -rf B +darcs init B +cd B +touch a b +darcs record -lam 'initial state' +cd .. + +# 4 branches + +rm -rf B1 +darcs clone B B1 +cd B1 +darcs replace J y b +darcs record -am qvcryqkmrjhsjgtmxeus +cat >a <a <log 2>&1 +not darcs whatsnew >>log +cd .. + +rm -rf R2 +darcs clone B R2 +cd R2 +darcs pull -a --allow-conflicts ../R1 -p yxdgxpwxqvkavhgojjrt +darcs pull -a --allow-conflicts ../R1 -p qvcryqkmrjhsjgtmxeus +darcs pull -a --allow-conflicts ../R1 -p tkcsxlftoemrwesezlaa +darcs pull -a ../R1 -p first_three +darcs pull -a --allow-conflicts ../R1 -p btpsqlxpsfelrkidbeyf +darcs pull -a --allow-conflicts ../R1 -p mihrcprtxunbvzhayult +darcs mark-conflicts >log 2>&1 +not darcs whatsnew >>log +cd .. + +diff -u R1/log R2/log >&2 + +exit; # success + +Named RepoPatchV3: + using V2.Prim wrapper for Prim.V1: + resolutions are invariant under reorderings: [Failed] +*** Failed! (after 14185 tests and 10 shrinks): +resolutions differ: r1= + +[] + +r2= + +[ [ Sealed + (Prim + { unPrim = + FP (AnchoredPath [ Name { unName = "a" } ]) (Hunk 1 [] [ "G U" ]) + } :>: + NilFL) + , Sealed + (Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk 1 [] [ "M T" , "i" ]) + } :>: + NilFL) + ] +] + +for context + +patch 6852635f0e39e7a8563e00bdba8cdc1b834da65a +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * qvcryqkmrjhsjgtmxeus +hash 54 026a0ba3a52003aade8ccbd1350455b84d4f3b42 +replace ./b [A-Za-z_0-9] J y +hash 65 a50456d6e0f32f49e1dfffb9e1672582ef1dd236 +hunk ./a 1 ++M T ++i +patch 648169e7afba6bd48b0fe7eab3eb1a352ebd4535 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * yxdgxpwxqvkavhgojjrt +conflictor +hash -54 026a0ba3a52003aade8ccbd1350455b84d4f3b42 +replace ./b [A-Za-z_0-9] y J +v v v v v v v +hash 54 026a0ba3a52003aade8ccbd1350455b84d4f3b42 +replace ./b [A-Za-z_0-9] J y +************* +hash 52 4699828c612aba576e7482d89e818eedcc7bf6c2 +rmfile ./b +^ ^ ^ ^ ^ ^ ^ +patch c1e2b8d9b4ee5e7a604c7fc300ffd8da2500d7ed +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * tkcsxlftoemrwesezlaa +hash 73 5196376fbf92761a84841970cb492813aa19b3a5 +adddir ./xFR +hash 83 b41dd5ff02370ea774981eb18c3bb2b0c680f8b0 +move ./xFR ./izf + +and patches + +patch 11ff78b7c98145880da912fc1d5f5e69c7acbb10 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * btpsqlxpsfelrkidbeyf +hash 21 29334fc3358bad8f40554d902f1af6d0b9df948d +adddir ./tPS +conflictor +hash -65 a50456d6e0f32f49e1dfffb9e1672582ef1dd236 +hunk ./a 1 +-M T +-i +v v v v v v v +hash 65 a50456d6e0f32f49e1dfffb9e1672582ef1dd236 +hunk ./a 1 ++M T ++i +************* +hash 55 db8bb25ec02832bc28ab3b9dbd9353dbf2b3c270 +hunk ./a 1 ++G U +^ ^ ^ ^ ^ ^ ^ +patch ab3674a9ebc11889747f0a29e076c5230e56dbf6 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * mihrcprtxunbvzhayult +depend 6852635f0e39e7a8563e00bdba8cdc1b834da65a + * qvcryqkmrjhsjgtmxeus +depend c1e2b8d9b4ee5e7a604c7fc300ffd8da2500d7ed + * tkcsxlftoemrwesezlaa +depend 11ff78b7c98145880da912fc1d5f5e69c7acbb10 + * btpsqlxpsfelrkidbeyf + +versus + +for context + +patch 648169e7afba6bd48b0fe7eab3eb1a352ebd4535 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * yxdgxpwxqvkavhgojjrt +hash 52 4699828c612aba576e7482d89e818eedcc7bf6c2 +rmfile ./b +patch 6852635f0e39e7a8563e00bdba8cdc1b834da65a +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * qvcryqkmrjhsjgtmxeus +conflictor +hash -52 4699828c612aba576e7482d89e818eedcc7bf6c2 +addfile ./b +v v v v v v v +hash 52 4699828c612aba576e7482d89e818eedcc7bf6c2 +rmfile ./b +************* +hash 54 026a0ba3a52003aade8ccbd1350455b84d4f3b42 +replace ./b [A-Za-z_0-9] J y +^ ^ ^ ^ ^ ^ ^ +hash 65 a50456d6e0f32f49e1dfffb9e1672582ef1dd236 +hunk ./a 1 ++M T ++i +patch c1e2b8d9b4ee5e7a604c7fc300ffd8da2500d7ed +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * tkcsxlftoemrwesezlaa +hash 73 5196376fbf92761a84841970cb492813aa19b3a5 +adddir ./xFR +hash 83 b41dd5ff02370ea774981eb18c3bb2b0c680f8b0 +move ./xFR ./izf + +and patches + +patch 11ff78b7c98145880da912fc1d5f5e69c7acbb10 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * btpsqlxpsfelrkidbeyf +hash 21 29334fc3358bad8f40554d902f1af6d0b9df948d +adddir ./tPS +conflictor +hash -65 a50456d6e0f32f49e1dfffb9e1672582ef1dd236 +hunk ./a 1 +-M T +-i +v v v v v v v +hash 65 a50456d6e0f32f49e1dfffb9e1672582ef1dd236 +hunk ./a 1 ++M T ++i +************* +hash 55 db8bb25ec02832bc28ab3b9dbd9353dbf2b3c270 +hunk ./a 1 ++G U +^ ^ ^ ^ ^ ^ ^ +patch ab3674a9ebc11889747f0a29e076c5230e56dbf6 +Author: tester +Date: Thu Jun 6 03:05:32 CEST 2024 + * mihrcprtxunbvzhayult +depend 6852635f0e39e7a8563e00bdba8cdc1b834da65a + * qvcryqkmrjhsjgtmxeus +depend c1e2b8d9b4ee5e7a604c7fc300ffd8da2500d7ed + * tkcsxlftoemrwesezlaa +depend 11ff78b7c98145880da912fc1d5f5e69c7acbb10 + * btpsqlxpsfelrkidbeyf + +Sealed2 + (WithStartState2 + (WithNames V1Model [ File "a" [] , File "b" [] ] []) + (WithSplit + 2 + (SeqMS + (ParMS + (ParMS + (SeqMS + NilMS + (NamedP + PatchInfo + { _piDate = "20240606010532" + , _piName = "qvcryqkmrjhsjgtmxeus" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + [] + (PrimWithName + (PrimPatchId 54 026a0ba3a52003aade8ccbd1350455b84d4f3b42) + Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "b" } ]) + (TokReplace "A-Za-z_0-9" "J" "y") + } :>: + (PrimWithName + (PrimPatchId 65 a50456d6e0f32f49e1dfffb9e1672582ef1dd236) + Prim + { unPrim = + FP + (AnchoredPath [ Name { unName = "a" } ]) + (Hunk 1 [] [ "M T" , "i" ]) + } :>: + NilFL)))) + (SeqMS + NilMS + (NamedP + PatchInfo + { _piDate = "20240606010532" + , _piName = "tkcsxlftoemrwesezlaa" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + [] + (PrimWithName + (PrimPatchId 73 5196376fbf92761a84841970cb492813aa19b3a5) + Prim + { unPrim = DP (AnchoredPath [ Name { unName = "xFR" } ]) AddDir + } :>: + (PrimWithName + (PrimPatchId 83 b41dd5ff02370ea774981eb18c3bb2b0c680f8b0) + Prim + { unPrim = + Move + (AnchoredPath [ Name { unName = "xFR" } ]) + (AnchoredPath [ Name { unName = "izf" } ]) + } :>: + NilFL))))) + (ParMS + (SeqMS + NilMS + (NamedP + PatchInfo + { _piDate = "20240606010532" + , _piName = "yxdgxpwxqvkavhgojjrt" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + [] + (PrimWithName + (PrimPatchId 52 4699828c612aba576e7482d89e818eedcc7bf6c2) + Prim + { unPrim = FP (AnchoredPath [ Name { unName = "b" } ]) RmFile } :>: + NilFL))) + (SeqMS + NilMS + (NamedP + PatchInfo + { _piDate = "20240606010532" + , _piName = "btpsqlxpsfelrkidbeyf" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + [] + (PrimWithName + (PrimPatchId 21 29334fc3358bad8f40554d902f1af6d0b9df948d) + Prim + { unPrim = DP (AnchoredPath [ Name { unName = "tPS" } ]) AddDir + } :>: + (PrimWithName + (PrimPatchId 55 db8bb25ec02832bc28ab3b9dbd9353dbf2b3c270) + Prim + { unPrim = + FP (AnchoredPath [ Name { unName = "a" } ]) (Hunk 1 [] [ "G U" ]) + } :>: + NilFL)))))) + (NamedP + PatchInfo + { _piDate = "20240606010532" + , _piName = "mihrcprtxunbvzhayult" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + [ PatchInfo + { _piDate = "20240606010532" + , _piName = "qvcryqkmrjhsjgtmxeus" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + , PatchInfo + { _piDate = "20240606010532" + , _piName = "tkcsxlftoemrwesezlaa" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + , PatchInfo + { _piDate = "20240606010532" + , _piName = "btpsqlxpsfelrkidbeyf" + , _piAuthor = "tester" + , _piLog = [] + , _piLegacyIsInverted = False + } + ] + NilFL)))) + +(used seed 5556232291165384773) -q=100000 diff --git a/tests/issue494-pending-sort.sh b/tests/issue494-pending-sort.sh index 5de5e917..e57ecfd8 100755 --- a/tests/issue494-pending-sort.sh +++ b/tests/issue494-pending-sort.sh @@ -23,6 +23,10 @@ ## CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE ## SOFTWARE. +. lib + +rm -rf R S + mkdir R cd R darcs init @@ -38,4 +42,5 @@ mkdir S cd S darcs init darcs pull --all ../R -darcs whatsnew | grep 'No changes' +not darcs whatsnew | grep 'No changes' +cd .. diff --git a/tests/latin9-input.sh b/tests/latin9-input.sh index 959cfcd4..95ddb0c9 100755 --- a/tests/latin9-input.sh +++ b/tests/latin9-input.sh @@ -66,12 +66,14 @@ darcs init echo 'Selbstverständlich überraschend' > something.txt darcs add something.txt -echo 'l33tking¸0r@example.org' > interaction_script.txt -echo y >> interaction_script.txt -echo y >> interaction_script.txt -echo y >> interaction_script.txt -echo '¤uroh4xx0rz' >> interaction_script.txt -echo n >> interaction_script.txt +cat >interaction_script.txt < Patch.lhs echo yy | darcs amend -a -p "get rid of" darcs changes | not grep "UNDO" -darcs changes --xml | not grep "inverted='True'" +darcs changes --xml | not grep "inverted=\"True\"" diff --git a/tests/match.sh b/tests/match.sh index 7c1e418b..c0b8134a 100755 --- a/tests/match.sh +++ b/tests/match.sh @@ -57,7 +57,7 @@ not grep author2 log not grep author3 log #hash darcs changes --xml-output --match='exact "\"second\" \ patch"' > log -hash=`grep hash log | sed -e "s/.*hash='//" -e "s/'.*//"` +hash=$(grep hash log | sed -E -e 's/.*hash="([^"]+)".*/\1/') echo $hash darcs changes --match="hash $hash" not grep author1 log diff --git a/tests/mutex-option-precedence.sh b/tests/mutex-option-precedence.sh index b8fad606..976343b4 100755 --- a/tests/mutex-option-precedence.sh +++ b/tests/mutex-option-precedence.sh @@ -24,7 +24,7 @@ ## CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE ## SOFTWARE. -. ../tests/lib # Load some portability helpers. +. lib # Load some portability helpers. rm -rf R # Another script may have left a mess. darcs init --repo R # Create our test repo. darcs init --repo S # Create our test repos. diff --git a/tests/network/failing-issue2462-remote-darcs-transfer-mode.sh b/tests/network/failing-issue2462-remote-darcs-transfer-mode.sh new file mode 100755 index 00000000..4c41cacb --- /dev/null +++ b/tests/network/failing-issue2462-remote-darcs-transfer-mode.sh @@ -0,0 +1,11 @@ +#!/usr/bin/env bash + +. lib +. sshlib + +init_remote_repo R + +rm -rf R +darcs clone --remote-darcs=xyzabc "${REMOTE}:${REMOTE_DIR}/R" --debug 2>LOG +grep '"ssh" .* "xyzabc"' LOG +not grep '"ssh" .* "darcs"' LOG diff --git a/tests/printer.sh b/tests/printer.sh index a70b8b65..2bac300b 100755 --- a/tests/printer.sh +++ b/tests/printer.sh @@ -11,17 +11,14 @@ touch a darcs add a darcs rec -a -m add -env # clear all output formating environment variables -for e in DARCS_DONT_ESCAPE_ISPRINT DARCS_USE_ISPRINT\ - DARCS_ESCAPE_8BIT\ - DARCS_DONT_ESCAPE_EXTRA DARCS_ESCAPE_EXTRA\ - DARCS_DONT_ESCAPE_TRAILING_SPACES\ - DARCS_DONT_COLOR DARCS_ALWAYS_COLOR DARCS_ALTERNATIVE_COLOR\ - DARCS_DONT_ESCAPE_ANYTHING; do - unset $e -done -env +unset \ + DARCS_DONT_ESCAPE_ISPRINT DARCS_USE_ISPRINT\ + DARCS_ESCAPE_8BIT\ + DARCS_DONT_ESCAPE_EXTRA DARCS_ESCAPE_EXTRA\ + DARCS_DONT_ESCAPE_TRAILING_SPACES\ + DARCS_DONT_COLOR DARCS_ALWAYS_COLOR DARCS_ALTERNATIVE_COLOR\ + DARCS_DONT_ESCAPE_ANYTHING # make sure the locale is c export LC_ALL=C diff --git a/tests/pull.sh b/tests/pull.sh index 20cc53d9..c6f4d77c 100755 --- a/tests/pull.sh +++ b/tests/pull.sh @@ -328,7 +328,7 @@ darcs init temp1 cd temp1 echo first > a darcs record -lam 'first' -firsthash=`darcs log --xml | grep 'hash=' | sed -e "s/.*hash='//" -e "s/'>//"` +firsthash=`darcs log --xml | grep 'hash=' | sed -E -e 's/.*hash="([^"]+)".*/\1/'` echo second > b darcs record -lam 'second' cd .. diff --git a/tests/rename_shouldnt_affect_prefixes.sh b/tests/rename_shouldnt_affect_prefixes.sh index 4f8bcbad..e176bece 100755 --- a/tests/rename_shouldnt_affect_prefixes.sh +++ b/tests/rename_shouldnt_affect_prefixes.sh @@ -41,7 +41,7 @@ darcs rec -am 'Move a -> b' darcs changes --xml b bb > changes.xml -grep "original_name='./a'" < changes.xml +grep 'original_name="./a"' < changes.xml # Ensure we've not used a prefix of the filename for the move. not grep "original_name='./ab'" < changes.xml diff --git a/tests/failing-pristine-problems.sh b/tests/repair-missing-pristine-files.sh similarity index 88% rename from tests/failing-pristine-problems.sh rename to tests/repair-missing-pristine-files.sh index 7fae8780..cb942aed 100755 --- a/tests/failing-pristine-problems.sh +++ b/tests/repair-missing-pristine-files.sh @@ -39,15 +39,17 @@ roothash=`darcs show pristine | grep ' ./$' | cut -d' ' -f1` wibblehash=`darcs show pristine | grep ' wibble$' | cut -d' ' -f1` rm _darcs/pristine.hashed/$roothash +# also remove it from the cache +find $DARCS_CACHE_DIR/pristine.hashed -name $roothash -exec rm -f {} \; || true not darcs check not darcs check -# At the time of writing this test goes wrong at the line above -# I'm not 100% certain if the rest of it is right. darcs repair | grep -v 'The repository is already consistent' darcs check rm _darcs/pristine.hashed/$wibblehash +# also remove it from the cache +find $DARCS_CACHE_DIR/pristine.hashed -name $wibblehash -exec rm -f {} \; || true not darcs check not darcs check diff --git a/tests/resolve-conflicts-explicitly.sh b/tests/resolve-conflicts-explicitly.sh index 31facd05..865aeb7a 100755 --- a/tests/resolve-conflicts-explicitly.sh +++ b/tests/resolve-conflicts-explicitly.sh @@ -51,9 +51,8 @@ echo three > file1 darcs record -a -m resolve_file1 # test this indeed resolves only the conflict in file1 darcs mark-conflicts > LOG 2>&1 -grep 'Marking conflicts' LOG -grep 'file2' LOG -not grep 'file1' LOG +grep 'Marking conflicts .*file2' LOG +not grep 'Marking conflicts .*file1' LOG not darcs whatsnew file1 # remove the markup darcs revert -a @@ -61,8 +60,7 @@ darcs revert -a echo yyd | darcs record --ask-deps -m explicit # test resolution is still only partial darcs mark-conflicts > LOG 2>&1 -grep 'Marking conflicts' LOG -grep 'file2' LOG -not grep 'file1' LOG +grep 'Marking conflicts .*file2' LOG +not grep 'Marking conflicts .*file1' LOG not darcs whatsnew file1 cd ..