diff --git a/.gitignore b/.gitignore index 9e82459c3a..b853f1b163 100644 --- a/.gitignore +++ b/.gitignore @@ -27,4 +27,3 @@ tags /etc/scripts/stack-scripts.cabal .hspec-failures better-cache/ -subs/ diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index e1eb88f35d..59e5e7e3f0 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1,4 +1,4 @@ -image: registry.gitlab.fpcomplete.com/fpco/default-build-image:4116 +image: registry.gitlab.fpcomplete.com/fpco/default-build-image:4297 cache: key: "$CI_BUILD_NAME" diff --git a/.hlint.yaml b/.hlint.yaml index 7b3944648b..aac19b918d 100644 --- a/.hlint.yaml +++ b/.hlint.yaml @@ -60,7 +60,6 @@ - error: {lhs: "Network.HTTP.Client.parseUrlThrow", rhs: "Network.HTTP.StackClient.parseUrlThrow"} - error: {lhs: "Network.HTTP.Client.path", rhs: "Network.HTTP.StackClient.path"} - error: {lhs: "Network.HTTP.Client.responseHeaders", rhs: "Network.HTTP.StackClient.responseHeaders"} -- error: {lhs: "Network.HTTP.Client.withResponse", rhs: "Network.HTTP.StackClient.withResponseByManager"} - error: {lhs: "Network.HTTP.Conduit.requestHeaders", rhs: "Network.HTTP.StackClient.requestHeaders"} - error: {lhs: "Network.HTTP.Simple.HttpException", rhs: "Network.HTTP.StackClient.HttpException"} - error: {lhs: "Network.HTTP.Simple.addRequestHeader", rhs: "Network.HTTP.StackClient.addRequestHeader"} @@ -88,4 +87,3 @@ - error: {lhs: "Network.HTTP.Types.hContentMD5", rhs: "Network.HTTP.StackClient.hContentMD5"} - error: {lhs: "Network.HTTP.Types.methodPut", rhs: "Network.HTTP.StackClient.methodPut"} - ignore: {name: "Use alternative", within: "Network.HTTP.StackClient"} -- ignore: {name: "Use withResponseByManager", within: "Network.HTTP.StackClient"} diff --git a/ChangeLog.md b/ChangeLog.md index 4e7a18d5b2..9ed2097d07 100644 --- a/ChangeLog.md +++ b/ChangeLog.md @@ -7,6 +7,37 @@ Release notes: Major changes: +* Switch over to pantry for managing packages. This is a major change + to Stack's internals, and affects user-visible behavior in a few + places. Some highlights: + * Drop support for multiple package indices and legacy + `00-index.tar` style indices. See + [#4137](https://github.com/commercialhaskell/stack/issues/4137). + * Support for archives and repos in the `packages` section has + been removed. Instead, you must use `extra-deps` for such + dependencies. `packages` now only supports local filepaths. + * Addition of new configuration options for specifying a "pantry + tree" key, which provides more reproducibility around builds, + and (in the future) will be used for more efficient package + content downloads. You can also specify package name and version + for more efficient config parsing. + * __NOTE__ The new `stack freeze` command provides support + for automatically generating this additional + information. + * Package contents and metadata are stored in an SQLite database + in place of files on the filesystem. The `pantry` library can be + used for interacting with these contents. + * Internally, Stack has changed many datatypes, including moving + to Cabal's definition of many data types. As a result of such + changes, existing cache files will in general be invalidated, + resulting in Stack needing to rebuild many previously cached + builds in the new version. Sorry :(. + * A new command, `stack freeze` has been added which outputs + project and snapshot definitions with dependencies pinned to + their exact versions. + * The `ignore-revision-mismatch` setting is no longer needed, and + has been removed. + Behavior changes: Other enhancements: @@ -27,9 +58,21 @@ Other enhancements: redefine the default styles that stack uses to color some of its output. See `stack --help` for more information. * New build option `--ddump-dir`. (See [#4225](https://github.com/commercialhaskell/stack/issues/4225)) +* Stack parses and respects the `preferred-versions` information from + Hackage for choosing latest version of a package in some cases, + e.g. `stack unpack packagename`. +* Git repos are shared across multiple projects. See + [#3551](https://github.com/commercialhaskell/stack/issues/3551) Bug fixes: +* Ignore duplicate files for a single module when a Haskell module was + generated from a preprocessor file. See + [#4076](https://github.com/commercialhaskell/stack/issues/4076). +* Only track down components in current directory if there are no + hs-source-dirs found. This eliminates a number of false-positive + warnings, similar to + [#4076](https://github.com/commercialhaskell/stack/issues/4076). * Handle a change in GHC's hi-dump format around `addDependentFile`, which now includes a hash. See [yesodweb/yesod#1551](https://github.com/yesodweb/yesod/issues/1551) diff --git a/doc/GUIDE.md b/doc/GUIDE.md index 4c6bc46598..c317e7b41a 100644 --- a/doc/GUIDE.md +++ b/doc/GUIDE.md @@ -8,6 +8,11 @@ This guide takes a new stack user through the typical workflows. This guide will not teach Haskell or involve much code, and it requires no prior experience with the Haskell packaging system or other build tools. +__NOTE__ This document is probably out of date in some places and +deserves a refresh. If you find this document helpful, please drop a +note on [issue +#4252](https://github.com/commercialhaskell/stack/issues/4252). + ## Stack's functions stack handles the management of your toolchain (including GHC — the Glasgow diff --git a/doc/architecture.md b/doc/architecture.md index 7eeda7cfe8..05cb9d7cc5 100644 --- a/doc/architecture.md +++ b/doc/architecture.md @@ -2,6 +2,12 @@ # Architecture +__NOTE__ MSS 2018-08-22 This document is out of date, and will be made +more out of date by +[#3922](https://github.com/commercialhaskell/stack/issues/3922). I +intend to update it when implementing #3922. Tracked in +[#4251](https://github.com/commercialhaskell/stack/issues/4251). + ## Terminology * Package identifier: a package name and version, e.g. text-1.2.1.0 diff --git a/doc/custom_snapshot.md b/doc/custom_snapshot.md index 335ff90afc..1d85fb5f57 100644 --- a/doc/custom_snapshot.md +++ b/doc/custom_snapshot.md @@ -2,132 +2,4 @@ # Custom Snapshots -Custom snapshots were totally reworked with the extensible snapshots -overhaul in Stack 1.6.0, see -[the writeup](https://www.fpcomplete.com/blog/2017/07/stacks-new-extensible-snapshots) -and -[PR #3249](https://github.com/commercialhaskell/stack/pull/3249)). This -documentation covers the new syntax only. - -Custom snapshots allow you to create your own snapshots, which provide -a list of packages to use, along with flags, ghc-options, and a few -other settings. Custom snapshots may extend any other snapshot that -can be specified in a `resolver` field. The packages specified follow -the syntax of `extra-deps` in the `stack.yaml` file, with one -exception: to ensure reproducibility of snapshots, local directories -are not allowed for custom snapshots (as they are expected to change -regularly). - -```yaml -resolver: lts-8.21 # Inherits GHC version and package set -compiler: ghc-8.0.1 # Overwrites GHC version in the resolver, optional - -name: my-snapshot # User-friendly name - -# Additional packages, follows extra-deps syntax -packages: -- unordered-containers-0.2.7.1 -- hashable-1.2.4.0 -- text-1.2.2.1 - -# Override flags, can also override flags in the parent snapshot -flags: - unordered-containers: - debug: true - -# Packages from the parent snapshot to ignore -drop-packages: -- wai-extra - -# Packages which should be hidden (affects script command's import -# parser -hidden: - wai: true - warp: false - -# Set GHC options for specific packages -ghc-options: - warp: - - -O2 -``` - -If you put this in a `snapshot.yaml` file in the same directory as your project, -you can now use the custom snapshot like this: - -```yaml -resolver: snapshot.yaml -``` - -This is an example of a custom snapshot stored in the filesystem. They are -assumed to be mutable, so you are free to modify it. We detect that the snapshot -has changed by hashing the contents of the involved files, and using it to -identify the snapshot internally. It is often reasonably efficient to modify a -custom snapshot, due to stack sharing snapshot packages whenever possible. - -## Using a URL instead of a filepath - -For efficiency, URLs are treated differently. If I uploaded the snapshot to -`https://domain.org/snapshot-1.yaml`, it is expected to be immutable. If you -change that file, then you lose any reproducibility guarantees. - -### Overriding the compiler - -The following snapshot specification will be identical to `lts-7.1`, but instead -use `ghc-7.10.3` instead of `ghc-8.0.1`: - -```yaml -resolver: lts-7.1 -compiler: ghc-7.10.3 -``` - -### Dropping packages - -The following snapshot specification will be identical to `lts-7.1`, but without -the `text` package in our snapshot. Removing this package will cause all the -packages that depend on `text` to be unbuildable, but they will still be present -in the snapshot. - -```yaml -resolver: lts-7.1 -drop-packages: - - text -``` - -### Specifying ghc-options - -In order to specify ghc-options for a package, you use the same syntax as the -[ghc-options](yaml_configuration.md#ghc-options) field for build configuration. -The following snapshot specification will be identical to `lts-7.1`, but -provides `-O1` as a ghc-option for `text`: - -```yaml -resolver: lts-7.1 -packages: - - text-1.2.2.1 -ghc-options: - text: -O1 -``` - -This works somewhat differently than the stack.yaml `ghc-options` field, in that -options can only be specified for packages that are mentioned in the custom -snapshot's `packages` list. It sets the ghc-options, rather than extending those -specified in the snapshot being extended. - -Another difference is that the `*` entry for `ghc-options` applies to all -packages in the `packages` list, rather than all packages in the snapshot. - -### Specifying flags - -In order to specify flags for a package, you use the same syntax as the -[flags](yaml_configuration.md#flags) field for build configuration. The -following snapshot specification will be identical to `lts-7.1`, but -it enables the `developer` cabal flag: - -```yaml -resolver: lts-7.1 -packages: - - text-1.2.2.1 -flags: - text: - developer: true -``` +This content has been moved to the [docs on pantry](pantry.md). diff --git a/doc/pantry.md b/doc/pantry.md new file mode 100644 index 0000000000..1b41c698c0 --- /dev/null +++ b/doc/pantry.md @@ -0,0 +1,457 @@ +
+ +# Pantry in Stack + +Beginning with Stack 1.11, Stack uses the Pantry library for its +specification of snapshots and package locations. Under the surface, +Pantry is geared towards reproducible build plans with +cryptographically secure specification of packages and snapshots. + +There are three user-visible components to Pantry's configuration which affect usage of Stack: + +* Snapshot location specification (in the `resolver` field) +* Package location specification (in the `extra-deps` field and inside snapshots) +* Snapshot specification, for creating custom snapshots + +## Freeze command + +As you'll see throughout this document, there is a lot of additional +information that can be provided to Stack to make the configuration +more reproducible and faster to parse. However, it's tedious to +specify these values manually. Therefore, the recommended workflow is: + +* Manually write the simple version of a configuration value +* Use `stack freeze` to obtain the more reproducible version + +The standard `stack freeze` will operate on your `stack.yaml` file, and provide +you with updated `resolver` and `extra-deps` values, if relevant. If you run +`stack freeze --snapshot`, it will provide you with an update snapshot file. + +New contents will be printed to `stdout` instead of modifying your existing +files to avoid mutation of user-created files. + +## Snapshot location + +There are essentially four different ways of specifying a snapshot +location: + +* Via a compiler version, which is a "compiler only" snapshot. This + could be, e.g., `resolver: ghc-8.4.3`. +* Via a URL pointing to a snapshot configuration file, e.g. `resolver: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/nightly/2018/8/21.yaml` +* Via a local file path pointing to a snapshot configuration file, e.g. `resolver: my-local-snapshot.yaml` +* Via a _convenience synonym_, which provides a short form for some + common URLs. These are: + * Github: `github:user/repo:path` is treated as `https://raw.githubusercontent.com/user/repo/master/path` + * LTS Haskell: `lts-X.Y` is treated as `github:commercialhaskell/stackage-snapshots:lts/X/Y.yaml` + * Stackage Nightly: `nightly-YYYY-MM-DD` is treated as `github:commercialhaskell/stackage-snapshots:nightly/YYYY/M/D.yaml` + +For safer, more reproducible builds, you can optionally specify a URL +together with a cryptographic hash of its content, e.g.: + +```yaml +resolver: + size: 499143 + url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/12/0.yaml + sha256: 781ea577595dff08b9c8794761ba1321020e3e1ec3297fb833fe951cce1bee11 +``` + +Where the `size` is the number of bytes in the file, and `sha256` is +its SHA256 hash. This information can automatically be generated with +the [`stack freeze`](#freeze-command) command. + +## Package location + +Pantry supports three types of package locations: + +* Hackage packages +* Repositories +* Archives + +All three of these formats support optional tree metadata to be added, +which can be used for reproducibility and faster downloads. This +information can automatically be generated with the [`stack +freeze`](#freeze-command) command. + +### Hackage + +Packages can be stated by a name/version combination. The basic syntax +for this is: + +```yaml +extra-deps: +- acme-missiles-0.3 +``` + +Using this syntax, the most recent Cabal file revision available will +be used. For more reproducibility of builds, it is recommended to +state the SHA256 hash of the cabal file contents as well, like this: + +```yaml +extra-deps: +- acme-missiles-0.3@sha256:2ba66a092a32593880a87fb00f3213762d7bca65a687d45965778deb8694c5d1 +``` + +Or, better yet, including the cabal file size too: + +```yaml +extra-deps: +- acme-missiles-0.3@sha256:2ba66a092a32593880a87fb00f3213762d7bca65a687d45965778deb8694c5d1,631 +``` + +Or a specific revision number, with `0` being the original file: + +```yaml +extra-deps: +- acme-missiles-0.3@rev:0 +``` + +Note that specifying via SHA256 is slightly more resilient in that it +does not rely on correct ordering in the package index, while revision +number is likely simpler to use. In practice, both should guarantee +equally reproducible build plans. + +Finally, you can include the Pantry tree information. The following +was generated with `stack freeze`: + +```yaml +- hackage: acme-missiles-0.3@sha256:2ba66a092a32593880a87fb00f3213762d7bca65a687d45965778deb8694c5d1,613 + pantry-tree: + size: 226 + sha256: 614bc0cca76937507ea0a5ccc17a504c997ce458d7f2f9e43b15a10c8eaeb033 +``` + +### Git and Mercurial repos + +You can give a Git or Mercurial repo at a specific commit, and Stack +will clone that repo. + +```yaml +extra-deps: +- git: git@github.com:commercialhaskell/stack.git + commit: 6a86ee32e5b869a877151f74064572225e1a0398 +- git: git@github.com:snoyberg/http-client.git + commit: "a5f4f3" +- hg: https://example.com/hg/repo + commit: da39a3ee5e6b4b0d3255bfef95601890afd80709 +``` + +__NOTE__ It is highly recommended that you only use SHA1 values for a +Git or Mercurial commit. Other values may work, but they are not +officially supported, and may result in unexpected behavior (namely, +Stack will not automatically pull to update to new versions). +Another problem with this is that your build will not be deterministic, +because when someone else tries to build the project they can get a +different checkout of the package. + +A common practice in the Haskell world is to use "megarepos", or +repositories with multiple packages in various subdirectories. Some +common examples include [wai](https://github.com/yesodweb/wai/) and +[digestive-functors](https://github.com/jaspervdj/digestive-functors). To +support this, you may also specify `subdirs` for repositories, e.g.: + +```yaml +extra-deps: +- git: git@github.com:yesodweb/wai + commit: 2f8a8e1b771829f4a8a77c0111352ce45a14c30f + subdirs: + - auto-update + - wai +``` + +Since v1.7.1, you can specify packages from GitHub repository name using `github`: + +```yaml +extra-deps: +- github: snoyberg/http-client + commit: a5f4f30f01366738f913968163d856366d7e0342 +``` + +If unspecified, `subdirs` defaults to `['.']` meaning looking for a +package in the root of the repo. Note that if you specify a value of +`subdirs`, then `'.'` is _not_ included by default and needs to be +explicitly specified if a required package is found in the top-level +directory of the repository. + +Using the `stack freeze` command will add in additional information, +including not only the Pantry tree hash, but also package metadata +which can allow Stack to work faster by bypassing cabal file +parses. For example, this: + +```yaml +extra-deps: +- git: git@github.com:yesodweb/wai + commit: 2f8a8e1b771829f4a8a77c0111352ce45a14c30f + subdirs: + - auto-update + - wai +``` + +Would be converted into: + +```yaml +extra-deps: +- subdir: auto-update + cabal-file: + size: 1219 + sha256: c07b2b1a2df1199f83eef819ac9bb067567e100b60586a52f8b92fc733ae3a6d + name: auto-update + version: 0.1.2.1 + git: git@github.com:yesodweb/wai + pantry-tree: + size: 687 + sha256: 26377897f35ccd3890b4405d72523233717afb04d62f2d36031bf6b18dcef74f + commit: 2f8a8e1b771829f4a8a77c0111352ce45a14c30f +- subdir: wai + cabal-file: + size: 1717 + sha256: 7b46e7a8b121d668351fa8a684810afadf58c39276125098485203ef274fd056 + name: wai + version: 3.0.2.3 + git: git@github.com:yesodweb/wai + pantry-tree: + size: 10299 + sha256: ce33fddab13592c847fbd7acd1859dfcbb9aeb6c212db3cee27c909fa3f3ae44 + commit: 2f8a8e1b771829f4a8a77c0111352ce45a14c30f +``` + +### Archives (HTTP(S) or local filepath) + +You can use HTTP and HTTPS URLs and local filepaths referring to +either tarballs or ZIP files. + +__NOTE__ Stack assumes that these files never change after downloading +to avoid needing to make an HTTP request on each build. Use hashes to +provide more security. + +```yaml +extra-deps: +- https://example.com/foo/bar/baz-0.0.2.tar.gz +- archive: http://github.com/yesodweb/wai/archive/2f8a8e1b771829f4a8a77c0111352ce45a14c30f.zip + subdirs: + - wai + - warp +- archive: ../acme-missiles-0.3.tar.gz + sha256: e563d8b524017a06b32768c4db8eff1f822f3fb22a90320b7e414402647b735b +``` + +With the `stack freeze` command, this would be replaced with: + +```yaml +extra-deps: +- size: 1540 + url: https://hackage.haskell.org/package/acme-dont-1.1.tar.gz + cabal-file: + size: 602 + sha256: 8264ad3e5113d3e0417b46e71d5a9c0914a1f03b5b81319cc329f1dc0f49b96c + name: acme-dont + version: '1.1' + sha256: c32231ff8548bccd4f3bafcc9b1eb84947a2e5e0897c50c048e0e7609fc443ce + pantry-tree: + size: 206 + sha256: 79dbeddaf0fd507611687cefe9511c8fda489849fb0cac3894925716936290b2 +- size: 285152 + subdir: wai + url: http://github.com/yesodweb/wai/archive/2f8a8e1b771829f4a8a77c0111352ce45a14c30f.zip + cabal-file: + size: 1717 + sha256: 7b46e7a8b121d668351fa8a684810afadf58c39276125098485203ef274fd056 + name: wai + version: 3.0.2.3 + sha256: 3b6eb04f3763ca16432f3ab2135d239161fbe2c8811b8cd1778ffa67469289ba + pantry-tree: + size: 10296 + sha256: ce431f1a22fcda89375ba5e35e53aee968eea23d1124fcba7cb9eae426daa2db +- size: 285152 + subdir: warp + url: http://github.com/yesodweb/wai/archive/2f8a8e1b771829f4a8a77c0111352ce45a14c30f.zip + cabal-file: + size: 6648 + sha256: e3f01fd7417af923fd30962e9e6a4fe4de41ebc5e02af9819067fed79c9c6575 + name: warp + version: 3.0.13.1 + sha256: 3b6eb04f3763ca16432f3ab2135d239161fbe2c8811b8cd1778ffa67469289ba + pantry-tree: + size: 4292 + sha256: d6b1def306a042b5fc500930302533a3ea828e916c99cbd82c0b7e2c4e3a8e09 +- size: 1442 + filepath: acme-missiles-0.3.tar.gz + cabal-file: + size: 613 + sha256: 2ba66a092a32593880a87fb00f3213762d7bca65a687d45965778deb8694c5d1 + name: acme-missiles + version: '0.3' + sha256: e563d8b524017a06b32768c4db8eff1f822f3fb22a90320b7e414402647b735b + pantry-tree: + size: 226 + sha256: 614bc0cca76937507ea0a5ccc17a504c997ce458d7f2f9e43b15a10c8eaeb033 +``` + +## Snapshots + +_NOTE_ Stack has supported custom snapshots properly since version +1.6. In version 1.11, the support for snapshots was moved to Pantry, +and Stackage snapshots have moved over to using the same +format. Therefore, there is no longer such a thing as "custom +snapshots," there are simply "snapshots." Pantry snapshots follow the +same format as Stack 1.6 "custom snapshots." + +Snapshots provide a list of packages to use, along with flags, +ghc-options, and a few other settings. Snapshots may extend any other +snapshot that can be specified in a `resolver` field. The packages +specified follow the same syntax mentioned above for +dependencies. Unlike `extra-deps`, however, no support for local +directories is available in snapshots to ensure reproducibility. + +```yaml +resolver: lts-8.21 # Inherits GHC version and package set +compiler: ghc-8.0.1 # Overwrites GHC version in the resolver, optional + +name: my-snapshot # User-friendly name + +# Additional packages, follows extra-deps syntax +packages: +- unordered-containers-0.2.7.1 +- hashable-1.2.4.0 +- text-1.2.2.1 + +# Override flags, can also override flags in the parent snapshot +flags: + unordered-containers: + debug: true + +# Packages from the parent snapshot to ignore +drop-packages: +- wai-extra + +# Packages which should be hidden (affects script command's import +# parser +hidden: + wai: true + warp: false + +# Set GHC options for specific packages +ghc-options: + warp: + - -O2 +``` + +If you put this in a `snapshot.yaml` file in the same directory as your project, +you can now use the custom snapshot like this: + +```yaml +resolver: snapshot.yaml +``` + +This is an example of a custom snapshot stored in the filesystem. They are +assumed to be mutable, so you are free to modify it. We detect that the snapshot +has changed by hashing the contents of the involved files, and using it to +identify the snapshot internally. It is often reasonably efficient to modify a +custom snapshot, due to stack sharing snapshot packages whenever possible. + +Running the `stack freeze --snapshot` command yields the following +output: + +```yaml +flags: + unordered-containers: + debug: true +ghc-options: + warp: + - -O2 +packages: +- hackage: unordered-containers-0.2.7.1@sha256:7a1ceb6d88c0f16ec417f28dac16f6dc7b10e88fbb536a74d84941ad2f57b74b,4367 + pantry-tree: + size: 1286 + sha256: 8a8f745cacae3c11a9c6e6c2fcefc95a13d0c153a8e14b4d28485db1b59d9ef3 +- hackage: hashable-1.2.4.0@sha256:33a49b3ea87cc4a0c89a4fd48f19e4807d8c620aff710a048a28cf7d9c9b4620,4271 + pantry-tree: + size: 1325 + sha256: cb05c31a8ec43f727004e5a6c8e35ff92e0515855a85cb01fa73623683ee4b33 +- hackage: text-1.2.2.1@sha256:1c6ffad395d1674915cc9fda1d3b8f202ddcbfda7c341eb8bd99de67d3283bf9,5724 + pantry-tree: + size: 7376 + sha256: ac2601c49cf7bc0f5d66b2793eddc8352f51a6ee989980827a0d0d8169700a03 +name: my-snapshot +hidden: + warp: false + wai: true +drop-packages: +- wai-extra +compiler: ghc-8.0.1 +resolver: + size: 515969 + url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/8/21.yaml + sha256: 2ec73d520d3e55cb753eaca11a72a9ce95bd9ba7ccaf16de1150d0130a50a5a1 +``` + +### Overriding the compiler + +The following snapshot specification will be identical to `lts-7.1`, but instead +use `ghc-7.10.3` instead of `ghc-8.0.1`: + +```yaml +resolver: lts-7.1 +compiler: ghc-7.10.3 +``` + +### Dropping packages + +The following snapshot specification will be identical to `lts-7.1`, but without +the `text` package in our snapshot. Removing this package will cause all the +packages that depend on `text` to be unbuildable, but they will still be present +in the snapshot. + +```yaml +resolver: lts-7.1 +drop-packages: + - text +``` + +### Specifying ghc-options + +In order to specify ghc-options for a package, you use the same syntax as the +[ghc-options](yaml_configuration.md#ghc-options) field for build configuration. +The following snapshot specification will be identical to `lts-7.1`, but +provides `-O1` as a ghc-option for `text`: + +```yaml +resolver: lts-7.1 +packages: + - text-1.2.2.1 +ghc-options: + text: -O1 +``` + +This works somewhat differently than the stack.yaml `ghc-options` field, in that +options can only be specified for packages that are mentioned in the custom +snapshot's `packages` list. It sets the ghc-options, rather than extending those +specified in the snapshot being extended. + +Another difference is that the `*` entry for `ghc-options` applies to all +packages in the `packages` list, rather than all packages in the snapshot. + +### Specifying flags + +In order to specify flags for a package, you use the same syntax as the +[flags](yaml_configuration.md#flags) field for build configuration. The +following snapshot specification will be identical to `lts-7.1`, but +it enables the `developer` cabal flag: + +```yaml +resolver: lts-7.1 +packages: + - text-1.2.2.1 +flags: + text: + developer: true +``` + +## Updating frozen information + +Suppose you're depending on `foo-1.2.3` from Hackage, and have used `stack +freeze` on your file. Now you'd like to upgrade to `foo-1.2.4`. Doing so +requires you to: + +* Change the version number specified to `1.2.4` +* Remove any freeze information that may conflict, like cabal file info, pantry tree, etc +* Rerun the `stack freeze` command to generate the new freeze information diff --git a/doc/yaml_configuration.md b/doc/yaml_configuration.md index 4ed369e8c8..10b380c5ce 100644 --- a/doc/yaml_configuration.md +++ b/doc/yaml_configuration.md @@ -60,210 +60,159 @@ are currently four resolver types: * No snapshot, just use packages shipped with the compiler * For GHC this looks like `resolver: ghc-7.10.2` * For GHCJS this looks like `resolver: ghcjs-0.1.0_ghc-7.10.2`. -* [Custom snapshot](custom_snapshot.md) +* Custom snapshot, via a URL or relative file path. (See [pantry docs](pantry.md) for more information.) Each of these resolvers will also determine what constraints are placed on the compiler version. See the [compiler-check](#compiler-check) option for some additional control over compiler version. -### packages and extra-deps - -_NOTE_ The contents of this section have changed significantly since -extensible snapshots were implemented (see: -[writeup](https://www.fpcomplete.com/blog/2017/07/stacks-new-extensible-snapshots) -and -[PR #3249](https://github.com/commercialhaskell/stack/pull/3249)). Most -old syntax is still supported with newer versions of Stack, but will -not be documented here. Instead, this section contains the recommended -syntax as of Stack v1.6.0. - -There are two types of packages that can be defined in your -`stack.yaml` file: - -* __Project packages__, those which you are actually working on in - your current project. These are local file paths in your project - directory. -* __Extra dependencies__, which are packages provided locally on top - of the snapshot definition of available packages. These can come - from Hackage (or an alternative package index you've defined, see - [package-indices](#package-indices)), an HTTP(S) or local archive, a - Git or Mercurial repository, or a local file path. - -These two sets of packages are both installed into your local package -database within your project. However, beyond that, they are -completely different: +Since Stack 1.11, the resolver field corresponds to a Pantry snapshot +location. See [the docs on pantry](pantry.md) for more information. -* Project packages will be built by default with a `stack build` - without specific targets. Extra dependencies will only be built if - they are depended upon. -* Test suites and benchmarks may be run for project packages. They are - never run for extra dependencies. +### packages + +_NOTE_ Beginning with Stack 1.11, Stack has moved over to Pantry for +managing extra-deps, and has removed some legacy syntax for specifying +dependencies in `packages`. See some conversion notes below. -The `packages` key is a simple list of file paths, which will be -treated as relative to the directory containing your `stack.yaml` -file. For example: +A list of packages that are part of your local project. These are +specified via paths to local directories. The paths are considered +relative to the directory containing the `stack.yaml` file. For +example, if your `stack.yaml` is located at `/foo/bar/stack.yaml`, and +you have: ```yaml packages: -- . -- dir1/dir2 +- hello +- there/world ``` -Each package directory or location specified must have a valid cabal -file or hpack `package.yaml` file present. Note that the -subdirectories of the directory are not searched for cabal -files. Subdirectories will have to be specified as independent items -in the list of packages. +Your configuration means "I have packages in `/foo/bar/hello` and +`/foo/bar/there/world`. + +If these packages should be treated as dependencies instead, specify +them in `extra-deps`, described below. -When the `packages` field is not present, it defaults to looking for a package -in the project's root directory: +The `packages` field is _optional_. If omitted, it is treated as: ```yaml packages: - . ``` -The `extra-deps` key is given a list of all extra dependencies. If -omitted, it is taken as the empty list, e.g.: +Each package directory specified must have a valid cabal file or hpack +`package.yaml` file present. Note that the subdirectories of the +directory are not searched for cabal files. Subdirectories will have +to be specified as independent items in the list of packages. -```yaml -extra-deps: [] -``` +Meaning that your project has exactly one package, and it is located +in the current directory. -It supports four different styles of values: +Project packages are different from snapshot dependencies (via +`resolver`) and extra dependencies (via `extra-deps`) in multiple +ways, e.g.: -#### Package index +* Project packages will be built by default with a `stack build` + without specific targets. Dependencies will only be built if + they are depended upon. +* Test suites and benchmarks may be run for project packages. They are + never run for extra dependencies. -Packages can be stated by a name/version combination, which will be -looked up in the package index (by default, Hackage). The basic syntax -for this is: +__Legacy syntax__ Prior to Stack 1.11, it was possible to specify +dependencies in your `packages` configuration value as well. This +support has been removed to simplify the file format. Instead, these +values should be moved to `extra-deps`. As a concrete example, you +would convert: ```yaml -extra-deps: -- acme-missiles-0.3 -``` - -Using this syntax, the most recent Cabal file revision available will -be used. For more reproducibility of builds, it is recommended to -state the SHA256 hash of the cabal file contents as well, like this: +packages: +- . +- location: + git: https://github.com/bitemyapp/esqueleto.git + commit: 08c9b4cdf977d5bcd1baba046a007940c1940758 + extra-dep: true +- location: + git: https://github.com/yesodweb/wai.git + commit: 6bf765e000c6fd14e09ebdea6c4c5b1510ff5376 + subdirs: + - wai-extra + extra-dep: true -```yaml extra-deps: -- acme-missiles-0.3@sha256:2ba66a092a32593880a87fb00f3213762d7bca65a687d45965778deb8694c5d1 + - streaming-commons-0.2.0.0 + - time-1.9.1 + - yesod-colonnade-1.3.0.1 + - yesod-elements-1.1 ``` -Or a specific revision number, with `0` being the original file: +into ```yaml -extra-deps: -- acme-missiles-0.3@rev:0 -``` - -Note that specifying via SHA256 is slightly more resilient in that it -does not rely on correct ordering in the package index, while revision -number is likely simpler to use. In practice, both should guarantee -equally reproducible build plans. - -#### Local file path - -Like `packages`, local file paths can be used in `extra-deps`, and -will be relative to the directory containing the `stack.yaml` file. +packages: +- . -```yaml extra-deps: -- vendor/somelib + - streaming-commons-0.2.0.0 + - time-1.9.1 + - yesod-colonnade-1.3.0.1 + - yesod-elements-1.1 + - git: https://github.com/bitemyapp/esqueleto.git + commit: 08c9b4cdf977d5bcd1baba046a007940c1940758 + - git: https://github.com/yesodweb/wai.git + commit: 6bf765e000c6fd14e09ebdea6c4c5b1510ff5376 + subdirs: + - wai-extra ``` -Note that if a local directory can be parsed as a package identifier, -Stack will treat it as a package identifier. In other words, if you -have a local directory named `foo-1.2.3`, instead of: +And, in fact, the `packages` value could be left off entirely since +it's using the default value. -```yaml -extra-deps: -- foo-1.2.3 -``` +### extra-deps -You should use the following to be explicit: +This field allows you to specify extra dependencies on top of what is +defined in your snapshot (specified in the `resolver` field mentioned +above). These dependencies may either come from a local file path or a +Pantry package location. -```yaml -extra-deps: -- ./foo-1.2.3 -``` +For the local file path case, the same relative path rules as apply to +`packages` apply. -#### Git and Mercurial repos +Pantry package locations allow you to include dependencies from three +different kinds of sources: -You can give a Git or Mercurial repo at a specific commit, and Stack -will clone that repo. +* Hackage +* Archives (tarballs or zip files, either local or over HTTP(S)) +* Git or Mercurial repositories -```yaml -extra-deps: -- git: git@github.com:commercialhaskell/stack.git - commit: 6a86ee32e5b869a877151f74064572225e1a0398 -- git: git@github.com:snoyberg/http-client.git - commit: "a5f4f3" -- hg: https://example.com/hg/repo - commit: da39a3ee5e6b4b0d3255bfef95601890afd80709 -``` - -__NOTE__ It is highly recommended that you only use SHA1 values for a -Git or Mercurial commit. Other values may work, but they are not -officially supported, and may result in unexpected behavior (namely, -Stack will not automatically pull to update to new versions). -Another problem with this is that your build will not be deterministic, -because when someone else tries to build the project they can get a -different checkout of the package. - -A common practice in the Haskell world is to use "megarepos", or -repositories with multiple packages in various subdirectories. Some -common examples include [wai](https://github.com/yesodweb/wai/) and -[digestive-functors](https://github.com/jaspervdj/digestive-functors). To -support this, you may also specify `subdirs` for repositories, e.g.: +Here's an example using all of the above: ```yaml extra-deps: -- git: git@github.com:yesodweb/wai - commit: 2f8a8e1b771829f4a8a77c0111352ce45a14c30f +- vendor/hashable +- streaming-commons-0.2.0.0 +- time-1.9.1 +- yesod-colonnade-1.3.0.1 +- yesod-elements-1.1 +- git: https://github.com/bitemyapp/esqueleto.git + commit: 08c9b4cdf977d5bcd1baba046a007940c1940758 +- url: https://github.com/yesodweb/wai/archive/6bf765e000c6fd14e09ebdea6c4c5b1510ff5376.tar.gz subdirs: - - auto-update - - wai -``` - -Since v1.7.1, you can specify packages from GitHub repository name using `github`: - -```yaml -extra-deps: -- github: snoyberg/http-client - commit: a5f4f30f01366738f913968163d856366d7e0342 + - wai-extra +- github: snoyberg/conduit + commit: 2e3e41de93821bcfe8ec6210aeca21be3f2087bf + subdirs: + - network-conduit-tls ``` -If unspecified, `subdirs` defaults to `['.']` meaning looking for a -package in the root of the repo.. Note that if you specify a value of -`subdirs`, then `'.'` is _not_ included by default and needs to be -explicitly specified if a required package is found in the top-level -directory of the repository. - -#### Archives (HTTP(S) or local filepath) - -This one's pretty straightforward: you can use HTTP and HTTPS URLs and -local filepaths referring to either tarballs or ZIP files. - -__NOTE__ Stack assumes that these files never change after downloading -to avoid needing to make an HTTP request on each build. +If no `extra-deps` value is provided, it defaults to an empty list, +e.g.: ```yaml -extra-deps: -- https://example.com/foo/bar/baz-0.0.2.tar.gz -- archive: http://github.com/yesodweb/wai/archive/2f8a8e1b771829f4a8a77c0111352ce45a14c30f.zip - subdirs: - - wai - - warp -- archive: ../acme-missiles-0.3.tar.gz - sha256: e563d8b524017a06b32768c4db8eff1f822f3fb22a90320b7e414402647b735b +extra-deps: [] ``` -Note that HTTP(S) URLs also support `subdirs` like repos to allow for -archives of megarepos. In order to leverage this, use `location: -http://...`. +For more information on the format for specifying dependencies, please +see [the Pantry docs](pantry.md). ### flags @@ -383,45 +332,38 @@ Default: `~/.local/bin` ### package-indices -```yaml -package-indices: -- name: Hackage - download-prefix: https://s3.amazonaws.com/hackage.fpcomplete.com/package/ +Since Stack 1.11, this field may only be used to specify a single +package index, which must use the Hackage Security format. For the +motivation for this change, please see [issue +#4137](https://github.com/commercialhaskell/stack/issues/4137). Therefore, +this field is most useful for providing an alternate Hackage mirror +either for: - # HTTP location of the package index - http: https://s3.amazonaws.com/hackage.fpcomplete.com/01-index.tar.gz +* Bypassing a firewall +* Faster download speeds - # Or, if using Hackage Security below, give the root URL: - http: https://s3.amazonaws.com/hackage.fpcomplete.com/ +The following is the default setting for this field: - # optional fields, both default to false - require-hashes: false - - # Starting with stack 1.4, we default to using Hackage Security +```yaml +package-indices: +- download-prefix: https://hackage.haskell.org/ hackage-security: - keyids: ["deadbeef", "12345"] # list of all approved keys + keyids: + - 0a5c7ea47cd1b15f01f5f51a33adda7e655bc0f0b0615baa8e271f4c3351e21d + - 1ea9ba32c526d1cc91ab5e5bd364ec5e9e8cb67179a471872f6e26f0ae773d42 + - 280b10153a522681163658cb49f632cde3f38d768b736ddbc901d99a1a772833 + - 2a96b1889dc221c17296fcc2bb34b908ca9734376f0f361660200935916ef201 + - 2c6c3627bd6c982990239487f1abd02e08a02e6cf16edb105a8012d444d870c3 + - 51f0161b906011b52c6613376b1ae937670da69322113a246a09f807c62f6921 + - 772e9f4c7db33d251d5c6e357199c819e569d130857dc225549b40845ff0890d + - aa315286e6ad281ad61182235533c41e806e5a787e0b6d1e7eef3f09d137d2e9 + - fe331502606802feac15e514d9b9ea83fee8b6ffef71335479a2e68d84adc6b0 key-threshold: 3 # number of keys required ``` -One thing you should be aware of: if you change the contents of package-version -combination by setting a different package index, this *can* have an effect on -other projects by installing into your shared snapshot database. - -Note that older versions of Stack supported Git-based indices. This feature has since been removed. A line such as: - -```yaml -git: https://github.com/commercialhaskell/all-cabal-hashes.git -gpg-verify: false -``` - -Will now be ignored. - -__IMPORTANT__ Hackage and its mirrors typically have two index files -available: `00-index.tar.gz` and `01-index.tar.gz`. The former is a -legacy file for backwards compatibility. It does not contain the cabal -file revisions produced by Hackage, and therefore _will not work_ with -most snapshots. Instead, you need to use `01-index.tar.gz` to ensure -that exact revisions can be found, ensuring more reproducible builds. +If you provide a replacement index which does not mirror Hackage, it +is likely that you'll end up with significant breakage, such as most +snapshots failing to work. ### system-ghc @@ -894,21 +836,9 @@ Since 1.8.0 ### ignore-revision-mismatch -Cabal files in packages can be specified via exact revisions to deal -with Hackage revision metadata. The default behavior of Stack (since -1.6.0) is to fail if an exact match is not found. In some cases -(specifically, when using a legacy `00-index.tar.gz` file), users may -wish to allow a mismatch. In such cases, you can change -`ignore-revision-mismatch` from `false` to `true`. - -```yaml -ignore-revision-mismatch: false -``` - -For more information, see -[the Github issue #3520 discussion](https://github.com/commercialhaskell/stack/issues/3520). - -Since 1.6.0 +This flag was introduced in Stack 1.6, and removed in Stack 1.11 with +the move to Pantry. You will receive a warning if this configuration +value is set. ### urls diff --git a/package.yaml b/package.yaml index a3881d7b1a..fe77864aec 100644 --- a/package.yaml +++ b/package.yaml @@ -46,8 +46,8 @@ dependencies: - base64-bytestring - bytestring - colour -- conduit -- conduit-extra +- conduit >= 1.3 +- conduit-extra >= 1.3 - containers - cryptonite - cryptonite-conduit @@ -81,6 +81,7 @@ dependencies: - network-uri - open-browser - optparse-applicative +- pantry - path - path-io - persistent @@ -138,7 +139,6 @@ library: - Paths_stack exposed-modules: - Control.Concurrent.Execute - - Data.Aeson.Extended - Data.Attoparsec.Args - Data.Attoparsec.Combinators - Data.Attoparsec.Interpreter @@ -178,8 +178,8 @@ library: - Stack.Docker - Stack.Docker.GlobalDB - Stack.Dot - - Stack.Fetch - Stack.FileWatch + - Stack.Freeze - Stack.GhcPkg - Stack.Ghci - Stack.Ghci.Script @@ -199,6 +199,7 @@ library: - Stack.Options.DockerParser - Stack.Options.DotParser - Stack.Options.ExecParser + - Stack.Options.FreezeParser - Stack.Options.GhcBuildParser - Stack.Options.GhciParser - Stack.Options.GhcVariantParser @@ -217,8 +218,6 @@ library: - Stack.Options.Utils - Stack.Package - Stack.PackageDump - - Stack.PackageIndex - - Stack.PackageLocation - Stack.Path - Stack.Prelude - Stack.PrettyPrint @@ -233,7 +232,6 @@ library: - Stack.Sig.Sign - Stack.Snapshot - Stack.Solver - - Stack.StaticBytes - Stack.Types.Build - Stack.Types.BuildPlan - Stack.Types.CompilerBuild @@ -242,15 +240,12 @@ library: - Stack.Types.Config - Stack.Types.Config.Build - Stack.Types.Docker - - Stack.Types.FlagName - Stack.Types.GhcPkgId - Stack.Types.Image - Stack.Types.NamedComponent - Stack.Types.Nix - Stack.Types.Package - Stack.Types.PackageDump - - Stack.Types.PackageIdentifier - - Stack.Types.PackageIndex - Stack.Types.PackageName - Stack.Types.PrettyPrint - Stack.Types.Resolver @@ -260,13 +255,12 @@ library: - Stack.Types.TemplateName - Stack.Types.Version - Stack.Types.VersionIntervals + - Stack.Unpack - Stack.Upgrade - Stack.Upload - Text.PrettyPrint.Leijen.Extended - System.Process.PagerEditor - System.Terminal - other-modules: - - Hackage.Security.Client.Repository.HttpLib.HttpClient when: - condition: 'os(windows)' then: diff --git a/snapshot.yaml b/snapshot.yaml index c3d6ba8ff5..811e3e6598 100644 --- a/snapshot.yaml +++ b/snapshot.yaml @@ -10,6 +10,7 @@ packages: - http-api-data-0.3.8.1@rev:0 - githash-0.1.0.1@rev:0 - rio-orphans-0.1.1.0@sha256:15600084c56ef4e1f22ac2091d10fa6ed62f01f531d819c6a5a19492212a76c9 +- persistent-sqlite-2.8.2@sha256:6874958eb2943c4567c30bc0069ce4868b2813c490402c22bb2e0efa5b4c4c71,3873 flags: cabal-install: diff --git a/src/Control/Concurrent/Execute.hs b/src/Control/Concurrent/Execute.hs index 97439b17bb..82bc5c55e9 100644 --- a/src/Control/Concurrent/Execute.hs +++ b/src/Control/Concurrent/Execute.hs @@ -16,7 +16,6 @@ import Control.Concurrent.STM (retry) import Stack.Prelude import Data.List (sortBy) import qualified Data.Set as Set -import Stack.Types.PackageIdentifier data ActionType = ATBuild diff --git a/src/Data/Attoparsec/Interpreter.hs b/src/Data/Attoparsec/Interpreter.hs index 7ee755cb95..110e860c84 100644 --- a/src/Data/Attoparsec/Interpreter.hs +++ b/src/Data/Attoparsec/Interpreter.hs @@ -58,9 +58,8 @@ import Data.Attoparsec.Args import Data.Attoparsec.Text (()) import qualified Data.Attoparsec.Text as P import Data.Char (isSpace) -import Data.Conduit +import Conduit import Data.Conduit.Attoparsec -import Data.Conduit.Text (decodeUtf8) import Data.List (intercalate) import Data.Text (pack) import Stack.Constants @@ -120,7 +119,7 @@ getInterpreterArgs file = do parseFile src = runConduit $ src - .| decodeUtf8 + .| decodeUtf8C .| sinkParserEither (interpreterArgsParser isLiterate stackProgName) isLiterate = takeExtension file == ".lhs" diff --git a/src/Data/Store/VersionTagged.hs b/src/Data/Store/VersionTagged.hs index 3dd14e122b..44317ce47d 100644 --- a/src/Data/Store/VersionTagged.hs +++ b/src/Data/Store/VersionTagged.hs @@ -95,6 +95,7 @@ storeVersionConfig name hash = (namedVersionConfig name hash) { vcIgnore = S.fromList [ "Data.Vector.Unboxed.Base.Vector GHC.Types.Word" , "Data.ByteString.Internal.ByteString" + , "Data.ByteString.Short.Internal.ShortByteString" ] , vcRenames = M.fromList [ ( "Data.Maybe.Maybe", "GHC.Base.Maybe") diff --git a/src/Network/HTTP/Download.hs b/src/Network/HTTP/Download.hs index f2a2d30200..2a17b2eda7 100644 --- a/src/Network/HTTP/Download.hs +++ b/src/Network/HTTP/Download.hs @@ -16,7 +16,6 @@ module Network.HTTP.Download , redownload , httpJSON , httpLbs - , httpLBS , parseRequest , parseUrlThrow , setGithubHeaders @@ -26,12 +25,12 @@ module Network.HTTP.Download import Stack.Prelude import Stack.Types.Runner import qualified Data.ByteString.Lazy as L -import Data.Conduit (yield) +import Conduit (yield, withSinkFileCautious, withSourceFile) import qualified Data.Conduit.Binary as CB import Data.Text.Encoding.Error (lenientDecode) import Data.Text.Encoding (decodeUtf8With) import Network.HTTP.Download.Verified -import Network.HTTP.StackClient (Request, Response, HttpException, httpJSON, httpLbs, httpLBS, withResponse, path, checkResponse, parseUrlThrow, parseRequest, setRequestHeader, getResponseHeaders, requestHeaders, getResponseBody, getResponseStatusCode) +import Network.HTTP.StackClient (Request, Response, HttpException, httpJSON, httpLbs, withResponse, path, checkResponse, parseUrlThrow, parseRequest, setRequestHeader, getResponseHeaders, requestHeaders, getResponseBody, getResponseStatusCode) import Path.IO (doesFileExist) import System.Directory (createDirectoryIfMissing, removeFile) diff --git a/src/Network/HTTP/Download/Verified.hs b/src/Network/HTTP/Download/Verified.hs index f6218ed0fb..01272c9ba6 100644 --- a/src/Network/HTTP/Download/Verified.hs +++ b/src/Network/HTTP/Download/Verified.hs @@ -22,6 +22,7 @@ module Network.HTTP.Download.Verified import qualified Data.List as List import qualified Data.ByteString as ByteString import qualified Data.ByteString.Base64 as B64 +import Conduit (withSinkFile) import qualified Data.Conduit.Binary as CB import qualified Data.Conduit.List as CL import qualified Data.Text as Text diff --git a/src/Network/HTTP/StackClient.hs b/src/Network/HTTP/StackClient.hs index 47b76d0965..7c98c8739e 100644 --- a/src/Network/HTTP/StackClient.hs +++ b/src/Network/HTTP/StackClient.hs @@ -1,3 +1,4 @@ +{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} -- | @@ -7,27 +8,17 @@ module Network.HTTP.StackClient ( httpJSON , httpLbs - , httpLBS , httpNoBody , httpSink - , setUserAgent , withResponse - , withResponseByManager , setRequestMethod , setRequestHeader , addRequestHeader , setRequestBody - , setRequestManager , getResponseHeaders , getResponseBody , getResponseStatusCode - , Network.HTTP.Client.responseHeaders - , Network.HTTP.Client.responseStatus - , Network.HTTP.Client.responseBody , parseRequest - , parseRequest_ - , defaultRequest - , setUri , getUri , path , checkResponse @@ -39,24 +30,11 @@ module Network.HTTP.StackClient , Request , RequestBody(RequestBodyBS, RequestBodyLBS) , Response - , Manager - , Header - , HeaderName - , HttpException(HttpExceptionRequest) - , HttpExceptionContent(StatusCodeException) + , HttpException , hAccept , hContentLength , hContentMD5 - , hCacheControl - , hRange , methodPut - , ok200 - , partialContent206 - , Proxy - , useProxy - , noProxy - , proxyEnvironment - , managerSetProxy , formDataBody , partFileRequestBody , partBS @@ -65,19 +43,16 @@ module Network.HTTP.StackClient import Data.Aeson (FromJSON) import qualified Data.ByteString as Strict -import Data.ByteString.Lazy (ByteString) -import Data.Conduit (ConduitM, transPipe) +import Data.Conduit (ConduitM) import Data.Void (Void) -import qualified Network.HTTP.Client -import Network.HTTP.Client (BodyReader, Manager, Request, RequestBody(..), Response, Manager, HttpExceptionContent(..), parseRequest, parseRequest_, defaultRequest, getUri, path, checkResponse, parseUrlThrow, responseStatus, responseBody, useProxy, noProxy, proxyEnvironment, managerSetProxy, Proxy) -import Network.HTTP.Client.Internal (setUri) -import Network.HTTP.Simple (setRequestMethod, setRequestBody, setRequestHeader, addRequestHeader, setRequestManager, HttpException(..), getResponseBody, getResponseStatusCode, getResponseHeaders) -import Network.HTTP.Types (hAccept, hContentLength, hContentMD5, hCacheControl, hRange, methodPut, Header, HeaderName, ok200, partialContent206) +import Network.HTTP.Client (Request, RequestBody(..), Response, parseRequest, getUri, path, checkResponse, parseUrlThrow) +import Network.HTTP.Simple (setRequestMethod, setRequestBody, setRequestHeader, addRequestHeader, HttpException(..), getResponseBody, getResponseStatusCode, getResponseHeaders) +import Network.HTTP.Types (hAccept, hContentLength, hContentMD5, methodPut) import Network.HTTP.Conduit (requestHeaders) import Network.HTTP.Client.TLS (getGlobalManager, applyDigestAuth, displayDigestAuthException) import qualified Network.HTTP.Simple import Network.HTTP.Client.MultipartFormData (formDataBody, partFileRequestBody, partBS, partLBS) -import UnliftIO (MonadIO, MonadUnliftIO, withRunInIO, withUnliftIO, unliftIO) +import RIO setUserAgent :: Request -> Request @@ -88,14 +63,10 @@ httpJSON :: (MonadIO m, FromJSON a) => Request -> m (Response a) httpJSON = Network.HTTP.Simple.httpJSON . setUserAgent -httpLbs :: MonadIO m => Request -> m (Response ByteString) +httpLbs :: MonadIO m => Request -> m (Response LByteString) httpLbs = Network.HTTP.Simple.httpLbs . setUserAgent -httpLBS :: MonadIO m => Request -> m (Response ByteString) -httpLBS = httpLbs - - httpNoBody :: MonadIO m => Request -> m (Response ()) httpNoBody = Network.HTTP.Simple.httpNoBody . setUserAgent @@ -105,17 +76,10 @@ httpSink => Request -> (Response () -> ConduitM Strict.ByteString Void m a) -> m a -httpSink req inner = withUnliftIO $ \u -> - Network.HTTP.Simple.httpSink (setUserAgent req) (transPipe (unliftIO u) . inner) +httpSink = Network.HTTP.Simple.httpSink . setUserAgent withResponse :: (MonadUnliftIO m, MonadIO n) => Request -> (Response (ConduitM i Strict.ByteString n ()) -> m a) -> m a -withResponse req inner = withRunInIO $ \run -> - Network.HTTP.Simple.withResponse (setUserAgent req) (run . inner) - - -withResponseByManager :: MonadUnliftIO m => Request -> Manager -> (Response BodyReader -> m a) -> m a -withResponseByManager req man inner = withRunInIO $ \run -> - Network.HTTP.Client.withResponse (setUserAgent req) man (run . inner) +withResponse = Network.HTTP.Simple.withResponse . setUserAgent diff --git a/src/Options/Applicative/Complicated.hs b/src/Options/Applicative/Complicated.hs index 8e2bdaa797..ecf94aa97a 100644 --- a/src/Options/Applicative/Complicated.hs +++ b/src/Options/Applicative/Complicated.hs @@ -15,7 +15,6 @@ module Options.Applicative.Complicated import Control.Monad.Trans.Except import Control.Monad.Trans.Writer -import Data.Version import Options.Applicative import Options.Applicative.Types import Options.Applicative.Builder.Internal @@ -45,7 +44,7 @@ complicatedOptions -> ExceptT b (Writer (Mod CommandFields (b,a))) () -- ^ commands (use 'addCommand') -> IO (a,b) -complicatedOptions numericVersion versionString numericHpackVersion h pd footerStr commonParser mOnFailure commandParser = +complicatedOptions numericVersion stringVersion numericHpackVersion h pd footerStr commonParser mOnFailure commandParser = do args <- getArgs (a,(b,c)) <- case execParserPure (prefs noBacktrack) parser args of Failure _ | null args -> withArgs ["--help"] (execParser parser) @@ -56,8 +55,8 @@ complicatedOptions numericVersion versionString numericHpackVersion h pd footerS where parser = info (helpOption <*> versionOptions <*> complicatedParser "COMMAND|FILE" commonParser commandParser) desc desc = fullDesc <> header h <> progDesc pd <> footer footerStr versionOptions = - case versionString of - Nothing -> versionOption (showVersion numericVersion) + case stringVersion of + Nothing -> versionOption (versionString numericVersion) Just s -> versionOption s <*> numericVersionOption <*> numericHpackVersionOption versionOption s = infoOption @@ -66,7 +65,7 @@ complicatedOptions numericVersion versionString numericHpackVersion h pd footerS help "Show version") numericVersionOption = infoOption - (showVersion numericVersion) + (versionString numericVersion) (long "numeric-version" <> help "Show only version number") numericHpackVersionOption = diff --git a/src/Path/Find.hs b/src/Path/Find.hs index 74765285f9..581b639b58 100644 --- a/src/Path/Find.hs +++ b/src/Path/Find.hs @@ -10,7 +10,7 @@ module Path.Find ,findInParents) where -import Stack.Prelude +import RIO import System.IO.Error (isPermissionError) import Data.List import Path diff --git a/src/Stack/Build.hs b/src/Stack/Build.hs index cfe90f901c..c5bec20a1a 100644 --- a/src/Stack/Build.hs +++ b/src/Stack/Build.hs @@ -2,13 +2,9 @@ {-# LANGUAGE CPP #-} {-# LANGUAGE ConstraintKinds #-} {-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE TemplateHaskell #-} -{-# LANGUAGE TupleSections #-} -{-# LANGUAGE ViewPatterns #-} -- | Build the project. @@ -21,7 +17,7 @@ module Stack.Build ,CabalVersionException(..)) where -import Stack.Prelude +import Stack.Prelude hiding (loadPackage) import Data.Aeson (Value (Object, Array), (.=), object) import qualified Data.HashMap.Strict as HM import Data.List ((\\), isPrefixOf) @@ -36,6 +32,8 @@ import qualified Data.Text.IO as TIO import Data.Text.Read (decimal) import qualified Data.Vector as V import qualified Data.Yaml as Yaml +import Distribution.Version (mkVersion) +import Path (parent) import Stack.Build.ConstructPlan import Stack.Build.Execute import Stack.Build.Haddock @@ -43,16 +41,10 @@ import Stack.Build.Installed import Stack.Build.Source import Stack.Build.Target import Stack.Package -import Stack.PackageLocation (parseSingleCabalFileIndex) import Stack.Types.Build -import Stack.Types.BuildPlan import Stack.Types.Config -import Stack.Types.FlagName import Stack.Types.NamedComponent import Stack.Types.Package -import Stack.Types.PackageIdentifier -import Stack.Types.PackageName -import Stack.Types.Version import Stack.Types.Compiler (compilerVersionText #ifdef WINDOWS @@ -89,10 +81,8 @@ build msetLocalFiles mbuildLk boptsCli = fixCodePage $ do -- The `locals` value above only contains local project -- packages, not local dependencies. This will get _all_ -- of the local files we're interested in - -- watching. Arguably, we should not bother watching repo - -- and archive files, since those shouldn't - -- change. That's a possible optimization to consider. - [lpFiles lp | PSFiles lp _ <- Map.elems sourceMap] + -- watching. + [lpFiles lp | PSFilePath lp _ <- Map.elems sourceMap] setLocalFiles $ Set.insert stackYaml $ Set.unions files (installedMap, globalDumpPkgs, snapshotDumpPkgs, localDumpPkgs) <- @@ -158,7 +148,7 @@ checkCabalVersion = do allowNewer <- view $ configL.to configAllowNewer cabalVer <- view cabalVersionL -- https://github.com/haskell/cabal/issues/2023 - when (allowNewer && cabalVer < $(mkVersion "1.22")) $ throwM $ + when (allowNewer && cabalVer < mkVersion [1, 22]) $ throwM $ CabalVersionException $ "Error: --allow-newer requires at least Cabal version 1.22, but version " ++ versionString cabalVer ++ @@ -182,7 +172,7 @@ warnIfExecutablesWithSameNameCouldBeOverwritten locals plan = do exesText pkgs = T.intercalate ", " - ["'" <> packageNameText p <> ":" <> exe <> "'" | p <- pkgs] + ["'" <> T.pack (packageNameString p) <> ":" <> exe <> "'" | p <- pkgs] (logWarn . display . T.unlines . concat) [ [ "Building " <> exe_s <> " " <> exesText toBuild <> "." ] , [ "Only one of them will be available via 'stack exec' or locally installed." @@ -224,9 +214,9 @@ warnIfExecutablesWithSameNameCouldBeOverwritten locals plan = do exesToBuild :: Map Text (NonEmpty PackageName) exesToBuild = collect - [ (exe,pkgName) - | (pkgName,task) <- Map.toList (planTasks plan) - , TTFiles lp _ <- [taskType task] -- FIXME analyze logic here, do we need to check for Local? + [ (exe,pkgName') + | (pkgName',task) <- Map.toList (planTasks plan) + , TTFilePath lp _ <- [taskType task] , exe <- (Set.toList . exeComponents . lpComponents) lp ] localExes :: Map Text (NonEmpty PackageName) @@ -275,14 +265,13 @@ mkBaseConfigOpts boptsCli = do -- | Provide a function for loading package information from the package index loadPackage :: HasEnvConfig env - => PackageLocationIndex FilePath + => PackageLocationImmutable -> Map FlagName Bool -> [Text] -> RIO env Package loadPackage loc flags ghcOptions = do compiler <- view actualCompilerVersionL platform <- view platformL - root <- view projectRootL let pkgConfig = PackageConfig { packageConfigEnableTests = False , packageConfigEnableBenchmarks = False @@ -291,7 +280,7 @@ loadPackage loc flags ghcOptions = do , packageConfigCompilerVersion = compiler , packageConfigPlatform = platform } - resolvePackage pkgConfig <$> parseSingleCabalFileIndex root loc + resolvePackage pkgConfig <$> loadCabalFileImmutable loc -- | Set the code page for this process as necessary. Only applies to Windows. -- See: https://github.com/commercialhaskell/stack/issues/738 @@ -300,7 +289,7 @@ fixCodePage :: HasEnvConfig env => RIO env a -> RIO env a fixCodePage inner = do mcp <- view $ configL.to configModifyCodePage ghcVersion <- view $ actualCompilerVersionL.to getGhcVersion - if mcp && ghcVersion < $(mkVersion "7.10.3") + if mcp && ghcVersion < mkVersion [7, 10, 3] then fixCodePage' -- GHC >=7.10.3 doesn't need this code page hack. else inner @@ -385,16 +374,14 @@ queryBuildInfo selectors0 = rawBuildInfo :: HasEnvConfig env => RIO env Value rawBuildInfo = do (locals, _sourceMap) <- loadSourceMap NeedTargets defaultBuildOptsCLI - wantedCompiler <- view $ wantedCompilerVersionL.to compilerVersionText + wantedCompiler <- view $ wantedCompilerVersionL.to (utf8BuilderToText . display) actualCompiler <- view $ actualCompilerVersionL.to compilerVersionText - globalHints <- view globalHintsL return $ object [ "locals" .= Object (HM.fromList $ map localToPair locals) , "compiler" .= object [ "wanted" .= wantedCompiler , "actual" .= actualCompiler ] - , "global-hints" .= globalHints ] where localToPair lp = @@ -402,6 +389,6 @@ rawBuildInfo = do where p = lpPackage lp value = object - [ "version" .= packageVersion p - , "path" .= toFilePath (lpDir lp) + [ "version" .= CabalString (packageVersion p) + , "path" .= toFilePath (parent $ lpCabalFile lp) ] diff --git a/src/Stack/Build/Cache.hs b/src/Stack/Build/Cache.hs index fbb93d1f9d..e0773d755f 100644 --- a/src/Stack/Build/Cache.hs +++ b/src/Stack/Build/Cache.hs @@ -35,7 +35,6 @@ module Stack.Build.Cache import Stack.Prelude import Crypto.Hash (hashWith, SHA256(..)) -import Control.Monad.Trans.Maybe import qualified Data.ByteArray as Mem (convert) import qualified Data.ByteString.Base64.URL as B64URL import qualified Data.ByteString as B @@ -48,19 +47,15 @@ import qualified Data.Set as Set import qualified Data.Store as Store import Data.Store.VersionTagged import qualified Data.Text as T -import qualified Data.Text.Encoding as TE import Path import Path.IO import Stack.Constants.Config import Stack.Types.Build -import Stack.Types.BuildPlan import Stack.Types.Compiler import Stack.Types.Config import Stack.Types.GhcPkgId import Stack.Types.NamedComponent import Stack.Types.Package -import Stack.Types.PackageIdentifier -import Stack.Types.Version import qualified System.FilePath as FP -- | Directory containing files to mark an executable as installed @@ -82,8 +77,8 @@ getInstalledExes loc = do -- before https://github.com/commercialhaskell/stack/issues/2373 -- was fixed), then we don't know which is correct - ignore them. M.fromListWith (\_ _ -> []) $ - map (\x -> (packageIdentifierName x, [x])) $ - mapMaybe (parsePackageIdentifierFromString . toFilePath . filename) files + map (\x -> (pkgName x, [x])) $ + mapMaybe (parsePackageIdentifier . toFilePath . filename) files -- | Mark the given executable as installed markExeInstalled :: (MonadReader env m, HasEnvConfig env, MonadIO m, MonadThrow m) @@ -96,7 +91,7 @@ markExeInstalled loc ident = do -- Remove old install records for this package. -- TODO: This is a bit in-efficient. Put all this metadata into one file? installed <- getInstalledExes loc - forM_ (filter (\x -> packageIdentifierName ident == packageIdentifierName x) installed) + forM_ (filter (\x -> pkgName ident == pkgName x) installed) (markExeNotInstalled loc) -- TODO consideration for the future: list all of the executables -- installed, and invalidate this file in getInstalledExes if they no @@ -254,73 +249,55 @@ checkTestSuccess dir = -- We only pay attention to non-directory options. We don't want to avoid a -- cache hit just because it was installed in a different directory. precompiledCacheFile :: HasEnvConfig env - => PackageLocationIndex FilePath + => PackageLocationImmutable -> ConfigureOpts -> Set GhcPkgId -- ^ dependencies - -> RIO env (Maybe (Path Abs File)) + -> RIO env (Path Abs File) precompiledCacheFile loc copts installedPackageIDs = do ec <- view envConfigL compiler <- view actualCompilerVersionL >>= parseRelDir . compilerVersionString cabal <- view cabalVersionL >>= parseRelDir . versionString - let mpkgRaw = - -- The goal here is to come up with a string representing the - -- package location which is unique. For archives and repos, - -- we rely upon cryptographic hashes paired with - -- subdirectories to identify this specific package version. - case loc of - PLIndex pir -> Just $ packageIdentifierRevisionString pir - PLOther other -> case other of - PLFilePath _ -> assert False Nothing -- no PLFilePaths should end up in a snapshot - PLArchive a -> fmap - (\h -> T.unpack (staticSHA256ToText h) ++ archiveSubdirs a) - (archiveHash a) - PLRepo r -> Just $ T.unpack (repoCommit r) ++ repoSubdirs r - forM mpkgRaw $ \pkgRaw -> do - platformRelDir <- platformGhcRelDir - let precompiledDir = - view stackRootL ec - $(mkRelDir "precompiled") - platformRelDir - compiler - cabal + -- The goal here is to come up with a string representing the + -- package location which is unique. Luckily @TreeKey@s are exactly + -- that! + treeKey <- getPackageLocationTreeKey loc + pkg <- parseRelDir $ T.unpack $ utf8BuilderToText $ display treeKey - pkg <- - case parseRelDir pkgRaw of - Just x -> return x - Nothing -> parseRelDir - $ T.unpack - $ TE.decodeUtf8 - $ B64URL.encode - $ TE.encodeUtf8 - $ T.pack pkgRaw + platformRelDir <- platformGhcRelDir + let precompiledDir = + view stackRootL ec + $(mkRelDir "precompiled") + platformRelDir + compiler + cabal - -- In Cabal versions 1.22 and later, the configure options contain the - -- installed package IDs, which is what we need for a unique hash. - -- Unfortunately, earlier Cabals don't have the information, so we must - -- supplement it with the installed package IDs directly. - -- See issue: https://github.com/commercialhaskell/stack/issues/1103 - let input = (coNoDirs copts, installedPackageIDs) - hashPath <- parseRelFile $ S8.unpack $ B64URL.encode - $ Mem.convert $ hashWith SHA256 $ Store.encode input + -- In Cabal versions 1.22 and later, the configure options contain the + -- installed package IDs, which is what we need for a unique hash. + -- Unfortunately, earlier Cabals don't have the information, so we must + -- supplement it with the installed package IDs directly. + -- See issue: https://github.com/commercialhaskell/stack/issues/1103 + let input = (coNoDirs copts, installedPackageIDs) + hashPath <- parseRelFile $ S8.unpack $ B64URL.encode + $ Mem.convert $ hashWith SHA256 $ Store.encode input - let longPath = precompiledDir pkg hashPath + let longPath = precompiledDir pkg hashPath - -- See #3649 - shorten the paths on windows if MAX_PATH will be - -- violated. Doing this only when necessary allows use of existing - -- precompiled packages. - if pathTooLong (toFilePath longPath) then do - shortPkg <- shaPath pkg - shortHash <- shaPath hashPath - return $ precompiledDir shortPkg shortHash - else - return longPath + -- See #3649 - shorten the paths on windows if MAX_PATH will be + -- violated. Doing this only when necessary allows use of existing + -- precompiled packages. + if pathTooLong (toFilePath longPath) then do + shortPkg <- shaPath pkg + shortHash <- shaPath hashPath + return $ precompiledDir shortPkg shortHash + else + return longPath -- | Write out information about a newly built package writePrecompiledCache :: HasEnvConfig env => BaseConfigOpts - -> PackageLocationIndex FilePath + -> PackageLocationImmutable -> ConfigureOpts -> Set GhcPkgId -- ^ dependencies -> Installed -- ^ library @@ -328,24 +305,23 @@ writePrecompiledCache :: HasEnvConfig env -> Set Text -- ^ executables -> RIO env () writePrecompiledCache baseConfigOpts loc copts depIDs mghcPkgId sublibs exes = do - mfile <- precompiledCacheFile loc copts depIDs - forM_ mfile $ \file -> do - ensureDir (parent file) - ec <- view envConfigL - let stackRootRelative = makeRelative (view stackRootL ec) - mlibpath <- case mghcPkgId of - Executable _ -> return Nothing - Library _ ipid _ -> liftM Just $ pathFromPkgId stackRootRelative ipid - sublibpaths <- mapM (pathFromPkgId stackRootRelative) sublibs - exes' <- forM (Set.toList exes) $ \exe -> do - name <- parseRelFile $ T.unpack exe - relPath <- stackRootRelative $ bcoSnapInstallRoot baseConfigOpts bindirSuffix name - return $ toFilePath relPath - $(versionedEncodeFile precompiledCacheVC) file PrecompiledCache - { pcLibrary = mlibpath - , pcSubLibs = sublibpaths - , pcExes = exes' - } + file <- precompiledCacheFile loc copts depIDs + ensureDir (parent file) + ec <- view envConfigL + let stackRootRelative = makeRelative (view stackRootL ec) + mlibpath <- case mghcPkgId of + Executable _ -> return Nothing + Library _ ipid _ -> liftM Just $ pathFromPkgId stackRootRelative ipid + sublibpaths <- mapM (pathFromPkgId stackRootRelative) sublibs + exes' <- forM (Set.toList exes) $ \exe -> do + name <- parseRelFile $ T.unpack exe + relPath <- stackRootRelative $ bcoSnapInstallRoot baseConfigOpts bindirSuffix name + return $ toFilePath relPath + $(versionedEncodeFile precompiledCacheVC) file PrecompiledCache + { pcLibrary = mlibpath + , pcSubLibs = sublibpaths + , pcExes = exes' + } where pathFromPkgId stackRootRelative ipid = do ipid' <- parseRelFile $ ghcPkgIdString ipid ++ ".conf" @@ -355,14 +331,14 @@ writePrecompiledCache baseConfigOpts loc copts depIDs mghcPkgId sublibs exes = d -- | Check the cache for a precompiled package matching the given -- configuration. readPrecompiledCache :: forall env. HasEnvConfig env - => PackageLocationIndex FilePath -- ^ target package + => PackageLocationImmutable -- ^ target package -> ConfigureOpts -> Set GhcPkgId -- ^ dependencies -> RIO env (Maybe PrecompiledCache) -readPrecompiledCache loc copts depIDs = runMaybeT $ - MaybeT (precompiledCacheFile loc copts depIDs) >>= - MaybeT . $(versionedDecodeFile precompiledCacheVC) >>= - lift . mkAbs +readPrecompiledCache loc copts depIDs = do + file <- precompiledCacheFile loc copts depIDs + mcache <- $(versionedDecodeFile precompiledCacheVC) file + maybe (pure Nothing) (fmap Just . mkAbs) mcache where -- Since commit ed9ccc08f327bad68dd2d09a1851ce0d055c0422, -- pcLibrary paths are stored as relative to the stack diff --git a/src/Stack/Build/ConstructPlan.hs b/src/Stack/Build/ConstructPlan.hs index 6734034ec1..93d002b837 100644 --- a/src/Stack/Build/ConstructPlan.hs +++ b/src/Stack/Build/ConstructPlan.hs @@ -4,12 +4,9 @@ {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE RecordWildCards #-} {-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TupleSections #-} {-# LANGUAGE ViewPatterns #-} {-# LANGUAGE StandaloneDeriving #-} @@ -18,11 +15,9 @@ module Stack.Build.ConstructPlan ( constructPlan ) where -import Stack.Prelude hiding (Display (..)) +import Stack.Prelude hiding (Display (..), loadPackage) import Control.Monad.RWS.Strict hiding ((<>)) import Control.Monad.State.Strict (execState) -import qualified Data.HashSet as HashSet -import qualified Data.HashMap.Strict as HashMap import Data.List import qualified Data.Map.Strict as M import qualified Data.Map.Strict as Map @@ -34,7 +29,10 @@ import Data.Text.Encoding.Error (lenientDecode) import qualified Distribution.Text as Cabal import qualified Distribution.Version as Cabal import Distribution.Types.BuildType (BuildType (Configure)) +import Distribution.Types.PackageName (mkPackageName) +import Distribution.Version (mkVersion) import Generics.Deriving.Monoid (memptydefault, mappenddefault) +import Path (parent) import qualified RIO import Stack.Build.Cache import Stack.Build.Haddock @@ -43,18 +41,14 @@ import Stack.Build.Source import Stack.Constants import Stack.Package import Stack.PackageDump -import Stack.PackageIndex import Stack.PrettyPrint import Stack.Types.Build import Stack.Types.BuildPlan import Stack.Types.Compiler import Stack.Types.Config -import Stack.Types.FlagName import Stack.Types.GhcPkgId import Stack.Types.NamedComponent import Stack.Types.Package -import Stack.Types.PackageIdentifier -import Stack.Types.PackageName import Stack.Types.Runner import Stack.Types.Version import System.IO (putStrLn) @@ -129,12 +123,11 @@ type M = RWST -- TODO replace with more efficient WS stack on top of StackT data Ctx = Ctx { ls :: !LoadedSnapshot , baseConfigOpts :: !BaseConfigOpts - , loadPackage :: !(PackageLocationIndex FilePath -> Map FlagName Bool -> [Text] -> M Package) + , loadPackage :: !(PackageLocationImmutable -> Map FlagName Bool -> [Text] -> M Package) , combinedMap :: !CombinedMap , ctxEnvConfig :: !EnvConfig , callStack :: ![PackageName] , extraToBuild :: !(Set PackageName) - , getVersions :: !(PackageName -> IO (HashMap Version (Maybe CabalHash))) , wanted :: !(Set PackageName) , localNames :: !(Set PackageName) } @@ -146,8 +139,8 @@ instance HasLogFunc Ctx where instance HasRunner Ctx where runnerL = configL.runnerL instance HasConfig Ctx -instance HasCabalLoader Ctx where - cabalLoaderL = configL.cabalLoaderL +instance HasPantryConfig Ctx where + pantryConfigL = configL.pantryConfigL instance HasProcessContext Ctx where processContextL = configL.processContextL instance HasBuildConfig Ctx @@ -176,7 +169,7 @@ constructPlan :: forall env. HasEnvConfig env -> [LocalPackage] -> Set PackageName -- ^ additional packages that must be built -> [DumpPackage () () ()] -- ^ locally registered - -> (PackageLocationIndex FilePath -> Map FlagName Bool -> [Text] -> RIO EnvConfig Package) -- ^ load upstream package + -> (PackageLocationImmutable -> Map FlagName Bool -> [Text] -> RIO EnvConfig Package) -- ^ load upstream package -> SourceMap -> InstalledMap -> Bool @@ -230,8 +223,8 @@ constructPlan ls0 baseConfigOpts0 locals extraToBuild0 localDumpPkgs loadPackage throwM $ ConstructPlanFailed "Plan construction failed." where hasBaseInDeps bconfig = - elem $(mkPackageName "base") - $ map (packageIdentifierName . pirIdent) [i | (PLIndex i) <- bcDependencies bconfig] + mkPackageName "base" `elem` + [n | (PLImmutable (PLIHackage (PackageIdentifierRevision n _ _) _)) <- bcDependencies bconfig] mkCtx econfig = Ctx { ls = ls0 @@ -241,7 +234,6 @@ constructPlan ls0 baseConfigOpts0 locals extraToBuild0 localDumpPkgs loadPackage , ctxEnvConfig = econfig , callStack = [] , extraToBuild = extraToBuild0 - , getVersions = runRIO econfig . getPackageVersions , wanted = wantedLocalPackages locals <> extraToBuild0 , localNames = Set.fromList $ map (packageName . lpPackage) locals } @@ -321,11 +313,12 @@ mkUnregisterLocal tasks dirtyReason localDumpPkgs sourceMap initialBuildSteps = = Just "Switching to snapshot installed package" -- Check if a dependency is going to be unregistered | (dep, _):_ <- mapMaybe (`Map.lookup` toUnregister) deps - = Just $ "Dependency being unregistered: " <> packageIdentifierText dep + = Just $ "Dependency being unregistered: " <> T.pack (packageIdentifierString dep) -- None of the above, keep it! | otherwise = Nothing where - name = packageIdentifierName ident + name :: PackageName + name = pkgName ident -- | Given a 'LocalPackage' and its 'lpTestBench', adds a 'Task' for -- running its tests and benchmarks. @@ -359,9 +352,9 @@ addFinal lp package isAllInOne = do Local package , taskPresent = present - , taskType = TTFiles lp Local -- FIXME we can rely on this being Local, right? + , taskType = TTFilePath lp Local -- FIXME we can rely on this being Local, right? , taskAllInOne = isAllInOne - , taskCachePkgSrc = CacheSrcLocal (toFilePath (lpDir lp)) + , taskCachePkgSrc = CacheSrcLocal (toFilePath (parent (lpCabalFile lp))) , taskAnyMissing = not $ Set.null missing , taskBuildTypeConfig = packageBuildTypeConfig package } @@ -406,7 +399,8 @@ addDep treatAsDep' name = do -- they likely won't affect executable -- names. This code does not feel right. tellExecutablesUpstream - (PackageIdentifierRevision (PackageIdentifier name (installedVersion installed)) CFILatest) + (PackageIdentifier name (installedVersion installed)) + (PLIHackage (PackageIdentifierRevision name (installedVersion installed) CFILatest) Nothing) loc Map.empty return $ Right $ ADRFound loc installed @@ -421,19 +415,19 @@ addDep treatAsDep' name = do -- FIXME what's the purpose of this? Add a Haddock! tellExecutables :: PackageSource -> M () -tellExecutables (PSFiles lp _) +tellExecutables (PSFilePath lp _) | lpWanted lp = tellExecutablesPackage Local $ lpPackage lp | otherwise = return () -- Ignores ghcOptions because they don't matter for enumerating -- executables. -tellExecutables (PSIndex loc flags _ghcOptions pir) = - tellExecutablesUpstream pir loc flags +tellExecutables (PSRemote loc flags _ghcOptions pkgloc ident) = + tellExecutablesUpstream ident pkgloc loc flags -tellExecutablesUpstream :: PackageIdentifierRevision -> InstallLocation -> Map FlagName Bool -> M () -tellExecutablesUpstream pir@(PackageIdentifierRevision (PackageIdentifier name _) _) loc flags = do +tellExecutablesUpstream :: PackageIdentifier -> PackageLocationImmutable -> InstallLocation -> Map FlagName Bool -> M () +tellExecutablesUpstream (PackageIdentifier name _) pkgloc loc flags = do ctx <- ask when (name `Set.member` extraToBuild ctx) $ do - p <- loadPackage ctx (PLIndex pir) flags [] + p <- loadPackage ctx pkgloc flags [] tellExecutablesPackage loc p tellExecutablesPackage :: InstallLocation -> Package -> M () @@ -447,10 +441,10 @@ tellExecutablesPackage loc p = do Just (PIOnlySource ps) -> goSource ps Just (PIBoth ps _) -> goSource ps - goSource (PSFiles lp _) + goSource (PSFilePath lp _) | lpWanted lp = exeComponents (lpComponents lp) | otherwise = Set.empty - goSource PSIndex{} = Set.empty + goSource PSRemote{} = Set.empty tell mempty { wInstall = Map.fromList $ map (, loc) $ Set.toList $ filterComps myComps $ packageExes p } where @@ -468,11 +462,11 @@ installPackage :: Bool -- ^ is this being used by a dependency? installPackage treatAsDep name ps minstalled = do ctx <- ask case ps of - PSIndex _ flags ghcOptions pkgLoc -> do + PSRemote _ flags ghcOptions pkgLoc _version -> do planDebug $ "installPackage: Doing all-in-one build for upstream package " ++ show name - package <- loadPackage ctx (PLIndex pkgLoc) flags ghcOptions -- FIXME be more efficient! Get this from the LoadedPackageInfo! + package <- loadPackage ctx pkgLoc flags ghcOptions resolveDepsAndInstall True treatAsDep ps package minstalled - PSFiles lp _ -> + PSFilePath lp _ -> case lpTestBench lp of Nothing -> do planDebug $ "installPackage: No test / bench component for " ++ show name ++ " so doing an all-in-one build." @@ -543,7 +537,7 @@ installPackageGivenDeps isAllInOne ps package minstalled (missing, present, minL shouldInstall <- checkDirtiness ps installed package present (wanted ctx) return $ if shouldInstall then Nothing else Just installed (Just _, False) -> do - let t = T.intercalate ", " $ map (T.pack . packageNameString . packageIdentifierName) (Set.toList missing) + let t = T.intercalate ", " $ map (T.pack . packageNameString . pkgName) (Set.toList missing) tell mempty { wDirty = Map.singleton name $ "missing dependencies: " <> addEllipsis t } return Nothing (Nothing, _) -> return Nothing @@ -568,8 +562,8 @@ installPackageGivenDeps isAllInOne ps package minstalled (missing, present, minL , taskPresent = present , taskType = case ps of - PSFiles lp loc -> TTFiles lp (loc <> minLoc) - PSIndex loc _ _ pkgLoc -> TTIndex package (loc <> minLoc) pkgLoc + PSFilePath lp loc -> TTFilePath lp (loc <> minLoc) + PSRemote loc _ _ pkgLoc _version -> TTRemote package (loc <> minLoc) pkgLoc , taskAllInOne = isAllInOne , taskCachePkgSrc = toCachePkgSrc ps , taskAnyMissing = not $ Set.null missing @@ -610,14 +604,14 @@ addPackageDeps treatAsDep package = do deps' <- packageDepsWithTools package deps <- forM (Map.toList deps') $ \(depname, DepValue range depType) -> do eres <- addDep treatAsDep depname - let getLatestApplicableVersionAndRev = do - vsAndRevs <- liftIO $ getVersions ctx depname - let vs = Set.fromList (HashMap.keys vsAndRevs) - case latestApplicableVersion range vs of - Nothing -> pure Nothing - Just lappVer -> do - let mlappRev = join (HashMap.lookup lappVer vsAndRevs) - pure $ (lappVer,) <$> mlappRev + let getLatestApplicableVersionAndRev :: M (Maybe (Version, BlobKey)) + getLatestApplicableVersionAndRev = do + vsAndRevs <- runRIO ctx $ getHackagePackageVersions UsePreferredVersions depname + pure $ do + lappVer <- latestApplicableVersion range $ Map.keysSet vsAndRevs + revs <- Map.lookup lappVer vsAndRevs + (cabalHash, _) <- Map.maxView revs + Just (lappVer, cabalHash) case eres of Left e -> do addParent depname range Nothing @@ -688,7 +682,7 @@ addPackageDeps treatAsDep package = do package (Map.fromList errs) where - adrVersion (ADRToInstall task) = packageIdentifierVersion $ taskProvides task + adrVersion (ADRToInstall task) = pkgVersion $ taskProvides task adrVersion (ADRFound _ installed) = installedVersion installed -- Update the parents map, for later use in plan construction errors -- - see 'getShortestDepsPath'. @@ -704,8 +698,8 @@ addPackageDeps treatAsDep package = do taskHasLibrary :: Task -> Bool taskHasLibrary task = case taskType task of - TTFiles lp _ -> packageHasLibrary $ lpPackage lp - TTIndex p _ _ -> packageHasLibrary p + TTFilePath lp _ -> packageHasLibrary $ lpPackage lp + TTRemote p _ _ -> packageHasLibrary p -- make sure we consider internal libraries as libraries too packageHasLibrary :: Package -> Bool @@ -721,7 +715,7 @@ checkDirtiness :: PackageSource -> Map PackageIdentifier GhcPkgId -> Set PackageName -> M Bool -checkDirtiness ps installed package present wanted = do +checkDirtiness ps installed package present wanted' = do ctx <- ask moldOpts <- runRIO ctx $ tryGetFlagCache installed let configOpts = configureOpts @@ -737,10 +731,10 @@ checkDirtiness ps installed package present wanted = do , configCacheDeps = Set.fromList $ Map.elems present , configCacheComponents = case ps of - PSFiles lp _ -> Set.map (encodeUtf8 . renderComponent) $ lpComponents lp - PSIndex{} -> Set.empty + PSFilePath lp _ -> Set.map (encodeUtf8 . renderComponent) $ lpComponents lp + PSRemote{} -> Set.empty , configCacheHaddock = - shouldHaddockPackage buildOpts wanted (packageName package) || + shouldHaddockPackage buildOpts wanted' (packageName package) || -- Disabling haddocks when old config had haddocks doesn't make dirty. maybe False configCacheHaddock moldOpts , configCachePkgSrc = toCachePkgSrc ps @@ -831,16 +825,16 @@ describeConfigDiff config old new pkgSrcName CacheSrcUpstream = "upstream source" psForceDirty :: PackageSource -> Bool -psForceDirty (PSFiles lp _) = lpForceDirty lp -psForceDirty PSIndex{} = False +psForceDirty (PSFilePath lp _) = lpForceDirty lp +psForceDirty PSRemote{} = False psDirty :: MonadIO m => PackageSource -> m (Maybe (Set FilePath)) -psDirty (PSFiles lp _) = runIOThunk $ lpDirtyFiles lp -psDirty PSIndex{} = pure Nothing -- files never change in an upstream package +psDirty (PSFilePath lp _) = runIOThunk $ lpDirtyFiles lp +psDirty PSRemote {} = pure Nothing -- files never change in a remote package psLocal :: PackageSource -> Bool -psLocal (PSFiles _ loc) = loc == Local -- FIXME this is probably not the right logic, see configureOptsNoDir. We probably want to check if this appears in packages: -psLocal PSIndex{} = False +psLocal (PSFilePath _ loc) = loc == Local -- FIXME this is probably not the right logic, see configureOptsNoDir. We probably want to check if this appears in packages: +psLocal PSRemote{} = False -- | Get all of the dependencies for a given package, including build -- tool dependencies. @@ -865,11 +859,11 @@ data ToolWarning = ToolWarning ExeName PackageName deriving Show toolWarningText :: ToolWarning -> Text -toolWarningText (ToolWarning (ExeName toolName) pkgName) = +toolWarningText (ToolWarning (ExeName toolName) pkgName') = "No packages found in snapshot which provide a " <> T.pack (show toolName) <> " executable, which is a build-tool dependency of " <> - T.pack (show (packageNameString pkgName)) + T.pack (packageNameString pkgName') -- | Strip out anything from the @Plan@ intended for the local database stripLocals :: Plan -> Plan @@ -889,7 +883,7 @@ stripNonDeps deps plan = plan , planInstallExes = Map.empty -- TODO maybe don't disable this? } where - checkTask task = packageIdentifierName (taskProvides task) `Set.member` deps + checkTask task = pkgName (taskProvides task) `Set.member` deps markAsDep :: PackageName -> M () markAsDep name = tell mempty { wDeps = Set.singleton name } @@ -898,9 +892,9 @@ markAsDep name = tell mempty { wDeps = Set.singleton name } inSnapshot :: PackageName -> Version -> M Bool inSnapshot name version = do p <- asks ls - ls <- asks localNames + ls' <- asks localNames return $ fromMaybe False $ do - guard $ not $ name `Set.member` ls + guard $ not $ name `Set.member` ls' lpi <- Map.lookup name (lsPackages p) return $ lpiVersion lpi == version @@ -915,7 +909,7 @@ deriving instance Ord VersionRange -- | The latest applicable version and it's latest cabal file revision. -- For display purposes only, Nothing if package not found -type LatestApplicableVersion = Maybe (Version, CabalHash) +type LatestApplicableVersion = Maybe (Version, BlobKey) -- | Reason why a dependency was not used data BadDependency @@ -937,7 +931,7 @@ pprintExceptions -> ParentMap -> Set PackageName -> StyleDoc -pprintExceptions exceptions stackYaml stackRoot parentMap wanted = +pprintExceptions exceptions stackYaml stackRoot parentMap wanted' = mconcat $ [ flow "While constructing the build plan, the following exceptions were encountered:" , line <> line @@ -960,7 +954,7 @@ pprintExceptions exceptions stackYaml stackRoot parentMap wanted = addExtraDepsRecommendations | Map.null extras = [] - | (Just _) <- Map.lookup $(mkPackageName "base") extras = + | (Just _) <- Map.lookup (mkPackageName "base") extras = [ " *" <+> align (flow "Build requires unattainable version of base. Since base is a part of GHC, you most likely need to use a different GHC version with the matching base.") , line ] @@ -986,11 +980,10 @@ pprintExceptions exceptions stackYaml stackRoot parentMap wanted = go (name, (_range, Just (version,cabalHash), DependencyMismatch{})) = Map.singleton name (version, cabalHash) go _ = Map.empty - pprintExtra (name, (version, cabalHash)) = - let cfInfo = CFIHash Nothing cabalHash - packageId = PackageIdentifier name version - packageIdRev = PackageIdentifierRevision packageId cfInfo - in fromString $ packageIdentifierRevisionString packageIdRev + pprintExtra (name, (version, BlobKey cabalHash cabalSize)) = + let cfInfo = CFIHash cabalHash (Just cabalSize) + packageIdRev = PackageIdentifierRevision name version cfInfo + in fromString $ T.unpack $ utf8BuilderToText $ RIO.display packageIdRev allNotInBuildPlan = Set.fromList $ concatMap toNotInBuildPlan exceptions' toNotInBuildPlan (DependencyPlanFailures _ pDeps) = @@ -1010,7 +1003,7 @@ pprintExceptions exceptions stackYaml stackRoot parentMap wanted = pprintException (DependencyCycleDetected pNames) = Just $ flow "Dependency cycle detected in packages:" <> line <> - indent 4 (encloseSep "[" "]" "," (map (style Error . display) pNames)) + indent 4 (encloseSep "[" "]" "," (map (style Error . fromString . packageNameString) pNames)) pprintException (DependencyPlanFailures pkg pDeps) = case mapMaybe pprintDep (Map.toList pDeps) of [] -> Nothing @@ -1018,24 +1011,24 @@ pprintExceptions exceptions stackYaml stackRoot parentMap wanted = flow "In the dependencies for" <+> pkgIdent <> pprintFlags (packageFlags pkg) <> ":" <> line <> indent 4 (vsep depErrors) <> - case getShortestDepsPath parentMap wanted (packageName pkg) of + case getShortestDepsPath parentMap wanted' (packageName pkg) of Nothing -> line <> flow "needed for unknown reason - stack invariant violated." - Just [] -> line <> flow "needed since" <+> pkgName <+> flow "is a build target." + Just [] -> line <> flow "needed since" <+> pkgName' <+> flow "is a build target." Just (target:path) -> line <> flow "needed due to" <+> encloseSep "" "" " -> " pathElems where pathElems = - [style Target . display $ target] ++ - map display path ++ + [style Target . fromString . packageIdentifierString $ target] ++ + map (fromString . packageIdentifierString) path ++ [pkgIdent] where - pkgName = style Current . display $ packageName pkg - pkgIdent = style Current . display $ packageIdentifier pkg + pkgName' = style Current . fromString . packageNameString $ packageName pkg + pkgIdent = style Current . fromString . packageIdentifierString $ packageIdentifier pkg -- Skip these when they are redundant with 'NotInBuildPlan' info. pprintException (UnknownPackage name) | name `Set.member` allNotInBuildPlan = Nothing - | name `HashSet.member` wiredInPackages = - Just $ flow "Can't build a package with same name as a wired-in-package:" <+> (style Current . display $ name) - | otherwise = Just $ flow "Unknown package:" <+> (style Current . display $ name) + | name `Set.member` wiredInPackages = + Just $ flow "Can't build a package with same name as a wired-in-package:" <+> (style Current . fromString . packageNameString $ name) + | otherwise = Just $ flow "Unknown package:" <+> (style Current . fromString . packageNameString $ name) pprintFlags flags | Map.null flags = "" @@ -1045,7 +1038,7 @@ pprintExceptions exceptions stackYaml stackRoot parentMap wanted = pprintDep (name, (range, mlatestApplicable, badDep)) = case badDep of NotInBuildPlan -> Just $ - style Error (display name) <+> + style Error (fromString $ packageNameString name) <+> align ((if range == Cabal.anyVersion then flow "needed" else flow "must match" <+> goodRange) <> "," <> softline <> @@ -1053,7 +1046,7 @@ pprintExceptions exceptions stackYaml stackRoot parentMap wanted = latestApplicable Nothing) -- TODO: For local packages, suggest editing constraints DependencyMismatch version -> Just $ - (style Error . display) (PackageIdentifier name version) <+> + (style Error . fromString . packageIdentifierString) (PackageIdentifier name version) <+> align (flow "from stack configuration does not match" <+> goodRange <+> latestApplicable (Just version)) -- I think the main useful info is these explain why missing @@ -1061,10 +1054,10 @@ pprintExceptions exceptions stackYaml stackRoot parentMap wanted = -- path from a target to the package. Couldn'tResolveItsDependencies _version -> Nothing HasNoLibrary -> Just $ - style Error (display name) <+> + style Error (fromString $ packageNameString name) <+> align (flow "is a library dependency, but the package provides no library") BDDependencyCycleDetected names -> Just $ - style Error (display name) <+> + style Error (fromString $ packageNameString name) <+> align (flow $ "dependency cycle detected: " ++ intercalate ", " (map packageNameString names)) where goodRange = style Good (fromString (Cabal.display range)) @@ -1078,7 +1071,7 @@ pprintExceptions exceptions stackYaml stackRoot parentMap wanted = | Just laVer == mversion -> softline <> flow "(latest matching version is specified)" | otherwise -> softline <> - flow "(latest matching version is" <+> style Good (display laVer) <> ")" + flow "(latest matching version is" <+> style Good (fromString $ versionString laVer) <> ")" -- | Get the shortest reason for the package to be in the build plan. In -- other words, trace the parent dependencies back to a 'wanted' @@ -1088,28 +1081,28 @@ getShortestDepsPath -> Set PackageName -> PackageName -> Maybe [PackageIdentifier] -getShortestDepsPath (MonoidMap parentsMap) wanted name = - if Set.member name wanted +getShortestDepsPath (MonoidMap parentsMap) wanted' name = + if Set.member name wanted' then Just [] else case M.lookup name parentsMap of Nothing -> Nothing Just (_, parents) -> Just $ findShortest 256 paths0 where - paths0 = M.fromList $ map (\(ident, _) -> (packageIdentifierName ident, startDepsPath ident)) parents + paths0 = M.fromList $ map (\(ident, _) -> (pkgName ident, startDepsPath ident)) parents where -- The 'paths' map is a map from PackageName to the shortest path -- found to get there. It is the frontier of our breadth-first -- search of dependencies. findShortest :: Int -> Map PackageName DepsPath -> [PackageIdentifier] findShortest fuel _ | fuel <= 0 = - [PackageIdentifier $(mkPackageName "stack-ran-out-of-jet-fuel") $(mkVersion "0")] + [PackageIdentifier (mkPackageName "stack-ran-out-of-jet-fuel") (mkVersion [0])] findShortest _ paths | M.null paths = [] findShortest fuel paths = case targets of [] -> findShortest (fuel - 1) $ M.fromListWith chooseBest $ concatMap extendPath recurses _ -> let (DepsPath _ _ path) = minimum (map snd targets) in path where - (targets, recurses) = partition (\(n, _) -> n `Set.member` wanted) (M.toList paths) + (targets, recurses) = partition (\(n, _) -> n `Set.member` wanted') (M.toList paths) chooseBest :: DepsPath -> DepsPath -> DepsPath chooseBest x y = if x > y then x else y -- Extend a path to all its parents. @@ -1117,7 +1110,7 @@ getShortestDepsPath (MonoidMap parentsMap) wanted name = extendPath (n, dp) = case M.lookup n parentsMap of Nothing -> [] - Just (_, parents) -> map (\(pkgId, _) -> (packageIdentifierName pkgId, extendDepsPath pkgId dp)) parents + Just (_, parents) -> map (\(pkgId, _) -> (pkgName pkgId, extendDepsPath pkgId dp)) parents data DepsPath = DepsPath { dpLength :: Int -- ^ Length of dpPath @@ -1131,14 +1124,14 @@ data DepsPath = DepsPath startDepsPath :: PackageIdentifier -> DepsPath startDepsPath ident = DepsPath { dpLength = 1 - , dpNameLength = T.length (packageNameText (packageIdentifierName ident)) + , dpNameLength = length (packageNameString (pkgName ident)) , dpPath = [ident] } extendDepsPath :: PackageIdentifier -> DepsPath -> DepsPath extendDepsPath ident dp = DepsPath { dpLength = dpLength dp + 1 - , dpNameLength = dpNameLength dp + T.length (packageNameText (packageIdentifierName ident)) + , dpNameLength = dpNameLength dp + length (packageNameString (pkgName ident)) , dpPath = [ident] } diff --git a/src/Stack/Build/Execute.hs b/src/Stack/Build/Execute.hs index 9b96ea2180..a6133f4880 100644 --- a/src/Stack/Build/Execute.hs +++ b/src/Stack/Build/Execute.hs @@ -33,7 +33,7 @@ import qualified Data.ByteArray as Mem (convert) import qualified Data.ByteString as S import qualified Data.ByteString.Base64.URL as B64URL import Data.Char (isSpace) -import Data.Conduit +import Conduit import qualified Data.Conduit.Binary as CB import qualified Data.Conduit.Filesystem as CF import qualified Data.Conduit.List as CL @@ -58,6 +58,8 @@ import qualified Distribution.Simple.Build.Macros as C import Distribution.System (OS (Windows), Platform (Platform)) import qualified Distribution.Text as C +import Distribution.Types.PackageName (mkPackageName) +import Distribution.Version (mkVersion) import Path import Path.CheckInstall import Path.Extra (toFilePathNoTrailingSep, rejectMissingFile) @@ -72,7 +74,6 @@ import Stack.Config import Stack.Constants import Stack.Constants.Config import Stack.Coverage -import Stack.Fetch as Fetch import Stack.GhcPkg import Stack.Package import Stack.PackageDump @@ -83,8 +84,6 @@ import Stack.Types.Config import Stack.Types.GhcPkgId import Stack.Types.NamedComponent import Stack.Types.Package -import Stack.Types.PackageIdentifier -import Stack.Types.PackageName import Stack.Types.Runner import Stack.Types.Version import qualified System.Directory as D @@ -104,19 +103,19 @@ data ExecutableBuildStatus -- | Fetch the packages necessary for a build, for example in combination with a dry run. preFetch :: HasEnvConfig env => Plan -> RIO env () preFetch plan - | Set.null idents = logDebug "Nothing to fetch" + | Set.null pkgLocs = logDebug "Nothing to fetch" | otherwise = do logDebug $ "Prefetching: " <> - mconcat (intersperse ", " (RIO.display <$> Set.toList idents)) - fetchPackages idents + mconcat (intersperse ", " (RIO.display <$> Set.toList pkgLocs)) + fetchPackages pkgLocs where - idents = Set.unions $ map toIdent $ Map.elems $ planTasks plan + pkgLocs = Set.unions $ map toPkgLoc $ Map.elems $ planTasks plan - toIdent task = + toPkgLoc task = case taskType task of - TTFiles{} -> Set.empty - TTIndex _ _ (PackageIdentifierRevision ident _) -> Set.singleton ident + TTFilePath{} -> Set.empty + TTRemote _ _ pkgloc -> Set.singleton pkgloc -- | Print a description of build plan for human consumption. printPlan :: HasRunner env => Plan -> RIO env () @@ -126,7 +125,7 @@ printPlan plan = do xs -> do logInfo "Would unregister locally:" forM_ xs $ \(ident, reason) -> logInfo $ - RIO.display ident <> + fromString (packageIdentifierString ident) <> if T.null reason then "" else " (" <> RIO.display reason <> ")" @@ -170,19 +169,19 @@ printPlan plan = do -- | For a dry run displayTask :: Task -> Utf8Builder displayTask task = - RIO.display (taskProvides task) <> + fromString (packageIdentifierString (taskProvides task)) <> ": database=" <> (case taskLocation task of Snap -> "snapshot" Local -> "local") <> ", source=" <> (case taskType task of - TTFiles lp _ -> fromString $ toFilePath $ lpDir lp - TTIndex{} -> "package index") <> + TTFilePath lp _ -> fromString $ toFilePath $ parent $ lpCabalFile lp + TTRemote _ _ pl -> RIO.display pl) <> (if Set.null missing then "" else ", after: " <> - mconcat (intersperse "," (RIO.display <$> Set.toList missing))) + mconcat (intersperse "," (fromString . packageIdentifierString <$> Set.toList missing))) where missing = tcoMissing $ taskConfigOpts task @@ -599,7 +598,7 @@ executePlan' installedMap0 targets plan ee@ExecuteEnv {..} = do localDB <- packageDatabaseLocal forM_ ids $ \(id', (ident, reason)) -> do logInfo $ - RIO.display ident <> + fromString (packageIdentifierString ident) <> ": unregistering" <> if T.null reason then "" @@ -633,9 +632,10 @@ executePlan' installedMap0 targets plan ee@ExecuteEnv {..} = do run $ logStickyDone ("Completed " <> RIO.display total <> " action(s).") | otherwise = do inProgress <- readTVarIO actionsVar - let packageNames = map (\(ActionId pkgID _) -> packageIdentifierText pkgID) (toList inProgress) + let packageNames = map (\(ActionId pkgID _) -> pkgName pkgID) (toList inProgress) + nowBuilding :: [PackageName] -> Utf8Builder nowBuilding [] = "" - nowBuilding names = mconcat $ ": " : intersperse ", " (map RIO.display names) + nowBuilding names = mconcat $ ": " : intersperse ", " (map (fromString . packageNameString) names) when terminal $ run $ logSticky $ "Progress " <> RIO.display prev <> "/" <> RIO.display total <> @@ -669,7 +669,7 @@ executePlan' installedMap0 targets plan ee@ExecuteEnv {..} = do where installedMap' = Map.difference installedMap0 $ Map.fromList - $ map (\(ident, _) -> (packageIdentifierName ident, ())) + $ map (\(ident, _) -> (pkgName ident, ())) $ Map.elems $ planUnregisterLocal plan @@ -761,13 +761,13 @@ getConfigCache ExecuteEnv {..} task@Task {..} installedMap enableTest enableBenc -- 'stack test'. See: -- https://github.com/commercialhaskell/stack/issues/805 case taskType of - TTFiles lp _ -> + TTFilePath lp _ -> -- FIXME: make this work with exact-configuration. -- Not sure how to plumb the info atm. See -- https://github.com/commercialhaskell/stack/issues/2049 [ "--enable-tests" | enableTest || (not useExactConf && depsPresent installedMap (lpTestDeps lp))] ++ [ "--enable-benchmarks" | enableBench || (not useExactConf && depsPresent installedMap (lpBenchDeps lp))] - _ -> [] + TTRemote{} -> [] idMap <- liftIO $ readTVarIO eeGhcPkgIds let getMissing ident = case Map.lookup ident idMap of @@ -775,7 +775,7 @@ getConfigCache ExecuteEnv {..} task@Task {..} installedMap enableTest enableBenc -- Expect to instead find it in installedMap if it's -- an initialBuildSteps target. | boptsCLIInitialBuildSteps eeBuildOptsCLI && taskIsTarget task, - Just (_, installed) <- Map.lookup (packageIdentifierName ident) installedMap + Just (_, installed) <- Map.lookup (pkgName ident) installedMap -> installedToGhcPkgId ident installed Just installed -> installedToGhcPkgId ident installed _ -> error "singleBuild: invariant violated, missing package ID missing" @@ -792,10 +792,10 @@ getConfigCache ExecuteEnv {..} task@Task {..} installedMap enableTest enableBenc , configCacheDeps = allDeps , configCacheComponents = case taskType of - TTFiles lp _ -> Set.map (encodeUtf8 . renderComponent) $ lpComponents lp - TTIndex{} -> Set.empty + TTFilePath lp _ -> Set.map (encodeUtf8 . renderComponent) $ lpComponents lp + TTRemote{} -> Set.empty , configCacheHaddock = - shouldHaddockPackage eeBuildOpts eeWanted (packageIdentifierName taskProvides) + shouldHaddockPackage eeBuildOpts eeWanted (pkgName taskProvides) , configCachePkgSrc = taskCachePkgSrc } allDepsMap = Map.union missing' taskPresent @@ -839,7 +839,7 @@ ensureConfig newConfigCache pkgDir ExecuteEnv {..} announce cabal cabalfp task = deleteCaches pkgDir announce let programNames = - if eeCabalPkgVer < $(mkVersion "1.22") + if eeCabalPkgVer < mkVersion [1, 22] then ["ghc", "ghc-pkg"] else ["ghc", "ghc-pkg", "ghcjs", "ghcjs-pkg"] exes <- forM programNames $ \name -> do @@ -873,7 +873,7 @@ ensureConfig newConfigCache pkgDir ExecuteEnv {..} announce cabal cabalfp task = announceTask :: HasLogFunc env => Task -> Text -> RIO env () announceTask task x = logInfo $ - RIO.display (taskProvides task) <> + fromString (packageIdentifierString (taskProvides task)) <> ": " <> RIO.display x @@ -906,6 +906,8 @@ withSingleContext :: forall env a. HasEnvConfig env -> ( Package -- Package info -> Path Abs File -- Cabal file path -> Path Abs Dir -- Package root directory file path + -- Note that the `Path Abs Dir` argument is redundant with the `Path Abs File` + -- argument, but we provide both to avoid recalculating `parent` of the `File`. -> (ExcludeTHLoading -> [String] -> RIO env ()) -- Function to run Cabal with args -> (Text -> RIO env ()) -- An 'announce' function, for different build phases @@ -922,8 +924,8 @@ withSingleContext ActionContext {..} ExecuteEnv {..} task@Task {..} mdeps msuffi wanted = case taskType of - TTFiles lp _ -> lpWanted lp - TTIndex{} -> False + TTFilePath lp _ -> lpWanted lp + TTRemote{} -> False -- Output to the console if this is the last task, and the user -- asked to build it specifically. When the action is a @@ -941,12 +943,27 @@ withSingleContext ActionContext {..} ExecuteEnv {..} task@Task {..} mdeps msuffi withPackage inner = case taskType of - TTFiles lp _ -> inner (lpPackage lp) (lpCabalFile lp) (lpDir lp) - TTIndex package _ pir -> do - mdist <- distRelativeDir - dir <- unpackPackageIdent eeTempDir mdist pir - - let name = packageIdentifierName taskProvides + TTFilePath lp _ -> inner (lpPackage lp) (lpCabalFile lp) (parent $ lpCabalFile lp) + TTRemote package _ pkgloc -> do + suffix <- parseRelDir $ packageIdentifierString $ packageIdent package + let dir = eeTempDir suffix + unpackPackageLocation dir pkgloc + + -- See: https://github.com/fpco/stack/issues/157 + distDir <- distRelativeDir + let oldDist = dir $(mkRelDir "dist") + newDist = dir distDir + exists <- doesDirExist oldDist + when exists $ do + -- Previously used takeDirectory, but that got confused + -- by trailing slashes, see: + -- https://github.com/commercialhaskell/stack/issues/216 + -- + -- Instead, use Path which is a bit more resilient + ensureDir $ parent newDist + renameDir oldDist newDist + + let name = pkgName taskProvides cabalfpRel <- parseRelFile $ packageNameString name ++ ".cabal" let cabalfp = dir cabalfpRel inner package cabalfp dir @@ -954,7 +971,8 @@ withSingleContext ActionContext {..} ExecuteEnv {..} task@Task {..} mdeps msuffi withOutputType pkgDir package inner -- If the user requested interleaved output, dump to the console with a -- prefix. - | boptsInterleavedOutput eeBuildOpts = inner $ OTConsole $ RIO.display (packageName package) <> "> " + | boptsInterleavedOutput eeBuildOpts = + inner $ OTConsole $ fromString (packageNameString (packageName package)) <> "> " -- Not in interleaved mode. When building a single wanted package, dump -- to the console with no prefix. @@ -968,7 +986,7 @@ withSingleContext ActionContext {..} ExecuteEnv {..} task@Task {..} mdeps msuffi -- We only want to dump logs for local non-dependency packages case taskType of - TTFiles lp _ | lpWanted lp -> + TTFilePath lp _ | lpWanted lp -> liftIO $ atomically $ writeTChan eeLogFiles (pkgDir, logPath) _ -> return () @@ -1007,7 +1025,7 @@ withSingleContext ActionContext {..} ExecuteEnv {..} task@Task {..} mdeps msuffi -- Omit cabal package dependency when building -- Cabal. See -- https://github.com/commercialhaskell/stack/issues/1356 - | packageName package == $(mkPackageName "Cabal") = [] + | packageName package == mkPackageName "Cabal" = [] | otherwise = ["-package=" ++ packageIdentifierString (PackageIdentifier cabalPackageName @@ -1025,10 +1043,10 @@ withSingleContext ActionContext {..} ExecuteEnv {..} task@Task {..} mdeps msuffi warnCustomNoDeps :: RIO env () warnCustomNoDeps = case (taskType, packageBuildType package) of - (TTFiles lp Local, C.Custom) | lpWanted lp -> do + (TTFilePath lp Local, C.Custom) | lpWanted lp -> do prettyWarnL [ flow "Package" - , display $ packageName package + , fromString $ packageNameString $ packageName package , flow "uses a custom Cabal build, but does not use a custom-setup stanza" ] _ -> return () @@ -1042,9 +1060,9 @@ withSingleContext ActionContext {..} ExecuteEnv {..} task@Task {..} mdeps msuffi -- explicit list of dependencies, and we -- should simply use all of them. (Just customSetupDeps, _) -> do - unless (Map.member $(mkPackageName "Cabal") customSetupDeps) $ + unless (Map.member (mkPackageName "Cabal") customSetupDeps) $ prettyWarnL - [ display $ packageName package + [ fromString $ packageNameString $ packageName package , "has a setup-depends field, but it does not mention a Cabal dependency. This is likely to cause build errors." ] allDeps <- @@ -1060,10 +1078,10 @@ withSingleContext ActionContext {..} ExecuteEnv {..} task@Task {..} mdeps msuffi case filter (matches . fst) (Map.toList allDeps) of x:xs -> do unless (null xs) - (logWarn ("Found multiple installed packages for custom-setup dep: " <> RIO.display name)) - return ("-package-id=" ++ ghcPkgIdString (snd x), Just (toCabalPackageIdentifier (fst x))) + (logWarn ("Found multiple installed packages for custom-setup dep: " <> fromString (packageNameString name))) + return ("-package-id=" ++ ghcPkgIdString (snd x), Just (fst x)) [] -> do - logWarn ("Could not find custom-setup dep: " <> RIO.display name) + logWarn ("Could not find custom-setup dep: " <> fromString (packageNameString name)) return ("-package=" ++ packageNameString name, Nothing) let depsArgs = map fst matchedDeps -- Generate setup_macros.h and provide it to ghc @@ -1142,7 +1160,7 @@ withSingleContext ActionContext {..} ExecuteEnv {..} task@Task {..} mdeps msuffi mlogFile bss where - runAndOutput :: CompilerVersion 'CVActual -> RIO env () + runAndOutput :: ActualCompiler -> RIO env () runAndOutput compilerVer = withWorkingDir (toFilePath pkgDir) $ withProcessContext menv $ case outputType of OTLogFile _ h -> proc (toFilePath exeName) fullArgs @@ -1158,7 +1176,7 @@ withSingleContext ActionContext {..} ExecuteEnv {..} task@Task {..} mdeps msuffi :: HasCallStack => ExcludeTHLoading -> LogLevel - -> CompilerVersion 'CVActual + -> ActualCompiler -> Utf8Builder -> ConduitM S.ByteString Void (RIO env) () outputSink excludeTH level compilerVer prefix = @@ -1245,13 +1263,16 @@ singleBuild ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} installedMap writeFlagCache installed cache liftIO $ atomically $ modifyTVar eeGhcPkgIds $ Map.insert taskProvides installed where - pname = packageIdentifierName taskProvides + pname = pkgName taskProvides shouldHaddockPackage' = shouldHaddockPackage eeBuildOpts eeWanted pname - doHaddock package = shouldHaddockPackage' && + doHaddock mcurator package + = shouldHaddockPackage' && not isFinalBuild && -- Works around haddock failing on bytestring-builder since it has no modules -- when bytestring is new enough. - packageHasExposedModules package + packageHasExposedModules package && + -- Special help for the curator tool to avoid haddocks that are known to fail + maybe True (Set.notMember pname . curatorSkipHaddock) mcurator buildingFinals = isFinalBuild || taskAllInOne enableTests = buildingFinals && any isCTest (taskComponents task) @@ -1267,7 +1288,7 @@ singleBuild ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} installedMap , ["bench" | enableBenchmarks] ] (hasLib, hasSubLib, hasExe) = case taskType of - TTFiles lp Local -> + TTFilePath lp Local -> let package = lpPackage lp hasLibrary = case packageLibraries package of @@ -1285,8 +1306,8 @@ singleBuild ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} installedMap Snap | not shouldHaddockPackage' -> do mpc <- case taskLocation task of - Snap -> readPrecompiledCache - (ttPackageLocation taskType) + Snap -> fmap join $ for (ttPackageLocation taskType) $ \loc -> readPrecompiledCache + loc (configCacheOpts cache) (configCacheDeps cache) _ -> return Nothing @@ -1320,9 +1341,9 @@ singleBuild ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} installedMap -- snapshot, in case it was built with different flags. let subLibNames = map T.unpack . Set.toList $ case taskType of - TTFiles lp _ -> packageInternalLibraries $ lpPackage lp - TTIndex p _ _ -> packageInternalLibraries p - (name, version) = toTuple taskProvides + TTFilePath lp _ -> packageInternalLibraries $ lpPackage lp + TTRemote p _ _ -> packageInternalLibraries p + PackageIdentifier name version = taskProvides mainLibName = packageNameString name mainLibVersion = versionString version pkgName = mainLibName ++ "-" ++ mainLibVersion @@ -1382,7 +1403,7 @@ singleBuild ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} installedMap executableBuildStatuses <- getExecutableBuildStatuses package pkgDir when (not (cabalIsSatisfied executableBuildStatuses) && taskIsTarget task) (logInfo - ("Building all executables for `" <> RIO.display (packageName package) <> + ("Building all executables for `" <> fromString (packageNameString (packageName package)) <> "' once. After a successful build of all of them, only specified executables will be rebuilt.")) _neededConfig <- ensureConfig cache pkgDir ee (announce ("configure" <> annSuffix executableBuildStatuses)) cabal cabalfp task @@ -1423,19 +1444,19 @@ singleBuild ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} installedMap markExeNotInstalled (taskLocation task) taskProvides case taskType of - TTFiles lp _ -> do -- FIXME should this only be for local packages? + TTFilePath lp _ -> do when enableTests $ unsetTestSuccess pkgDir caches <- runIOThunk $ lpNewBuildCaches lp mapM_ (uncurry (writeBuildCache pkgDir)) (Map.toList caches) - TTIndex{} -> return () + TTRemote{} -> return () -- FIXME: only output these if they're in the build plan. preBuildTime <- modTime <$> liftIO getCurrentTime let postBuildCheck _succeeded = do mlocalWarnings <- case taskType of - TTFiles lp Local -> do + TTFilePath lp Local -> do warnings <- checkForUnlistedFiles taskType preBuildTime pkgDir -- TODO: Perhaps only emit these warnings for non extra-dep? return (Just (lpCabalFile lp, warnings)) @@ -1466,16 +1487,17 @@ singleBuild ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} installedMap cabal stripTHLoading (("build" :) $ (++ extraOpts) $ case (taskType, taskAllInOne, isFinalBuild) of (_, True, True) -> error "Invariant violated: cannot have an all-in-one build that also has a final build step." - (TTFiles lp _, False, False) -> primaryComponentOptions executableBuildStatuses lp - (TTFiles lp _, False, True) -> finalComponentOptions lp - (TTFiles lp _, True, False) -> primaryComponentOptions executableBuildStatuses lp ++ finalComponentOptions lp - (TTIndex{}, _, _) -> []) + (TTFilePath lp _, False, False) -> primaryComponentOptions executableBuildStatuses lp + (TTFilePath lp _, False, True) -> finalComponentOptions lp + (TTFilePath lp _, True, False) -> primaryComponentOptions executableBuildStatuses lp ++ finalComponentOptions lp + (TTRemote{}, _, _) -> []) `catch` \ex -> case ex of CabalExitedUnsuccessfully{} -> postBuildCheck False >> throwM ex _ -> throwM ex postBuildCheck True - when (doHaddock package) $ do + mcurator <- view $ buildConfigL.to bcCurator + when (doHaddock mcurator package) $ do announce "haddock" sourceFlag <- if not (boptsHaddockHyperlinkSource eeBuildOpts) then return [] else do -- See #2429 for why the temp dir is used @@ -1503,8 +1525,8 @@ singleBuild ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} installedMap actualCompiler <- view actualCompilerVersionL let quickjump = case actualCompiler of - GhcVersion ghcVer - | ghcVer >= $(mkVersion "8.4") -> ["--haddock-option=--quickjump"] + ACGhc ghcVer + | ghcVer >= mkVersion [8, 4] -> ["--haddock-option=--quickjump"] _ -> [] cabal KeepTHLoading $ concat @@ -1570,8 +1592,8 @@ singleBuild ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} installedMap sublibsPkgIds <- fmap catMaybes $ forM (Set.toList $ packageInternalLibraries package) $ \sublib -> do -- z-haddock-library-z-attoparsec for internal lib attoparsec of haddock-library - let sublibName = T.concat ["z-", packageNameText $ packageName package, "-z-", sublib] - case parsePackageName sublibName of + let sublibName = T.concat ["z-", T.pack $ packageNameString $ packageName package, "-z-", sublib] + case parsePackageName $ T.unpack sublibName of Nothing -> return Nothing -- invalid lib, ignored Just subLibName -> loadInstalledPkg wc [installedPkgDb] installedDumpPkgsTVar subLibName @@ -1584,10 +1606,10 @@ singleBuild ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} installedMap return (Executable ident, []) -- don't return sublibs in this case case taskLocation task of - Snap -> + Snap -> for_ (ttPackageLocation taskType) $ \loc -> writePrecompiledCache eeBaseConfigOpts - (ttPackageLocation taskType) + loc (configCacheOpts cache) (configCacheDeps cache) mpkgid sublibsPkgIds (packageExes package) @@ -1597,10 +1619,10 @@ singleBuild ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} installedMap -- For packages from a package index, pkgDir is in the tmp -- directory. We eagerly delete it if no other tasks -- require it, to reduce space usage in tmp (#3018). - TTIndex{} -> do + TTRemote{} -> do let remaining = filter (\(ActionId x _) -> x == taskProvides) (Set.toList acRemaining) when (null remaining) $ removeDirRecur pkgDir - _ -> return () + TTFilePath{} -> return () return mpkgid @@ -1665,7 +1687,7 @@ checkExeStatus compiler platform distDir name = do -- | Check if any unlisted files have been found, and add them to the build cache. checkForUnlistedFiles :: HasEnvConfig env => TaskType -> ModTime -> Path Abs Dir -> RIO env [PackageWarning] -checkForUnlistedFiles (TTFiles lp _) preBuildTime pkgDir = do +checkForUnlistedFiles (TTFilePath lp _) preBuildTime pkgDir = do caches <- runIOThunk $ lpNewBuildCaches lp (addBuildCache,warnings) <- addUnlistedToBuildCache @@ -1679,7 +1701,7 @@ checkForUnlistedFiles (TTFiles lp _) preBuildTime pkgDir = do writeBuildCache pkgDir component $ Map.unions (cache : newToCache) return warnings -checkForUnlistedFiles TTIndex{} _ _ = return [] +checkForUnlistedFiles TTRemote{} _ _ = return [] -- | Determine if all of the dependencies given are installed depsPresent :: InstalledMap -> Map PackageName VersionRange -> Bool @@ -1885,7 +1907,7 @@ mungeBuildOutput :: forall m. MonadIO m => ExcludeTHLoading -- ^ exclude TH loading? -> ConvertPathsToAbsolute -- ^ convert paths to absolute? -> Path Abs Dir -- ^ package's root directory - -> CompilerVersion 'CVActual -- ^ compiler we're building with + -> ActualCompiler -- ^ compiler we're building with -> ConduitM Text Text m () mungeBuildOutput excludeTHLoading makeAbsolute pkgDir compilerVer = void $ CT.lines @@ -1907,7 +1929,7 @@ mungeBuildOutput excludeTHLoading makeAbsolute pkgDir compilerVer = void $ filterLinkerWarnings -- Check for ghc 7.8 since it's the only one prone to producing -- linker warnings on Windows x64 - | getGhcVersion compilerVer >= $(mkVersion "7.8") = doNothing + | getGhcVersion compilerVer >= mkVersion [7, 8] = doNothing | otherwise = CL.filter (not . isLinkerWarning) isLinkerWarning :: Text -> Bool @@ -1992,7 +2014,7 @@ primaryComponentOptions executableBuildStatuses lp = NoLibraries -> [] HasLibraries names -> map T.unpack - $ T.append "lib:" (packageNameText (packageName package)) + $ T.append "lib:" (T.pack (packageNameString (packageName package))) : map (T.append "flib:") (Set.toList names)) ++ map (T.unpack . T.append "lib:") (Set.toList $ packageInternalLibraries package) ++ map (T.unpack . T.append "exe:") (Set.toList $ exesToBuild executableBuildStatuses lp) @@ -2032,8 +2054,8 @@ finalComponentOptions lp = taskComponents :: Task -> Set NamedComponent taskComponents task = case taskType task of - TTFiles lp _ -> lpComponents lp -- FIXME probably just want Local, maybe even just lpWanted - TTIndex{} -> Set.empty + TTFilePath lp _ -> lpComponents lp -- FIXME probably just want lpWanted + TTRemote{} -> Set.empty -- | Take the given list of package dependencies and the contents of the global -- package database, and construct a set of installed package IDs that: @@ -2070,7 +2092,7 @@ addGlobalPackages deps globals0 = -- Create a Map of unique package names in the global database globals2 = Map.fromListWith chooseBest - $ map (packageIdentifierName . dpPackageIdent &&& id) globals1 + $ map (pkgName . dpPackageIdent &&& id) globals1 -- Final result: add in globals that have their dependencies met res = loop id (Map.elems globals2) $ Set.fromList res0 @@ -2080,18 +2102,18 @@ addGlobalPackages deps globals0 = ---------------------------------- -- Is the given package identifier for any version of Cabal - isCabal (PackageIdentifier name _) = name == $(mkPackageName "Cabal") + isCabal (PackageIdentifier name _) = name == mkPackageName "Cabal" -- Is the given package name provided by the package dependencies? - isDep dp = packageIdentifierName (dpPackageIdent dp) `Set.member` depNames - depNames = Set.map packageIdentifierName $ Map.keysSet deps + isDep dp = pkgName (dpPackageIdent dp) `Set.member` depNames + depNames = Set.map pkgName $ Map.keysSet deps -- Choose the best of two competing global packages (the newest version) chooseBest dp1 dp2 | getVer dp1 < getVer dp2 = dp2 | otherwise = dp1 where - getVer = packageIdentifierVersion . dpPackageIdent + getVer = pkgVersion . dpPackageIdent -- Are all dependencies of the given package met by the given Set of -- installed packages @@ -2108,3 +2130,7 @@ addGlobalPackages deps globals0 = -- None of the packages we checked can be added, therefore drop them all -- and return our results loop _ [] gids = gids + +ttPackageLocation :: TaskType -> Maybe PackageLocationImmutable +ttPackageLocation TTFilePath{} = Nothing +ttPackageLocation (TTRemote _ _ pkgloc) = Just pkgloc diff --git a/src/Stack/Build/Haddock.hs b/src/Stack/Build/Haddock.hs index 61fb39e6c1..fc7e99e644 100644 --- a/src/Stack/Build/Haddock.hs +++ b/src/Stack/Build/Haddock.hs @@ -33,8 +33,6 @@ import Stack.Types.Compiler import Stack.Types.Config import Stack.Types.GhcPkgId import Stack.Types.Package -import Stack.Types.PackageIdentifier -import Stack.Types.PackageName import Stack.Types.Runner import qualified System.FilePath as FP import RIO.Process diff --git a/src/Stack/Build/Installed.hs b/src/Stack/Build/Installed.hs index 28cf6e44d8..9aab51f86d 100644 --- a/src/Stack/Build/Installed.hs +++ b/src/Stack/Build/Installed.hs @@ -14,7 +14,7 @@ module Stack.Build.Installed import Data.Conduit import qualified Data.Conduit.List as CL import qualified Data.Foldable as F -import qualified Data.HashSet as HashSet +import qualified Data.Set as Set import Data.List import qualified Data.Map.Strict as Map import Path @@ -28,9 +28,6 @@ import Stack.Types.Config import Stack.Types.GhcPkgId import Stack.Types.Package import Stack.Types.PackageDump -import Stack.Types.PackageIdentifier -import Stack.Types.PackageName -import Stack.Types.Version -- | Options for 'getInstalled'. data GetInstalledOpts = GetInstalledOpts @@ -171,19 +168,19 @@ processLoadResult _ _ (Allowed, lh) = return (Just lh) processLoadResult _ True (WrongVersion actual wanted, lh) -- Allow some packages in the ghcjs global DB to have the wrong -- versions. Treat them as wired-ins by setting deps to []. - | fst (lhPair lh) `HashSet.member` ghcjsBootPackages = do + | fst (lhPair lh) `Set.member` ghcjsBootPackages = do logWarn $ "Ignoring that the GHCJS boot package \"" <> - display (packageNameText (fst (lhPair lh))) <> + fromString (packageNameString (fst (lhPair lh))) <> "\" has a different version, " <> - display (versionText actual) <> + fromString (versionString actual) <> ", than the resolver's wanted version, " <> - display (versionText wanted) + fromString (versionString wanted) return (Just lh) processLoadResult mdb _ (reason, lh) = do logDebug $ "Ignoring package " <> - display (packageNameText (fst (lhPair lh))) <> + fromString (packageNameString (fst (lhPair lh))) <> maybe mempty (\db -> ", from " <> displayShow db <> ",") mdb <> " due to" <> case reason of @@ -195,9 +192,9 @@ processLoadResult mdb _ (reason, lh) = do WrongLocation mloc loc -> " wrong location: " <> displayShow (mloc, loc) WrongVersion actual wanted -> " wanting version " <> - display (versionText wanted) <> + fromString (versionString wanted) <> " instead of " <> - display (versionText actual) + fromString (versionString actual) return Nothing data Allowed @@ -278,7 +275,7 @@ toLoadHelper mloc dp = LoadHelper -- minor versions of GHC, where the dependencies of wired-in -- packages may change slightly and therefore not match the -- snapshot. - if name `HashSet.member` wiredInPackages + if name `Set.member` wiredInPackages then [] else dpDepends dp , lhPair = (name, (toPackageLocation mloc, Library ident gid (Right <$> dpLicense dp))) diff --git a/src/Stack/Build/Source.hs b/src/Stack/Build/Source.hs index 2c6b0f0f15..abb8f2014a 100644 --- a/src/Stack/Build/Source.hs +++ b/src/Stack/Build/Source.hs @@ -16,13 +16,10 @@ module Stack.Build.Source ) where import Stack.Prelude -import Crypto.Hash (Digest, SHA256(..)) -import Crypto.Hash.Conduit (sinkHash) -import qualified Data.ByteArray as Mem (convert) +import qualified Pantry.SHA256 as SHA256 import qualified Data.ByteString as S -import Data.Conduit (ZipSink (..)) +import Conduit (ZipSink (..), withSourceFile) import qualified Data.Conduit.List as CL -import qualified Data.HashSet as HashSet import Data.List import qualified Data.Map as Map import qualified Data.Map.Strict as M @@ -32,14 +29,11 @@ import Stack.Build.Target import Stack.Config (getLocalPackages) import Stack.Constants (wiredInPackages) import Stack.Package -import Stack.PackageLocation import Stack.Types.Build import Stack.Types.BuildPlan import Stack.Types.Config -import Stack.Types.FlagName import Stack.Types.NamedComponent import Stack.Types.Package -import Stack.Types.PackageName import qualified System.Directory as D import System.FilePath (takeFileName) import System.IO.Error (isDoesNotExistError) @@ -92,19 +86,20 @@ loadSourceMapFull needTargets boptsCli = do let configOpts = getGhcOptions bconfig boptsCli n False False case lpiLocation lpi of -- NOTE: configOpts includes lpiGhcOptions for now, this may get refactored soon - PLIndex pir -> return $ PSIndex loc (lpiFlags lpi) configOpts pir - PLOther pl -> do - root <- view projectRootL - lpv <- parseSingleCabalFile root True pl + PLImmutable pkgloc -> do + ident <- getPackageLocationIdent pkgloc + return $ PSRemote loc (lpiFlags lpi) configOpts pkgloc ident + PLMutable dir -> do + lpv <- mkLocalPackageView YesPrintWarnings dir lp' <- loadLocalPackage False boptsCli targets (n, lpv) - return $ PSFiles lp' loc + return $ PSFilePath lp' loc sourceMap' <- Map.unions <$> sequence - [ return $ Map.fromList $ map (\lp' -> (packageName $ lpPackage lp', PSFiles lp' Local)) locals + [ return $ Map.fromList $ map (\lp' -> (packageName $ lpPackage lp', PSFilePath lp' Local)) locals , sequence $ Map.mapWithKey (goLPI Local) localDeps , sequence $ Map.mapWithKey (goLPI Snap) (lsPackages ls) ] let sourceMap = sourceMap' - `Map.difference` Map.fromList (map (, ()) (HashSet.toList wiredInPackages)) + `Map.difference` Map.fromList (map (, ()) (toList wiredInPackages)) return ( targets @@ -188,15 +183,16 @@ loadLocalPackage isLocal boptsCli targets (name, lpv) = do let mtarget = Map.lookup name targets config <- getPackageConfig boptsCli name (isJust mtarget) isLocal bopts <- view buildOptsL + mcurator <- view $ buildConfigL.to bcCurator let (exeCandidates, testCandidates, benchCandidates) = case mtarget of Just (TargetComps comps) -> splitComponents $ Set.toList comps Just (TargetAll _packageType) -> ( packageExes pkg - , if boptsTests bopts + , if boptsTests bopts && maybe True (Set.notMember name . curatorSkipTest) mcurator then Map.keysSet (packageTests pkg) else Set.empty - , if boptsBenchmarks bopts + , if boptsBenchmarks bopts && maybe True (Set.notMember name . curatorSkipBenchmark) mcurator then packageBenchmarks pkg else Set.empty ) @@ -302,7 +298,6 @@ loadLocalPackage isLocal boptsCli targets (name, lpv) = do , lpDirtyFiles = dirtyFiles , lpNewBuildCaches = newBuildCaches , lpCabalFile = lpvCabalFP lpv - , lpDir = lpvRoot lpv , lpWanted = isWanted , lpComponents = nonLibComponents -- TODO: refactor this so that it's easier to be sure that these @@ -315,7 +310,6 @@ loadLocalPackage isLocal boptsCli targets (name, lpv) = do (exes `Set.difference` packageExes pkg) (tests `Set.difference` Map.keysSet (packageTests pkg)) (benches `Set.difference` packageBenchmarks pkg) - , lpLocation = lpvLoc lpv } -- | Ensure that the flags specified in the stack.yaml file and on the command @@ -323,7 +317,7 @@ loadLocalPackage isLocal boptsCli targets (name, lpv) = do checkFlagsUsed :: (MonadThrow m, MonadReader env m, HasBuildConfig env) => BuildOptsCLI -> [LocalPackage] - -> Map PackageName (LoadedPackageInfo (PackageLocationIndex FilePath)) -- ^ local deps + -> Map PackageName (LoadedPackageInfo PackageLocation) -- ^ local deps -> Map PackageName snapshot -- ^ snapshot, for error messages -> m () checkFlagsUsed boptsCli lps extraDeps snapshot = do @@ -471,11 +465,11 @@ calcFci modTime' fp = liftIO $ <$> ZipSink (CL.fold (\x y -> x + fromIntegral (S.length y)) 0) - <*> ZipSink sinkHash) + <*> ZipSink SHA256.sinkHash) return FileCacheInfo { fciModTime = modTime' , fciSize = size - , fciHash = Mem.convert (digest :: Digest SHA256) + , fciHash = digest } checkComponentsBuildable :: MonadThrow m => [LocalPackage] -> m () diff --git a/src/Stack/Build/Target.hs b/src/Stack/Build/Target.hs index 64b2a22ded..e488fc354e 100644 --- a/src/Stack/Build/Target.hs +++ b/src/Stack/Build/Target.hs @@ -71,23 +71,17 @@ module Stack.Build.Target ) where import Stack.Prelude -import qualified Data.HashMap.Strict as HashMap import qualified Data.Map as Map import qualified Data.Set as Set import qualified Data.Text as T -import Distribution.PackageDescription (GenericPackageDescription, package, packageDescription) +import Distribution.PackageDescription (GenericPackageDescription) import Path import Path.Extra (rejectMissingDir) import Path.IO import Stack.Config (getLocalPackages) -import Stack.PackageIndex -import Stack.PackageLocation import Stack.Snapshot (calculatePackagePromotion) import Stack.Types.Config import Stack.Types.NamedComponent -import Stack.Types.PackageIdentifier -import Stack.Types.PackageName -import Stack.Types.Version import Stack.Types.Build import Stack.Types.BuildPlan import Stack.Types.GhcPkgId @@ -109,7 +103,7 @@ getRawInput boptscli locals = textTargets = -- Handle the no targets case, which means we pass in the names of all project packages if null textTargets' - then map packageNameText (Map.keys locals) + then map (T.pack . packageNameString) (Map.keys locals) else textTargets' in (textTargets', map RawInput textTargets) @@ -173,8 +167,8 @@ parseRawTargetDirs root locals ri = -- directory. parseRawTarget :: Text -> Maybe RawTarget parseRawTarget t = - (RTPackageIdentifier <$> parsePackageIdentifier t) - <|> (RTPackage <$> parsePackageNameFromString s) + (RTPackageIdentifier <$> parsePackageIdentifier s) + <|> (RTPackage <$> parsePackageName s) <|> (RTComponent <$> T.stripPrefix ":" t) <|> parsePackageComponent where @@ -183,13 +177,13 @@ parseRawTarget t = parsePackageComponent = case T.splitOn ":" t of [pname, "lib"] - | Just pname' <- parsePackageNameFromString (T.unpack pname) -> + | Just pname' <- parsePackageName (T.unpack pname) -> Just $ RTPackageComponent pname' $ ResolvedComponent CLib [pname, cname] - | Just pname' <- parsePackageNameFromString (T.unpack pname) -> + | Just pname' <- parsePackageName (T.unpack pname) -> Just $ RTPackageComponent pname' $ UnresolvedComponent cname [pname, typ, cname] - | Just pname' <- parsePackageNameFromString (T.unpack pname) + | Just pname' <- parsePackageName (T.unpack pname) , Just wrapper <- parseCompType typ -> Just $ RTPackageComponent pname' $ ResolvedComponent $ wrapper cname _ -> Nothing @@ -210,7 +204,7 @@ data ResolveResult = ResolveResult , rrRaw :: !RawInput , rrComponent :: !(Maybe NamedComponent) -- ^ Was a concrete component specified? - , rrAddedDep :: !(Maybe Version) + , rrAddedDep :: !(Maybe PackageLocationImmutable) -- ^ Only if we're adding this as a dependency , rrPackageType :: !PackageType } @@ -220,8 +214,8 @@ data ResolveResult = ResolveResult resolveRawTarget :: forall env. HasConfig env => Map PackageName (LoadedPackageInfo GhcPkgId) -- ^ globals - -> Map PackageName (LoadedPackageInfo (PackageLocationIndex FilePath)) -- ^ snapshot - -> Map PackageName (GenericPackageDescription, PackageLocationIndex FilePath) -- ^ local deps + -> Map PackageName (LoadedPackageInfo PackageLocation) -- ^ snapshot + -> Map PackageName (GenericPackageDescription, PackageLocation) -- ^ local deps -> Map PackageName LocalPackageView -- ^ project packages -> (RawInput, RawTarget) -> RIO env (Either Text ResolveResult) @@ -319,7 +313,7 @@ resolveRawTarget globals snap deps locals (ri, rt) = , rrPackageType = Dependency } | otherwise = do - mversion <- getLatestVersion name + mversion <- getLatestHackageVersion name UsePreferredVersions return $ case mversion of -- This is actually an error case. We _could_ return a -- Left value here, but it turns out to be better to defer @@ -334,20 +328,22 @@ resolveRawTarget globals snap deps locals (ri, rt) = , rrAddedDep = Nothing , rrPackageType = Dependency } - Just version -> Right ResolveResult + Just pir -> Right ResolveResult { rrName = name , rrRaw = ri , rrComponent = Nothing - , rrAddedDep = Just version + , rrAddedDep = Just $ PLIHackage pir Nothing , rrPackageType = Dependency } - where - getLatestVersion pn = - fmap fst . Set.maxView . Set.fromList . HashMap.keys <$> getPackageVersions pn + + -- Note that we use CFILatest below, even though it's + -- non-reproducible, to avoid user confusion. In any event, + -- reproducible builds should be done by updating your config + -- files! go (RTPackageIdentifier ident@(PackageIdentifier name version)) | Map.member name locals = return $ Left $ T.concat - [ packageNameText name + [ tshow (packageNameString name) , " target has a specific version number, but it is a local package." , "\nTo avoid confusion, we will not install the specified version or build the local one." , "\nTo build the local package, specify the target without an explicit version." @@ -356,7 +352,7 @@ resolveRawTarget globals snap deps locals (ri, rt) = case Map.lookup name allLocs of -- Installing it from the package index, so we're cool -- with overriding it if necessary - Just (PLIndex (PackageIdentifierRevision (PackageIdentifier _name versionLoc) _mcfi)) -> Right ResolveResult + Just (PLImmutable (PLIHackage (PackageIdentifierRevision _name versionLoc _mcfi) _mtree)) -> Right ResolveResult { rrName = name , rrRaw = ri , rrComponent = Nothing @@ -366,14 +362,14 @@ resolveRawTarget globals snap deps locals (ri, rt) = -- version we have then Nothing -- OK, we'll override it - else Just version + else Just $ PLIHackage (PackageIdentifierRevision name version CFILatest) Nothing , rrPackageType = Dependency } -- The package was coming from something besides the -- index, so refuse to do the override - Just (PLOther loc') -> Left $ T.concat + Just loc' -> Left $ T.concat [ "Package with identifier was targeted on the command line: " - , packageIdentifierText ident + , T.pack $ packageIdentifierString ident , ", but it was specified from a non-index location: " , T.pack $ show loc' , ".\nRecommendation: add the correctly desired version to extra-deps." @@ -383,17 +379,17 @@ resolveRawTarget globals snap deps locals (ri, rt) = { rrName = name , rrRaw = ri , rrComponent = Nothing - , rrAddedDep = Just version + , rrAddedDep = Just $ PLIHackage (PackageIdentifierRevision name version CFILatest) Nothing , rrPackageType = Dependency } where - allLocs :: Map PackageName (PackageLocationIndex FilePath) + allLocs :: Map PackageName PackageLocation allLocs = Map.unions [ Map.mapWithKey - (\name' lpi -> PLIndex $ PackageIdentifierRevision - (PackageIdentifier name' (lpiVersion lpi)) - CFILatest) + (\name' lpi -> PLImmutable $ PLIHackage + (PackageIdentifierRevision name' (lpiVersion lpi) CFILatest) + Nothing) globals , Map.map lpiLocation snap , Map.map snd deps @@ -416,14 +412,13 @@ data PackageType = ProjectPackage | Dependency combineResolveResults :: forall env. HasLogFunc env => [ResolveResult] - -> RIO env ([Text], Map PackageName Target, Map PackageName (PackageLocationIndex FilePath)) + -> RIO env ([Text], Map PackageName Target, Map PackageName PackageLocationImmutable) combineResolveResults results = do addedDeps <- fmap Map.unions $ forM results $ \result -> case rrAddedDep result of Nothing -> return Map.empty - Just version -> do - let ident = PackageIdentifier (rrName result) version - return $ Map.singleton (rrName result) $ PLIndex $ PackageIdentifierRevision ident CFILatest + Just pl -> do + return $ Map.singleton (rrName result) pl let m0 = Map.unionsWith (++) $ map (\rr -> Map.singleton (rrName rr) [rr]) results (errs, ms) = partitionEithers $ flip map (Map.toList m0) $ \(name, rrs) -> @@ -437,7 +432,7 @@ combineResolveResults results = do | all isJust mcomps -> Right $ Map.singleton name $ TargetComps $ Set.fromList $ catMaybes mcomps | otherwise -> Left $ T.concat [ "The package " - , packageNameText name + , T.pack $ packageNameString name , " was specified in multiple, incompatible ways: " , T.unwords $ map (unRawInput . rrRaw) rrs ] @@ -454,7 +449,7 @@ parseTargets -> BuildOptsCLI -> RIO env ( LoadedSnapshot -- upgraded snapshot, with some packages possibly moved to local - , Map PackageName (LoadedPackageInfo (PackageLocationIndex FilePath)) -- all local deps + , Map PackageName (LoadedPackageInfo PackageLocation) -- all local deps , Map PackageName Target ) parseTargets needTargets boptscli = do @@ -492,8 +487,6 @@ parseTargets needTargets boptscli = do | otherwise -> throwIO $ TargetParseException ["The specified targets matched no packages"] - root <- view projectRootL - let dropMaybeKey (Nothing, _) = Map.empty dropMaybeKey (Just key, value) = Map.singleton key value flags = Map.unionWith Map.union @@ -512,17 +505,17 @@ parseTargets needTargets boptscli = do (globals', snapshots, locals') <- do addedDeps' <- fmap Map.fromList $ forM (Map.toList addedDeps) $ \(name, loc) -> do - gpd <- parseSingleCabalFileIndex root loc - return (name, (gpd, loc, Nothing)) + gpd <- loadCabalFileImmutable loc + return (name, (gpd, PLImmutable loc, Nothing)) -- Calculate a list of all of the locals, based on the project -- packages, local dependencies, and added deps found from the -- command line - let allLocals :: Map PackageName (GenericPackageDescription, PackageLocationIndex FilePath, Maybe LocalPackageView) + let allLocals :: Map PackageName (GenericPackageDescription, PackageLocation, Maybe LocalPackageView) allLocals = Map.unions [ -- project packages Map.map - (\lpv -> (lpvGPD lpv, PLOther $ lpvLoc lpv, Just lpv)) + (\lpv -> (lpvGPD lpv, PLMutable $ lpvResolvedDir lpv, Just lpv)) (lpProject lp) , -- added deps take precendence over local deps addedDeps' @@ -533,7 +526,7 @@ parseTargets needTargets boptscli = do ] calculatePackagePromotion - root ls0 (Map.elems allLocals) + ls0 (Map.elems allLocals) flags hides options drops let ls = LoadedSnapshot @@ -550,9 +543,3 @@ parseTargets needTargets boptscli = do (loc, _) -> Just (name, lpi { lpiLocation = loc }) -- upgraded or local dep return (ls, localDeps, targets) - -gpdVersion :: GenericPackageDescription -> Version -gpdVersion gpd = - version - where - PackageIdentifier _ version = fromCabalPackageIdentifier $ package $ packageDescription gpd diff --git a/src/Stack/BuildPlan.hs b/src/Stack/BuildPlan.hs index cf068b573b..b74888c9e6 100644 --- a/src/Stack/BuildPlan.hs +++ b/src/Stack/BuildPlan.hs @@ -25,12 +25,11 @@ module Stack.BuildPlan import Stack.Prelude hiding (Display (..)) import qualified Data.Foldable as F -import qualified Data.HashSet as HashSet +import qualified Data.Set as Set import Data.List (intercalate) import Data.List.NonEmpty (NonEmpty(..)) import qualified Data.List.NonEmpty as NonEmpty import qualified Data.Map as Map -import qualified Data.Set as Set import qualified Data.Text as T import qualified Distribution.Package as C import Distribution.PackageDescription (GenericPackageDescription, @@ -45,9 +44,6 @@ import Stack.Constants import Stack.Package import Stack.Snapshot import Stack.Types.BuildPlan -import Stack.Types.FlagName -import Stack.Types.PackageIdentifier -import Stack.Types.PackageName import Stack.Types.Version import Stack.Types.Config import Stack.Types.Compiler @@ -127,7 +123,7 @@ instance Show BuildPlanException where [ packageNameString dep , " (used by " , intercalate ", " - $ map (packageNameString . packageIdentifierName) + $ map (packageNameString . pkgName) $ Set.toList users , ")" ] @@ -142,15 +138,13 @@ instance Show BuildPlanException where ", because no 'compiler' or 'resolver' is specified." gpdPackages :: [GenericPackageDescription] -> Map PackageName Version -gpdPackages gpds = Map.fromList $ - map (fromCabalIdent . C.package . C.packageDescription) gpds +gpdPackages = Map.fromList . map (toPair . C.package . C.packageDescription) where - fromCabalIdent (C.PackageIdentifier name version) = - (fromCabalPackageName name, fromCabalVersion version) + toPair (C.PackageIdentifier name version) = (name, version) gpdPackageDeps :: GenericPackageDescription - -> CompilerVersion 'CVActual + -> ActualCompiler -> Platform -> Map FlagName Bool -> Map PackageName VersionRange @@ -188,10 +182,9 @@ removeSrcPkgDefaultFlags gpds flags = let tuples = map getDefault (C.genPackageFlags gpd) in Map.singleton (gpdPackageName gpd) (Map.fromList tuples) - flagName' = fromCabalFlagName . C.flagName getDefault f - | C.flagDefault f = (flagName' f, True) - | otherwise = (flagName' f, False) + | C.flagDefault f = (C.flagName f, True) + | otherwise = (C.flagName f, False) -- | Find the set of @FlagName@s necessary to get the given -- @GenericPackageDescription@ to compile against the given @BuildPlan@. Will @@ -199,7 +192,7 @@ removeSrcPkgDefaultFlags gpds flags = -- Returns the plan which produces least number of dep errors selectPackageBuildPlan :: Platform - -> CompilerVersion 'CVActual + -> ActualCompiler -> Map PackageName Version -> GenericPackageDescription -> (Map PackageName (Map FlagName Bool), DepErrors) @@ -232,13 +225,13 @@ selectPackageBuildPlan platform compiler pool gpd = | flagManual f = (fname, flagDefault f) :| [] | flagDefault f = (fname, True) :| [(fname, False)] | otherwise = (fname, False) :| [(fname, True)] - where fname = (fromCabalFlagName . flagName) f + where fname = flagName f -- | Check whether with the given set of flags a package's dependency -- constraints can be satisfied against a given build plan or pool of packages. checkPackageBuildPlan :: Platform - -> CompilerVersion 'CVActual + -> ActualCompiler -> Map PackageName Version -> Map FlagName Bool -> GenericPackageDescription @@ -292,7 +285,7 @@ combineDepError (DepError a x) (DepError b y) = -- will be chosen automatically. checkBundleBuildPlan :: Platform - -> CompilerVersion 'CVActual + -> ActualCompiler -> Map PackageName Version -> Maybe (Map PackageName (Map FlagName Bool)) -> [GenericPackageDescription] @@ -316,7 +309,7 @@ data BuildPlanCheck = BuildPlanCheckOk (Map PackageName (Map FlagName Bool)) | BuildPlanCheckPartial (Map PackageName (Map FlagName Bool)) DepErrors | BuildPlanCheckFail (Map PackageName (Map FlagName Bool)) DepErrors - (CompilerVersion 'CVActual) + ActualCompiler -- | Compare 'BuildPlanCheck', where GT means a better plan. compareBuildPlanCheck :: BuildPlanCheck -> BuildPlanCheck -> Ordering @@ -342,15 +335,14 @@ instance Show BuildPlanCheck where -- the packages. checkSnapBuildPlan :: (HasConfig env, HasGHCVariant env) - => Path Abs Dir -- ^ project root, used for checking out necessary files - -> [GenericPackageDescription] + => [GenericPackageDescription] -> Maybe (Map PackageName (Map FlagName Bool)) -> SnapshotDef - -> Maybe (CompilerVersion 'CVActual) + -> Maybe ActualCompiler -> RIO env BuildPlanCheck -checkSnapBuildPlan root gpds flags snapshotDef mactualCompiler = do +checkSnapBuildPlan gpds flags snapshotDef mactualCompiler = do platform <- view platformL - rs <- loadSnapshot mactualCompiler root snapshotDef + rs <- loadSnapshot mactualCompiler snapshotDef let compiler = lsCompilerVersion rs @@ -372,22 +364,23 @@ checkSnapBuildPlan root gpds flags snapshotDef mactualCompiler = do -- FIXME not sure how to handle ghcjs boot packages | otherwise = Map.empty - isGhcWiredIn p _ = p `HashSet.member` wiredInPackages + isGhcWiredIn p _ = p `Set.member` wiredInPackages ghcErrors = Map.filterWithKey isGhcWiredIn -- | Find a snapshot and set of flags that is compatible with and matches as -- best as possible with the given 'GenericPackageDescription's. selectBestSnapshot :: (HasConfig env, HasGHCVariant env) - => Path Abs Dir -- ^ project root, used for checking out necessary files - -> [GenericPackageDescription] + => [GenericPackageDescription] -> NonEmpty SnapName -> RIO env (SnapshotDef, BuildPlanCheck) -selectBestSnapshot root gpds snaps = do +selectBestSnapshot gpds snaps = do logInfo $ "Selecting the best among " <> displayShow (NonEmpty.length snaps) <> " snapshots...\n" - F.foldr1 go (NonEmpty.map (getResult <=< loadResolver . ResolverStackage) snaps) + let resolverStackage (LTS x y) = ltsSnapshotLocation x y + resolverStackage (Nightly d) = nightlySnapshotLocation d + F.foldr1 go (NonEmpty.map (getResult <=< flip loadResolver Nothing . resolverStackage) snaps) where go mold mnew = do old@(_snap, bpc) <- mold @@ -396,12 +389,8 @@ selectBestSnapshot root gpds snaps = do _ -> fmap (betterSnap old) mnew getResult snap = do - result <- checkSnapBuildPlan root gpds Nothing snap - -- We know that we're only dealing with ResolverStackage - -- here, where we can rely on the global package hints. - -- Therefore, we don't use an actual compiler. For more - -- info, see comments on - -- Stack.Solver.checkSnapBuildPlanActual. + result <- checkSnapBuildPlan gpds Nothing snap + -- Rely on global package hints. Nothing reportResult result snap return (snap, result) @@ -424,12 +413,12 @@ selectBestSnapshot root gpds snaps = do indent t = T.unlines $ fmap (" " <>) (T.lines t) -showItems :: Show a => [a] -> Text +showItems :: [String] -> Text showItems items = T.concat (map formatItem items) where formatItem item = T.concat [ " - " - , T.pack $ show item + , T.pack item , "\n" ] @@ -449,12 +438,12 @@ showPackageFlags pkg fl = formatFlags (f, v) = show f ++ " = " ++ show v showMapPackages :: Map PackageName a -> Text -showMapPackages mp = showItems $ Map.keys mp +showMapPackages mp = showItems $ map packageNameString $ Map.keys mp showCompilerErrors :: Map PackageName (Map FlagName Bool) -> DepErrors - -> CompilerVersion 'CVActual + -> ActualCompiler -> Text showCompilerErrors flags errs compiler = T.concat diff --git a/src/Stack/Clean.hs b/src/Stack/Clean.hs index 222949ee6a..81ef151d61 100644 --- a/src/Stack/Clean.hs +++ b/src/Stack/Clean.hs @@ -17,7 +17,6 @@ import qualified Data.Map.Strict as Map import Path.IO (ignoringAbsence, removeDirRecur) import Stack.Config (getLocalPackages) import Stack.Constants.Config (distDirFromDir, workDirFromDir) -import Stack.Types.PackageName import Stack.Types.Config import System.Exit (exitFailure) @@ -45,7 +44,7 @@ dirsToDelete cleanOpts = do CleanShallow targets -> do let localPkgViews = lpProject packages localPkgNames = Map.keys localPkgViews - getPkgDir pkgName = fmap lpvRoot (Map.lookup pkgName localPkgViews) + getPkgDir pkgName' = fmap lpvRoot (Map.lookup pkgName' localPkgViews) case targets \\ localPkgNames of [] -> mapM distDirFromDir (mapMaybe getPkgDir targets) xs -> throwM (NonLocalPackages xs) diff --git a/src/Stack/Config.hs b/src/Stack/Config.hs index 211c3d4dad..4eec5c0365 100644 --- a/src/Stack/Config.hs +++ b/src/Stack/Config.hs @@ -51,7 +51,9 @@ import Control.Monad.Extra (firstJustM) import Stack.Prelude import Data.Aeson.Extended import qualified Data.ByteString as S +import Data.ByteString.Builder (toLazyByteString) import Data.Coerce (coerce) +import Data.IORef.RunOnce (runOnce) import qualified Data.IntMap as IntMap import qualified Data.Map as Map import qualified Data.Monoid @@ -67,6 +69,7 @@ import GHC.Conc (getNumProcessors) import Lens.Micro ((.~), lens) import Network.HTTP.StackClient (httpJSON, parseUrlThrow, getResponseBody) import Options.Applicative (Parser, strOption, long, help) +import qualified Pantry.SHA256 as SHA256 import Path import Path.Extra (toFilePathNoTrailingSep) import Path.Find (findInParents) @@ -78,17 +81,11 @@ import Stack.Config.Nix import Stack.Config.Urls import Stack.Constants import qualified Stack.Image as Image -import Stack.PackageLocation -import Stack.PackageIndex (CabalLoader (..), HasCabalLoader (..)) +import Stack.Package (mkLocalPackageView) import Stack.Snapshot -import Stack.Types.BuildPlan -import Stack.Types.Compiler import Stack.Types.Config import Stack.Types.Docker import Stack.Types.Nix -import Stack.Types.PackageName (PackageName) -import Stack.Types.PackageIdentifier -import Stack.Types.PackageIndex (IndexType (ITHackageSecurity), HackageSecurity (..)) import Stack.Types.Resolver import Stack.Types.Runner import Stack.Types.Urls @@ -166,44 +163,41 @@ getSnapshots = do -- | Turn an 'AbstractResolver' into a 'Resolver'. makeConcreteResolver :: HasConfig env - => Maybe (Path Abs Dir) -- ^ root of project for resolving custom relative paths - -> AbstractResolver - -> RIO env Resolver -makeConcreteResolver root (ARResolver r) = parseCustomLocation root r -makeConcreteResolver root ar = do + => AbstractResolver + -> RIO env SnapshotLocation +makeConcreteResolver (ARResolver r) = pure r +makeConcreteResolver ar = do snapshots <- getSnapshots r <- case ar of - ARResolver r -> assert False $ makeConcreteResolver root $ ARResolver r + ARResolver r -> assert False $ makeConcreteResolver (ARResolver r) ARGlobal -> do config <- view configL implicitGlobalDir <- getImplicitGlobalProjectDir config let fp = implicitGlobalDir stackDotYaml - ProjectAndConfigMonoid project _ <- - loadConfigYaml (parseProjectAndConfigMonoid (parent fp)) fp + iopc <- loadConfigYaml (parseProjectAndConfigMonoid (parent fp)) fp + ProjectAndConfigMonoid project _ <- liftIO iopc return $ projectResolver project - ARLatestNightly -> return $ ResolverStackage $ Nightly $ snapshotsNightly snapshots + ARLatestNightly -> return $ nightlySnapshotLocation $ snapshotsNightly snapshots ARLatestLTSMajor x -> case IntMap.lookup x $ snapshotsLts snapshots of Nothing -> throwString $ "No LTS release found with major version " ++ show x - Just y -> return $ ResolverStackage $ LTS x y + Just y -> return $ ltsSnapshotLocation x y ARLatestLTS | IntMap.null $ snapshotsLts snapshots -> throwString "No LTS releases found" | otherwise -> let (x, y) = IntMap.findMax $ snapshotsLts snapshots - in return $ ResolverStackage $ LTS x y - logInfo $ "Selected resolver: " <> display (resolverRawName r) + in return $ ltsSnapshotLocation x y + logInfo $ "Selected resolver: " <> display r return r -- | Get the latest snapshot resolver available. -getLatestResolver :: HasConfig env => RIO env (ResolverWith a) +getLatestResolver :: HasConfig env => RIO env SnapshotLocation getLatestResolver = do snapshots <- getSnapshots - let mlts = do - (x,y) <- listToMaybe (reverse (IntMap.toList (snapshotsLts snapshots))) - return (LTS x y) - snap = fromMaybe (Nightly (snapshotsNightly snapshots)) mlts - return (ResolverStackage snap) + let mlts = uncurry ltsSnapshotLocation <$> + listToMaybe (reverse (IntMap.toList (snapshotsLts snapshots))) + pure $ fromMaybe (nightlySnapshotLocation (snapshotsNightly snapshots)) mlts -- | Create a 'Config' value when we're not using any local -- configuration files (e.g., the script command) @@ -212,9 +206,10 @@ configNoLocalConfig => Path Abs Dir -- ^ stack root -> Maybe AbstractResolver -> ConfigMonoid - -> RIO env Config -configNoLocalConfig _ Nothing _ = throwIO NoResolverWhenUsingNoLocalConfig -configNoLocalConfig stackRoot (Just resolver) configMonoid = do + -> (Config -> RIO env a) + -> RIO env a +configNoLocalConfig _ Nothing _ _ = throwIO NoResolverWhenUsingNoLocalConfig +configNoLocalConfig stackRoot (Just resolver) configMonoid inner = do userConfigPath <- liftIO $ getFakeConfigPath stackRoot resolver configFromConfigMonoid stackRoot @@ -223,6 +218,7 @@ configNoLocalConfig stackRoot (Just resolver) configMonoid = do (Just resolver) Nothing -- project configMonoid + inner -- Interprets ConfigMonoid options. configFromConfigMonoid @@ -233,10 +229,11 @@ configFromConfigMonoid -> Maybe AbstractResolver -> Maybe (Project, Path Abs File) -> ConfigMonoid - -> RIO env Config + -> (Config -> RIO env a) + -> RIO env a configFromConfigMonoid - clStackRoot configUserConfigPath configAllowLocals mresolver - mproject ConfigMonoid{..} = do + configStackRoot configUserConfigPath configAllowLocals mresolver + mproject ConfigMonoid{..} inner = do -- If --stack-work is passed, prefer it. Otherwise, if STACK_WORK -- is set, use that. If neither, use the default ".stack-work" mstackWorkEnv <- liftIO $ lookupEnv stackWorkEnvVar @@ -250,28 +247,6 @@ configFromConfigMonoid _ -> return (urlsFromMonoid configMonoidUrls) let clConnectionCount = fromFirst 8 configMonoidConnectionCount configHideTHLoading = fromFirst True configMonoidHideTHLoading - clIndices = fromFirst - [PackageIndex - { indexName = IndexName "Hackage" - , indexLocation = "https://s3.amazonaws.com/hackage.fpcomplete.com/" - , indexType = ITHackageSecurity HackageSecurity - { hsKeyIds = - [ "0a5c7ea47cd1b15f01f5f51a33adda7e655bc0f0b0615baa8e271f4c3351e21d" - , "1ea9ba32c526d1cc91ab5e5bd364ec5e9e8cb67179a471872f6e26f0ae773d42" - , "280b10153a522681163658cb49f632cde3f38d768b736ddbc901d99a1a772833" - , "2a96b1889dc221c17296fcc2bb34b908ca9734376f0f361660200935916ef201" - , "2c6c3627bd6c982990239487f1abd02e08a02e6cf16edb105a8012d444d870c3" - , "51f0161b906011b52c6613376b1ae937670da69322113a246a09f807c62f6921" - , "772e9f4c7db33d251d5c6e357199c819e569d130857dc225549b40845ff0890d" - , "aa315286e6ad281ad61182235533c41e806e5a787e0b6d1e7eef3f09d137d2e9" - , "fe331502606802feac15e514d9b9ea83fee8b6ffef71335479a2e68d84adc6b0" - ] - , hsKeyThreshold = 3 - } - , indexDownloadPrefix = "https://s3.amazonaws.com/hackage.fpcomplete.com/package/" - , indexRequireHashes = False - }] - configMonoidPackageIndices configGHCVariant0 = getFirst configMonoidGHCVariant configGHCBuild = getFirst configMonoidGHCBuild @@ -282,7 +257,6 @@ configFromConfigMonoid configExtraIncludeDirs = configMonoidExtraIncludeDirs configExtraLibDirs = configMonoidExtraLibDirs configOverrideGccPath = getFirst configMonoidOverrideGccPath - configOverrideHpack = maybe HpackBundled HpackCommand $ getFirst configMonoidOverrideHpack -- Only place in the codebase where platform is hard-coded. In theory -- in the future, allow it to be configured. @@ -308,7 +282,7 @@ configFromConfigMonoid let configBuild = buildOptsFromMonoid configMonoidBuildOpts configDocker <- - dockerOptsFromMonoid (fmap fst mproject) clStackRoot mresolver configMonoidDockerOpts + dockerOptsFromMonoid (fmap fst mproject) configStackRoot mresolver configMonoidDockerOpts configNix <- nixOptsFromMonoid configMonoidNixOpts os configSystemGHC <- @@ -332,7 +306,7 @@ configFromConfigMonoid let configProcessContextSettings _ = return origEnv configLocalProgramsBase <- case getFirst configMonoidLocalProgramsBase of - Nothing -> getDefaultLocalProgramsBase clStackRoot configPlatform origEnv + Nothing -> getDefaultLocalProgramsBase configStackRoot configPlatform origEnv Just path -> return path platformOnlyDir <- runReaderT platformOnlyRelDir (configPlatform, configPlatformVariant) let configLocalPrograms = configLocalProgramsBase platformOnlyDir @@ -375,7 +349,6 @@ configFromConfigMonoid configDumpLogs = fromFirst DumpWarningLogs configMonoidDumpLogs configSaveHackageCreds = fromFirst True configMonoidSaveHackageCreds configHackageBaseUrl = fromFirst "https://hackage.haskell.org/" configMonoidHackageBaseUrl - clIgnoreRevisionMismatch = fromFirst False configMonoidIgnoreRevisionMismatch configAllowDifferentUser <- case getFirst configMonoidAllowDifferentUser of @@ -386,9 +359,6 @@ configFromConfigMonoid configRunner' <- view runnerL - clCache <- newIORef Nothing - clUpdateRef <- newMVar True - useAnsi <- liftIO $ hSupportsANSI stderr let stylesUpdate' = runnerStylesUpdate configRunner' <> @@ -404,9 +374,18 @@ configFromConfigMonoid & processContextL .~ origEnv & stylesUpdateL .~ stylesUpdate' & useColorL .~ fromMaybe useColor' mUseColor - configCabalLoader = CabalLoader {..} - return Config {..} + hsc <- + case getFirst configMonoidPackageIndices of + Nothing -> pure defaultHackageSecurityConfig + Just [hsc] -> pure hsc + Just x -> error $ "When overriding the default package index, you must provide exactly one value, received: " ++ show x + withPantryConfig + (configStackRoot $(mkRelDir "pantry")) + hsc + (maybe HpackBundled HpackCommand $ getFirst configMonoidOverrideHpack) + clConnectionCount + (\configPantryConfig -> inner Config {..}) -- | Get the default location of the local programs directory. getDefaultLocalProgramsBase :: MonadThrow m @@ -441,8 +420,8 @@ instance HasConfig MiniConfig where configL = lens mcConfig (\x y -> x { mcConfig = y }) instance HasProcessContext MiniConfig where processContextL = configL.processContextL -instance HasCabalLoader MiniConfig where - cabalLoaderL = configL.cabalLoaderL +instance HasPantryConfig MiniConfig where + pantryConfigL = configL.pantryConfigL instance HasPlatform MiniConfig instance HasGHCVariant MiniConfig where ghcVariantL = lens mcGHCVariant (\x y -> x { mcGHCVariant = y }) @@ -471,11 +450,12 @@ loadConfigMaybeProject -- ^ Override resolver -> LocalConfigStatus (Project, Path Abs File, ConfigMonoid) -- ^ Project config to use, if any - -> RIO env LoadConfig -loadConfigMaybeProject configArgs mresolver mproject = do + -> (LoadConfig -> RIO env a) + -> RIO env a +loadConfigMaybeProject configArgs mresolver mproject inner = do (stackRoot, userOwnsStackRoot) <- determineStackRootAndOwnership configArgs - let loadHelper mproject' = do + let loadHelper mproject' inner2 = do userConfigPath <- getDefaultUserConfigPath stackRoot extraConfigs0 <- getExtraConfigs userConfigPath >>= mapM (\file -> loadConfigYaml (parseConfigMonoid (parent file)) file) @@ -492,33 +472,35 @@ loadConfigMaybeProject configArgs mresolver mproject = do True -- allow locals mresolver (fmap (\(x, y, _) -> (x, y)) mproject') - $ mconcat $ configArgs - : maybe id (\(_, _, projectConfig) -> (projectConfig:)) mproject' extraConfigs + (mconcat $ configArgs + : maybe id (\(_, _, projectConfig) -> (projectConfig:)) mproject' extraConfigs) + inner2 - config <- - case mproject of + let withConfig = case mproject of LCSNoConfig _ -> configNoLocalConfig stackRoot mresolver configArgs LCSProject project -> loadHelper $ Just project LCSNoProject -> loadHelper Nothing - unless (fromCabalVersion (mkVersion' Meta.version) `withinRange` configRequireStackVersion config) - (throwM (BadStackVersionException (configRequireStackVersion config))) - - let mprojectRoot = fmap (\(_, fp, _) -> parent fp) mproject - unless (configAllowDifferentUser config) $ do - unless userOwnsStackRoot $ - throwM (UserDoesn'tOwnDirectory stackRoot) - forM_ mprojectRoot $ \dir -> - checkOwnership (dir configWorkDir config) - - return LoadConfig - { lcConfig = config - , lcLoadBuildConfig = runRIO config . loadBuildConfig mproject mresolver - , lcProjectRoot = - case mprojectRoot of - LCSProject fp -> Just fp - LCSNoProject -> Nothing - LCSNoConfig _ -> Nothing - } + + withConfig $ \config -> do + unless (mkVersion' Meta.version `withinRange` configRequireStackVersion config) + (throwM (BadStackVersionException (configRequireStackVersion config))) + + let mprojectRoot = fmap (\(_, fp, _) -> parent fp) mproject + unless (configAllowDifferentUser config) $ do + unless userOwnsStackRoot $ + throwM (UserDoesn'tOwnDirectory stackRoot) + forM_ mprojectRoot $ \dir -> + checkOwnership (dir configWorkDir config) + + inner LoadConfig + { lcConfig = config + , lcLoadBuildConfig = runRIO config . loadBuildConfig mproject mresolver + , lcProjectRoot = + case mprojectRoot of + LCSProject fp -> Just fp + LCSNoProject -> Nothing + LCSNoConfig _ -> Nothing + } -- | Load the configuration, using current directory, environment variables, -- and defaults as necessary. The passed @Maybe (Path Abs File)@ is an @@ -530,15 +512,16 @@ loadConfig :: HasRunner env -- ^ Override resolver -> StackYamlLoc (Path Abs File) -- ^ Override stack.yaml - -> RIO env LoadConfig -loadConfig configArgs mresolver mstackYaml = - loadProjectConfig mstackYaml >>= loadConfigMaybeProject configArgs mresolver + -> (LoadConfig -> RIO env a) + -> RIO env a +loadConfig configArgs mresolver mstackYaml inner = + loadProjectConfig mstackYaml >>= \x -> loadConfigMaybeProject configArgs mresolver x inner -- | Load the build configuration, adds build-specific values to config loaded by @loadConfig@. -- values. loadBuildConfig :: LocalConfigStatus (Project, Path Abs File, ConfigMonoid) -> Maybe AbstractResolver -- override resolver - -> Maybe (CompilerVersion 'CVWanted) -- override compiler + -> Maybe WantedCompiler -- override compiler -> RIO Config BuildConfig loadBuildConfig mproject maresolver mcompiler = do config <- ask @@ -552,29 +535,8 @@ loadBuildConfig mproject maresolver mcompiler = do -- paths). We consider the current working directory to be the -- correct base. Let's calculate the mresolver first. mresolver <- forM maresolver $ \aresolver -> do - -- For display purposes only - let name = - case aresolver of - ARResolver resolver -> resolverRawName resolver - ARLatestNightly -> "nightly" - ARLatestLTS -> "lts" - ARLatestLTSMajor x -> T.pack $ "lts-" ++ show x - ARGlobal -> "global" - logDebug ("Using resolver: " <> display name <> " specified on command line") - - -- In order to resolve custom snapshots, we need a base - -- directory to deal with relative paths. For the case of - -- LCSNoConfig, we use the parent directory provided. This is - -- because, when running the script interpreter, we assume the - -- resolver is in fact coming from the file contents itself and - -- not the command line. For the project and non project cases, - -- however, we use the current directory. - base <- - case mproject of - LCSNoConfig parentDir -> return parentDir - LCSProject _ -> resolveDir' "." - LCSNoProject -> resolveDir' "." - makeConcreteResolver (Just base) aresolver + logDebug ("Using resolver: " <> display aresolver <> " specified on command line") + makeConcreteResolver aresolver (project', stackYamlFP) <- case mproject of LCSProject (project, fp, _) -> do @@ -594,13 +556,14 @@ loadBuildConfig mproject maresolver mcompiler = do exists <- doesFileExist dest if exists then do - ProjectAndConfigMonoid project _ <- loadConfigYaml (parseProjectAndConfigMonoid destDir) dest + iopc <- loadConfigYaml (parseProjectAndConfigMonoid destDir) dest + ProjectAndConfigMonoid project _ <- liftIO iopc when (view terminalL config) $ case maresolver of Nothing -> logDebug $ "Using resolver: " <> - display (resolverRawName (projectResolver project)) <> + display (projectResolver project) <> " from implicit global project's config file: " <> fromString dest' Just _ -> return () @@ -625,21 +588,26 @@ loadBuildConfig mproject maresolver mcompiler = do , "outside of a real project.\n" ] return (p, dest) let project = project' - { projectCompiler = mcompiler <|> projectCompiler project' - , projectResolver = fromMaybe (projectResolver project') mresolver + { projectResolver = fromMaybe (projectResolver project') mresolver } - sd0 <- runRIO config $ loadResolver $ projectResolver project - let sd = maybe id setCompilerVersion (projectCompiler project) sd0 + sd <- runRIO config $ loadResolver (projectResolver project) mcompiler extraPackageDBs <- mapM resolveDir' (projectExtraPackageDBs project) + packages <- for (projectPackages project) $ \fp@(RelFilePath t) -> do + abs' <- resolveDir (parent stackYamlFP) (T.unpack t) + let resolved = ResolvedPath fp abs' + (resolved,) <$> runOnce (mkLocalPackageView YesPrintWarnings resolved) + + let deps = projectDependencies project + return BuildConfig { bcConfig = config , bcSnapshotDef = sd , bcGHCVariant = configGHCVariantDefault config - , bcPackages = projectPackages project - , bcDependencies = projectDependencies project + , bcPackages = packages + , bcDependencies = deps , bcExtraPackageDBs = extraPackageDBs , bcStackYaml = stackYamlFP , bcFlags = projectFlags project @@ -648,17 +616,18 @@ loadBuildConfig mproject maresolver mcompiler = do LCSNoProject -> True LCSProject _ -> False LCSNoConfig _ -> False + , bcCurator = projectCurator project } where - getEmptyProject :: Maybe Resolver -> RIO Config Project + getEmptyProject :: Maybe SnapshotLocation -> RIO Config Project getEmptyProject mresolver = do r <- case mresolver of Just resolver -> do - logInfo ("Using resolver: " <> display (resolverRawName resolver) <> " specified on command line") + logInfo ("Using resolver: " <> display resolver <> " specified on command line") return resolver Nothing -> do r'' <- getLatestResolver - logInfo ("Using latest snapshot resolver: " <> display (resolverRawName r'')) + logInfo ("Using latest snapshot resolver: " <> display r'') return r'' return Project { projectUserMsg = Nothing @@ -668,6 +637,7 @@ loadBuildConfig mproject maresolver mcompiler = do , projectResolver = r , projectCompiler = Nothing , projectExtraPackageDBs = [] + , projectCurator = Nothing } -- | Get packages from EnvConfig, downloading and cloning as necessary. @@ -679,24 +649,17 @@ getLocalPackages = do case mcached of Just cached -> return cached Nothing -> do - root <- view projectRootL bc <- view buildConfigL - packages <- do - let withName lpv = (lpvName lpv, lpv) - map withName . concat <$> mapM (parseMultiCabalFiles root True) (bcPackages bc) + packages <- for (bcPackages bc) $ fmap (lpvName &&& id) . liftIO . snd - let wrapGPD (gpd, loc) = - let PackageIdentifier name _version = - fromCabalPackageIdentifier - $ C.package - $ C.packageDescription gpd - in (name, (gpd, loc)) - deps <- map wrapGPD . concat - <$> mapM (parseMultiCabalFilesIndex root) (bcDependencies bc) + deps <- forM (bcDependencies bc) $ \plp -> do + gpd <- loadCabalFile plp + let name = pkgName $ C.package $ C.packageDescription gpd + pure (name, (gpd, plp)) checkDuplicateNames $ - map (second (PLOther . lpvLoc)) packages ++ + map (second (PLMutable . lpvResolvedDir)) packages ++ map (second snd) deps return LocalPackages @@ -706,7 +669,7 @@ getLocalPackages = do -- | Check if there are any duplicate package names and, if so, throw an -- exception. -checkDuplicateNames :: MonadThrow m => [(PackageName, PackageLocationIndex FilePath)] -> m () +checkDuplicateNames :: MonadThrow m => [(PackageName, PackageLocation)] -> m () checkDuplicateNames locals = case filter hasMultiples $ Map.toList $ Map.fromListWith (++) $ map (second return) locals of [] -> return () @@ -901,7 +864,8 @@ loadProjectConfig mstackYaml = do return (LCSNoConfig mparentDir) where load fp = do - ProjectAndConfigMonoid project config <- loadConfigYaml (parseProjectAndConfigMonoid (parent fp)) fp + iopc <- loadConfigYaml (parseProjectAndConfigMonoid (parent fp)) fp + ProjectAndConfigMonoid project config <- liftIO iopc return (project, fp, config) -- | Get the location of the default stack configuration file. @@ -949,7 +913,7 @@ getFakeConfigPath getFakeConfigPath stackRoot ar = do asString <- case ar of - ARResolver r -> return $ T.unpack $ resolverRawName r + ARResolver r -> pure $ T.unpack $ SHA256.toHexText $ SHA256.hashLazyBytes $ toLazyByteString $ getUtf8Builder $ display r _ -> throwM $ InvalidResolverForNoLocalConfig $ show ar -- This takeWhile is an ugly hack. We don't actually need this -- path for anything useful. But if we take the raw value for diff --git a/src/Stack/Config/Docker.hs b/src/Stack/Config/Docker.hs index 213c0cbf09..f670f91c66 100644 --- a/src/Stack/Config/Docker.hs +++ b/src/Stack/Config/Docker.hs @@ -1,4 +1,5 @@ {-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE CPP, DeriveDataTypeable, RecordWildCards, TemplateHaskell #-} -- | Docker configuration @@ -7,6 +8,7 @@ module Stack.Config.Docker where import Stack.Prelude import Data.List (find) import qualified Data.Text as T +import Data.Text.Read (decimal) import Distribution.Version (simplifyVersionRange) import Path import Stack.Types.Version @@ -14,6 +16,32 @@ import Stack.Types.Config import Stack.Types.Docker import Stack.Types.Resolver +-- | Add a default Docker tag name to a given base image. +addDefaultTag + :: MonadThrow m + => String -- ^ base + -> Maybe Project + -> Maybe AbstractResolver + -> m String +addDefaultTag base mproject maresolver = do + let exc = throwM $ ResolverNotSupportedException mproject maresolver + onUrl url = maybe exc pure $ do + (x, y) <- parseLtsName url + Just $ concat + [ base + , ":lts-" + , show x + , "." + , show y + ] + case maresolver of + Just (ARResolver (SLUrl url _)) -> onUrl url + Just _aresolver -> exc + Nothing -> + case projectResolver <$> mproject of + Just (SLUrl url _) -> onUrl url + _ -> exc + -- | Interprets DockerOptsMonoid options. dockerOptsFromMonoid :: MonadThrow m @@ -23,39 +51,17 @@ dockerOptsFromMonoid -> DockerOptsMonoid -> m DockerOpts dockerOptsFromMonoid mproject stackRoot maresolver DockerOptsMonoid{..} = do + let dockerImage = + case getFirst dockerMonoidRepoOrImage of + Nothing -> addDefaultTag "fpco/stack-build" mproject maresolver + Just (DockerMonoidImage image) -> pure image + Just (DockerMonoidRepo repo) -> + case find (`elem` (":@" :: String)) repo of + Nothing -> addDefaultTag repo mproject maresolver + -- Repo already specified a tag or digest, so don't append default + Just _ -> pure repo let dockerEnable = fromFirst (getAny dockerMonoidDefaultEnable) dockerMonoidEnable - dockerImage = - let mresolver = - case maresolver of - Just (ARResolver resolver) -> - Just (void resolver) - Just aresolver -> - impureThrow - (ResolverNotSupportedException $ - show aresolver) - Nothing -> - fmap (void . projectResolver) mproject - defaultTag = - case mresolver of - Nothing -> "" - Just resolver -> - case resolver of - ResolverStackage n@(LTS _ _) -> - ":" ++ T.unpack (renderSnapName n) - _ -> - impureThrow - (ResolverNotSupportedException $ - show resolver) - in case getFirst dockerMonoidRepoOrImage of - Nothing -> "fpco/stack-build" ++ defaultTag - Just (DockerMonoidImage image) -> image - Just (DockerMonoidRepo repo) -> - case find (`elem` (":@" :: String)) repo of - Just _ -- Repo already specified a tag or digest, so don't append default - -> - repo - Nothing -> repo ++ defaultTag dockerRegistryLogin = fromFirst (isJust (emptyToNothing (getFirst dockerMonoidRegistryUsername))) @@ -82,7 +88,7 @@ dockerOptsFromMonoid mproject stackRoot maresolver DockerOptsMonoid{..} = do -- | Exceptions thrown by Stack.Docker.Config. data StackDockerConfigException - = ResolverNotSupportedException String + = ResolverNotSupportedException !(Maybe Project) !(Maybe AbstractResolver) -- ^ Only LTS resolvers are supported for default image tag. | InvalidDatabasePathException SomeException -- ^ Invalid global database path. @@ -93,11 +99,25 @@ instance Exception StackDockerConfigException -- | Show instance for StackDockerConfigException. instance Show StackDockerConfigException where - show (ResolverNotSupportedException resolver) = + show (ResolverNotSupportedException mproject maresolver) = concat [ "Resolver not supported for Docker images:\n " - , resolver + , case (mproject, maresolver) of + (Nothing, Nothing) -> "no resolver specified" + (_, Just aresolver) -> T.unpack $ utf8BuilderToText $ display aresolver + (Just project, Nothing) -> T.unpack $ utf8BuilderToText $ display $ projectResolver project , "\nUse an LTS resolver, or set the '" , T.unpack dockerImageArgName , "' explicitly, in your configuration file."] show (InvalidDatabasePathException ex) = "Invalid database path: " ++ show ex + +-- | Parse an LTS major and minor number from a snapshot URL. +-- +-- This might make more sense in pantry instead. +parseLtsName :: Text -> Maybe (Int, Int) +parseLtsName t0 = do + t1 <- T.stripPrefix "https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/" t0 + Right (x, t2) <- Just $ decimal t1 + t3 <- T.stripPrefix "/" t2 + Right (y, ".yaml") <- Just $ decimal t3 + Just (x, y) diff --git a/src/Stack/Config/Nix.hs b/src/Stack/Config/Nix.hs index b499ffa130..e58e88426b 100644 --- a/src/Stack/Config/Nix.hs +++ b/src/Stack/Config/Nix.hs @@ -14,9 +14,7 @@ import qualified Data.Text as T import qualified Data.Text.IO as TIO import Distribution.System (OS (..)) import Stack.Constants -import Stack.Types.Version import Stack.Types.Nix -import Stack.Types.Compiler import Stack.Types.Runner import System.Directory (doesFileExist) @@ -53,11 +51,11 @@ nixOptsFromMonoid NixOptsMonoid{..} os = do where prefixAll p (x:xs) = p : x : prefixAll p xs prefixAll _ _ = [] -nixCompiler :: CompilerVersion a -> Either StringException T.Text +nixCompiler :: WantedCompiler -> Either StringException T.Text nixCompiler compilerVersion = case compilerVersion of - GhcVersion version -> - case T.split (== '.') (versionText version) of + WCGhc version -> + case T.split (== '.') (fromString $ versionString version) of x : y : minor -> Right $ case minor of @@ -72,11 +70,11 @@ nixCompiler compilerVersion = \(lib.attrNames haskell.compiler); in \ \if compilers == [] \ \then abort \"No compiler found for GHC " - <> versionText version <> "\"\ + <> T.pack (versionString version) <> "\"\ \else haskell.compiler.${builtins.head compilers})" _ -> "haskell.compiler.ghc" <> T.concat (x : y : minor) _ -> Left $ stringException "GHC major version not specified" - _ -> Left $ stringException "Only GHC is supported by stack --nix" + WCGhcjs{} -> Left $ stringException "Only GHC is supported by stack --nix" -- Exceptions thown specifically by Stack.Nix data StackNixException diff --git a/src/Stack/ConfigCmd.hs b/src/Stack/ConfigCmd.hs index 277933efaf..d57a19f550 100644 --- a/src/Stack/ConfigCmd.hs +++ b/src/Stack/ConfigCmd.hs @@ -29,7 +29,7 @@ import Stack.Types.Config import Stack.Types.Resolver data ConfigCmdSet - = ConfigCmdSetResolver AbstractResolver + = ConfigCmdSetResolver (Unresolved AbstractResolver) | ConfigCmdSetSystemGhc CommandScope Bool | ConfigCmdSetInstallGhc CommandScope @@ -81,9 +81,10 @@ cfgCmdSetValue => Path Abs Dir -- ^ root directory of project -> ConfigCmdSet -> RIO env Yaml.Value cfgCmdSetValue root (ConfigCmdSetResolver newResolver) = do - concreteResolver <- makeConcreteResolver (Just root) newResolver + newResolver' <- resolvePaths (Just root) newResolver + concreteResolver <- makeConcreteResolver newResolver' -- Check that the snapshot actually exists - void $ loadResolver concreteResolver + void $ loadResolver concreteResolver Nothing return (Yaml.toJSON concreteResolver) cfgCmdSetValue _ (ConfigCmdSetSystemGhc _ bool') = return (Yaml.Bool bool') diff --git a/src/Stack/Constants.hs b/src/Stack/Constants.hs index 28648dbd54..4a4fc6a026 100644 --- a/src/Stack/Constants.hs +++ b/src/Stack/Constants.hs @@ -8,7 +8,8 @@ module Stack.Constants (buildPlanDir ,buildPlanCacheDir - ,haskellModuleExts + ,haskellFileExts + ,haskellPreprocessorExts ,stackDotYaml ,stackWorkEnvVar ,stackRootEnvVar @@ -38,15 +39,11 @@ module Stack.Constants where import Data.Char (toUpper) -import qualified Data.HashSet as HashSet +import qualified Data.Set as Set +import Distribution.Package (mkPackageName) import Path as FL import Stack.Prelude import Stack.Types.Compiler -import Stack.Types.PackageName - --- | Extensions for anything that can be a Haskell module. -haskellModuleExts :: [Text] -haskellModuleExts = haskellFileExts ++ haskellPreprocessorExts -- | Extensions used for Haskell modules. Excludes preprocessor ones. haskellFileExts :: [Text] @@ -100,9 +97,9 @@ inNixShellEnvVar :: String inNixShellEnvVar = map toUpper stackProgName ++ "_IN_NIX_SHELL" -- See https://downloads.haskell.org/~ghc/7.10.1/docs/html/libraries/ghc/src/Module.html#integerPackageKey -wiredInPackages :: HashSet PackageName +wiredInPackages :: Set PackageName wiredInPackages = - maybe (error "Parse error in wiredInPackages") HashSet.fromList mparsed + maybe (error "Parse error in wiredInPackages") Set.fromList mparsed where mparsed = mapM parsePackageName [ "ghc-prim" @@ -119,9 +116,9 @@ wiredInPackages = -- TODO: Get this unwieldy list out of here and into a datafile -- generated by GHCJS! See https://github.com/ghcjs/ghcjs/issues/434 -ghcjsBootPackages :: HashSet PackageName +ghcjsBootPackages :: Set PackageName ghcjsBootPackages = - maybe (error "Parse error in ghcjsBootPackages") HashSet.fromList mparsed + maybe (error "Parse error in ghcjsBootPackages") Set.fromList mparsed where mparsed = mapM parsePackageName -- stage1a @@ -167,7 +164,7 @@ ghcjsBootPackages = -- | Just to avoid repetition and magic strings. cabalPackageName :: PackageName cabalPackageName = - $(mkPackageName "Cabal") + mkPackageName "Cabal" -- | Deprecated implicit global project directory used when outside of a project. implicitGlobalProjectDirDeprecated :: Path Abs Dir -- ^ Stack root. diff --git a/src/Stack/Constants/Config.hs b/src/Stack/Constants/Config.hs index 74cec0ef05..c1ca27fcab 100644 --- a/src/Stack/Constants/Config.hs +++ b/src/Stack/Constants/Config.hs @@ -23,7 +23,6 @@ import Stack.Prelude import Stack.Constants import Stack.Types.Compiler import Stack.Types.Config -import Stack.Types.PackageIdentifier import Path -- | Output .o/.hi directory. diff --git a/src/Stack/Coverage.hs b/src/Stack/Coverage.hs index 423f095b5b..7c2cb5167f 100644 --- a/src/Stack/Coverage.hs +++ b/src/Stack/Coverage.hs @@ -29,6 +29,7 @@ import qualified Data.Set as Set import qualified Data.Text as T import qualified Data.Text.IO as T import qualified Data.Text.Lazy as LT +import Distribution.Version (mkVersion) import Path import Path.Extra (toFilePathNoTrailingSep) import Path.IO @@ -41,10 +42,7 @@ import Stack.Types.Compiler import Stack.Types.Config import Stack.Types.NamedComponent import Stack.Types.Package -import Stack.Types.PackageIdentifier -import Stack.Types.PackageName import Stack.Types.Runner -import Stack.Types.Version import System.FilePath (isPathSeparator) import qualified RIO import RIO.Process @@ -60,10 +58,10 @@ deleteHpcReports = do -- | Move a tix file into a sub-directory of the hpc report directory. Deletes the old one if one is -- present. updateTixFile :: HasEnvConfig env => PackageName -> Path Abs File -> String -> RIO env () -updateTixFile pkgName tixSrc testName = do +updateTixFile pkgName' tixSrc testName = do exists <- doesFileExist tixSrc when exists $ do - tixDest <- tixFilePath pkgName testName + tixDest <- tixFilePath pkgName' testName liftIO $ ignoringAbsence (removeFile tixDest) ensureDir (parent tixDest) -- Remove exe modules because they are problematic. This could be revisited if there's a GHC @@ -81,17 +79,17 @@ updateTixFile pkgName tixSrc testName = do -- | Get the directory used for hpc reports for the given pkgId. hpcPkgPath :: HasEnvConfig env => PackageName -> RIO env (Path Abs Dir) -hpcPkgPath pkgName = do +hpcPkgPath pkgName' = do outputDir <- hpcReportDir - pkgNameRel <- parseRelDir (packageNameString pkgName) + pkgNameRel <- parseRelDir (packageNameString pkgName') return (outputDir pkgNameRel) -- | Get the tix file location, given the name of the file (without extension), and the package -- identifier string. tixFilePath :: HasEnvConfig env => PackageName -> String -> RIO env (Path Abs File) -tixFilePath pkgName testName = do - pkgPath <- hpcPkgPath pkgName +tixFilePath pkgName' testName = do + pkgPath <- hpcPkgPath pkgName' tixRel <- parseRelFile (testName ++ "/" ++ testName ++ ".tix") return (pkgPath tixRel) @@ -102,7 +100,7 @@ generateHpcReport pkgDir package tests = do compilerVersion <- view actualCompilerVersionL -- If we're using > GHC 7.10, the hpc 'include' parameter must specify a ghc package key. See -- https://github.com/commercialhaskell/stack/issues/785 - let pkgName = packageNameText (packageName package) + let pkgName' = T.pack $ packageNameString (packageName package) pkgId = packageIdentifierString (packageIdentifier package) ghcVersion = getGhcVersion compilerVersion hasLibrary = @@ -112,7 +110,7 @@ generateHpcReport pkgDir package tests = do internalLibs = packageInternalLibraries package eincludeName <- -- Pre-7.8 uses plain PKG-version in tix files. - if ghcVersion < $(mkVersion "7.10") then return $ Right $ Just [pkgId] + if ghcVersion < mkVersion [7, 10] then return $ Right $ Just [pkgId] -- We don't expect to find a package key if there is no library. else if not hasLibrary && Set.null internalLibs then return $ Right Nothing -- Look in the inplace DB for the package key. @@ -120,7 +118,7 @@ generateHpcReport pkgDir package tests = do else do -- GHC 8.0 uses package id instead of package key. -- See https://github.com/commercialhaskell/stack/issues/2424 - let hpcNameField = if ghcVersion >= $(mkVersion "8.0") then "id" else "key" + let hpcNameField = if ghcVersion >= mkVersion [8, 0] then "id" else "key" eincludeName <- findPackageFieldForBuiltPackage pkgDir (packageIdentifier package) internalLibs hpcNameField case eincludeName of Left err -> do @@ -129,7 +127,7 @@ generateHpcReport pkgDir package tests = do Right includeNames -> return $ Right $ Just $ map T.unpack includeNames forM_ tests $ \testName -> do tixSrc <- tixFilePath (packageName package) (T.unpack testName) - let report = "coverage report for " <> pkgName <> "'s test-suite \"" <> testName <> "\"" + let report = "coverage report for " <> pkgName' <> "'s test-suite \"" <> testName <> "\"" reportDir = parent tixSrc case eincludeName of Left err -> generateHpcErrorReport reportDir (RIO.display (sanitize (T.unpack err))) @@ -442,7 +440,7 @@ findPackageFieldForBuiltPackage pkgDir pkgId internalLibs field = do Just result -> return $ Right result Nothing -> notFoundErr cabalVer <- view cabalVersionL - if cabalVer < $(mkVersion "1.24") + if cabalVer < mkVersion [1, 24] then do -- here we don't need to handle internal libs path <- liftM (inplaceDir ) $ parseRelFile (pkgIdStr ++ "-inplace.conf") diff --git a/src/Stack/Docker.hs b/src/Stack/Docker.hs index d660199839..db80f83bbe 100644 --- a/src/Stack/Docker.hs +++ b/src/Stack/Docker.hs @@ -41,8 +41,8 @@ import qualified Data.Text as T import qualified Data.Text.Encoding as T import Data.Time (UTCTime,LocalTime(..),diffDays,utcToLocalTime,getZonedTime,ZonedTime(..)) import Data.Version (showVersion) +import Distribution.Version (mkVersion) import GHC.Exts (sortWith) -import Lens.Micro (set) import Path import Path.Extra (toFilePathNoTrailingSep) import Path.IO hiding (canonicalizePath) @@ -51,8 +51,6 @@ import Stack.Config (getInContainer) import Stack.Constants import Stack.Constants.Config import Stack.Docker.GlobalDB -import Stack.PackageIndex -import Stack.Types.PackageIndex import Stack.Types.Runner import Stack.Types.Version import Stack.Types.Config @@ -254,7 +252,7 @@ runContainerAndExit getCmdArgs msshAuthSock = lookup "SSH_AUTH_SOCK" env muserEnv = lookup "USER" env isRemoteDocker = maybe False (isPrefixOf "tcp://") dockerHost - image = dockerImage docker + image <- either throwIO pure (dockerImage docker) when (isRemoteDocker && maybe False (isInfixOf "boot2docker") dockerCertPath) (logWarn "Warning: Using boot2docker is NOT supported, and not likely to perform well.") @@ -657,7 +655,7 @@ pull = do config <- view configL let docker = configDocker config checkDockerVersion docker - pullImage docker (dockerImage docker) + either throwIO (pullImage docker) (dockerImage docker) -- | Pull Docker image from registry. pullImage :: (HasProcessContext env, HasLogFunc env) @@ -697,7 +695,7 @@ checkDockerVersion docker = dockerVersionOut <- readDockerProcess ["--version"] case words (decodeUtf8 dockerVersionOut) of (_:_:v:_) -> - case parseVersionFromString (stripVersion v) of + case parseVersion (stripVersion v) of Just v' | v' < minimumDockerVersion -> throwIO (DockerTooOldException minimumDockerVersion v') @@ -709,7 +707,7 @@ checkDockerVersion docker = return () _ -> throwIO InvalidVersionOutputException _ -> throwIO InvalidVersionOutputException - where minimumDockerVersion = $(mkVersion "1.6.0") + where minimumDockerVersion = mkVersion [1, 6, 0] prohibitedDockerVersions = [] stripVersion v = takeWhile (/= '-') (dropWhileEnd (not . isDigit) v) @@ -758,25 +756,8 @@ entrypoint config@Config{..} DockerEntrypoint{..} = unless exists $ do ensureDir (parent destBuildPlan) copyFile srcBuildPlan destBuildPlan - forM_ clIndices $ \pkgIdx -> do - msrcIndex <- runRIO (set stackRootL origStackRoot config) $ do - srcIndex <- configPackageIndex (indexName pkgIdx) - exists <- doesFileExist srcIndex - return $ if exists - then Just srcIndex - else Nothing - case msrcIndex of - Nothing -> return () - Just srcIndex -> - runRIO config $ do - destIndex <- configPackageIndex (indexName pkgIdx) - exists <- doesFileExist destIndex - unless exists $ do - ensureDir (parent destIndex) - copyFile srcIndex destIndex return True where - CabalLoader {..} = configCabalLoader updateOrCreateStackUser estackUserEntry homeDir DockerUser{..} = do case estackUserEntry of Left _ -> do diff --git a/src/Stack/Dot.hs b/src/Stack/Dot.hs index 1e13c9c282..97002cb695 100644 --- a/src/Stack/Dot.hs +++ b/src/Stack/Dot.hs @@ -3,7 +3,6 @@ {-# LANGUAGE TupleSections #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE TemplateHaskell #-} module Stack.Dot (dot ,listDependencies @@ -16,15 +15,15 @@ module Stack.Dot (dot ) where import qualified Data.Foldable as F -import qualified Data.HashSet as HashSet -import qualified Data.Map as Map import qualified Data.Set as Set +import qualified Data.Map as Map import qualified Data.Text as Text import qualified Data.Text.IO as Text import qualified Data.Traversable as T import Distribution.Text (display) import qualified Distribution.SPDX.License as SPDX import Distribution.License (License(BSD3), licenseFromSPDX) +import Distribution.Types.PackageName (mkPackageName) import Stack.Build (loadPackage) import Stack.Build.Installed (getInstalled, GetInstalledOpts(..)) import Stack.Build.Source @@ -33,16 +32,12 @@ import Stack.Config (getLocalPackages) import Stack.Constants import Stack.Package import Stack.PackageDump (DumpPackage(..)) -import Stack.Prelude hiding (Display (..)) +import Stack.Prelude hiding (Display (..), pkgName, loadPackage) +import qualified Stack.Prelude (pkgName) import Stack.Types.Build -import Stack.Types.BuildPlan import Stack.Types.Config -import Stack.Types.FlagName import Stack.Types.GhcPkgId import Stack.Types.Package -import Stack.Types.PackageIdentifier -import Stack.Types.PackageName -import Stack.Types.Version -- | Options record for @stack dot@ data DotOpts = DotOpts @@ -121,13 +116,13 @@ createDependencyGraph dotOpts = do sourceMap -- TODO: Can there be multiple entries for wired-in-packages? If so, -- this will choose one arbitrarily.. - let globalDumpMap = Map.fromList $ map (\dp -> (packageIdentifierName (dpPackageIdent dp), dp)) globalDump + let globalDumpMap = Map.fromList $ map (\dp -> (Stack.Prelude.pkgName (dpPackageIdent dp), dp)) globalDump globalIdMap = Map.fromList $ map (\dp -> (dpGhcPkgId dp, dpPackageIdent dp)) globalDump let depLoader = createDepLoader sourceMap installedMap globalDumpMap globalIdMap loadPackageDeps loadPackageDeps name version loc flags ghcOptions -- Skip packages that can't be loaded - see -- https://github.com/commercialhaskell/stack/issues/2967 - | name `elem` [$(mkPackageName "rts"), $(mkPackageName "ghc")] = + | name `elem` [mkPackageName "rts", mkPackageName "ghc"] = return (Set.empty, DotPayload (Just version) (Just $ Right BSD3)) | otherwise = fmap (packageAllDeps &&& makePayload) (loadPackage loc flags ghcOptions) resolveDependencies (dotDependencyDepth dotOpts) graph depLoader @@ -145,7 +140,7 @@ listDependencies opts = do if listDepsLicense opts then maybe "" (Text.pack . display . either licenseFromSPDX id) (payloadLicense payload) else maybe "" (Text.pack . show) (payloadVersion payload) - line = packageNameText name <> listDepsSep opts <> payloadText + line = Text.pack (packageNameString name) <> listDepsSep opts <> payloadText in liftIO $ Text.putStrLn line -- | @pruneGraph dontPrune toPrune graph@ prunes all packages in @@ -204,20 +199,20 @@ createDepLoader :: Applicative m -> Map PackageName (InstallLocation, Installed) -> Map PackageName (DumpPackage () () ()) -> Map GhcPkgId PackageIdentifier - -> (PackageName -> Version -> PackageLocationIndex FilePath -> + -> (PackageName -> Version -> PackageLocationImmutable -> Map FlagName Bool -> [Text] -> m (Set PackageName, DotPayload)) -> PackageName -> m (Set PackageName, DotPayload) createDepLoader sourceMap installed globalDumpMap globalIdMap loadPackageDeps pkgName = - if not (pkgName `HashSet.member` wiredInPackages) + if not (pkgName `Set.member` wiredInPackages) then case Map.lookup pkgName sourceMap of - Just (PSFiles lp _) -> pure (packageAllDeps pkg, payloadFromLocal pkg) + Just (PSFilePath lp _) -> pure (packageAllDeps pkg, payloadFromLocal pkg) where pkg = localPackageToPackage lp - Just (PSIndex _ flags ghcOptions loc) -> + Just (PSRemote _ flags ghcOptions loc ident) -> -- FIXME pretty certain this could be cleaned up a lot by including more info in PackageSource - let PackageIdentifierRevision (PackageIdentifier name version) _ = loc - in assert (pkgName == name) (loadPackageDeps pkgName version (PLIndex loc) flags ghcOptions) + let PackageIdentifier name version = ident + in assert (pkgName == name) (loadPackageDeps pkgName version loc flags ghcOptions) Nothing -> pure (Set.empty, payloadFromInstalled (Map.lookup pkgName installed)) -- For wired-in-packages, use information from ghc-pkg (see #3084) else case Map.lookup pkgName globalDumpMap of @@ -225,7 +220,7 @@ createDepLoader sourceMap installed globalDumpMap globalIdMap loadPackageDeps pk Just dp -> pure (Set.fromList deps, payloadFromDump dp) where deps = map (\depId -> maybe (error ("Invariant violated: Expected to find " ++ ghcPkgIdString depId ++ " in global DB")) - packageIdentifierName + Stack.Prelude.pkgName (Map.lookup depId globalIdMap)) (dpDepends dp) where @@ -234,7 +229,7 @@ createDepLoader sourceMap installed globalDumpMap globalIdMap loadPackageDeps pk case maybePkg of Just (_, Library _ _ mlicense) -> mlicense _ -> Nothing - payloadFromDump dp = DotPayload (Just $ packageIdentifierVersion $ dpPackageIdent dp) (Right <$> dpLicense dp) + payloadFromDump dp = DotPayload (Just $ pkgVersion $ dpPackageIdent dp) (Right <$> dpLicense dp) -- | Resolve the direct (depth 0) external dependencies of the given local packages localDependencies :: DotOpts -> [LocalPackage] -> [(PackageName, (Set PackageName, DotPayload))] @@ -293,7 +288,7 @@ printEdge from to' = liftIO $ Text.putStrLn (Text.concat [ nodeName from, " -> " -- | Convert a package name to a graph node name. nodeName :: PackageName -> Text -nodeName name = "\"" <> packageNameText name <> "\"" +nodeName name = "\"" <> Text.pack (packageNameString name) <> "\"" -- | Print a node with no dependencies printLeaf :: MonadIO m => PackageName -> m () @@ -304,7 +299,7 @@ printLeaf package = liftIO . Text.putStrLn . Text.concat $ -- | Check if the package is wired in (shipped with) ghc isWiredIn :: PackageName -> Bool -isWiredIn = (`HashSet.member` wiredInPackages) +isWiredIn = (`Set.member` wiredInPackages) localPackageToPackage :: LocalPackage -> Package localPackageToPackage lp = diff --git a/src/Stack/Fetch.hs b/src/Stack/Fetch.hs deleted file mode 100644 index 25a4663fa3..0000000000 --- a/src/Stack/Fetch.hs +++ /dev/null @@ -1,657 +0,0 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE ConstraintKinds #-} -{-# LANGUAGE DataKinds #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE MultiParamTypeClasses #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE PatternGuards #-} -{-# LANGUAGE RankNTypes #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE TemplateHaskell #-} -{-# LANGUAGE TupleSections #-} -{-# LANGUAGE TypeFamilies #-} -{-# LANGUAGE PackageImports #-} -{-# LANGUAGE ViewPatterns #-} -{-# LANGUAGE RecordWildCards #-} - --- | Functionality for downloading packages securely for cabal's usage. - -module Stack.Fetch - ( unpackPackages - , unpackPackageIdent - , unpackPackageIdents - , fetchPackages - , untar - , resolvePackages - , resolvePackagesAllowMissing - , ResolvedPackage (..) - , withCabalFiles - , loadFromIndex - ) where - -import qualified Codec.Archive.Tar as Tar -import qualified Codec.Archive.Tar.Check as Tar -import qualified Codec.Archive.Tar.Entry as Tar -import Codec.Compression.GZip (decompress) -import Stack.Prelude -import Crypto.Hash (SHA256 (..)) -import qualified Data.ByteString as S -import qualified Data.Foldable as F -import qualified Data.HashMap.Strict as HashMap -import qualified Data.HashSet as HashSet -import Data.List (intercalate, maximum) -import Data.List.NonEmpty (NonEmpty) -import qualified Data.List.NonEmpty as NE -import qualified Data.Map as Map -import qualified Data.Set as Set -import qualified Data.Text as T -import Data.Text.Encoding (decodeUtf8) -import Data.Text.Metrics -import Lens.Micro (to) -import Network.HTTP.Download -import Path -import Path.Extra (toFilePathNoTrailingSep) -import Path.IO -import Stack.PackageIndex -import Stack.Types.BuildPlan -import Stack.Types.PackageIdentifier -import Stack.Types.PackageIndex -import Stack.Types.PackageName -import Stack.Types.Version -import qualified System.FilePath as FP -import System.IO (SeekMode (AbsoluteSeek)) -import System.PosixCompat (setFileMode) - -data FetchException - = Couldn'tReadIndexTarball FilePath Tar.FormatError - | Couldn'tReadPackageTarball FilePath SomeException - | UnpackDirectoryAlreadyExists (Set FilePath) - | CouldNotParsePackageSelectors [String] - | UnknownPackageNames (Set PackageName) - | UnknownPackageIdentifiers (HashSet PackageIdentifierRevision) String - Bool -- Do we use any 00-index.tar.gz indices? Just used for more informative error messages - deriving Typeable -instance Exception FetchException - -instance Show FetchException where - show (Couldn'tReadIndexTarball fp err) = concat - [ "There was an error reading the index tarball " - , fp - , ": " - , show err - ] - show (Couldn'tReadPackageTarball fp err) = concat - [ "There was an error reading the package tarball " - , fp - , ": " - , show err - ] - show (UnpackDirectoryAlreadyExists dirs) = unlines - $ "Unable to unpack due to already present directories:" - : map (" " ++) (Set.toList dirs) - show (CouldNotParsePackageSelectors strs) = - "The following package selectors are not valid package names or identifiers: " ++ - intercalate ", " strs - show (UnknownPackageNames names) = - "The following packages were not found in your indices: " ++ - intercalate ", " (map packageNameString $ Set.toList names) - show (UnknownPackageIdentifiers idents suggestions uses00Index) = - "The following package identifiers were not found in your indices: " ++ - intercalate ", " (map packageIdentifierRevisionString $ HashSet.toList idents) ++ - (if null suggestions then "" else "\n" ++ suggestions) ++ - (if uses00Index then "\n\nYou seem to be using a legacy 00-index.tar.gz tarball.\nConsider changing your configuration to use a 01-index.tar.gz file.\nAlternatively, you can set the ignore-revision-mismatch setting to true.\nFor more information, see: https://github.com/commercialhaskell/stack/issues/3520" else "") - --- | Fetch packages into the cache without unpacking -fetchPackages :: HasCabalLoader env => Set PackageIdentifier -> RIO env () -fetchPackages idents' = do - resolved <- resolvePackages Nothing idents Set.empty - ToFetchResult toFetch alreadyUnpacked <- getToFetch Nothing resolved - assert (Map.null alreadyUnpacked) (return ()) - nowUnpacked <- fetchPackages' Nothing toFetch - assert (Map.null nowUnpacked) (return ()) - where - -- Since we're just fetching tarballs and not unpacking cabal files, we can - -- always provide a CFILatest cabal file info - idents = map (flip PackageIdentifierRevision CFILatest) $ Set.toList idents' - --- | Intended to work for the command line command. -unpackPackages :: HasCabalLoader env - => Maybe SnapshotDef -- ^ when looking up by name, take from this build plan - -> FilePath -- ^ destination - -> [String] -- ^ names or identifiers - -> RIO env () -unpackPackages mSnapshotDef dest input = do - dest' <- resolveDir' dest - (names, idents) <- case partitionEithers $ map parse input of - ([], x) -> return $ partitionEithers x - (errs, _) -> throwM $ CouldNotParsePackageSelectors errs - resolved <- resolvePackages mSnapshotDef idents (Set.fromList names) - ToFetchResult toFetch alreadyUnpacked <- getToFetch (Just dest') resolved - unless (Map.null alreadyUnpacked) $ - throwM $ UnpackDirectoryAlreadyExists $ Set.fromList $ map toFilePath $ Map.elems alreadyUnpacked - unpacked <- fetchPackages' Nothing toFetch - F.forM_ (Map.toList unpacked) $ \(ident, dest'') -> logInfo $ - "Unpacked " <> - fromString (packageIdentifierString ident) <> - " to " <> - fromString (toFilePath dest'') - where - -- Possible future enhancement: parse names as name + version range - parse s = - case parsePackageName t of - Right x -> Right $ Left x - Left _ -> - case parsePackageIdentifierRevision t of - Right x -> Right $ Right x - Left _ -> Left s - where - t = T.pack s - --- | Same as 'unpackPackageIdents', but for a single package. -unpackPackageIdent - :: HasCabalLoader env - => Path Abs Dir -- ^ unpack directory - -> Path Rel Dir -- ^ the dist rename directory, see: https://github.com/fpco/stack/issues/157 - -> PackageIdentifierRevision - -> RIO env (Path Abs Dir) -unpackPackageIdent unpackDir distDir (PackageIdentifierRevision ident mcfi) = do - -- FIXME make this more direct in the future - m <- unpackPackageIdents unpackDir (Just distDir) [PackageIdentifierRevision ident mcfi] - case Map.toList m of - [(ident', dir)] - | ident /= ident' -> error "unpackPackageIdent: ident mismatch" - | otherwise -> return dir - [] -> error "unpackPackageIdent: empty list" - _ -> error "unpackPackageIdent: multiple results" - --- | Ensure that all of the given package idents are unpacked into the build --- unpack directory, and return the paths to all of the subdirectories. -unpackPackageIdents - :: HasCabalLoader env - => Path Abs Dir -- ^ unpack directory - -> Maybe (Path Rel Dir) -- ^ the dist rename directory, see: https://github.com/fpco/stack/issues/157 - -> [PackageIdentifierRevision] - -> RIO env (Map PackageIdentifier (Path Abs Dir)) -unpackPackageIdents unpackDir mdistDir idents = do - resolved <- resolvePackages Nothing idents Set.empty - ToFetchResult toFetch alreadyUnpacked <- getToFetch (Just unpackDir) resolved - nowUnpacked <- fetchPackages' mdistDir toFetch - return $ alreadyUnpacked <> nowUnpacked - -data ResolvedPackage = ResolvedPackage - { rpIdent :: !PackageIdentifier - , rpDownload :: !(Maybe PackageDownload) - , rpOffsetSize :: !OffsetSize - , rpIndex :: !PackageIndex - } - deriving Show - --- | Resolve a set of package names and identifiers into @FetchPackage@ values. -resolvePackages :: HasCabalLoader env - => Maybe SnapshotDef -- ^ when looking up by name, take from this build plan - -> [PackageIdentifierRevision] - -> Set PackageName - -> RIO env [ResolvedPackage] -resolvePackages mSnapshotDef idents0 names0 = do - eres <- go - case eres of - Left _ -> do - updateAllIndices - go >>= either throwM return - Right x -> return x - where - go = r <$> getUses00Index <*> resolvePackagesAllowMissing mSnapshotDef idents0 names0 - r uses00Index (missingNames, missingIdents, idents) - | not $ Set.null missingNames = Left $ UnknownPackageNames missingNames - | not $ HashSet.null missingIdents = Left $ UnknownPackageIdentifiers missingIdents "" uses00Index - | otherwise = Right idents - --- | Does the configuration use a 00-index.tar.gz file for indices? --- See -getUses00Index :: HasCabalLoader env => RIO env Bool -getUses00Index = - any is00 <$> view (cabalLoaderL.to clIndices) - where - is00 :: PackageIndex -> Bool - is00 index = "00-index.tar.gz" `T.isInfixOf` indexLocation index - --- | Turn package identifiers and package names into a list of --- @ResolvedPackage@s. Returns any unresolved names and --- identifier. These are considered unresolved even if the only --- mismatch is in the cabal file info (MSS 2017-07-17: old versions of --- this code had special handling to treat missing cabal file info as --- a warning, that's no longer necessary or desirable since all info --- should be present and checked). -resolvePackagesAllowMissing - :: forall env. HasCabalLoader env - => Maybe SnapshotDef -- ^ when looking up by name, take from this build plan - -> [PackageIdentifierRevision] - -> Set PackageName - -> RIO env (Set PackageName, HashSet PackageIdentifierRevision, [ResolvedPackage]) -resolvePackagesAllowMissing mSnapshotDef idents0 names0 = do - cache@(PackageCache cache') <- getPackageCaches - - -- Find out the latest versions of all packages in the cache - let versions = fmap (maximum . HashMap.keys) cache' - - -- Determines the identifier for a given name, either from - -- snapshot information or by taking the latest version - -- available - getNamed :: PackageName -> Maybe PackageIdentifierRevision - getNamed = - case mSnapshotDef of - Nothing -> getNamedFromIndex - Just sd -> getNamedFromSnapshotDef sd - - -- Use whatever is specified in the snapshot. TODO this does not - -- handle the case where a snapshot defines a package outside of - -- the index, we'd need a LoadedSnapshot for that. - getNamedFromSnapshotDef sd name = do - loop $ sdLocations sd - where - loop [] = Nothing - loop (PLIndex ident@(PackageIdentifierRevision (PackageIdentifier name' _) _):rest) - | name == name' = Just ident - | otherwise = loop rest - loop (_:rest) = loop rest - - -- Take latest version available, including latest cabal file information - getNamedFromIndex name = fmap - (\ver -> PackageIdentifierRevision (PackageIdentifier name ver) CFILatest) - (HashMap.lookup name versions) - - (missingNames, idents1) = partitionEithers $ map - (\name -> maybe (Left name) Right (getNamed name)) - (Set.toList names0) - cl <- view cabalLoaderL - let (missingIdents, resolved) = - partitionEithers - $ map (\pir -> maybe (Left pir) Right (lookupResolvedPackage cl pir cache)) - $ idents0 <> idents1 - return (Set.fromList missingNames, HashSet.fromList missingIdents, resolved) - -lookupResolvedPackage - :: CabalLoader - -> PackageIdentifierRevision - -> PackageCache PackageIndex - -> Maybe ResolvedPackage -lookupResolvedPackage cl (PackageIdentifierRevision ident@(PackageIdentifier name version) cfi) (PackageCache cache) = do - (index, mdownload, files) <- HashMap.lookup name cache >>= HashMap.lookup version - let moffsetSize = - case cfi of - CFILatest -> Just $ snd $ NE.last files - CFIHash _msize hash' -> -- TODO check size? - lookup hash' - $ concatMap (\(hashes, x) -> map (, x) hashes) - $ NE.toList files - CFIRevision rev -> fmap snd $ listToMaybe $ drop (fromIntegral rev) $ NE.toList files - offsetSize <- - case moffsetSize of - Just x -> Just x - Nothing - | clIgnoreRevisionMismatch cl -> Just $ snd $ NE.last files - | otherwise -> Nothing - Just ResolvedPackage - { rpIdent = ident - , rpDownload = mdownload - , rpOffsetSize = offsetSize - , rpIndex = index - } - -data ToFetch = ToFetch - { tfTarball :: !(Path Abs File) - , tfDestDir :: !(Maybe (Path Abs Dir)) - , tfUrl :: !T.Text - , tfSize :: !(Maybe Word64) - , tfSHA256 :: !(Maybe StaticSHA256) - , tfCabal :: !ByteString - -- ^ Contents of the .cabal file - } - -data ToFetchResult = ToFetchResult - { tfrToFetch :: !(Map PackageIdentifier ToFetch) - , tfrAlreadyUnpacked :: !(Map PackageIdentifier (Path Abs Dir)) - } - --- | Add the cabal files to a list of idents with their caches. -withCabalFiles - :: HasCabalLoader env - => IndexName - -> [(ResolvedPackage, a)] - -> (PackageIdentifier -> a -> ByteString -> IO b) - -> RIO env [b] -withCabalFiles name pkgs f = do - indexPath <- configPackageIndex name - withBinaryFile (toFilePath indexPath) ReadMode - $ \h -> mapM (goPkg h) pkgs - where - goPkg h (ResolvedPackage { rpIdent = ident, rpOffsetSize = OffsetSize offset size }, tf) = do - -- Did not find warning for tarballs is handled above - liftIO $ do - hSeek h AbsoluteSeek $ fromIntegral offset - cabalBS <- S.hGet h $ fromIntegral size - f ident tf cabalBS - -loadFromIndex :: HasCabalLoader env => PackageIdentifierRevision -> RIO env ByteString -loadFromIndex ident = do - -- TODO in the future, keep all of the necessary @Handle@s open - bothCaches <- getPackageCaches - mres <- lookupPackageIdentifierExact ident bothCaches - case mres of - Just bs -> return bs - -- Update the cache and try again - Nothing -> do - let fuzzy = fuzzyLookupCandidates ident bothCaches - suggestions = case fuzzy of - FRNameNotFound Nothing -> "" - FRNameNotFound (Just cs) -> - "Perhaps you meant " <> orSeparated cs <> "?" - FRVersionNotFound cs -> "Possible candidates: " <> - commaSeparated (NE.map packageIdentifierText cs) - <> "." - FRRevisionNotFound cs -> - "The specified revision was not found.\nPossible candidates: " <> - commaSeparated (NE.map (T.pack . packageIdentifierRevisionString) cs) - <> "." - cl <- view cabalLoaderL - join $ modifyMVar (clUpdateRef cl) $ \toUpdate -> - if toUpdate then do - logInfo $ - "Didn't see " <> - fromString (packageIdentifierRevisionString ident) <> - " in your package indices.\n" <> - "Updating and trying again." - updateAllIndices - _ <- getPackageCaches - return (False, loadFromIndex ident) - else do - uses00Index <- getUses00Index - return (toUpdate, throwIO $ UnknownPackageIdentifiers - (HashSet.singleton ident) (T.unpack suggestions) uses00Index) - -lookupPackageIdentifierExact - :: HasCabalLoader env - => PackageIdentifierRevision - -> PackageCache PackageIndex - -> RIO env (Maybe ByteString) -lookupPackageIdentifierExact identRev cache = do - cl <- view cabalLoaderL - forM (lookupResolvedPackage cl identRev cache) $ \rp -> do - [bs] <- withCabalFiles (indexName (rpIndex rp)) [(rp, ())] $ \_ _ bs -> return bs - return bs - -data FuzzyResults - = FRNameNotFound !(Maybe (NonEmpty T.Text)) - | FRVersionNotFound !(NonEmpty PackageIdentifier) - | FRRevisionNotFound !(NonEmpty PackageIdentifierRevision) - --- | Given package identifier and package caches, return list of packages --- with the same name and the same two first version number components found --- in the caches. -fuzzyLookupCandidates - :: PackageIdentifierRevision - -> PackageCache index - -> FuzzyResults -fuzzyLookupCandidates (PackageIdentifierRevision (PackageIdentifier name ver) _rev) (PackageCache caches) = - case HashMap.lookup name caches of - Nothing -> FRNameNotFound $ typoCorrectionCandidates name (PackageCache caches) - Just m -> - case HashMap.lookup ver m of - Nothing -> - case NE.nonEmpty $ filter sameMajor $ HashMap.keys m of - Just vers -> FRVersionNotFound $ NE.map (PackageIdentifier name) vers - Nothing -> - case NE.nonEmpty $ HashMap.keys m of - Nothing -> error "fuzzyLookupCandidates: no versions" - Just vers -> FRVersionNotFound $ NE.map (PackageIdentifier name) vers - Just (_index, _mpd, revisions) -> - let hashes = concatMap fst $ NE.toList revisions - pirs = map (PackageIdentifierRevision (PackageIdentifier name ver) . CFIHash Nothing) hashes - in case NE.nonEmpty pirs of - Nothing -> error "fuzzyLookupCandidates: no revisions" - Just pirs' -> FRRevisionNotFound pirs' - where - sameMajor v = toMajorVersion v == toMajorVersion ver - --- | Try to come up with typo corrections for given package identifier using --- package caches. This should be called before giving up, i.e. when --- 'fuzzyLookupCandidates' cannot return anything. -typoCorrectionCandidates - :: PackageName - -> PackageCache index - -> Maybe (NonEmpty T.Text) -typoCorrectionCandidates name' (PackageCache cache) = - let name = packageNameText name' - in NE.nonEmpty - . take 10 - . map snd - . filter (\(distance, _) -> distance < 4) - . map (\k -> (damerauLevenshtein name (packageNameText k), packageNameText k)) - . HashMap.keys - $ cache - --- | Figure out where to fetch from. -getToFetch :: HasCabalLoader env - => Maybe (Path Abs Dir) -- ^ directory to unpack into, @Nothing@ means no unpack - -> [ResolvedPackage] - -> RIO env ToFetchResult -getToFetch mdest resolvedAll = do - (toFetch0, unpacked) <- liftM partitionEithers $ mapM checkUnpacked resolvedAll - toFetch1 <- mapM goIndex $ Map.toList $ Map.fromListWith (++) toFetch0 - return ToFetchResult - { tfrToFetch = Map.unions toFetch1 - , tfrAlreadyUnpacked = Map.fromList unpacked - } - where - checkUnpacked resolved = do - let ident = rpIdent resolved - dirRel <- parseRelDir $ packageIdentifierString ident - let mdestDir = ( dirRel) <$> mdest - mexists <- - case mdestDir of - Nothing -> return Nothing - Just destDir -> do - exists <- doesDirExist destDir - return $ if exists then Just destDir else Nothing - case mexists of - Just destDir -> return $ Right (ident, destDir) - Nothing -> do - let index = rpIndex resolved - d = rpDownload resolved - targz = T.pack $ packageIdentifierString ident ++ ".tar.gz" - tarball <- configPackageTarball (indexName index) ident - return $ Left (indexName index, [(resolved, ToFetch - { tfTarball = tarball - , tfDestDir = mdestDir - , tfUrl = case fmap pdUrl d of - Just url | not (S.null url) -> decodeUtf8 url - _ -> indexDownloadPrefix index <> targz - , tfSize = fmap pdSize d - , tfSHA256 = fmap pdSHA256 d - , tfCabal = S.empty -- filled in by goIndex - })]) - - goIndex (name, pkgs) = - liftM Map.fromList $ - withCabalFiles name pkgs $ \ident tf cabalBS -> - return (ident, tf { tfCabal = cabalBS }) - --- | Download the given name,version pairs into the directory expected by cabal. --- --- For each package it downloads, it will optionally unpack it to the given --- @Path@ (if present). Note that unpacking is not simply a matter of --- untarring, but also of grabbing the cabal file from the package index. The --- destinations should not include package identifiers. --- --- Returns the list of paths unpacked, including package identifiers. E.g.: --- --- @ --- fetchPackages [("foo-1.2.3", Just "/some/dest")] ==> ["/some/dest/foo-1.2.3"] --- @ --- --- Since 0.1.0.0 -fetchPackages' :: forall env. HasCabalLoader env - => Maybe (Path Rel Dir) -- ^ the dist rename directory, see: https://github.com/fpco/stack/issues/157 - -> Map PackageIdentifier ToFetch - -> RIO env (Map PackageIdentifier (Path Abs Dir)) -fetchPackages' mdistDir toFetchAll = do - connCount <- view $ cabalLoaderL.to clConnectionCount - outputVar <- newTVarIO Map.empty - - parMapM_ - connCount - (go outputVar) - (Map.toList toFetchAll) - - readTVarIO outputVar - where - go :: TVar (Map PackageIdentifier (Path Abs Dir)) - -> (PackageIdentifier, ToFetch) - -> RIO env () - go outputVar (ident, toFetch) = do - req <- parseUrlThrow $ T.unpack $ tfUrl toFetch - let destpath = tfTarball toFetch - - let toHashCheck bs = HashCheck SHA256 (CheckHexDigestByteString bs) - let downloadReq = DownloadRequest - { drRequest = req - , drHashChecks = map (toHashCheck . staticSHA256ToBase16) $ maybeToList (tfSHA256 toFetch) - , drLengthCheck = fromIntegral <$> tfSize toFetch - , drRetryPolicy = drRetryPolicyDefault - } - let progressSink _ = - logInfo $ display ident <> ": download" - _ <- verifiedDownload downloadReq destpath progressSink - - identStrP <- parseRelDir $ packageIdentifierString ident - - F.forM_ (tfDestDir toFetch) $ \destDir -> do - let innerDest = toFilePath destDir - - unexpectedEntries <- liftIO $ untar destpath identStrP (parent destDir) - - liftIO $ do - case mdistDir of - Nothing -> return () - -- See: https://github.com/fpco/stack/issues/157 - Just distDir -> do - let inner = parent destDir identStrP - oldDist = inner $(mkRelDir "dist") - newDist = inner distDir - exists <- doesDirExist oldDist - when exists $ do - -- Previously used takeDirectory, but that got confused - -- by trailing slashes, see: - -- https://github.com/commercialhaskell/stack/issues/216 - -- - -- Instead, use Path which is a bit more resilient - ensureDir $ parent newDist - renameDir oldDist newDist - - let cabalFP = - innerDest FP. - packageNameString (packageIdentifierName ident) - FP.<.> "cabal" - S.writeFile cabalFP $ tfCabal toFetch - - atomically $ modifyTVar outputVar $ Map.insert ident destDir - - F.forM_ unexpectedEntries $ \(path, entryType) -> - logWarn $ "Unexpected entry type " <> display entryType <> " for entry " <> fromString path - --- | Internal function used to unpack tarball. --- --- Takes a path to a .tar.gz file, the name of the directory it should contain, --- and a destination folder to extract the tarball into. Returns unexpected --- entries, as pairs of paths and descriptions. -untar :: forall b1 b2. Path b1 File -> Path Rel Dir -> Path b2 Dir -> IO [(FilePath, T.Text)] -untar tarPath expectedTarFolder destDirParent = do - ensureDir destDirParent - withLazyFile (toFilePath tarPath) $ \lbs -> do - let rawEntries = fmap (either wrap wrap) - $ Tar.checkTarbomb (toFilePathNoTrailingSep expectedTarFolder) - $ Tar.read $ decompress lbs - - filterEntries - :: (Semigroup w, Monoid w) => (Tar.Entry -> (Bool, w)) - -> Tar.Entries b -> (Tar.Entries b, w) - -- Allow collecting warnings, Writer-monad style. - filterEntries f = - Tar.foldEntries - (\e -> let (res, w) = f e in - \(rest, wOld) -> ((if res then Tar.Next e else id) rest, wOld <> w)) - (Tar.Done, mempty) - (\err -> (Tar.Fail err, mempty)) - - extractableEntry e = - case Tar.entryContent e of - Tar.NormalFile _ _ -> (True, []) - Tar.Directory -> (True, []) - Tar.SymbolicLink _ -> (True, []) - Tar.HardLink _ -> (True, []) - Tar.OtherEntryType 'g' _ _ -> (False, []) - Tar.OtherEntryType 'x' _ _ -> (False, []) - Tar.CharacterDevice _ _ -> (False, [(path, "character device")]) - Tar.BlockDevice _ _ -> (False, [(path, "block device")]) - Tar.NamedPipe -> (False, [(path, "named pipe")]) - Tar.OtherEntryType code _ _ -> (False, [(path, "other entry type with code " <> T.pack (show code))]) - where - path = Tar.fromTarPath $ Tar.entryTarPath e - (entries, unexpectedEntries) = filterEntries extractableEntry rawEntries - - wrap :: Exception e => e -> FetchException - wrap = Couldn'tReadPackageTarball (toFilePath tarPath) . toException - - getPerms :: Tar.Entry -> (FilePath, Tar.Permissions) - getPerms e = (toFilePath destDirParent FP. Tar.fromTarPath (Tar.entryTarPath e), - Tar.entryPermissions e) - - filePerms :: [(FilePath, Tar.Permissions)] - filePerms = catMaybes $ Tar.foldEntries (\e -> (:) (Just $ getPerms e)) - [] (const []) entries - Tar.unpack (toFilePath destDirParent) entries - -- Reset file permissions as they were in the tarball, but only - -- for extracted entries (whence filterEntries extractableEntry above). - -- See https://github.com/commercialhaskell/stack/issues/2361 - mapM_ (\(fp, perm) -> setFileMode - (FP.dropTrailingPathSeparator fp) - perm) filePerms - return unexpectedEntries - -parMapM_ :: (F.Foldable f,MonadUnliftIO m) - => Int - -> (a -> m ()) - -> f a - -> m () -parMapM_ (max 1 -> 1) f xs = F.mapM_ f xs -parMapM_ cnt f xs0 = withRunInIO $ \run -> do - var <- newTVarIO $ F.toList xs0 - - replicateConcurrently_ cnt $ fix $ \loop -> join $ atomically $ do - xs <- readTVar var - case xs of - [] -> return $ return () - x:xs' -> do - writeTVar var xs' - return $ do - run $ f x - loop - -orSeparated :: NonEmpty T.Text -> T.Text -orSeparated xs - | NE.length xs == 1 = NE.head xs - | NE.length xs == 2 = NE.head xs <> " or " <> NE.last xs - | otherwise = T.intercalate ", " (NE.init xs) <> ", or " <> NE.last xs - -commaSeparated :: NonEmpty T.Text -> T.Text -commaSeparated = F.fold . NE.intersperse ", " - --- | Location of a package tarball -configPackageTarball :: HasCabalLoader env => IndexName -> PackageIdentifier -> RIO env (Path Abs File) -configPackageTarball iname ident = do - root <- configPackageIndexRoot iname - name <- parseRelDir $ packageNameString $ packageIdentifierName ident - ver <- parseRelDir $ versionString $ packageIdentifierVersion ident - base <- parseRelFile $ packageIdentifierString ident ++ ".tar.gz" - return (root $(mkRelDir "packages") name ver base) diff --git a/src/Stack/Freeze.hs b/src/Stack/Freeze.hs new file mode 100644 index 0000000000..4921918ade --- /dev/null +++ b/src/Stack/Freeze.hs @@ -0,0 +1,66 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} + +module Stack.Freeze + ( freeze + , FreezeOpts (..) + , FreezeMode (..) + ) where + +import Data.Aeson ((.=), object) +import qualified Data.Yaml as Yaml +import qualified RIO.ByteString as B +import Stack.Prelude +import Stack.Types.BuildPlan +import Stack.Types.Config + +data FreezeMode = FreezeProject | FreezeSnapshot + +newtype FreezeOpts = FreezeOpts + { freezeMode :: FreezeMode + } + +freeze :: HasEnvConfig env => FreezeOpts -> RIO env () +freeze (FreezeOpts FreezeProject) = do + mproject <- view $ configL.to configMaybeProject + case mproject of + Just (p, _) -> do + let deps = projectDependencies p + resolver = projectResolver p + completePackageLocation' pl = + case pl of + PLImmutable pli -> PLImmutable <$> completePackageLocation pli + plm@(PLMutable _) -> pure plm + resolver' <- completeSnapshotLocation resolver + deps' <- mapM completePackageLocation' deps + if deps' == deps && resolver' == resolver + then + logInfo "No freezing is required for this project" + else do + logInfo "# Fields not mentioned below do not need to be updated" + + if resolver' == resolver + then logInfo "# No update to resolver is needed" + else do + logInfo "# Frozen version of resolver" + B.putStr $ Yaml.encode $ object ["resolver" .= resolver'] + + if deps' == deps + then logInfo "# No update to extra-deps is needed" + else do + logInfo "# Frozen version of extra-deps" + B.putStr $ Yaml.encode $ object ["extra-deps" .= deps'] + Nothing -> logWarn "No project was found: nothing to freeze" + +freeze (FreezeOpts FreezeSnapshot) = do + msnapshot <- view $ buildConfigL.to bcSnapshotDef.to sdSnapshot + case msnapshot of + Just (snap, _) -> do + snap' <- completeSnapshot snap + if snap' == snap + then + logInfo "No freezing is required for the snapshot of this project" + else + liftIO $ B.putStr $ Yaml.encode snap' + Nothing -> + logWarn "No snapshot was found: nothing to freeze" diff --git a/src/Stack/GhcPkg.hs b/src/Stack/GhcPkg.hs index f15fe9e354..67c93c891d 100644 --- a/src/Stack/GhcPkg.hs +++ b/src/Stack/GhcPkg.hs @@ -27,16 +27,14 @@ import qualified Data.ByteString.Lazy as BL import Data.List import qualified Data.Text as T import qualified Data.Text.Encoding as T +import Distribution.Version (mkVersion) import Path (parent, mkRelFile, ()) import Path.Extra (toFilePathNoTrailingSep) import Path.IO import Stack.Constants import Stack.Types.Build import Stack.Types.GhcPkgId -import Stack.Types.PackageIdentifier import Stack.Types.Compiler -import Stack.Types.PackageName -import Stack.Types.Version import System.FilePath (searchPathSeparator) import RIO.Process @@ -150,12 +148,12 @@ findGhcPkgVersion :: (HasProcessContext env, HasLogFunc env) findGhcPkgVersion wc pkgDbs name = do mv <- findGhcPkgField wc pkgDbs (packageNameString name) "version" case mv of - Just !v -> return (parseVersion v) + Just !v -> return (parseVersion $ T.unpack v) _ -> return Nothing unregisterGhcPkgId :: (HasProcessContext env, HasLogFunc env) => WhichCompiler - -> CompilerVersion 'CVActual + -> ActualCompiler -> Path Abs Dir -- ^ package database -> GhcPkgId -> PackageIdentifier @@ -169,7 +167,7 @@ unregisterGhcPkgId wc cv pkgDb gid ident = do -- TODO ideally we'd tell ghc-pkg a GhcPkgId instead args = "unregister" : "--user" : "--force" : (case cv of - GhcVersion v | v < $(mkVersion "7.9") -> + ACGhc v | v < mkVersion [7, 9] -> [packageIdentifierString ident] _ -> ["--ipid", ghcPkgIdString gid]) diff --git a/src/Stack/Ghci.hs b/src/Stack/Ghci.hs index aafcc9bfac..33d6b3e8af 100644 --- a/src/Stack/Ghci.hs +++ b/src/Stack/Ghci.hs @@ -45,11 +45,8 @@ import Stack.PrettyPrint import Stack.Types.Build import Stack.Types.Compiler import Stack.Types.Config -import Stack.Types.FlagName import Stack.Types.NamedComponent import Stack.Types.Package -import Stack.Types.PackageIdentifier -import Stack.Types.PackageName import Stack.Types.Runner import System.IO (putStrLn) import System.IO.Temp (getCanonicalTemporaryDirectory) @@ -178,7 +175,7 @@ ghci opts@GhciOpts{..} = do figureOutMainFile bopts mainIsTargets localTargets pkgs0 -- Build required dependencies and setup local packages. stackYaml <- view stackYamlL - buildDepsAndInitialSteps opts (map (packageNameText . fst) localTargets) + buildDepsAndInitialSteps opts (map (T.pack . packageNameString . fst) localTargets) targetWarnings stackYaml localTargets nonLocalTargets mfileTargets -- Load the list of modules _after_ building, to catch changes in -- unlisted dependencies (#1180) @@ -295,7 +292,7 @@ getAllLocalTargets GhciOpts{..} targets0 mainIsTargets sourceMap = do then return directlyWanted else do let extraList = - mconcat $ intersperse ", " (map (RIO.display . fst) extraLoadDeps) + mconcat $ intersperse ", " (map (fromString . packageNameString . fst) extraLoadDeps) if ghciLoadLocalDeps then logInfo $ "The following libraries will also be loaded into GHCi because " <> @@ -336,8 +333,8 @@ buildDepsAndInitialSteps GhciOpts{..} targets0 = do checkAdditionalPackages :: MonadThrow m => [String] -> m [PackageName] checkAdditionalPackages pkgs = forM pkgs $ \name -> do - let mres = (packageIdentifierName <$> parsePackageIdentifierFromString name) - <|> parsePackageNameFromString name + let mres = (pkgName <$> parsePackageIdentifier name) + <|> parsePackageNameThrowing name maybe (throwM $ InvalidPackageOption name) return mres runGhci @@ -388,7 +385,7 @@ runGhci GhciOpts{..} targets mainFile pkgs extraFiles exposePackages = do , "-hidir=" <> toFilePathNoTrailingSep oiDir ] logInfo $ "Configuring GHCi with the following packages: " <> - mconcat (intersperse ", " (map (RIO.display . ghciPkgName) pkgs)) + mconcat (intersperse ", " (map (fromString . packageNameString . ghciPkgName) pkgs)) let execGhci extras = do menv <- liftIO $ configProcessContextSettings config defaultEnvSettings withProcessContext menv $ exec @@ -545,7 +542,7 @@ figureOutMainFile bopts mainIsTargets targets0 packages = do renderCandidate c@(pkgName,namedComponent,mainIs) = let candidateIndex = T.pack . show . (+1) . fromMaybe 0 . elemIndex c in candidateIndex candidates <> ". Package `" <> - packageNameText pkgName <> + T.pack (packageNameString pkgName) <> "' component " <> renderComp namedComponent <> " with main-is file: " <> @@ -578,9 +575,9 @@ figureOutMainFile bopts mainIsTargets targets0 packages = do CTest name -> "test:" <> name CBench name -> "bench:" <> name sampleTargetArg (pkg,comp,_) = - packageNameText pkg <> ":" <> renderComp comp + T.pack (packageNameString pkg) <> ":" <> renderComp comp sampleMainIsArg (pkg,comp,_) = - "--main-is " <> packageNameText pkg <> ":" <> renderComp comp + "--main-is " <> T.pack (packageNameString pkg) <> ":" <> renderComp comp loadGhciPkgDescs :: HasEnvConfig env @@ -616,11 +613,11 @@ loadGhciPkgDesc buildOptsCLI name cabalfp target = do -- wouldn't have figured out the cabalfp already. In the future: -- retain that GenericPackageDescription in the relevant data -- structures to avoid reparsing. - (gpkgdesc, _cabalfp) <- readPackageUnresolvedDir (parent cabalfp) True + (gpkgdesc, _cabalfp) <- loadCabalFilePath (parent cabalfp) YesPrintWarnings -- Source the package's *.buildinfo file created by configure if any. See -- https://www.haskell.org/cabal/users-guide/developing-packages.html#system-dependent-parameters - buildinfofp <- parseRelFile (T.unpack (packageNameText name) ++ ".buildinfo") + buildinfofp <- parseRelFile (packageNameString name ++ ".buildinfo") hasDotBuildinfo <- doesFileExist (parent cabalfp buildinfofp) let mbuildinfofp | hasDotBuildinfo = Just (parent cabalfp buildinfofp) @@ -822,7 +819,7 @@ targetWarnings stackYaml localTargets nonLocalTargets mfileTargets = do unless (null nonLocalTargets) $ prettyWarnL [ flow "Some targets" - , parens $ fillSep $ punctuate "," $ map (style Good . display) nonLocalTargets + , parens $ fillSep $ punctuate "," $ map (style Good . fromString . packageNameString) nonLocalTargets , flow "are not local packages, and so cannot be directly loaded." , flow "In future versions of stack, this might be supported - see" , style Url "https://github.com/commercialhaskell/stack/issues/1441" @@ -869,7 +866,7 @@ getExtraLoadDeps loadAllDeps sourceMap targets = getDeps :: PackageName -> [PackageName] getDeps name = case M.lookup name sourceMap of - Just (PSFiles lp _) -> M.keys (packageDeps (lpPackage lp)) -- FIXME just Local? + Just (PSFilePath lp _) -> M.keys (packageDeps (lpPackage lp)) -- FIXME just Local? _ -> [] go :: PackageName -> State (Map PackageName (Maybe (Path Abs File, Target))) Bool go name = do @@ -877,7 +874,7 @@ getExtraLoadDeps loadAllDeps sourceMap targets = case (M.lookup name cache, M.lookup name sourceMap) of (Just (Just _), _) -> return True (Just Nothing, _) | not loadAllDeps -> return False - (_, Just (PSFiles lp _)) -> do + (_, Just (PSFilePath lp _)) -> do let deps = M.keys (packageDeps (lpPackage lp)) shouldLoad <- liftM or $ mapM go deps if shouldLoad @@ -887,7 +884,7 @@ getExtraLoadDeps loadAllDeps sourceMap targets = else do modify (M.insert name Nothing) return False - (_, Just PSIndex{}) -> return loadAllDeps + (_, Just PSRemote{}) -> return loadAllDeps (_, _) -> return False setScriptPerms :: MonadIO m => FilePath -> m () diff --git a/src/Stack/Hoogle.hs b/src/Stack/Hoogle.hs index 0718f60ed7..2e24ceceeb 100644 --- a/src/Stack/Hoogle.hs +++ b/src/Stack/Hoogle.hs @@ -1,7 +1,6 @@ {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE TemplateHaskell #-} -- | A wrapper around hoogle. module Stack.Hoogle @@ -11,18 +10,14 @@ module Stack.Hoogle import Stack.Prelude import qualified Data.ByteString.Lazy.Char8 as BL8 import Data.Char (isSpace) -import Data.List (find) -import qualified Data.Set as Set import qualified Data.Text as T +import Distribution.Types.PackageName (mkPackageName) +import Distribution.Version (mkVersion) import Path (parseAbsFile) import Path.IO hiding (findExecutable) import qualified Stack.Build -import Stack.Fetch import Stack.Runners import Stack.Types.Config -import Stack.Types.PackageIdentifier -import Stack.Types.PackageName -import Stack.Types.Version import System.Exit import RIO.Process @@ -78,43 +73,32 @@ hoogleCmd (args,setup,rebuild,startServer) go = withBuildConfig go $ do defaultBuildOptsCLI)) (\(_ :: ExitCode) -> return ())) - hooglePackageName = $(mkPackageName "hoogle") - hoogleMinVersion = $(mkVersion "5.0") + hooglePackageName = mkPackageName "hoogle" + hoogleMinVersion = mkVersion [5, 0] hoogleMinIdent = PackageIdentifier hooglePackageName hoogleMinVersion installHoogle :: RIO EnvConfig () installHoogle = do - hooglePackageIdentifier <- - do (_,_,resolved) <- - resolvePackagesAllowMissing + hooglePackageIdentifier <- do + mversion <- getLatestHackageVersion hooglePackageName UsePreferredVersions - -- FIXME this Nothing means "do not follow any - -- specific snapshot", which matches old - -- behavior. However, since introducing the - -- logic to pin a name to a package in a - -- snapshot, we may arguably want to ensure - -- that we're grabbing the version of Hoogle - -- present in the snapshot currently being - -- used. - Nothing + -- FIXME For a while, we've been following the logic of + -- taking the latest Hoogle version available. However, we + -- may want to instead grab the version of Hoogle present in + -- the snapshot current being used instead. + pure $ fromMaybe (Left hoogleMinIdent) $ do + pir@(PackageIdentifierRevision _ ver _) <- mversion + guard $ ver >= hoogleMinVersion + Just $ Right pir - mempty - (Set.fromList [hooglePackageName]) - return - (case find - ((== hooglePackageName) . packageIdentifierName) - (map rpIdent resolved) of - Just ident@(PackageIdentifier _ ver) - | ver >= hoogleMinVersion -> Right ident - _ -> Left hoogleMinIdent) case hooglePackageIdentifier of Left{} -> logInfo $ "Minimum " <> - display hoogleMinIdent <> + fromString (packageIdentifierString hoogleMinIdent) <> " is not in your index. Installing the minimum version." Right ident -> logInfo $ "Minimum version is " <> - display hoogleMinIdent <> + fromString (packageIdentifierString hoogleMinIdent) <> ". Found acceptable " <> display ident <> " in your index, installing it." @@ -129,11 +113,12 @@ hoogleCmd (args,setup,rebuild,startServer) go = withBuildConfig go $ do Nothing lk defaultBuildOptsCLI - { boptsCLITargets = [ packageIdentifierText - (either - id - id - hooglePackageIdentifier)] + { boptsCLITargets = + pure $ + either + (T.pack . packageIdentifierString) + (utf8BuilderToText . display) + hooglePackageIdentifier })) (\(e :: ExitCode) -> case e of @@ -173,7 +158,7 @@ hoogleCmd (args,setup,rebuild,startServer) go = withBuildConfig go $ do ] return $ case result of Left err -> unexpectedResult $ T.pack (show err) - Right bs -> case parseVersionFromString (takeWhile (not . isSpace) (BL8.unpack bs)) of + Right bs -> case parseVersion (takeWhile (not . isSpace) (BL8.unpack bs)) of Nothing -> unexpectedResult $ T.pack (BL8.unpack bs) Just ver | ver >= hoogleMinVersion -> Right hooglePath @@ -181,7 +166,7 @@ hoogleCmd (args,setup,rebuild,startServer) go = withBuildConfig go $ do [ "Installed Hoogle is too old, " , T.pack hooglePath , " is version " - , versionText ver + , T.pack $ versionString ver , " but >= 5.0 is required." ] case eres of diff --git a/src/Stack/IDE.hs b/src/Stack/IDE.hs index ea56890533..03bceec4ed 100644 --- a/src/Stack/IDE.hs +++ b/src/Stack/IDE.hs @@ -14,7 +14,6 @@ import qualified Data.Map as Map import qualified Data.Set as Set import qualified Data.Text as T import Stack.Config (getLocalPackages) -import Stack.Package (readPackageUnresolvedDir, gpdPackageName) import Stack.Prelude import Stack.Types.Config import Stack.Types.NamedComponent @@ -27,8 +26,8 @@ listPackages = do -- the directory. packageDirs <- liftM (map lpvRoot . Map.elems . lpProject) getLocalPackages forM_ packageDirs $ \dir -> do - (gpd, _) <- readPackageUnresolvedDir dir False - (logInfo . display) (gpdPackageName gpd) + (gpd, _) <- loadCabalFilePath dir NoPrintWarnings + (logInfo . fromString . packageNameString) (gpdPackageName gpd) -- | List the targets in the current project. listTargets :: HasEnvConfig env => RIO env () @@ -43,5 +42,5 @@ listTargets = toNameAndComponent (Map.toList rawLocals)))) where - toNameAndComponent (pkgName,view') = - map (pkgName, ) (Set.toList (lpvComponents view')) + toNameAndComponent (pkgName',view') = + map (pkgName', ) (Set.toList (lpvComponents view')) diff --git a/src/Stack/Init.hs b/src/Stack/Init.hs index df066a9123..b67bb38a78 100644 --- a/src/Stack/Init.hs +++ b/src/Stack/Init.hs @@ -39,9 +39,6 @@ import Stack.Solver import Stack.Types.Build import Stack.Types.BuildPlan import Stack.Types.Config -import Stack.Types.FlagName -import Stack.Types.PackageIdentifier -import Stack.Types.PackageName import Stack.Types.Resolver import Stack.Types.Version import qualified System.FilePath as FP @@ -75,13 +72,7 @@ initProject whichCmd currDir initOpts mresolver = do cabaldirs <- Set.toList . Set.unions <$> mapM find dirs' (bundle, dupPkgs) <- cabalPackagesCheck cabaldirs noPkgMsg Nothing - (sd, flags, extraDeps, rbundle) <- getDefaultResolver whichCmd dest initOpts - mresolver bundle - - -- Kind of inefficient, since we've already parsed this value. But - -- better to reparse in this one case than carry the unneeded data - -- around everywhere in the codebase. - resolver <- parseCustomLocation (Just (parent dest)) (void (sdResolver sd)) + (sd, flags, extraDeps, rbundle) <- getDefaultResolver whichCmd initOpts mresolver bundle let ignored = Map.difference bundle rbundle dupPkgMsg @@ -114,16 +105,19 @@ initProject whichCmd currDir initOpts mresolver = do userMsg = makeUserMsg [dupPkgMsg, missingPkgMsg, extraDepMsg] gpds = Map.elems $ fmap snd rbundle - p = Project + + deps <- for (Map.toList extraDeps) $ \(n, v) -> + PLImmutable <$> completePackageLocation (PLIHackage (PackageIdentifierRevision n v CFILatest) Nothing) + + let p = Project { projectUserMsg = if userMsg == "" then Nothing else Just userMsg - , projectPackages = pkgs - , projectDependencies = map - (\(n, v) -> PLIndex $ PackageIdentifierRevision (PackageIdentifier n v) CFILatest) - (Map.toList extraDeps) + , projectPackages = RelFilePath . T.pack <$> pkgs + , projectDependencies = deps , projectFlags = removeSrcPkgDefaultFlags gpds flags - , projectResolver = resolver + , projectResolver = sdResolver sd , projectCompiler = Nothing , projectExtraPackageDBs = [] + , projectCurator = Nothing } makeRelDir dir = @@ -136,7 +130,7 @@ initProject whichCmd currDir initOpts mresolver = do makeRel = fmap toFilePath . makeRelativeToCurrentDir pkgs = map toPkg $ Map.elems (fmap (parent . fst) rbundle) - toPkg dir = PLFilePath $ makeRelDir dir + toPkg dir = makeRelDir dir indent t = T.unlines $ fmap (" " <>) (T.lines t) logInfo $ "Initialising configuration using resolver: " <> display (sdResolverName sd) @@ -290,8 +284,7 @@ renderStackYaml p ignoredPackages dupPackages = ] footerHelp = - let major = toCabalVersion - $ toMajorVersion $ fromCabalVersion $ C.mkVersion' Meta.version + let major = toMajorVersion $ C.mkVersion' Meta.version in commentHelp [ "Control whether we use the GHC we find on the path" , "system-ghc: true" @@ -336,7 +329,6 @@ getSnapshots' = do getDefaultResolver :: (HasConfig env, HasGHCVariant env) => WhichSolverCmd - -> Path Abs File -- ^ stack.yaml -> InitOpts -> Maybe AbstractResolver -> Map PackageName (Path Abs File, C.GenericPackageDescription) @@ -350,16 +342,15 @@ getDefaultResolver -- , Flags for src packages and extra deps -- , Extra dependencies -- , Src packages actually considered) -getDefaultResolver whichCmd stackYaml initOpts mresolver bundle = do - sd <- maybe selectSnapResolver (makeConcreteResolver (Just root) >=> loadResolver) mresolver - getWorkingResolverPlan whichCmd stackYaml initOpts bundle sd +getDefaultResolver whichCmd initOpts mresolver bundle = do + sd <- maybe selectSnapResolver (makeConcreteResolver >=> flip loadResolver Nothing) mresolver + getWorkingResolverPlan whichCmd initOpts bundle sd where - root = parent stackYaml -- TODO support selecting best across regular and custom snapshots selectSnapResolver = do let gpds = Map.elems (fmap snd bundle) snaps <- fmap getRecommendedSnapshots getSnapshots' - (s, r) <- selectBestSnapshot (parent stackYaml) gpds snaps + (s, r) <- selectBestSnapshot gpds snaps case r of BuildPlanCheckFail {} | not (omitPackages initOpts) -> throwM (NoMatchingSnapshot whichCmd snaps) @@ -368,7 +359,6 @@ getDefaultResolver whichCmd stackYaml initOpts mresolver bundle = do getWorkingResolverPlan :: (HasConfig env, HasGHCVariant env) => WhichSolverCmd - -> Path Abs File -- ^ stack.yaml -> InitOpts -> Map PackageName (Path Abs File, C.GenericPackageDescription) -- ^ Src package name: cabal dir, cabal package description @@ -382,12 +372,12 @@ getWorkingResolverPlan -- , Flags for src packages and extra deps -- , Extra dependencies -- , Src packages actually considered) -getWorkingResolverPlan whichCmd stackYaml initOpts bundle sd = do +getWorkingResolverPlan whichCmd initOpts bundle sd = do logInfo $ "Selected resolver: " <> display (sdResolverName sd) go bundle where go info = do - eres <- checkBundleResolver whichCmd stackYaml initOpts info sd + eres <- checkBundleResolver whichCmd initOpts info sd -- if some packages failed try again using the rest case eres of Right (f, edeps)-> return (sd, f, edeps, info) @@ -403,13 +393,13 @@ getWorkingResolverPlan whichCmd stackYaml initOpts bundle sd = do if length ignored > 1 then do logWarn "*** Ignoring packages:" - logWarn $ display $ indent $ showItems ignored + logWarn $ display $ indent $ showItems $ map packageNameString ignored else logWarn $ "*** Ignoring package: " - <> display + <> fromString (case ignored of [] -> error "getWorkingResolverPlan.head" - x:_ -> x) + x:_ -> packageNameString x) go available where @@ -420,7 +410,6 @@ getWorkingResolverPlan whichCmd stackYaml initOpts bundle sd = do checkBundleResolver :: (HasConfig env, HasGHCVariant env) => WhichSolverCmd - -> Path Abs File -- ^ stack.yaml -> InitOpts -> Map PackageName (Path Abs File, C.GenericPackageDescription) -- ^ Src package name: cabal dir, cabal package description @@ -428,14 +417,14 @@ checkBundleResolver -> RIO env (Either [PackageName] ( Map PackageName (Map FlagName Bool) , Map PackageName Version)) -checkBundleResolver whichCmd stackYaml initOpts bundle sd = do - result <- checkSnapBuildPlanActual (parent stackYaml) gpds Nothing sd +checkBundleResolver whichCmd initOpts bundle sd = do + result <- checkSnapBuildPlan gpds Nothing sd Nothing case result of BuildPlanCheckOk f -> return $ Right (f, Map.empty) BuildPlanCheckPartial f e -> do shouldUseSolver <- case (resolver, initOpts) of (_, InitOpts { useSolver = True }) -> return True - (ResolverCompiler _, _) -> do + (SLCompiler _, _) -> do logInfo "Using solver because a compiler resolver was specified." return True _ -> return False @@ -471,8 +460,7 @@ checkBundleResolver whichCmd stackYaml initOpts bundle sd = do let cabalDirs = map parent (Map.elems (fmap fst bundle)) srcConstraints = mergeConstraints (gpdPackages gpds) flags - eresult <- solveResolverSpec stackYaml cabalDirs - (sd, srcConstraints, Map.empty) + eresult <- solveResolverSpec cabalDirs (sd, srcConstraints, Map.empty) case eresult of Right (src, ext) -> return $ Right (fmap snd (Map.union src ext), fmap fst ext) @@ -489,7 +477,7 @@ checkBundleResolver whichCmd stackYaml initOpts bundle sd = do -- set of packages. findOneIndependent packages flags = do platform <- view platformL - (compiler, _) <- getResolverConstraints Nothing stackYaml sd + (compiler, _) <- getResolverConstraints Nothing sd let getGpd pkg = snd (fromMaybe (error "findOneIndependent: getGpd") (Map.lookup pkg bundle)) getFlags pkg = fromMaybe (error "fromOneIndependent: getFlags") (Map.lookup pkg flags) deps pkg = gpdPackageDeps (getGpd pkg) compiler platform diff --git a/src/Stack/Ls.hs b/src/Stack/Ls.hs index 4187721934..0f9405555f 100644 --- a/src/Stack/Ls.hs +++ b/src/Stack/Ls.hs @@ -17,7 +17,7 @@ import Control.Monad (when) import Data.Aeson import Data.Array.IArray ((//), elems) import Stack.DefaultStyles (defaultStyles) -import Stack.Prelude +import Stack.Prelude hiding (Snapshot (..)) import Stack.Types.Runner import qualified Data.Aeson.Types as A import qualified Data.List as L @@ -26,8 +26,7 @@ import qualified Data.Text as T import qualified Data.Text.IO as T import Data.Typeable (Typeable) import qualified Data.Vector as V -import Network.HTTP.StackClient (httpJSON, getGlobalManager, addRequestHeader, getResponseBody, parseRequest, - setRequestManager, hAccept) +import Network.HTTP.StackClient (httpJSON, addRequestHeader, getResponseBody, parseRequest, hAccept) import qualified Options.Applicative as OA import Options.Applicative ((<|>), idm) import Options.Applicative.Builder.Extra (boolFlags) @@ -250,11 +249,8 @@ handleRemote => LsCmdOpts -> m () handleRemote lsOpts = do req <- liftIO $ parseRequest urlInfo - mgr <- liftIO getGlobalManager isStdoutTerminal <- view terminalL - let req' = - setRequestManager mgr $ - addRequestHeader hAccept "application/json" req + let req' = addRequestHeader hAccept "application/json" req result <- httpJSON req' let snapData = getResponseBody result case lsView lsOpts of diff --git a/src/Stack/New.hs b/src/Stack/New.hs index 99b4631df2..b7a1fb664c 100644 --- a/src/Stack/New.hs +++ b/src/Stack/New.hs @@ -37,7 +37,6 @@ import Path.IO import Stack.Constants import Stack.Constants.Config import Stack.Types.Config -import Stack.Types.PackageName import Stack.Types.TemplateName import RIO.Process import qualified Text.Mustache as Mustache @@ -100,7 +99,7 @@ new opts forceOverwrite = do logInfo (loading <> " template \"" <> display (templateName template) <> "\" to create project \"" <> - display (packageNameText project) <> + fromString (packageNameString project) <> "\" in " <> if bare then "the current directory" else fromString (toFilePath (dirname absDir)) <> @@ -198,9 +197,9 @@ applyTemplate project template nonceParams dir templateText = do return $ T.pack . show $ year let context = M.unions [nonceParams, nameParams, configParams, yearParam] where - nameAsVarId = T.replace "-" "_" $ packageNameText project - nameAsModule = T.filter (/= '-') $ T.toTitle $ packageNameText project - nameParams = M.fromList [ ("name", packageNameText project) + nameAsVarId = T.replace "-" "_" $ T.pack $ packageNameString project + nameAsModule = T.filter (/= '-') $ T.toTitle $ T.pack $ packageNameString project + nameParams = M.fromList [ ("name", T.pack $ packageNameString project) , ("name-as-varid", nameAsVarId) , ("name-as-module", nameAsModule) ] configParams = configTemplateParams config diff --git a/src/Stack/Nix.hs b/src/Stack/Nix.hs index 460dc23f13..ea4cdbe657 100644 --- a/src/Stack/Nix.hs +++ b/src/Stack/Nix.hs @@ -25,7 +25,6 @@ import Stack.Types.Config import Stack.Types.Docker import Stack.Types.Nix import Stack.Types.Runner -import Stack.Types.Compiler import System.Environment (getArgs,getExecutablePath,lookupEnv) import qualified System.FilePath as F import RIO.Process (processContextL, exec) @@ -35,7 +34,7 @@ import RIO.Process (processContextL, exec) reexecWithOptionalShell :: HasConfig env => Maybe (Path Abs Dir) - -> IO (CompilerVersion 'CVWanted) + -> IO WantedCompiler -> IO () -> RIO env () reexecWithOptionalShell mprojectRoot getCompilerVersion inner = @@ -59,7 +58,7 @@ reexecWithOptionalShell mprojectRoot getCompilerVersion inner = runShellAndExit :: HasConfig env => Maybe (Path Abs Dir) - -> IO (CompilerVersion 'CVWanted) + -> IO WantedCompiler -> RIO env (String, [String]) -> RIO env () runShellAndExit mprojectRoot getCompilerVersion getCmdArgs = do diff --git a/src/Stack/Options/BuildParser.hs b/src/Stack/Options/BuildParser.hs index 5234f0530a..9a0529fdcb 100644 --- a/src/Stack/Options/BuildParser.hs +++ b/src/Stack/Options/BuildParser.hs @@ -11,8 +11,6 @@ import Stack.Options.Completion import Stack.Options.PackageParser (readFlag) import Stack.Prelude import Stack.Types.Config -import Stack.Types.FlagName -import Stack.Types.PackageName import Stack.Types.Version -- | Parser for CLI-only build arguments diff --git a/src/Stack/Options/Completion.hs b/src/Stack/Options/Completion.hs index 27983b9732..426cf297f7 100644 --- a/src/Stack/Options/Completion.hs +++ b/src/Stack/Options/Completion.hs @@ -26,9 +26,7 @@ import Stack.Runners (loadConfigWithOpts) import Stack.Prelude hiding (lift) import Stack.Setup import Stack.Types.Config -import Stack.Types.FlagName import Stack.Types.NamedComponent -import Stack.Types.PackageName import System.Process (readProcess) import Language.Haskell.TH.Syntax (runIO, lift) @@ -57,8 +55,8 @@ buildConfigCompleter inner = mkCompleter $ \inputRaw -> do -- If it looks like a flag, skip this more costly completion. ('-': _) -> return [] _ -> do - let go = (globalOptsFromMonoid False mempty) - { globalLogLevel = LevelOther "silent" } + go' <- globalOptsFromMonoid False mempty + let go = go' { globalLogLevel = LevelOther "silent" } loadConfigWithOpts go $ \lc -> do bconfig <- liftIO $ lcLoadBuildConfig lc (globalCompiler go) envConfig <- runRIO bconfig (setupEnv Nothing) @@ -94,7 +92,7 @@ flagCompleter = buildConfigCompleter $ \input -> do in (if flagEnabled name fl then "-" else "") ++ flname flagEnabled name fl = fromMaybe (C.flagDefault fl) $ - Map.lookup (fromCabalFlagName (C.flagName fl)) $ + Map.lookup (C.flagName fl) $ Map.findWithDefault Map.empty name (bcFlags bconfig) return $ filter (input `isPrefixOf`) $ case input of diff --git a/src/Stack/Options/ConfigParser.hs b/src/Stack/Options/ConfigParser.hs index 7a9b284e8b..36ef92ad45 100644 --- a/src/Stack/Options/ConfigParser.hs +++ b/src/Stack/Options/ConfigParser.hs @@ -122,7 +122,7 @@ configOptsParser currentDir hide0 = "skip-msys" "skipping the local MSYS installation (Windows only)" hide - <*> optionalFirst (strOption + <*> optionalFirst ((currentDir FilePath.) <$> strOption ( long "local-bin-path" <> metavar "DIR" <> completer dirCompleter diff --git a/src/Stack/Options/FreezeParser.hs b/src/Stack/Options/FreezeParser.hs new file mode 100644 index 0000000000..65c2068aa9 --- /dev/null +++ b/src/Stack/Options/FreezeParser.hs @@ -0,0 +1,16 @@ +{-# LANGUAGE NoImplicitPrelude #-} + +module Stack.Options.FreezeParser where + +import Data.Semigroup ((<>)) +import Options.Applicative +import Stack.Freeze + + +-- | Parser for arguments to `stack freeze` +freezeOptsParser :: Parser FreezeOpts +freezeOptsParser = + FreezeOpts <$> flag FreezeProject FreezeSnapshot + ( long "snapshot" + <> short 's' + <> help "Freeze snapshot definition instead of project's stack.yaml" ) diff --git a/src/Stack/Options/GlobalParser.hs b/src/Stack/Options/GlobalParser.hs index 73ce7b7a0f..997fbcdfe7 100644 --- a/src/Stack/Options/GlobalParser.hs +++ b/src/Stack/Options/GlobalParser.hs @@ -5,6 +5,7 @@ module Stack.Options.GlobalParser where import Options.Applicative import Options.Applicative.Builder.Extra +import Path.IO (getCurrentDir) import qualified Stack.Docker as Docker import Stack.Init import Stack.Prelude @@ -63,19 +64,24 @@ globalOptsParser currentDir kind defLogLevel = hide0 = kind /= OuterGlobalOpts -- | Create GlobalOpts from GlobalOptsMonoid. -globalOptsFromMonoid :: Bool -> GlobalOptsMonoid -> GlobalOpts -globalOptsFromMonoid defaultTerminal GlobalOptsMonoid{..} = GlobalOpts +globalOptsFromMonoid :: MonadIO m => Bool -> GlobalOptsMonoid -> m GlobalOpts +globalOptsFromMonoid defaultTerminal GlobalOptsMonoid{..} = do + resolver <- for (getFirst globalMonoidResolver) $ \ur -> do + cwd <- getCurrentDir + resolvePaths (Just cwd) ur + pure GlobalOpts { globalReExecVersion = getFirst globalMonoidReExecVersion , globalDockerEntrypoint = getFirst globalMonoidDockerEntrypoint , globalLogLevel = fromFirst defaultLogLevel globalMonoidLogLevel , globalTimeInLog = fromFirst True globalMonoidTimeInLog , globalConfigMonoid = globalMonoidConfigMonoid - , globalResolver = getFirst globalMonoidResolver + , globalResolver = resolver , globalCompiler = getFirst globalMonoidCompiler , globalTerminal = fromFirst defaultTerminal globalMonoidTerminal , globalStylesUpdate = globalMonoidStyles , globalTermWidth = getFirst globalMonoidTermWidth - , globalStackYaml = maybe SYLDefault SYLOverride $ getFirst globalMonoidStackYaml } + , globalStackYaml = maybe SYLDefault SYLOverride $ getFirst globalMonoidStackYaml + } initOptsParser :: Parser InitOpts initOptsParser = diff --git a/src/Stack/Options/PackageParser.hs b/src/Stack/Options/PackageParser.hs index a2b7968eb1..e515c9da67 100644 --- a/src/Stack/Options/PackageParser.hs +++ b/src/Stack/Options/PackageParser.hs @@ -5,8 +5,6 @@ import qualified Data.Map as Map import Options.Applicative import Options.Applicative.Types (readerAsk) import Stack.Prelude -import Stack.Types.FlagName -import Stack.Types.PackageName -- | Parser for package:[-]flag readFlag :: ReadM (Map (Maybe PackageName) (Map FlagName Bool)) @@ -15,7 +13,7 @@ readFlag = do case break (== ':') s of (pn, ':':mflag) -> do pn' <- - case parsePackageNameFromString pn of + case parsePackageName pn of Nothing | pn == "*" -> return Nothing | otherwise -> readerError $ "Invalid package name: " ++ pn @@ -25,7 +23,7 @@ readFlag = do '-':x -> (False, x) _ -> (True, mflag) flagN <- - case parseFlagNameFromString flagS of + case parseFlagName flagS of Nothing -> readerError $ "Invalid flag name: " ++ flagS Just x -> return x return $ Map.singleton pn' $ Map.singleton flagN b diff --git a/src/Stack/Options/ResolverParser.hs b/src/Stack/Options/ResolverParser.hs index 242aa6d7f9..c80475e64f 100644 --- a/src/Stack/Options/ResolverParser.hs +++ b/src/Stack/Options/ResolverParser.hs @@ -7,11 +7,10 @@ import Options.Applicative import Options.Applicative.Types (readerAsk) import Stack.Options.Utils import Stack.Prelude -import Stack.Types.Compiler import Stack.Types.Resolver -- | Parser for the resolver -abstractResolverOptsParser :: Bool -> Parser AbstractResolver +abstractResolverOptsParser :: Bool -> Parser (Unresolved AbstractResolver) abstractResolverOptsParser hide = option readAbstractResolver (long "resolver" <> @@ -19,7 +18,7 @@ abstractResolverOptsParser hide = help "Override resolver in project file" <> hideMods hide) -compilerOptsParser :: Bool -> Parser (CompilerVersion 'CVWanted) +compilerOptsParser :: Bool -> Parser WantedCompiler compilerOptsParser hide = option readCompilerVersion (long "compiler" <> @@ -27,9 +26,9 @@ compilerOptsParser hide = help "Use the specified compiler" <> hideMods hide) -readCompilerVersion :: ReadM (CompilerVersion 'CVWanted) +readCompilerVersion :: ReadM WantedCompiler readCompilerVersion = do s <- readerAsk - case parseCompilerVersion (T.pack s) of - Nothing -> readerError $ "Failed to parse compiler: " ++ s - Just x -> return x + case parseWantedCompiler (T.pack s) of + Left{} -> readerError $ "Failed to parse compiler: " ++ s + Right x -> return x diff --git a/src/Stack/Package.hs b/src/Stack/Package.hs index 957c8942cc..b5a12d62a4 100644 --- a/src/Stack/Package.hs +++ b/src/Stack/Package.hs @@ -16,8 +16,6 @@ module Stack.Package (readPackageDir - ,readPackageUnresolvedDir - ,readPackageUnresolvedIndex ,readPackageDescriptionDir ,readDotBuildinfo ,resolvePackage @@ -31,13 +29,9 @@ module Stack.Package ,PackageException (..) ,resolvePackageDescription ,packageDependencies - ,cabalFilePackageId - ,gpdPackageIdentifier - ,gpdPackageName - ,gpdVersion) + ,mkLocalPackageView) where -import qualified Data.ByteString as BS import qualified Data.ByteString.Lazy.Char8 as CL8 import Data.List (isSuffixOf, isPrefixOf, unzip) import Data.Maybe (maybe) @@ -54,8 +48,6 @@ import Distribution.Package hiding (Package,PackageName,packageName,pa import qualified Distribution.PackageDescription as D import Distribution.PackageDescription hiding (FlagName) import Distribution.PackageDescription.Parsec -import qualified Distribution.PackageDescription.Parsec as D -import Distribution.Parsec.Common (PWarning (..), showPos) import Distribution.Simple.Utils import Distribution.System (OS (..), Arch, Platform (..)) import qualified Distribution.Text as D @@ -66,18 +58,15 @@ import qualified Distribution.Types.LegacyExeDependency as Cabal import Distribution.Types.MungedPackageName import qualified Distribution.Types.UnqualComponentName as Cabal import qualified Distribution.Verbosity as D +import Distribution.Version (mkVersion) import Lens.Micro (lens) -import qualified Hpack import qualified Hpack.Config as Hpack import Path as FL import Path.Extra -import Path.Find import Path.IO hiding (findFiles) import Stack.Build.Installed import Stack.Constants import Stack.Constants.Config -import Stack.Fetch (loadFromIndex) -import Stack.PackageIndex (HasCabalLoader (..)) import Stack.Prelude hiding (Display (..)) import Stack.PrettyPrint import qualified Stack.PrettyPrint as PP (Style (Module)) @@ -85,12 +74,9 @@ import Stack.Types.Build import Stack.Types.BuildPlan (ExeName (..)) import Stack.Types.Compiler import Stack.Types.Config -import Stack.Types.FlagName import Stack.Types.GhcPkgId import Stack.Types.NamedComponent import Stack.Types.Package -import Stack.Types.PackageIdentifier -import Stack.Types.PackageName import Stack.Types.Runner import Stack.Types.Version import qualified System.Directory as D @@ -111,111 +97,23 @@ instance HasLogFunc Ctx where instance HasRunner Ctx where runnerL = configL.runnerL instance HasConfig Ctx -instance HasCabalLoader Ctx where - cabalLoaderL = configL.cabalLoaderL +instance HasPantryConfig Ctx where + pantryConfigL = configL.pantryConfigL instance HasProcessContext Ctx where processContextL = configL.processContextL instance HasBuildConfig Ctx instance HasEnvConfig Ctx where envConfigL = lens ctxEnvConfig (\x y -> x { ctxEnvConfig = y }) --- | A helper function that performs the basic character encoding --- necessary. -rawParseGPD - :: MonadThrow m - => Either PackageIdentifierRevision (Path Abs File) - -> BS.ByteString - -> m ([PWarning], GenericPackageDescription) -rawParseGPD key bs = - case eres of - Left (mversion, errs) -> throwM $ PackageInvalidCabalFile key - (fromCabalVersion <$> mversion) - errs - warnings - Right gpkg -> return (warnings, gpkg) - where - (warnings, eres) = runParseResult $ parseGenericPackageDescription bs - --- | Read the raw, unresolved package information from a file. -readPackageUnresolvedDir - :: forall env. HasConfig env - => Path Abs Dir -- ^ directory holding the cabal file - -> Bool -- ^ print warnings? - -> RIO env (GenericPackageDescription, Path Abs File) -readPackageUnresolvedDir dir printWarnings = do - ref <- view $ runnerL.to runnerParsedCabalFiles - (_, m) <- readIORef ref - case M.lookup dir m of - Just x -> return x - Nothing -> do - cabalfp <- findOrGenerateCabalFile dir - bs <- liftIO $ BS.readFile $ toFilePath cabalfp - (warnings, gpd) <- rawParseGPD (Right cabalfp) bs - when printWarnings - $ mapM_ (prettyWarnL . toPretty (toFilePath cabalfp)) warnings - checkCabalFileName (gpdPackageName gpd) cabalfp - let ret = (gpd, cabalfp) - atomicModifyIORef' ref $ \(m1, m2) -> - ((m1, M.insert dir ret m2), ret) - where - toPretty :: String -> PWarning -> [Doc StyleAnn] - toPretty src (PWarning _type pos msg) = - [ flow "Cabal file warning in" - , fromString src <> "@" - , fromString (showPos pos) <> ":" - , flow msg - ] - - -- | Check if the given name in the @Package@ matches the name of the .cabal file - checkCabalFileName :: MonadThrow m => PackageName -> Path Abs File -> m () - checkCabalFileName name cabalfp = do - -- Previously, we just use parsePackageNameFromFilePath. However, that can - -- lead to confusing error messages. See: - -- https://github.com/commercialhaskell/stack/issues/895 - let expected = packageNameString name ++ ".cabal" - when (expected /= toFilePath (filename cabalfp)) - $ throwM $ MismatchedCabalName cabalfp name - -gpdPackageIdentifier :: GenericPackageDescription -> PackageIdentifier -gpdPackageIdentifier = fromCabalPackageIdentifier . D.package . D.packageDescription - -gpdPackageName :: GenericPackageDescription -> PackageName -gpdPackageName = packageIdentifierName . gpdPackageIdentifier - -gpdVersion :: GenericPackageDescription -> Version -gpdVersion = packageIdentifierVersion . gpdPackageIdentifier - --- | Read the 'GenericPackageDescription' from the given --- 'PackageIdentifierRevision'. -readPackageUnresolvedIndex - :: forall env. HasCabalLoader env - => PackageIdentifierRevision - -> RIO env GenericPackageDescription -readPackageUnresolvedIndex pir@(PackageIdentifierRevision pi' _) = do - ref <- view $ runnerL.to runnerParsedCabalFiles - (m, _) <- readIORef ref - case M.lookup pir m of - Just gpd -> return gpd - Nothing -> do - bs <- loadFromIndex pir - (_warnings, gpd) <- rawParseGPD (Left pir) bs - let foundPI = - fromCabalPackageIdentifier - $ D.package - $ D.packageDescription gpd - unless (pi' == foundPI) $ throwM $ MismatchedCabalIdentifier pir foundPI - atomicModifyIORef' ref $ \(m1, m2) -> - ((M.insert pir gpd m1, m2), gpd) - -- | Reads and exposes the package information readPackageDir :: forall env. HasConfig env => PackageConfig -> Path Abs Dir - -> Bool -- ^ print warnings from cabal file parsing? + -> PrintWarnings -> RIO env (Package, Path Abs File) readPackageDir packageConfig dir printWarnings = - first (resolvePackage packageConfig) <$> readPackageUnresolvedDir dir printWarnings + first (resolvePackage packageConfig) <$> loadCabalFilePath dir printWarnings -- | Get 'GenericPackageDescription' and 'PackageDescription' reading info -- from given directory. @@ -223,10 +121,10 @@ readPackageDescriptionDir :: forall env. HasConfig env => PackageConfig -> Path Abs Dir - -> Bool -- ^ print warnings? + -> PrintWarnings -> RIO env (GenericPackageDescription, PackageDescriptionPair) readPackageDescriptionDir config pkgDir printWarnings = do - (gdesc, _) <- readPackageUnresolvedDir pkgDir printWarnings + (gdesc, _) <- loadCabalFilePath pkgDir printWarnings return (gdesc, resolvePackageDescription config gdesc) -- | Read @.buildinfo@ ancillary files produced by some Setup.hs hooks. @@ -259,7 +157,7 @@ packageFromPackageDescription :: PackageConfig packageFromPackageDescription packageConfig pkgFlags (PackageDescriptionPair pkgNoMod pkg) = Package { packageName = name - , packageVersion = fromCabalVersion (pkgVersion pkgId) + , packageVersion = pkgVersion pkgId , packageLicense = licenseRaw pkg , packageDeps = deps , packageFiles = pkgFiles @@ -267,7 +165,7 @@ packageFromPackageDescription packageConfig pkgFlags (PackageDescriptionPair pkg , packageGhcOptions = packageConfigGhcOptions packageConfig , packageFlags = packageConfigFlags packageConfig , packageDefaultFlags = M.fromList - [(fromCabalFlagName (flagName flag), flagDefault flag) | flag <- pkgFlags] + [(flagName flag, flagDefault flag) | flag <- pkgFlags] , packageAllDeps = S.fromList (M.keys deps) , packageLibraries = let mlib = do @@ -300,8 +198,9 @@ packageFromPackageDescription packageConfig pkgFlags (PackageDescriptionPair pkg \sourceMap installedMap omitPkgs addPkgs cabalfp -> do (componentsModules,componentFiles,_,_) <- getPackageFiles pkgFiles cabalfp let internals = S.toList $ internalLibComponents $ M.keysSet componentsModules - excludedInternals <- mapM parsePackageName internals - mungedInternals <- mapM (parsePackageName . toInternalPackageMungedName) internals + excludedInternals <- mapM (parsePackageNameThrowing . T.unpack) internals + mungedInternals <- mapM (parsePackageNameThrowing . T.unpack . + toInternalPackageMungedName) internals componentsOpts <- generatePkgDescOpts sourceMap installedMap (excludedInternals ++ omitPkgs) (mungedInternals ++ addPkgs) @@ -362,7 +261,7 @@ packageFromPackageDescription packageConfig pkgFlags (PackageDescriptionPair pkg return $ if hpackExists then S.singleton hpackPath else S.empty return (componentModules, componentFiles, buildFiles <> dataFiles', warnings) pkgId = package pkg - name = fromCabalPackageName (pkgName pkgId) + name = pkgName pkgId (unknownTools, knownTools) = packageDescTools pkg @@ -387,7 +286,7 @@ packageFromPackageDescription packageConfig pkgFlags (PackageDescriptionPair pkg -- Is the package dependency mentioned here me: either the package -- name itself, or the name of one of the sub libraries - isMe name' = name' == name || packageNameText name' `S.member` extraLibNames + isMe name' = name' == name || fromString (packageNameString name') `S.member` extraLibNames -- | Generate GHC options for the package's components, and a list of -- options which apply generally to the package, not one specific @@ -511,8 +410,7 @@ generateBuildInfoOpts BioInput {..} = pkgs = biAddPackages ++ [ name - | Dependency cname _ <- targetBuildDepends biBuildInfo - , let name = fromCabalPackageName cname + | Dependency name _ <- targetBuildDepends biBuildInfo , name `notElem` biOmitPackages] ghcOpts = concatMap snd . filter (isGhc . fst) $ options biBuildInfo where @@ -600,7 +498,7 @@ makeObjectFilePathFromC cabalDir namedComponent distDir cFilePath = do -- | Make the global autogen dir if Cabal version is new enough. packageAutogenDir :: Version -> Path Abs Dir -> Maybe (Path Abs Dir) packageAutogenDir cabalVer distDir - | cabalVer < $(mkVersion "2.0") = Nothing + | cabalVer < mkVersion [2, 0] = Nothing | otherwise = Just $ buildDir distDir $(mkRelDir "global-autogen") -- | Make the autogen dir. @@ -611,7 +509,7 @@ componentAutogenDir cabalVer component distDir = -- | See 'Distribution.Simple.LocalBuildInfo.componentBuildDir' componentBuildDir :: Version -> NamedComponent -> Path Abs Dir -> Path Abs Dir componentBuildDir cabalVer component distDir - | cabalVer < $(mkVersion "2.0") = buildDir distDir + | cabalVer < mkVersion [2, 0] = buildDir distDir | otherwise = case component of CLib -> buildDir distDir @@ -664,7 +562,7 @@ packageDependencies pkgConfig pkg' = maybe [] setupDepends (setupBuildInfo pkg) where pkg - | getGhcVersion (packageConfigCompilerVersion pkgConfig) >= $(mkVersion "8.0") = pkg' + | getGhcVersion (packageConfigCompilerVersion pkgConfig) >= mkVersion [8, 0] = pkg' -- Set all components to buildable. Only need to worry about -- library, exe, test, and bench, since others didn't exist in -- older Cabal versions @@ -716,7 +614,7 @@ packageDescTools pd = go2 (Cabal.ExeDependency pkg _name range) | pkg `S.member` preInstalledPackages = Nothing | otherwise = Just - ( fromCabalPackageName pkg + ( pkg , DepValue { dvVersionRange = range , dvType = AsBuildTool @@ -965,9 +863,8 @@ resolveComponentFiles component build names = do (modules,files,warnings) <- resolveFilesAndDeps component - (dirs ++ [dir]) + (if null dirs then [dir] else dirs) names - haskellModuleExts cfiles <- buildOtherSources build return (modules, files <> cfiles, warnings) @@ -1089,17 +986,17 @@ resolvePackageDescription packageConfig (GenericPackageDescription desc defaultF flagMap :: [Flag] -> Map FlagName Bool flagMap = M.fromList . map pair where pair :: Flag -> (FlagName, Bool) - pair (MkFlag (fromCabalFlagName -> name) _desc def _manual) = (name,def) + pair = flagName &&& flagDefault data ResolveConditions = ResolveConditions { rcFlags :: Map FlagName Bool - , rcCompilerVersion :: CompilerVersion 'CVActual + , rcCompilerVersion :: ActualCompiler , rcOS :: OS , rcArch :: Arch } -- | Generic a @ResolveConditions@ using sensible defaults. -mkResolveConditions :: CompilerVersion 'CVActual -- ^ Compiler version +mkResolveConditions :: ActualCompiler -- ^ Compiler version -> Platform -- ^ installation target platform -> Map FlagName Bool -- ^ enabled flags -> ResolveConditions @@ -1138,21 +1035,21 @@ resolveConditions rc addDeps (CondNode lib deps cs) = basic <> children OS os -> os == rcOS rc Arch arch -> arch == rcArch rc Flag flag -> - fromMaybe False $ M.lookup (fromCabalFlagName flag) (rcFlags rc) + fromMaybe False $ M.lookup flag (rcFlags rc) -- NOTE: ^^^^^ This should never happen, as all flags -- which are used must be declared. Defaulting to -- False. Impl flavor range -> case (flavor, rcCompilerVersion rc) of - (GHC, GhcVersion vghc) -> vghc `withinRange` range - (GHC, GhcjsVersion _ vghc) -> vghc `withinRange` range - (GHCJS, GhcjsVersion vghcjs _) -> + (GHC, ACGhc vghc) -> vghc `withinRange` range + (GHC, ACGhcjs _ vghc) -> vghc `withinRange` range + (GHCJS, ACGhcjs vghcjs _) -> vghcjs `withinRange` range _ -> False -- | Get the name of a dependency. depName :: Dependency -> PackageName -depName (Dependency n _) = fromCabalPackageName n +depName (Dependency n _) = n -- | Get the version range of a dependency. depRange :: Dependency -> VersionRange @@ -1166,16 +1063,15 @@ resolveFilesAndDeps :: NamedComponent -- ^ Package component name -> [Path Abs Dir] -- ^ Directories to look in. -> [DotCabalDescriptor] -- ^ Base names. - -> [Text] -- ^ Extensions. -> RIO Ctx (Map ModuleName (Path Abs File),Set DotCabalPath,[PackageWarning]) -resolveFilesAndDeps component dirs names0 exts = do +resolveFilesAndDeps component dirs names0 = do (dotCabalPaths, foundModules, missingModules) <- loop names0 S.empty warnings <- liftM2 (++) (warnUnlisted foundModules) (warnMissing missingModules) return (foundModules, dotCabalPaths, warnings) where loop [] _ = return (S.empty, M.empty, []) loop names doneModules0 = do - resolved <- resolveFiles dirs names exts + resolved <- resolveFiles dirs names let foundFiles = mapMaybe snd resolved foundModules = mapMaybe toResolvedModule resolved missingModules = mapMaybe toMissingModule resolved @@ -1325,19 +1221,38 @@ parseDumpHI dumpHIPath = do resolveFiles :: [Path Abs Dir] -- ^ Directories to look in. -> [DotCabalDescriptor] -- ^ Base names. - -> [Text] -- ^ Extensions. -> RIO Ctx [(DotCabalDescriptor, Maybe DotCabalPath)] -resolveFiles dirs names exts = - forM names (\name -> liftM (name, ) (findCandidate dirs exts name)) +resolveFiles dirs names = + forM names (\name -> liftM (name, ) (findCandidate dirs name)) + +data CabalFileNameParseFail + = CabalFileNameParseFail FilePath + | CabalFileNameInvalidPackageName FilePath + deriving (Typeable) + +instance Exception CabalFileNameParseFail +instance Show CabalFileNameParseFail where + show (CabalFileNameParseFail fp) = "Invalid file path for cabal file, must have a .cabal extension: " ++ fp + show (CabalFileNameInvalidPackageName fp) = "cabal file names must use valid package names followed by a .cabal extension, the following is invalid: " ++ fp + +-- | Parse a package name from a file path. +parsePackageNameFromFilePath :: MonadThrow m => Path a File -> m PackageName +parsePackageNameFromFilePath fp = do + base <- clean $ toFilePath $ filename fp + case parsePackageName base of + Nothing -> throwM $ CabalFileNameInvalidPackageName $ toFilePath fp + Just x -> return x + where clean = liftM reverse . strip . reverse + strip ('l':'a':'b':'a':'c':'.':xs) = return xs + strip _ = throwM (CabalFileNameParseFail (toFilePath fp)) -- | Find a candidate for the given module-or-filename from the list -- of directories and given extensions. findCandidate :: [Path Abs Dir] - -> [Text] -> DotCabalDescriptor -> RIO Ctx (Maybe DotCabalPath) -findCandidate dirs exts name = do +findCandidate dirs name = do pkg <- asks ctxFile >>= parsePackageNameFromFilePath candidates <- liftIO makeNameCandidates case candidates of @@ -1368,13 +1283,22 @@ findCandidate dirs exts name = do DotCabalMain fp -> resolveCandidate dir fp DotCabalFile fp -> resolveCandidate dir fp DotCabalCFile fp -> resolveCandidate dir fp - DotCabalModule mn -> - liftM concat - $ mapM - ((\ ext -> - resolveCandidate dir (Cabal.toFilePath mn ++ "." ++ ext)) - . T.unpack) - exts + DotCabalModule mn -> do + let perExt ext = + resolveCandidate dir (Cabal.toFilePath mn ++ "." ++ T.unpack ext) + withHaskellExts <- mapM perExt haskellFileExts + withPPExts <- mapM perExt haskellPreprocessorExts + pure $ + case (concat withHaskellExts, concat withPPExts) of + -- If we have exactly 1 Haskell extension and exactly + -- 1 preprocessor extension, assume the former file is + -- generated from the latter + -- + -- See https://github.com/commercialhaskell/stack/issues/4076 + ([_], [y]) -> [y] + + -- Otherwise, return everything + (xs, ys) -> xs ++ ys resolveCandidate :: (MonadIO m, MonadThrow m) => Path Abs Dir -> FilePath.FilePath -> m [Path Abs File] @@ -1391,9 +1315,9 @@ warnMultiple warnMultiple name candidate rest = -- TODO: figure out how to style 'name' and the dispOne stuff prettyWarnL - [ flow "There were multiple candidates for the Cabal entry \"" + [ flow "There were multiple candidates for the Cabal entry" , fromString . showName $ name - , line <> bulletedList (map dispOne rest) + , line <> bulletedList (map dispOne (candidate:rest)) , line <> flow "picking:" , dispOne candidate ] @@ -1438,79 +1362,6 @@ logPossibilities dirs mn = do files))) dirs --- | Get the filename for the cabal file in the given directory. --- --- If no .cabal file is present, or more than one is present, an exception is --- thrown via 'throwM'. --- --- If the directory contains a file named package.yaml, hpack is used to --- generate a .cabal file from it. -findOrGenerateCabalFile - :: forall env. HasConfig env - => Path Abs Dir -- ^ package directory - -> RIO env (Path Abs File) -findOrGenerateCabalFile pkgDir = do - hpack pkgDir - findCabalFile - where - findCabalFile :: RIO env (Path Abs File) - findCabalFile = findCabalFile' >>= either throwIO return - - findCabalFile' :: RIO env (Either PackageException (Path Abs File)) - findCabalFile' = do - files <- liftIO $ findFiles - pkgDir - (flip hasExtension "cabal" . FL.toFilePath) - (const False) - return $ case files of - [] -> Left $ PackageNoCabalFileFound pkgDir - [x] -> Right x - -- If there are multiple files, ignore files that start with - -- ".". On unixlike environments these are hidden, and this - -- character is not valid in package names. The main goal is - -- to ignore emacs lock files - see - -- https://github.com/commercialhaskell/stack/issues/1897. - (filter (not . ("." `isPrefixOf`) . toFilePath . filename) -> [x]) -> Right x - _:_ -> Left $ PackageMultipleCabalFilesFound pkgDir files - where hasExtension fp x = FilePath.takeExtension fp == "." ++ x - --- | Generate .cabal file from package.yaml, if necessary. -hpack :: HasConfig env => Path Abs Dir -> RIO env () -hpack pkgDir = do - let hpackFile = pkgDir $(mkRelFile Hpack.packageConfig) - exists <- liftIO $ doesFileExist hpackFile - when exists $ do - prettyDebugL [flow "Running hpack on", display hpackFile] - - config <- view configL - case configOverrideHpack config of - HpackBundled -> do - r <- liftIO $ Hpack.hpackResult $ Hpack.setProgramName "stack" $ Hpack.setTarget (toFilePath hpackFile) Hpack.defaultOptions - forM_ (Hpack.resultWarnings r) prettyWarnS - let cabalFile = style File . fromString . Hpack.resultCabalFile $ r - case Hpack.resultStatus r of - Hpack.Generated -> prettyDebugL - [flow "hpack generated a modified version of", cabalFile] - Hpack.OutputUnchanged -> prettyDebugL - [flow "hpack output unchanged in", cabalFile] - Hpack.AlreadyGeneratedByNewerHpack -> prettyWarnL - [ cabalFile - , flow "was generated with a newer version of hpack," - , flow "please upgrade and try again." - ] - Hpack.ExistingCabalFileWasModifiedManually -> prettyWarnL - [ cabalFile - , flow "was modified manually. Ignoring" - , display hpackFile - , flow "in favor of the cabal file. If you want to use the" - , display . filename $ hpackFile - , flow "file instead of the cabal file," - , flow "then please delete the cabal file." - ] - HpackCommand command -> - withWorkingDir (toFilePath pkgDir) $ - proc command [] runProcess_ - -- | Path for the package's build log. buildLogPath :: (MonadReader env m, HasBuildConfig env, MonadThrow m) => Package -> Maybe String -> m (Path Abs File) @@ -1556,16 +1407,16 @@ resolveDirOrWarn :: FilePath.FilePath resolveDirOrWarn = resolveOrWarn "Directory" f where f p x = liftIO (forgivingAbsence (resolveDir p x)) >>= rejectMissingDir --- | Extract the @PackageIdentifier@ given an exploded haskell package --- path. -cabalFilePackageId - :: (MonadIO m, MonadThrow m) - => Path Abs File -> m PackageIdentifier -cabalFilePackageId fp = do - pkgDescr <- liftIO (D.readGenericPackageDescription D.silent $ toFilePath fp) - (toStackPI . D.package . D.packageDescription) pkgDescr - where - toStackPI (D.PackageIdentifier (D.unPackageName -> name) ver) = do - name' <- parsePackageNameFromString name - let ver' = fromCabalVersion ver - return (PackageIdentifier name' ver') +-- | Create a 'LocalPackageView' from a directory containing a package. +mkLocalPackageView + :: forall env. HasConfig env + => PrintWarnings + -> ResolvedPath Dir + -> RIO env LocalPackageView +mkLocalPackageView printWarnings dir = do + (gpd, cabalfp) <- loadCabalFilePath (resolvedAbsolute dir) printWarnings + return LocalPackageView + { lpvCabalFP = cabalfp + , lpvGPD = gpd + , lpvResolvedDir = dir + } diff --git a/src/Stack/PackageDump.hs b/src/Stack/PackageDump.hs index 31fc5da7fb..59529557e4 100644 --- a/src/Stack/PackageDump.hs +++ b/src/Stack/PackageDump.hs @@ -35,8 +35,9 @@ import Data.List (isPrefixOf) import qualified Data.Map as Map import qualified Data.Set as Set import Data.Store.VersionTagged -import qualified Data.Text as T +import qualified RIO.Text as T import qualified Distribution.License as C +import Distribution.ModuleName (ModuleName) import qualified Distribution.System as OS import qualified Distribution.Text as C import Path.Extra (toFilePathNoTrailingSep) @@ -44,9 +45,6 @@ import Stack.GhcPkg import Stack.Types.Compiler import Stack.Types.GhcPkgId import Stack.Types.PackageDump -import Stack.Types.PackageIdentifier -import Stack.Types.PackageName -import Stack.Types.Version import System.Directory (getDirectoryContents, doesFileExist) import System.Process (readProcess) -- FIXME confirm that this is correct import RIO.Process hiding (readProcess) @@ -68,7 +66,7 @@ ghcPkgDescribe -> [Path Abs Dir] -- ^ if empty, use global -> ConduitM Text Void (RIO env) a -> RIO env a -ghcPkgDescribe pkgName = ghcPkgCmdArgs ["describe", "--simple-output", packageNameString pkgName] +ghcPkgDescribe pkgName' = ghcPkgCmdArgs ["describe", "--simple-output", packageNameString pkgName'] -- | Call ghc-pkg and stream to the given @Sink@, for a single database ghcPkgCmdArgs @@ -162,7 +160,7 @@ sinkMatching :: Monad m (Map PackageName (DumpPackage Bool Bool Bool)) sinkMatching reqProfiling reqHaddock reqSymbols allowed = Map.fromList - . map (packageIdentifierName . dpPackageIdent &&& id) + . map (pkgName . dpPackageIdent &&& id) . Map.elems . pruneDeps id @@ -287,7 +285,7 @@ data DumpPackage profiling haddock symbols = DumpPackage , dpLibDirs :: ![FilePath] , dpLibraries :: ![Text] , dpHasExposedModules :: !Bool - , dpExposedModules :: ![Text] + , dpExposedModules :: !(Set ModuleName) , dpDepends :: ![GhcPkgId] , dpHaddockInterfaces :: ![FilePath] , dpHaddockHtml :: !(Maybe FilePath) @@ -342,8 +340,8 @@ conduitDumpPackage = (.| CL.catMaybes) $ eachSection $ do case Map.lookup "id" m of Just ["builtin_rts"] -> return Nothing _ -> do - name <- parseS "name" >>= parsePackageName - version <- parseS "version" >>= parseVersion + name <- parseS "name" >>= parsePackageNameThrowing . T.unpack + version <- parseS "version" >>= parseVersionThrowing . T.unpack ghcPkgId <- parseS "id" >>= parseGhcPkgId -- if a package has no modules, these won't exist @@ -360,7 +358,8 @@ conduitDumpPackage = (.| CL.catMaybes) $ eachSection $ do -- Handle sublibs by recording the name of the parent library -- If name of parent library is missing, this is not a sublib. let mkParentLib n = PackageIdentifier n version - parentLib = mkParentLib <$> (parseS "package-name" >>= parsePackageName) + parentLib = mkParentLib <$> (parseS "package-name" >>= + parsePackageNameThrowing . T.unpack) let parseQuoted key = case mapM (P.parseOnly (argsParser NoEscaping)) val of @@ -380,7 +379,15 @@ conduitDumpPackage = (.| CL.catMaybes) $ eachSection $ do , dpLibDirs = libDirPaths , dpLibraries = T.words $ T.unwords libraries , dpHasExposedModules = not (null libraries || null exposedModules) - , dpExposedModules = T.words $ T.unwords exposedModules + + -- Strip trailing commas from ghc package exposed-modules (looks buggy to me...). + -- Then try to parse the module names. + , dpExposedModules = + Set.fromList + $ mapMaybe (C.simpleParse . T.unpack . T.dropSuffix ",") + $ T.words + $ T.unwords exposedModules + , dpDepends = depends , dpHaddockInterfaces = haddockInterfaces , dpHaddockHtml = listToMaybe haddockHtml diff --git a/src/Stack/PackageIndex.hs b/src/Stack/PackageIndex.hs deleted file mode 100644 index 66d11fdf79..0000000000 --- a/src/Stack/PackageIndex.hs +++ /dev/null @@ -1,501 +0,0 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE BangPatterns #-} -{-# LANGUAGE CPP #-} -{-# LANGUAGE ConstraintKinds #-} -{-# LANGUAGE DataKinds #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE GADTs #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} -{-# LANGUAGE KindSignatures #-} -{-# LANGUAGE LambdaCase #-} -{-# LANGUAGE MultiParamTypeClasses #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE PatternGuards #-} -{-# LANGUAGE RankNTypes #-} -{-# LANGUAGE TemplateHaskell #-} -{-# LANGUAGE TupleSections #-} -{-# LANGUAGE ViewPatterns #-} -{-# LANGUAGE ScopedTypeVariables #-} - --- | Dealing with the 01-index file and all its cabal files. -module Stack.PackageIndex - ( updateAllIndices - , getPackageCaches - , getPackageVersions - , lookupPackageVersions - , CabalLoader (..) - , HasCabalLoader (..) - , configPackageIndex - , configPackageIndexRoot - ) where - -import qualified Codec.Archive.Tar as Tar -import Stack.Prelude -import Data.Aeson.Extended -import qualified Data.ByteString.Char8 as B8 -import qualified Data.ByteString.Lazy as L -import Data.Conduit.Zlib (ungzip) -import qualified Data.List.NonEmpty as NE -import qualified Data.HashMap.Strict as HashMap -import Data.Store.Version -import Data.Store.VersionTagged -import qualified Data.Text as T -import Data.Text.Unsafe (unsafeTail) -import Data.Time (getCurrentTime) -import qualified Hackage.Security.Client as HS -import qualified Hackage.Security.Client.Repository.Cache as HS -import qualified Hackage.Security.Client.Repository.Remote as HS -import qualified Hackage.Security.Client.Repository.HttpLib.HttpClient as HS -import qualified Hackage.Security.Util.Path as HS -import qualified Hackage.Security.Util.Pretty as HS -import Network.HTTP.StackClient (getGlobalManager) -import Network.HTTP.Download -import Network.URI (parseURI) -import Path (toFilePath, parseAbsFile, mkRelDir, mkRelFile, (), parseRelDir) -import Path.Extra (tryGetModificationTime) -import Path.IO -import Stack.Types.PackageIdentifier -import Stack.Types.PackageIndex -import Stack.Types.PackageName -import Stack.Types.Runner (HasRunner) -import Stack.Types.Version -import qualified System.Directory as D -import System.FilePath ((<.>)) - --- | Populate the package index caches and return them. -populateCache :: HasCabalLoader env => PackageIndex -> RIO env (PackageCache ()) -populateCache index = do - requireIndex index - -- This uses full on lazy I/O instead of ResourceT to provide some - -- protections. Caveat emptor - path <- configPackageIndex (indexName index) - let loadPIS = withLazyFile (Path.toFilePath path) $ \lbs -> do - logSticky "Populating index cache ..." - loop 0 HashMap.empty (Tar.read lbs) - pis0 <- loadPIS `catch` \e -> do - logWarn $ "Exception encountered when parsing index tarball: " - <> displayShow (e :: Tar.FormatError) - logWarn "Automatically updating index and trying again" - updateIndex index - loadPIS - - when (indexRequireHashes index) $ forM_ (HashMap.toList pis0) $ \(ident, (mpd, _)) -> - case mpd :: Maybe PackageDownload of - Just _ -> return () - Nothing -> throwM $ MissingRequiredHashes (indexName index) ident - - cache <- fmap mconcat $ mapM convertPI $ HashMap.toList pis0 - - logStickyDone "Populated index cache." - - return cache - where - convertPI :: MonadIO m - => (PackageIdentifier, (Maybe PackageDownload, Endo [([CabalHash], OffsetSize)])) - -> m (PackageCache ()) - convertPI (ident@(PackageIdentifier name version), (mpd, Endo front)) = - case NE.nonEmpty $ front [] of - Nothing -> throwString $ "Missing cabal file info for: " ++ show ident - Just files -> return - $ PackageCache - $ HashMap.singleton name - $ HashMap.singleton version - ((), mpd, files) - - loop :: MonadThrow m - => Int64 - -> HashMap PackageIdentifier (Maybe PackageDownload, Endo [([CabalHash], OffsetSize)]) - -> Tar.Entries Tar.FormatError - -> m (HashMap PackageIdentifier (Maybe PackageDownload, Endo [([CabalHash], OffsetSize)])) - loop !blockNo !m (Tar.Next e es) = - loop (blockNo + entrySizeInBlocks e) (goE blockNo m e) es - loop _ m Tar.Done = return m - loop _ _ (Tar.Fail e) = throwM e - - goE :: Int64 - -> HashMap PackageIdentifier (Maybe PackageDownload, Endo [([CabalHash], OffsetSize)]) - -> Tar.Entry - -> HashMap PackageIdentifier (Maybe PackageDownload, Endo [([CabalHash], OffsetSize)]) - goE blockNo m e = - case Tar.entryContent e of - Tar.NormalFile lbs size -> - case parseNameVersionSuffix $ Tar.entryPath e of - Just (ident, ".cabal") -> addCabal lbs ident size - Just (ident, ".json") -> addJSON id ident lbs - _ -> - case parsePackageJSON $ Tar.entryPath e of - Just ident -> addJSON unHSPackageDownload ident lbs - Nothing -> m - _ -> m - where - addCabal lbs ident size = HashMap.alter - (\case - Nothing -> Just (Nothing, newEndo) - Just (mpd, oldEndo) -> Just (mpd, oldEndo <> newEndo)) - ident - m - where - !cabalHash = computeCabalHash lbs - - -- Some older Stackage snapshots ended up with slightly - -- modified cabal files, in particular having DOS-style - -- line endings (CRLF) converted to Unix-style (LF). As a - -- result, we track both hashes with and without CR - -- characters stripped for compatibility with these older - -- snapshots. - cr = 13 - cabalHashes - | cr `L.elem` lbs = - let !cabalHash' = computeCabalHash (L.filter (/= cr) lbs) - in [cabalHash, cabalHash'] - | otherwise = [cabalHash] - offsetSize = OffsetSize ((blockNo + 1) * 512) size - newPair = (cabalHashes, offsetSize) - newEndo = Endo (newPair:) - - addJSON :: FromJSON a - => (a -> PackageDownload) - -> PackageIdentifier - -> L.ByteString - -> HashMap PackageIdentifier (Maybe PackageDownload, Endo [([CabalHash], OffsetSize)]) - addJSON unwrap ident lbs = - case decode lbs of - Nothing -> m - Just (unwrap -> pd) -> HashMap.alter - (\case - Nothing -> Just (Just pd, mempty) - Just (Just oldPD, _) - | oldPD /= pd -> error $ concat - [ "Conflicting package hash information discovered for " - , packageIdentifierString ident - , "\nFound both: \n- " - , show oldPD - , "\n- " - , show pd - , "\n\nThis should not happen. See: https://github.com/haskell/hackage-security/issues/189" - ] - Just (_, files) -> Just (Just pd, files)) - ident - m - - breakSlash x - | T.null z = Nothing - | otherwise = Just (y, unsafeTail z) - where - (y, z) = T.break (== '/') x - - parseNameVersion t1 = do - (p', t3) <- breakSlash - $ T.map (\c -> if c == '\\' then '/' else c) - $ T.pack t1 - p <- parsePackageName p' - (v', t5) <- breakSlash t3 - v <- parseVersion v' - return (p', p, v, t5) - - parseNameVersionSuffix t1 = do - (p', p, v, t5) <- parseNameVersion t1 - let (t6, suffix) = T.break (== '.') t5 - guard $ t6 == p' - return (PackageIdentifier p v, suffix) - - parsePackageJSON t1 = do - (_, p, v, t5) <- parseNameVersion t1 - guard $ t5 == "package.json" - return $ PackageIdentifier p v - -data PackageIndexException - = GitNotAvailable IndexName - | MissingRequiredHashes IndexName PackageIdentifier - deriving Typeable -instance Exception PackageIndexException -instance Show PackageIndexException where - show (GitNotAvailable name) = concat - [ "Package index " - , T.unpack $ indexNameText name - , " only provides Git access, and you do not have" - , " the git executable on your PATH" - ] - show (MissingRequiredHashes name ident) = concat - [ "Package index " - , T.unpack $ indexNameText name - , " is configured to require package hashes, but no" - , " hash is available for " - , packageIdentifierString ident - ] - --- | Require that an index be present, updating if it isn't. -requireIndex :: HasCabalLoader env => PackageIndex -> RIO env () -requireIndex index = do - tarFile <- configPackageIndex $ indexName index - exists <- doesFileExist tarFile - unless exists $ updateIndex index - --- | Update all of the package indices -updateAllIndices :: HasCabalLoader env => RIO env () -updateAllIndices = do - clearPackageCaches - cl <- view cabalLoaderL - mapM_ updateIndex (clIndices cl) - --- | Update the index tarball -updateIndex :: HasCabalLoader env => PackageIndex -> RIO env () -updateIndex index = - do let name = indexName index - url = indexLocation index - logSticky $ "Updating package index " - <> display (indexNameText (indexName index)) - <> " (mirrored at " - <> display url - <> ") ..." - case indexType index of - ITVanilla -> updateIndexHTTP name url - ITHackageSecurity hs -> updateIndexHackageSecurity name url hs - logStickyDone "Update complete" - - -- Copy to the 00-index.tar filename for backwards - -- compatibility. First wipe out the cache file if present. - tarFile <- configPackageIndex name - oldTarFile <- configPackageIndexOld name - oldCacheFile <- configPackageIndexCacheOld name - liftIO $ ignoringAbsence (removeFile oldCacheFile) - withSourceFile (toFilePath tarFile) $ \src -> - withSinkFile (toFilePath oldTarFile) $ \sink -> - runConduit $ src .| sink - --- | Update the index tarball via HTTP -updateIndexHTTP :: HasCabalLoader env - => IndexName - -> Text -- ^ url - -> RIO env () -updateIndexHTTP indexName' url = do - req <- parseRequest $ T.unpack url - logInfo ("Downloading package index from " <> display url) - gz <- configPackageIndexGz indexName' - tar <- configPackageIndex indexName' - wasDownloaded <- redownload req gz - shouldUnpack <- - if wasDownloaded - then return True - else not `liftM` doesFileExist tar - - if not shouldUnpack - then packageIndexNotUpdated indexName' - else do - let tmp = toFilePath tar <.> "tmp" - tmpPath <- parseAbsFile tmp - - deleteCache indexName' - - liftIO $ do - withSourceFile (toFilePath gz) $ \input -> - withSinkFile tmp $ \output -> -- FIXME use withSinkFileCautious - runConduit $ input .| ungzip .| output - renameFile tmpPath tar - --- | Update the index tarball via Hackage Security -updateIndexHackageSecurity - :: HasCabalLoader env - => IndexName - -> Text -- ^ base URL - -> HackageSecurity - -> RIO env () -updateIndexHackageSecurity indexName' url (HackageSecurity keyIds threshold) = do - baseURI <- - case parseURI $ T.unpack url of - Nothing -> throwString $ "Invalid Hackage Security base URL: " ++ T.unpack url - Just x -> return x - manager <- liftIO getGlobalManager - root <- configPackageIndexRoot indexName' - run <- askRunInIO - let logTUF = run . logInfo . fromString . HS.pretty - withRepo = HS.withRepository - (HS.makeHttpLib manager) - [baseURI] - HS.defaultRepoOpts - HS.Cache - { HS.cacheRoot = HS.fromAbsoluteFilePath $ toFilePath root - , HS.cacheLayout = HS.cabalCacheLayout - -- Have Hackage Security write to a temporary file - -- to avoid invalidating the cache... continued - -- below at case didUpdate - { HS.cacheLayoutIndexTar = HS.rootPath $ HS.fragment "01-index.tar-tmp" - } - } - HS.hackageRepoLayout - HS.hackageIndexLayout - logTUF - didUpdate <- liftIO $ withRepo $ \repo -> HS.uncheckClientErrors $ do - needBootstrap <- HS.requiresBootstrap repo - when needBootstrap $ do - HS.bootstrap - repo - (map (HS.KeyId . T.unpack) keyIds) - (HS.KeyThreshold (fromIntegral threshold)) - now <- getCurrentTime - HS.checkForUpdates repo (Just now) - - case didUpdate of - HS.NoUpdates -> packageIndexNotUpdated indexName' - HS.HasUpdates -> do - -- The index actually updated. Delete the old cache, and - -- then move the temporary unpacked file to its real - -- location - tar <- configPackageIndex indexName' - deleteCache indexName' - liftIO $ D.renameFile (toFilePath tar ++ "-tmp") (toFilePath tar) - logInfo "Updated package index downloaded" - --- If the index is newer than the cache, delete it so that --- the next 'getPackageCaches' call recomputes it. This --- could happen if a prior run of stack updated the index, --- but exited before deleting the cache. --- --- See https://github.com/commercialhaskell/stack/issues/3033 -packageIndexNotUpdated :: HasCabalLoader env => IndexName -> RIO env () -packageIndexNotUpdated indexName' = do - mindexModTime <- tryGetModificationTime =<< configPackageIndex indexName' - mcacheModTime <- tryGetModificationTime =<< configPackageIndexCache indexName' - case (mindexModTime, mcacheModTime) of - (Right indexModTime, Right cacheModTime) | cacheModTime < indexModTime -> do - deleteCache indexName' - logInfo "No updates to your package index were found, but clearing the index cache as it is older than the index." - (Left _, _) -> do - deleteCache indexName' - logError "Error: No updates to your package index were found, but downloaded index is missing." - _ -> logInfo "No updates to your package index were found" - --- | Delete the package index cache -deleteCache :: HasCabalLoader env => IndexName -> RIO env () -deleteCache indexName' = do - fp <- configPackageIndexCache indexName' - eres <- liftIO $ tryIO $ removeFile fp - case eres of - Left e -> logDebug $ "Could not delete cache: " <> displayShow e - Right () -> logDebug $ "Deleted index cache at " <> fromString (toFilePath fp) - --- | Get the known versions for a given package from the package caches. --- --- See 'getPackageCaches' for performance notes. -getPackageVersions :: HasCabalLoader env => PackageName -> RIO env (HashMap Version (Maybe CabalHash)) -getPackageVersions pkgName = lookupPackageVersions pkgName <$> getPackageCaches - -lookupPackageVersions :: PackageName -> PackageCache index -> HashMap Version (Maybe CabalHash) -lookupPackageVersions pkgName (PackageCache m) = - maybe HashMap.empty (HashMap.map extractOrigRevHash) $ HashMap.lookup pkgName m - where - -- Extract the original cabal file hash (the first element of the one or two - -- element list currently representing the cabal file hashes). - extractOrigRevHash (_,_, neRevHashesAndOffsets) = - listToMaybe $ fst (NE.last neRevHashesAndOffsets) - --- | Load the package caches, or create the caches if necessary. --- --- This has two levels of caching: in memory, and the on-disk cache. So, --- feel free to call this function multiple times. -getPackageCaches :: HasCabalLoader env => RIO env (PackageCache PackageIndex) -getPackageCaches = do - cl <- view cabalLoaderL - mcached <- readIORef (clCache cl) - case mcached of - Just cached -> return cached - Nothing -> do - result <- liftM mconcat $ forM (clIndices cl) $ \index -> do - fp <- configPackageIndexCache (indexName index) - PackageCache pis <- -#if MIN_VERSION_template_haskell(2,13,0) - $(versionedDecodeOrLoad (storeVersionConfig "pkg-v5" "LLL6OCcimOqRm3r0JmsSlLHcaLE=" -#else - $(versionedDecodeOrLoad (storeVersionConfig "pkg-v5" "A607WaDwhg5VVvZTxNgU9g52DO8=" -#endif - :: VersionConfig (PackageCache ()))) - fp - (populateCache index) - return $ PackageCache ((fmap.fmap) (\((), mpd, files) -> (index, mpd, files)) pis) - liftIO $ writeIORef (clCache cl) (Just result) - return result - --- | Clear the in-memory hackage index cache. This is needed when the --- hackage index is updated. -clearPackageCaches :: HasCabalLoader env => RIO env () -clearPackageCaches = do - cl <- view cabalLoaderL - writeIORef (clCache cl) Nothing - -class HasRunner env => HasCabalLoader env where - cabalLoaderL :: Lens' env CabalLoader - -data CabalLoader = CabalLoader - { clCache :: !(IORef (Maybe (PackageCache PackageIndex))) - , clIndices :: ![PackageIndex] - -- ^ Information on package indices. This is left biased, meaning that - -- packages in an earlier index will shadow those in a later index. - -- - -- Warning: if you override packages in an index vs what's available - -- upstream, you may correct your compiled snapshots, as different - -- projects may have different definitions of what pkg-ver means! This - -- feature is primarily intended for adding local packages, not - -- overriding. Overriding is better accomplished by adding to your - -- list of packages. - -- - -- Note that indices specified in a later config file will override - -- previous indices, /not/ extend them. - -- - -- Using an assoc list instead of a Map to keep track of priority - , clStackRoot :: !(Path Abs Dir) - -- ^ ~/.stack more often than not - , clUpdateRef :: !(MVar Bool) - -- ^ Want to try updating the index once during a single run for missing - -- package identifiers. We also want to ensure we only update once at a - -- time. Start at @True@. - -- - -- TODO: probably makes sense to move this concern into getPackageCaches - , clConnectionCount :: !Int - -- ^ How many concurrent connections are allowed when downloading - , clIgnoreRevisionMismatch :: !Bool - -- ^ Ignore a revision mismatch when loading up cabal files, - -- and fall back to the latest revision. See: - -- - } - --- | Root for a specific package index -configPackageIndexRoot :: HasCabalLoader env => IndexName -> RIO env (Path Abs Dir) -configPackageIndexRoot (IndexName name) = do - cl <- view cabalLoaderL - let root = clStackRoot cl - dir <- parseRelDir $ B8.unpack name - return (root $(mkRelDir "indices") dir) - --- | Location of the 01-index.tar file -configPackageIndex :: HasCabalLoader env => IndexName -> RIO env (Path Abs File) -configPackageIndex name = ( $(mkRelFile "01-index.tar")) <$> configPackageIndexRoot name - --- | Location of the 01-index.cache file -configPackageIndexCache :: HasCabalLoader env => IndexName -> RIO env (Path Abs File) -configPackageIndexCache name = ( $(mkRelFile "01-index.cache")) <$> configPackageIndexRoot name - --- | Location of the 00-index.cache file -configPackageIndexCacheOld :: HasCabalLoader env => IndexName -> RIO env (Path Abs File) -configPackageIndexCacheOld = liftM ( $(mkRelFile "00-index.cache")) . configPackageIndexRoot - --- | Location of the 00-index.tar file. This file is just a copy of --- the 01-index.tar file, provided for tools which still look for the --- 00-index.tar file. -configPackageIndexOld :: HasCabalLoader env => IndexName -> RIO env (Path Abs File) -configPackageIndexOld = liftM ( $(mkRelFile "00-index.tar")) . configPackageIndexRoot - --- | Location of the 01-index.tar.gz file -configPackageIndexGz :: HasCabalLoader env => IndexName -> RIO env (Path Abs File) -configPackageIndexGz = liftM ( $(mkRelFile "01-index.tar.gz")) . configPackageIndexRoot - ---------------- Lifted from cabal-install, Distribution.Client.Tar: --- | Return the number of blocks in an entry. -entrySizeInBlocks :: Tar.Entry -> Int64 -entrySizeInBlocks entry = 1 + case Tar.entryContent entry of - Tar.NormalFile _ size -> bytesToBlocks size - Tar.OtherEntryType _ _ size -> bytesToBlocks size - _ -> 0 - where - bytesToBlocks s = 1 + ((fromIntegral s - 1) `div` 512) diff --git a/src/Stack/PackageLocation.hs b/src/Stack/PackageLocation.hs deleted file mode 100644 index c3ce58afb2..0000000000 --- a/src/Stack/PackageLocation.hs +++ /dev/null @@ -1,291 +0,0 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE LambdaCase #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE TemplateHaskell #-} -{-# LANGUAGE TupleSections #-} - --- | Deal with downloading, cloning, or whatever else is necessary for --- getting a 'PackageLocation' into something Stack can work with. -module Stack.PackageLocation - ( resolveSinglePackageLocation - , resolveMultiPackageLocation - , parseSingleCabalFile - , parseSingleCabalFileIndex - , parseMultiCabalFiles - , parseMultiCabalFilesIndex - ) where - -import qualified Codec.Archive.Tar as Tar -import qualified Codec.Archive.Zip as Zip -import qualified Codec.Compression.GZip as GZip -import Stack.Prelude -import Crypto.Hash (hashWith, SHA256(..)) -import qualified Data.ByteArray as Mem (convert) -import qualified Data.ByteString as S -import qualified Data.ByteString.Base64.URL as B64URL -import qualified Data.Text as T -import Data.Text.Encoding (encodeUtf8, decodeUtf8) -import Distribution.PackageDescription (GenericPackageDescription) -import Network.HTTP.StackClient (parseUrlThrow) -import Network.HTTP.Download.Verified -import Path -import Path.Extra -import Path.IO -import Stack.Package -import Stack.Types.BuildPlan -import Stack.Types.Config -import Stack.Types.PackageIdentifier -import qualified System.Directory as Dir -import RIO.Process - --- | Same as 'resolveMultiPackageLocation', but works on a --- 'SinglePackageLocation'. -resolveSinglePackageLocation - :: HasConfig env - => Path Abs Dir -- ^ project root - -> PackageLocation FilePath - -> RIO env (Path Abs Dir) -resolveSinglePackageLocation projRoot (PLFilePath fp) = resolveDir projRoot fp -resolveSinglePackageLocation projRoot (PLArchive (Archive url subdir msha)) = do - workDir <- view workDirL - - -- TODO: dedupe with code for snapshot hash? - let name = T.unpack $ decodeUtf8 $ S.take 12 $ B64URL.encode $ Mem.convert $ hashWith SHA256 $ encodeUtf8 url - root = projRoot workDir $(mkRelDir "downloaded") - fileExtension' = ".http-archive" - - fileRel <- parseRelFile $ name ++ fileExtension' - dirRel <- parseRelDir name - dirRelTmp <- parseRelDir $ name ++ ".tmp" - let fileDownload = root fileRel - dir = root dirRel - - exists <- doesDirExist dir - unless exists $ do - liftIO $ ignoringAbsence (removeDirRecur dir) - - let dirTmp = root dirRelTmp - liftIO $ ignoringAbsence (removeDirRecur dirTmp) - - urlExists <- liftIO $ Dir.doesFileExist $ T.unpack url - file <- - if urlExists - then do - file <- liftIO $ Dir.canonicalizePath (T.unpack url) >>= parseAbsFile - case msha of - Nothing -> return () - Just sha -> do - actualSha <- mkStaticSHA256FromFile file - when (sha /= actualSha) $ error $ concat - [ "Invalid SHA256 found for local archive " - , show file - , "\nExpected: " - , T.unpack $ staticSHA256ToText sha - , "\nActual: " - , T.unpack $ staticSHA256ToText actualSha - ] - return file - else do - req <- parseUrlThrow $ T.unpack url - let dreq = DownloadRequest - { drRequest = req - , drHashChecks = - case msha of - Nothing -> [] - Just sha -> - [HashCheck - { hashCheckAlgorithm = SHA256 - , hashCheckHexDigest = CheckHexDigestByteString $ staticSHA256ToBase16 sha - }] - , drLengthCheck = Nothing -- TODO add length info? - , drRetryPolicy = drRetryPolicyDefault - } - _ <- verifiedDownload dreq fileDownload (const $ return ()) - return fileDownload - - let fp = toFilePath file - - withLazyFile fp $ \lbs -> do - -- Share a single file read among all of the different - -- parsing attempts. We're not worried about unbounded - -- memory usage, as we will detect almost immediately if - -- this is the wrong type of file. - - let tryTargz = do - logDebug $ "Trying to ungzip/untar " <> fromString fp - let entries = Tar.read $ GZip.decompress lbs - liftIO $ Tar.unpack (toFilePath dirTmp) entries - tryZip = do - logDebug $ "Trying to unzip " <> fromString fp - let archive = Zip.toArchive lbs - liftIO $ Zip.extractFilesFromArchive [Zip.OptDestination - (toFilePath dirTmp)] archive - tryTar = do - logDebug $ "Trying to untar (no ungzip) " <> fromString fp - let entries = Tar.read lbs - liftIO $ Tar.unpack (toFilePath dirTmp) entries - err = throwM $ UnableToExtractArchive url file - - catchAnyLog goodpath handler = - catchAny goodpath $ \e -> do - logDebug $ "Got exception: " <> displayShow e - handler - - tryTargz `catchAnyLog` tryZip `catchAnyLog` tryTar `catchAnyLog` err - renameDir dirTmp dir - - x <- listDir dir - case x of - ([dir'], []) -> resolveDir dir' subdir - (dirs, files) -> liftIO $ do - ignoringAbsence (removeFile fileDownload) - ignoringAbsence (removeDirRecur dir) - throwIO $ UnexpectedArchiveContents dirs files -resolveSinglePackageLocation projRoot (PLRepo (Repo url commit repoType' subdir)) = - cloneRepo projRoot url commit repoType' >>= flip resolveDir subdir - --- | Resolve a PackageLocation into a path, downloading and cloning as --- necessary. --- --- Returns the updated PackageLocation value with just a single subdir --- (if relevant). -resolveMultiPackageLocation - :: HasConfig env - => Path Abs Dir -- ^ project root - -> PackageLocation Subdirs - -> RIO env [(Path Abs Dir, PackageLocation FilePath)] -resolveMultiPackageLocation y (PLFilePath fp) = do - dir <- resolveSinglePackageLocation y (PLFilePath fp) - return [(dir, PLFilePath fp)] -resolveMultiPackageLocation y (PLArchive (Archive url subdirs msha)) = do - dir <- resolveSinglePackageLocation y (PLArchive (Archive url "." msha)) - let subdirs' = - case subdirs of - DefaultSubdirs -> ["."] - ExplicitSubdirs subs -> subs - forM subdirs' $ \subdir -> do - dir' <- resolveDir dir subdir - return (dir', PLArchive (Archive url subdir msha)) -resolveMultiPackageLocation projRoot (PLRepo (Repo url commit repoType' subdirs)) = do - dir <- cloneRepo projRoot url commit repoType' - - let subdirs' = - case subdirs of - DefaultSubdirs -> ["."] - ExplicitSubdirs subs -> subs - forM subdirs' $ \subdir -> do - dir' <- resolveDir dir subdir - return (dir', PLRepo $ Repo url commit repoType' subdir) - -cloneRepo - :: HasConfig env - => Path Abs Dir -- ^ project root - -> Text -- ^ URL - -> Text -- ^ commit - -> RepoType - -> RIO env (Path Abs Dir) -cloneRepo projRoot url commit repoType' = do - workDir <- view workDirL - let nameBeforeHashing = case repoType' of - RepoGit -> T.unwords [url, commit] - RepoHg -> T.unwords [url, commit, "hg"] - -- TODO: dedupe with code for snapshot hash? - name = T.unpack $ decodeUtf8 $ S.take 12 $ B64URL.encode $ Mem.convert $ hashWith SHA256 $ encodeUtf8 nameBeforeHashing - root = projRoot workDir $(mkRelDir "downloaded") - - dirRel <- parseRelDir name - let dir = root dirRel - - exists <- doesDirExist dir - unless exists $ do - liftIO $ ignoringAbsence (removeDirRecur dir) - - let cloneAndExtract commandName cloneArgs resetCommand = - withWorkingDir (toFilePath root) $ do - ensureDir root - logInfo $ "Cloning " <> display commit <> " from " <> display url - proc commandName - ("clone" : - cloneArgs ++ - [ T.unpack url - , toFilePathNoTrailingSep dir - ]) runProcess_ - created <- doesDirExist dir - unless created $ throwM $ FailedToCloneRepo commandName - withWorkingDir (toFilePath dir) $ readProcessNull commandName - (resetCommand ++ [T.unpack commit, "--"]) - `catchAny` \case - ex -> do - logInfo $ - "Please ensure that commit " <> - display commit <> - " exists within " <> - display url - throwM ex - - case repoType' of - RepoGit -> cloneAndExtract "git" ["--recursive"] ["--git-dir=.git", "reset", "--hard"] - RepoHg -> cloneAndExtract "hg" [] ["--repository", ".", "update", "-C"] - - return dir - --- | Parse the cabal files present in the given --- 'PackageLocationIndex FilePath'. -parseSingleCabalFileIndex - :: forall env. - HasConfig env - => Path Abs Dir -- ^ project root, used for checking out necessary files - -> PackageLocationIndex FilePath - -> RIO env GenericPackageDescription --- Need special handling of PLIndex for efficiency (just read from the --- index tarball) and correctness (get the cabal file from the index, --- not the package tarball itself, yay Hackage revisions). -parseSingleCabalFileIndex _ (PLIndex pir) = readPackageUnresolvedIndex pir -parseSingleCabalFileIndex root (PLOther loc) = lpvGPD <$> parseSingleCabalFile root False loc - -parseSingleCabalFile - :: forall env. HasConfig env - => Path Abs Dir -- ^ project root, used for checking out necessary files - -> Bool -- ^ print warnings? - -> PackageLocation FilePath - -> RIO env LocalPackageView -parseSingleCabalFile root printWarnings loc = do - dir <- resolveSinglePackageLocation root loc - (gpd, cabalfp) <- readPackageUnresolvedDir dir printWarnings - return LocalPackageView - { lpvCabalFP = cabalfp - , lpvGPD = gpd - , lpvLoc = loc - } - --- | Load and parse cabal files into 'GenericPackageDescription's -parseMultiCabalFiles - :: forall env. HasConfig env - => Path Abs Dir -- ^ project root, used for checking out necessary files - -> Bool -- ^ print warnings? - -> PackageLocation Subdirs - -> RIO env [LocalPackageView] -parseMultiCabalFiles root printWarnings loc0 = - resolveMultiPackageLocation root loc0 >>= - mapM (\(dir, loc1) -> do - (gpd, cabalfp) <- readPackageUnresolvedDir dir printWarnings - return LocalPackageView - { lpvCabalFP = cabalfp - , lpvGPD = gpd - , lpvLoc = loc1 - }) - --- | 'parseMultiCabalFiles' but supports 'PLIndex' -parseMultiCabalFilesIndex - :: forall env. HasConfig env - => Path Abs Dir -- ^ project root, used for checking out necessary files - -> PackageLocationIndex Subdirs - -> RIO env [(GenericPackageDescription, PackageLocationIndex FilePath)] -parseMultiCabalFilesIndex _root (PLIndex pir) = - pure . (, PLIndex pir) <$> - readPackageUnresolvedIndex pir -parseMultiCabalFilesIndex root (PLOther loc0) = - map (\lpv -> (lpvGPD lpv, PLOther $ lpvLoc lpv)) <$> - parseMultiCabalFiles root False loc0 diff --git a/src/Stack/Path.hs b/src/Stack/Path.hs index 2f5f797608..752154456f 100644 --- a/src/Stack/Path.hs +++ b/src/Stack/Path.hs @@ -15,12 +15,12 @@ import qualified Data.Text as T import qualified Data.Text.IO as T import Lens.Micro (lens) import qualified Options.Applicative as OA +import Pantry (HasPantryConfig (..)) import Path import Path.Extra import Stack.Constants import Stack.Constants.Config import Stack.GhcPkg as GhcPkg -import Stack.PackageIndex (HasCabalLoader (..)) import Stack.Types.Config import Stack.Types.Runner import qualified System.FilePath as FP @@ -119,8 +119,8 @@ instance HasLogFunc PathInfo where instance HasRunner PathInfo where runnerL = configL.runnerL instance HasConfig PathInfo -instance HasCabalLoader PathInfo where - cabalLoaderL = configL.cabalLoaderL +instance HasPantryConfig PathInfo where + pantryConfigL = configL.pantryConfigL instance HasProcessContext PathInfo where processContextL = configL.processContextL instance HasBuildConfig PathInfo where diff --git a/src/Stack/Prelude.hs b/src/Stack/Prelude.hs index 7e3c60c18f..edb000f478 100644 --- a/src/Stack/Prelude.hs +++ b/src/Stack/Prelude.hs @@ -3,10 +3,7 @@ {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ScopedTypeVariables #-} module Stack.Prelude - ( withSourceFile - , withSinkFile - , withSinkFileCautious - , withSystemTempDir + ( withSystemTempDir , withKeepSystemTempDir , sinkProcessStderrStdout , sinkProcessStdout @@ -25,22 +22,18 @@ import RIO as X import Data.Conduit as X (ConduitM, runConduit, (.|)) import Path as X (Abs, Dir, File, Path, Rel, toFilePath) +import Pantry as X hiding (Package (..), loadSnapshot) import Data.Monoid as X (First (..), Any (..), Sum (..), Endo (..)) import qualified Path.IO -import qualified System.IO as IO -import qualified System.Directory as Dir -import qualified System.FilePath as FP import System.IO.Echo (withoutInputEcho) -import System.IO.Error (isDoesNotExistError) #ifdef WINDOWS import System.Win32 (isMinTTYHandle, withHandleToHANDLE) #endif -import Data.Conduit.Binary (sourceHandle, sinkHandle) import qualified Data.Conduit.Binary as CB import qualified Data.Conduit.List as CL import Data.Conduit.Process.Typed (withLoggedProcess_, createSource) @@ -52,36 +45,6 @@ import Data.Text.Encoding.Error (lenientDecode) import qualified Data.Text.IO as T import qualified RIO.Text as T --- | Get a source for a file. Unlike @sourceFile@, doesn't require --- @ResourceT@. Unlike explicit @withBinaryFile@ and @sourceHandle@ --- usage, you can't accidentally use @WriteMode@ instead of --- @ReadMode@. -withSourceFile :: MonadUnliftIO m => FilePath -> (ConduitM i ByteString m () -> m a) -> m a -withSourceFile fp inner = withBinaryFile fp ReadMode $ inner . sourceHandle - --- | Same idea as 'withSourceFile', see comments there. -withSinkFile :: MonadUnliftIO m => FilePath -> (ConduitM ByteString o m () -> m a) -> m a -withSinkFile fp inner = withBinaryFile fp WriteMode $ inner . sinkHandle - --- | Like 'withSinkFile', but ensures that the file is atomically --- moved after all contents are written. -withSinkFileCautious - :: MonadUnliftIO m - => FilePath - -> (ConduitM ByteString o m () -> m a) - -> m a -withSinkFileCautious fp inner = - withRunInIO $ \run -> bracket acquire cleanup $ \(tmpFP, h) -> - run (inner $ sinkHandle h) <* (IO.hClose h *> Dir.renameFile tmpFP fp) - where - acquire = IO.openBinaryTempFile (FP.takeDirectory fp) (FP.takeFileName fp FP.<.> "tmp") - cleanup (tmpFP, h) = do - IO.hClose h - Dir.removeFile tmpFP `catch` \e -> - if isDoesNotExistError e - then return () - else throwIO e - -- | Path version withSystemTempDir :: MonadUnliftIO m => String -> (Path Abs Dir -> m a) -> m a withSystemTempDir str inner = withRunInIO $ \run -> Path.IO.withSystemTempDir str $ run . inner diff --git a/src/Stack/PrettyPrint.hs b/src/Stack/PrettyPrint.hs index 8fbea30edc..ba549869fc 100644 --- a/src/Stack/PrettyPrint.hs +++ b/src/Stack/PrettyPrint.hs @@ -41,11 +41,8 @@ import qualified Data.Text as T import qualified Distribution.ModuleName as C (ModuleName) import qualified Distribution.Text as C (display) import Stack.Types.NamedComponent -import Stack.Types.PackageIdentifier -import Stack.Types.PackageName import Stack.Types.PrettyPrint (Style (..)) import Stack.Types.Runner -import Stack.Types.Version import Text.PrettyPrint.Leijen.Extended (Ann, Display (display), Doc, HasStyleAnn (..), StyleAnn (..), StyleDoc, (<+>), align, angles, braces, brackets, cat, @@ -165,15 +162,6 @@ debugBracket msg f = do style :: Style -> StyleDoc -> StyleDoc style = styleAnn -instance Display PackageName where - display = fromString . packageNameString - -instance Display PackageIdentifier where - display = fromString . packageIdentifierString - -instance Display Version where - display = fromString . versionString - instance Display (Path b File) where display = style File . fromString . toFilePath diff --git a/src/Stack/Runners.hs b/src/Stack/Runners.hs index 1211d6ed79..2bc8dffb95 100644 --- a/src/Stack/Runners.hs +++ b/src/Stack/Runners.hs @@ -29,7 +29,6 @@ import Stack.DefaultColorWhen (defaultColorWhen) import qualified Stack.Docker as Docker import qualified Stack.Nix as Nix import Stack.Setup -import Stack.Types.Compiler (CompilerVersion, CVType (..)) import Stack.Types.Config import Stack.Types.Runner import System.Environment (getEnvironment) @@ -40,7 +39,7 @@ import Stack.Dot -- FIXME it seems wrong that we call lcLoadBuildConfig multiple times loadCompilerVersion :: GlobalOpts -> LoadConfig - -> IO (CompilerVersion 'CVWanted) + -> IO WantedCompiler loadCompilerVersion go lc = view wantedCompilerVersionL <$> lcLoadBuildConfig lc (globalCompiler go) @@ -104,14 +103,14 @@ withGlobalConfigAndLock :: GlobalOpts -> RIO Config () -> IO () -withGlobalConfigAndLock go@GlobalOpts{..} inner = withRunnerGlobal go $ \runner -> do - lc <- runRIO runner $ - loadConfigMaybeProject - globalConfigMonoid - Nothing - LCSNoProject - withUserFileLock go (view stackRootL lc) $ \_lk -> - runRIO (lcConfig lc) inner +withGlobalConfigAndLock go@GlobalOpts{..} inner = + withRunnerGlobal go $ \runner -> + runRIO runner $ loadConfigMaybeProject + globalConfigMonoid + Nothing + LCSNoProject $ \lc -> + withUserFileLock go (view stackRootL lc) $ \_lk -> + runRIO (lcConfig lc) inner -- For now the non-locking version just unlocks immediately. -- That is, there's still a serialization point. @@ -202,8 +201,8 @@ loadConfigWithOpts -> IO a loadConfigWithOpts go@GlobalOpts{..} inner = withRunnerGlobal go $ \runner -> do mstackYaml <- forM globalStackYaml resolveFile' - runRIO runner $ do - lc <- loadConfig globalConfigMonoid globalResolver mstackYaml + runRIO runner $ + loadConfig globalConfigMonoid globalResolver mstackYaml $ \lc -> do -- If we have been relaunched in a Docker container, perform in-container initialization -- (switch UID, etc.). We do this after first loading the configuration since it must -- happen ASAP but needs a configuration. @@ -229,14 +228,11 @@ withMiniConfigAndLock :: GlobalOpts -> RIO MiniConfig () -> IO () -withMiniConfigAndLock go@GlobalOpts{..} inner = withRunnerGlobal go $ \runner -> do - miniConfig <- - runRIO runner $ - loadMiniConfig . lcConfig <$> - loadConfigMaybeProject - globalConfigMonoid - globalResolver - LCSNoProject +withMiniConfigAndLock go@GlobalOpts{..} inner = + withRunnerGlobal go $ \runner -> + runRIO runner $ + loadConfigMaybeProject globalConfigMonoid globalResolver LCSNoProject $ \lc -> do + let miniConfig = loadMiniConfig $ lcConfig lc runRIO miniConfig inner -- | Unlock a lock file, if the value is Just diff --git a/src/Stack/SDist.hs b/src/Stack/SDist.hs index 5ce853866f..440de2fd53 100644 --- a/src/Stack/SDist.hs +++ b/src/Stack/SDist.hs @@ -25,6 +25,7 @@ import qualified Data.ByteString.Char8 as S8 import qualified Data.ByteString.Lazy as L import Data.Char (toLower) import Data.Data (cast) +import Data.IORef.RunOnce (runOnce) import Data.List import Data.List.NonEmpty (NonEmpty) import qualified Data.List.NonEmpty as NE @@ -42,27 +43,21 @@ import qualified Distribution.PackageDescription.Check as Check import qualified Distribution.PackageDescription.Parsec as Cabal import Distribution.PackageDescription.PrettyPrint (showGenericPackageDescription) import qualified Distribution.Types.UnqualComponentName as Cabal -import qualified Distribution.Text as Cabal import Distribution.Version (simplifyVersionRange, orLaterVersion, earlierVersion, hasUpperBound, hasLowerBound) import Lens.Micro (set) import Path import Path.IO hiding (getModificationTime, getPermissions, withSystemTempDir) -import qualified RIO import Stack.Build (mkBaseConfigOpts, build) import Stack.Build.Execute import Stack.Build.Installed import Stack.Build.Source (loadSourceMap) import Stack.Build.Target hiding (PackageType (..)) -import Stack.PackageLocation (resolveMultiPackageLocation) import Stack.PrettyPrint import Stack.Constants import Stack.Package import Stack.Types.Build -import Stack.Types.BuildPlan import Stack.Types.Config import Stack.Types.Package -import Stack.Types.PackageIdentifier -import Stack.Types.PackageName import Stack.Types.Runner import Stack.Types.Version import System.Directory (getModificationTime, getPermissions) @@ -171,7 +166,7 @@ getCabalLbs :: HasEnvConfig env -> Path Abs File -- ^ cabal file -> RIO env (PackageIdentifier, L.ByteString) getCabalLbs pvpBounds mrev cabalfp = do - (gpd, cabalfp') <- readPackageUnresolvedDir (parent cabalfp) False + (gpd, cabalfp') <- loadCabalFilePath (parent cabalfp) NoPrintWarnings unless (cabalfp == cabalfp') $ error $ "getCabalLbs: cabalfp /= cabalfp': " ++ show (cabalfp, cabalfp') (_, sourceMap) <- loadSourceMap AllowNoTargets defaultBuildOptsCLI @@ -183,7 +178,7 @@ getCabalLbs pvpBounds mrev cabalfp = do sourceMap let internalPackages = Set.fromList $ gpdPackageName gpd : - map (fromCabalPackageName . Cabal.unqualComponentNameToPackageName . fst) (Cabal.condSubLibraries gpd) + map (Cabal.unqualComponentNameToPackageName . fst) (Cabal.condSubLibraries gpd) gpd' = gtraverseT (addBounds internalPackages sourceMap installedMap) gpd gpd'' = case mrev of @@ -198,7 +193,7 @@ getCabalLbs pvpBounds mrev cabalfp = do $ Cabal.packageDescription gpd' } } - ident <- parsePackageIdentifierFromString $ Cabal.display $ Cabal.package $ Cabal.packageDescription gpd'' + ident = Cabal.package $ Cabal.packageDescription gpd'' -- Sanity rendering and reparsing the input, to ensure there are no -- cabal bugs, since there have been bugs here before, and currently -- are at the time of writing: @@ -257,17 +252,16 @@ getCabalLbs pvpBounds mrev cabalfp = do ) where addBounds :: Set PackageName -> SourceMap -> InstalledMap -> Dependency -> Dependency - addBounds internalPackages sourceMap installedMap dep@(Dependency cname range) = + addBounds internalPackages sourceMap installedMap dep@(Dependency name range) = if name `Set.member` internalPackages then dep else case foundVersion of Nothing -> dep - Just version -> Dependency cname $ simplifyVersionRange + Just version -> Dependency name $ simplifyVersionRange $ (if toAddUpper && not (hasUpperBound range) then addUpper version else id) $ (if toAddLower && not (hasLowerBound range) then addLower version else id) range where - name = fromCabalPackageName cname foundVersion = case Map.lookup name sourceMap of Just ps -> Just (piiVersion ps) @@ -277,9 +271,8 @@ getCabalLbs pvpBounds mrev cabalfp = do Nothing -> Nothing addUpper version = intersectVersionRanges - (earlierVersion $ toCabalVersion $ nextMajorVersion version) - addLower version = intersectVersionRanges - (orLaterVersion (toCabalVersion version)) + (earlierVersion $ nextMajorVersion version) + addLower version = intersectVersionRanges (orLaterVersion version) (toAddLower, toAddUpper) = case pvpBounds of @@ -301,11 +294,10 @@ gtraverseT f = readLocalPackage :: HasEnvConfig env => Path Abs Dir -> RIO env LocalPackage readLocalPackage pkgDir = do config <- getDefaultPackageConfig - (package, cabalfp) <- readPackageDir config pkgDir True + (package, cabalfp) <- readPackageDir config pkgDir YesPrintWarnings return LocalPackage { lpPackage = package , lpWanted = False -- HACK: makes it so that sdist output goes to a log instead of a file. - , lpDir = pkgDir , lpCabalFile = cabalfp -- NOTE: these aren't the 'correct values, but aren't used in -- the usage of this function in this module. @@ -318,7 +310,6 @@ readLocalPackage pkgDir = do , lpComponentFiles = pure Map.empty , lpComponents = Set.empty , lpUnbuildable = Set.empty - , lpLocation = PLFilePath $ toFilePath pkgDir } -- | Returns a newline-separate list of paths, and the absolute path to the .cabal file. @@ -342,14 +333,14 @@ getSDistFileList lp = ac = ActionContext Set.empty [] ConcurrencyAllowed task = Task { taskProvides = PackageIdentifier (packageName package) (packageVersion package) - , taskType = TTFiles lp Local + , taskType = TTFilePath lp Local , taskConfigOpts = TaskConfigOpts { tcoMissing = Set.empty , tcoOpts = \_ -> ConfigureOpts [] [] } , taskPresent = Map.empty , taskAllInOne = True - , taskCachePkgSrc = CacheSrcLocal (toFilePath (lpDir lp)) + , taskCachePkgSrc = CacheSrcLocal (toFilePath (parent $ lpCabalFile lp)) , taskAnyMissing = True , taskBuildTypeConfig = False } @@ -396,7 +387,10 @@ checkSDistTarball opts tarball = withTempTarGzContents tarball $ \pkgDir' -> do pkgDir <- (pkgDir' ) `liftM` (parseRelDir . FP.takeBaseName . FP.takeBaseName . toFilePath $ tarball) -- ^ drop ".tar" ^ drop ".gz" - when (sdoptsBuildTarball opts) (buildExtractedTarball pkgDir) + when (sdoptsBuildTarball opts) (buildExtractedTarball ResolvedPath + { resolvedRelative = RelFilePath "this-is-not-used" -- ugly hack + , resolvedAbsolute = pkgDir + }) unless (sdoptsIgnoreCheck opts) (checkPackageInExtractedTarball pkgDir) checkPackageInExtractedTarball @@ -404,12 +398,12 @@ checkPackageInExtractedTarball => Path Abs Dir -- ^ Absolute path to tarball -> RIO env () checkPackageInExtractedTarball pkgDir = do - (gpd, _cabalfp) <- readPackageUnresolvedDir pkgDir True + (gpd, _cabalfp) <- loadCabalFilePath pkgDir YesPrintWarnings let name = gpdPackageName gpd config <- getDefaultPackageConfig - (gdesc, PackageDescriptionPair pkgDesc _) <- readPackageDescriptionDir config pkgDir False + (gdesc, PackageDescriptionPair pkgDesc _) <- readPackageDescriptionDir config pkgDir NoPrintWarnings logInfo $ - "Checking package '" <> RIO.display name <> "' for common mistakes" + "Checking package '" <> fromString (packageNameString name) <> "' for common mistakes" let pkgChecks = -- MSS 2017-12-12: Try out a few different variants of -- pkgDesc to try and provoke an error or warning. I don't @@ -438,19 +432,17 @@ checkPackageInExtractedTarball pkgDir = do Nothing -> return () Just ne -> throwM $ CheckException ne -buildExtractedTarball :: HasEnvConfig env => Path Abs Dir -> RIO env () +buildExtractedTarball :: HasEnvConfig env => ResolvedPath Dir -> RIO env () buildExtractedTarball pkgDir = do - projectRoot <- view projectRootL envConfig <- view envConfigL - localPackageToBuild <- readLocalPackage pkgDir - let packageEntries = bcPackages (envConfigBuildConfig envConfig) - getPaths = resolveMultiPackageLocation projectRoot - allPackagePaths <- fmap (map fst . mconcat) (mapM getPaths packageEntries) + localPackageToBuild <- readLocalPackage $ resolvedAbsolute pkgDir + let allPackagePaths = bcPackages (envConfigBuildConfig envConfig) -- We remove the path based on the name of the package let isPathToRemove path = do localPackage <- readLocalPackage path return $ packageName (lpPackage localPackage) == packageName (lpPackage localPackageToBuild) - pathsToKeep <- filterM (fmap not . isPathToRemove) allPackagePaths + pathsToKeep <- filterM (fmap not . isPathToRemove . resolvedAbsolute . fst) allPackagePaths + getLPV <- runOnce $ mkLocalPackageView YesPrintWarnings pkgDir newPackagesRef <- liftIO (newIORef Nothing) let adjustEnvForBuild env = let updatedEnvConfig = envConfig @@ -459,7 +451,7 @@ buildExtractedTarball pkgDir = do } in set envConfigL updatedEnvConfig env updatePackageInBuildConfig buildConfig = buildConfig - { bcPackages = map (PLFilePath . toFilePath) $ pkgDir : pathsToKeep + { bcPackages = (pkgDir, getLPV) : pathsToKeep , bcConfig = (bcConfig buildConfig) { configBuild = defaultBuildOpts { boptsTests = True diff --git a/src/Stack/Script.hs b/src/Stack/Script.hs index 7c892b12b1..7db68ccffa 100644 --- a/src/Stack/Script.hs +++ b/src/Stack/Script.hs @@ -1,7 +1,6 @@ {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE CPP #-} {-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TupleSections #-} module Stack.Script ( scriptCmd @@ -13,6 +12,7 @@ import qualified Data.Conduit.List as CL import Data.List.Split (splitWhen) import qualified Data.Map.Strict as Map import qualified Data.Set as Set +import Distribution.Types.PackageName (mkPackageName) import Path import Path.IO import qualified Stack.Build @@ -23,9 +23,9 @@ import Stack.Runners import Stack.Types.BuildPlan import Stack.Types.Compiler import Stack.Types.Config -import Stack.Types.PackageName import System.FilePath (dropExtension, replaceExtension) import RIO.Process +import qualified RIO.Text as T -- | Run a Stack Script scriptCmd :: ScriptOpts -> GlobalOpts -> IO () @@ -64,7 +64,7 @@ scriptCmd opts go' = do getPackagesFromModuleInfo moduleInfo (soFile opts) packages -> do let targets = concatMap wordsComma packages - targets' <- mapM parsePackageNameFromString targets + targets' <- mapM parsePackageNameThrowing targets return $ Set.fromList targets' unless (Set.null targetsSet) $ do @@ -85,7 +85,7 @@ scriptCmd opts go' = do else do logDebug "Missing packages, performing installation" Stack.Build.build Nothing lk defaultBuildOptsCLI - { boptsCLITargets = map packageNameText $ Set.toList targetsSet + { boptsCLITargets = map (T.pack . packageNameString) $ Set.toList targetsSet } let ghcArgs = concat @@ -146,7 +146,7 @@ getPackagesFromModuleInfo mi scriptFP = do [pn] -> return $ Set.singleton pn pns' -> throwString $ concat [ "Module " - , S8.unpack $ unModuleName mn + , moduleNameString mn , " appears in multiple packages: " , unwords $ map packageNameString pns' ] @@ -162,48 +162,48 @@ getPackagesFromModuleInfo mi scriptFP = do -- packages that should never be auto-parsed in. blacklist :: Set PackageName blacklist = Set.fromList - [ $(mkPackageName "async-dejafu") - , $(mkPackageName "monads-tf") - , $(mkPackageName "crypto-api") - , $(mkPackageName "fay-base") - , $(mkPackageName "hashmap") - , $(mkPackageName "hxt-unicode") - , $(mkPackageName "hledger-web") - , $(mkPackageName "plot-gtk3") - , $(mkPackageName "gtk3") - , $(mkPackageName "regex-pcre-builtin") - , $(mkPackageName "regex-compat-tdfa") - , $(mkPackageName "log") - , $(mkPackageName "zip") - , $(mkPackageName "monad-extras") - , $(mkPackageName "control-monad-free") - , $(mkPackageName "prompt") - , $(mkPackageName "kawhi") - , $(mkPackageName "language-c") - , $(mkPackageName "gl") - , $(mkPackageName "svg-tree") - , $(mkPackageName "Glob") - , $(mkPackageName "nanospec") - , $(mkPackageName "HTF") - , $(mkPackageName "courier") - , $(mkPackageName "newtype-generics") - , $(mkPackageName "objective") - , $(mkPackageName "binary-ieee754") - , $(mkPackageName "rerebase") - , $(mkPackageName "cipher-aes") - , $(mkPackageName "cipher-blowfish") - , $(mkPackageName "cipher-camellia") - , $(mkPackageName "cipher-des") - , $(mkPackageName "cipher-rc4") - , $(mkPackageName "crypto-cipher-types") - , $(mkPackageName "crypto-numbers") - , $(mkPackageName "crypto-pubkey") - , $(mkPackageName "crypto-random") - , $(mkPackageName "cryptohash") - , $(mkPackageName "cryptohash-conduit") - , $(mkPackageName "cryptohash-md5") - , $(mkPackageName "cryptohash-sha1") - , $(mkPackageName "cryptohash-sha256") + [ mkPackageName "async-dejafu" + , mkPackageName "monads-tf" + , mkPackageName "crypto-api" + , mkPackageName "fay-base" + , mkPackageName "hashmap" + , mkPackageName "hxt-unicode" + , mkPackageName "hledger-web" + , mkPackageName "plot-gtk3" + , mkPackageName "gtk3" + , mkPackageName "regex-pcre-builtin" + , mkPackageName "regex-compat-tdfa" + , mkPackageName "log" + , mkPackageName "zip" + , mkPackageName "monad-extras" + , mkPackageName "control-monad-free" + , mkPackageName "prompt" + , mkPackageName "kawhi" + , mkPackageName "language-c" + , mkPackageName "gl" + , mkPackageName "svg-tree" + , mkPackageName "Glob" + , mkPackageName "nanospec" + , mkPackageName "HTF" + , mkPackageName "courier" + , mkPackageName "newtype-generics" + , mkPackageName "objective" + , mkPackageName "binary-ieee754" + , mkPackageName "rerebase" + , mkPackageName "cipher-aes" + , mkPackageName "cipher-blowfish" + , mkPackageName "cipher-camellia" + , mkPackageName "cipher-des" + , mkPackageName "cipher-rc4" + , mkPackageName "crypto-cipher-types" + , mkPackageName "crypto-numbers" + , mkPackageName "crypto-pubkey" + , mkPackageName "crypto-random" + , mkPackageName "cryptohash" + , mkPackageName "cryptohash-conduit" + , mkPackageName "cryptohash-md5" + , mkPackageName "cryptohash-sha1" + , mkPackageName "cryptohash-sha256" ] toModuleInfo :: LoadedSnapshot -> ModuleInfo @@ -242,11 +242,13 @@ parseImports = bs3 = fromMaybe bs2 $ stripPrefix "qualified " bs2 case stripPrefix "\"" bs3 of Just bs4 -> do - pn <- parsePackageNameFromString $ S8.unpack $ S8.takeWhile (/= '"') bs4 + pn <- parsePackageNameThrowing $ S8.unpack $ S8.takeWhile (/= '"') bs4 Just (Set.singleton pn, Set.empty) Nothing -> Just ( Set.empty , Set.singleton - $ ModuleName + $ fromString + $ T.unpack + $ decodeUtf8With lenientDecode $ S8.takeWhile (\c -> c /= ' ' && c /= '(') bs3 ) diff --git a/src/Stack/Setup.hs b/src/Stack/Setup.hs index b2d8ec26da..67018b67bc 100644 --- a/src/Stack/Setup.hs +++ b/src/Stack/Setup.hs @@ -63,6 +63,8 @@ import qualified Data.Yaml as Yaml import Distribution.System (OS, Arch (..), Platform (..)) import qualified Distribution.System as Cabal import Distribution.Text (simpleParse) +import Distribution.Types.PackageName (mkPackageName) +import Distribution.Version (mkVersion) import Lens.Micro (set) import Network.HTTP.StackClient (getResponseBody, getResponseStatusCode) import Network.HTTP.Download @@ -76,7 +78,6 @@ import Stack.Build (build) import Stack.Config (loadConfig) import Stack.Constants (stackProgName) import Stack.Constants.Config (distRelativeDir) -import Stack.Fetch import Stack.GhcPkg (createDatabase, getCabalPkgVer, getGlobalDB, mkGhcPackagePath, ghcPkgPathEnvVar) import Stack.Prelude hiding (Display (..)) import Stack.PrettyPrint @@ -87,8 +88,6 @@ import Stack.Types.Compiler import Stack.Types.CompilerBuild import Stack.Types.Config import Stack.Types.Docker -import Stack.Types.PackageIdentifier -import Stack.Types.PackageName import Stack.Types.Runner import Stack.Types.Version import qualified System.Directory as D @@ -117,7 +116,7 @@ data SetupOpts = SetupOpts { soptsInstallIfMissing :: !Bool , soptsUseSystem :: !Bool -- ^ Should we use a system compiler installation, if available? - , soptsWantedCompiler :: !(CompilerVersion 'CVWanted) + , soptsWantedCompiler :: !WantedCompiler , soptsCompilerCheck :: !VersionCheck , soptsStackYaml :: !(Maybe (Path Abs File)) -- ^ If we got the desired GHC version from that file @@ -143,7 +142,7 @@ data SetupOpts = SetupOpts deriving Show data SetupException = UnsupportedSetupCombo OS Arch | MissingDependencies [String] - | UnknownCompilerVersion (Set.Set Text) (CompilerVersion 'CVWanted) (Set.Set (CompilerVersion 'CVActual)) + | UnknownCompilerVersion (Set.Set Text) WantedCompiler (Set.Set ActualCompiler) | UnknownOSKey Text | GHCSanityCheckCompileFailed SomeException (Path Abs File) | WantedMustBeGHC @@ -167,7 +166,7 @@ instance Show SetupException where intercalate ", " tools show (UnknownCompilerVersion oskeys wanted known) = concat [ "No setup information found for " - , compilerVersionString wanted + , T.unpack $ utf8BuilderToText $ RIO.display wanted , " on your platform.\nThis probably means a GHC bindist has not yet been added for OS key '" , T.unpack (T.intercalate "', '" (sort $ Set.toList oskeys)) , "'.\nSupported versions: " @@ -218,7 +217,7 @@ setupEnv mResolveMissingGHC = do let stackYaml = bcStackYaml bconfig platform <- view platformL wcVersion <- view wantedCompilerVersionL - wc <- view $ wantedCompilerVersionL.whichCompilerL + wc <- view $ wantedCompilerVersionL.to wantedToActual.whichCompilerL let sopts = SetupOpts { soptsInstallIfMissing = configInstallGHC config , soptsUseSystem = configSystemGHC config @@ -264,7 +263,6 @@ setupEnv mResolveMissingGHC = do ls <- runRIO bcPath $ loadSnapshot (Just compilerVer) - (view projectRootL bc) (bcSnapshotDef bc) let envConfig0 = EnvConfig { envConfigBuildConfig = bc @@ -379,8 +377,8 @@ ensureCompiler :: (HasConfig env, HasGHCVariant env) => SetupOpts -> RIO env (Maybe ExtraDirs, CompilerBuild, Bool) ensureCompiler sopts = do - let wc = whichCompiler (soptsWantedCompiler sopts) - when (getGhcVersion (soptsWantedCompiler sopts) < $(mkVersion "7.8")) $ do + let wc = whichCompiler (wantedToActual (soptsWantedCompiler sopts)) + when (getGhcVersion (wantedToActual (soptsWantedCompiler sopts)) < mkVersion [7, 8]) $ do logWarn "Stack will almost certainly fail with GHC below version 7.8" logWarn "Valiantly attempting to run anyway, but I know this is doomed" logWarn "For more information, see: https://github.com/commercialhaskell/stack/issues/648" @@ -410,7 +408,7 @@ ensureCompiler sopts = do case platform of Platform _ Cabal.Windows | not (soptsSkipMsys sopts) -> - case getInstalledTool installed $(mkPackageName "msys2") (const True) of + case getInstalledTool installed (mkPackageName "msys2") (const True) of Just tool -> return (Just tool) Nothing | soptsInstallIfMissing sopts -> do @@ -421,7 +419,7 @@ ensureCompiler sopts = do case Map.lookup osKey $ siMsys2 si of Just x -> return x Nothing -> throwString $ "MSYS2 not found for " ++ T.unpack osKey - let tool = Tool (PackageIdentifier $(mkPackageName "msys2") version) + let tool = Tool (PackageIdentifier (mkPackageName "msys2") version) Just <$> downloadAndInstallTool (configLocalPrograms config) si info tool (installMsys2Windows osKey) | otherwise -> do logWarn "Continuing despite missing tool: msys2" @@ -445,8 +443,8 @@ ensureCompiler sopts = do Ghc -> do ghcBuilds <- getGhcBuilds forM ghcBuilds $ \ghcBuild -> do - ghcPkgName <- parsePackageNameFromString ("ghc" ++ ghcVariantSuffix ghcVariant ++ compilerBuildSuffix ghcBuild) - return (getInstalledTool installed ghcPkgName (isWanted . GhcVersion), ghcBuild) + ghcPkgName <- parsePackageNameThrowing ("ghc" ++ ghcVariantSuffix ghcVariant ++ compilerBuildSuffix ghcBuild) + return (getInstalledTool installed ghcPkgName (isWanted . ACGhc), ghcBuild) Ghcjs -> return [(getInstalledGhcjs installed isWanted, CompilerBuildStandard)] let existingCompilers = concatMap (\(installedCompiler, compilerBuild) -> @@ -664,7 +662,7 @@ ensureDockerStackExe containerPlatform = do config <- view configL containerPlatformDir <- runReaderT platformOnlyRelDir (containerPlatform,PlatformVariantNone) let programsPath = configLocalProgramsBase config containerPlatformDir - tool = Tool (PackageIdentifier $(mkPackageName "stack") stackVersion) + tool = Tool (PackageIdentifier (mkPackageName "stack") stackVersion) stackExeDir <- installDir programsPath tool let stackExePath = stackExeDir $(mkRelFile "stack") stackExeExists <- doesFileExist stackExePath @@ -686,8 +684,7 @@ upgradeCabal :: (HasConfig env, HasGHCVariant env) upgradeCabal wc upgradeTo = do logWarn "Using deprecated --upgrade-cabal feature, this is not recommended" logWarn "Manipulating the global Cabal is only for debugging purposes" - let name = $(mkPackageName "Cabal") - rmap <- resolvePackages Nothing mempty (Set.singleton name) + let name = mkPackageName "Cabal" installed <- getCabalPkgVer wc case upgradeTo of Specific wantedVersion -> do @@ -696,20 +693,21 @@ upgradeCabal wc upgradeTo = do else logInfo $ "No install necessary. Cabal " <> - RIO.display installed <> + fromString (versionString installed) <> " is already installed" - Latest -> case map rpIdent rmap of - [] -> throwString "No Cabal library found in index, cannot upgrade" - [PackageIdentifier name' latestVersion] | name == name' -> do + Latest -> do + mversion <- getLatestHackageVersion name UsePreferredVersions + case mversion of + Nothing -> throwString "No Cabal library found in index, cannot upgrade" + Just (PackageIdentifierRevision _name latestVersion _cabalHash) -> do if installed < latestVersion then doCabalInstall wc installed latestVersion else logInfo $ "No upgrade necessary: Cabal-" <> - RIO.display latestVersion <> + fromString (versionString latestVersion) <> " is the same or newer than latest hackage version " <> - RIO.display installed - x -> error $ "Unexpected results for resolvePackages: " ++ show x + fromString (versionString installed) -- Configure and run the necessary commands for a cabal install doCabalInstall :: (HasConfig env, HasGHCVariant env) @@ -718,31 +716,31 @@ doCabalInstall :: (HasConfig env, HasGHCVariant env) -> Version -> RIO env () doCabalInstall wc installed wantedVersion = do - when (wantedVersion >= $(mkVersion "2.2")) $ do + when (wantedVersion >= mkVersion [2, 2]) $ do logWarn "--upgrade-cabal will almost certainly fail for Cabal 2.2 or later" logWarn "See: https://github.com/commercialhaskell/stack/issues/4070" logWarn "Valiantly attempting to build it anyway, but I know this is doomed" withSystemTempDir "stack-cabal-upgrade" $ \tmpdir -> do logInfo $ "Installing Cabal-" <> - RIO.display wantedVersion <> + fromString (versionString wantedVersion) <> " to replace " <> - RIO.display installed - let name = $(mkPackageName "Cabal") - ident = PackageIdentifier name wantedVersion - m <- unpackPackageIdents tmpdir Nothing [PackageIdentifierRevision ident CFILatest] + fromString (versionString installed) + let name = mkPackageName "Cabal" + suffix <- parseRelDir $ "Cabal-" ++ versionString wantedVersion + let dir = tmpdir suffix + unpackPackageLocation dir $ PLIHackage + (PackageIdentifierRevision name wantedVersion CFILatest) + Nothing compilerPath <- findExecutable (compilerExeName wc) >>= either throwM parseAbsFile versionDir <- parseRelDir $ versionString wantedVersion let installRoot = toFilePath $ parent (parent compilerPath) $(mkRelDir "new-cabal") versionDir - dir <- case Map.lookup ident m of - Nothing -> error "upgradeCabal: Invariant violated, dir missing" - Just dir -> return dir withWorkingDir (toFilePath dir) $ proc (compilerExeName wc) ["Setup.hs"] runProcess_ platform <- view platformL - let setupExe = toFilePath $ dir case platform of + let setupExe = dir case platform of Platform _ Cabal.Windows -> $(mkRelFile "Setup.exe") _ -> $(mkRelFile "Setup") dirArgument name' = concat [ "--" @@ -751,17 +749,18 @@ doCabalInstall wc installed wantedVersion = do , installRoot FP. name' ] args = "configure" : map dirArgument (words "lib bin data doc") - withWorkingDir (toFilePath dir) $ do - proc setupExe args runProcess_ - proc setupExe ["build"] runProcess_ - proc setupExe ["install"] runProcess_ + withWorkingDir (toFilePath dir) $ mapM_ (\args' -> proc (toFilePath setupExe) args' runProcess_) + [ args + , ["build"] + , ["install"] + ] logInfo "New Cabal library installed" -- | Get the version of the system compiler, if available getSystemCompiler :: (HasProcessContext env, HasLogFunc env) => WhichCompiler - -> RIO env (Maybe (CompilerVersion 'CVActual, Arch)) + -> RIO env (Maybe (ActualCompiler, Arch)) getSystemCompiler wc = do let exeName = case wc of Ghc -> "ghc" @@ -773,11 +772,11 @@ getSystemCompiler wc = do let minfo = do Right lbs <- Just eres pairs_ <- readMaybe $ BL8.unpack lbs :: Maybe [(String, String)] - version <- lookup "Project version" pairs_ >>= parseVersionFromString + version <- lookup "Project version" pairs_ >>= parseVersionThrowing arch <- lookup "Target platform" pairs_ >>= simpleParse . takeWhile (/= '-') return (version, arch) case (wc, minfo) of - (Ghc, Just (version, arch)) -> return (Just (GhcVersion version, arch)) + (Ghc, Just (version, arch)) -> return (Just (ACGhc version, arch)) (Ghcjs, Just (_, arch)) -> do eversion <- tryAny $ getCompilerVersion Ghcjs case eversion of @@ -801,7 +800,7 @@ getSetupInfo stackSetupYaml = do loadSetupInfo (SetupInfoFileOrURL urlOrFile) = do bs <- case parseUrlThrow urlOrFile of - Just req -> liftM (LBS.toStrict . getResponseBody) $ httpLBS req + Just req -> liftM (LBS.toStrict . getResponseBody) $ httpLbs req Nothing -> liftIO $ S.readFile urlOrFile WithJSONWarnings si warnings <- either throwM return (Yaml.decodeEither' bs) when (urlOrFile /= defaultSetupInfoYaml) $ @@ -815,18 +814,18 @@ getInstalledTool :: [Tool] -- ^ already installed getInstalledTool installed name goodVersion = if null available then Nothing - else Just $ Tool $ maximumBy (comparing packageIdentifierVersion) available + else Just $ Tool $ maximumBy (comparing pkgVersion) available where available = mapMaybe goodPackage installed goodPackage (Tool pi') = - if packageIdentifierName pi' == name && - goodVersion (packageIdentifierVersion pi') + if pkgName pi' == name && + goodVersion (pkgVersion pi') then Just pi' else Nothing goodPackage _ = Nothing getInstalledGhcjs :: [Tool] - -> (CompilerVersion 'CVActual -> Bool) + -> (ActualCompiler -> Bool) -> Maybe Tool getInstalledGhcjs installed goodVersion = if null available @@ -860,11 +859,11 @@ downloadAndInstallTool programsDir si downloadInfo tool installer = do downloadAndInstallCompiler :: (HasConfig env, HasGHCVariant env) => CompilerBuild -> SetupInfo - -> CompilerVersion 'CVWanted + -> WantedCompiler -> VersionCheck -> Maybe String -> RIO env Tool -downloadAndInstallCompiler ghcBuild si wanted@GhcVersion{} versionCheck mbindistURL = do +downloadAndInstallCompiler ghcBuild si wanted@WCGhc{} versionCheck mbindistURL = do ghcVariant <- view ghcVariantL (selectedVersion, downloadInfo) <- case mbindistURL of Just bindistURL -> do @@ -872,7 +871,7 @@ downloadAndInstallCompiler ghcBuild si wanted@GhcVersion{} versionCheck mbindist GHCCustom _ -> return () _ -> throwM RequireCustomGHCVariant case wanted of - GhcVersion version -> + WCGhc version -> return (version, GHCDownloadInfo mempty mempty DownloadInfo { downloadInfoUrl = T.pack bindistURL , downloadInfoContentLength = Nothing @@ -885,7 +884,7 @@ downloadAndInstallCompiler ghcBuild si wanted@GhcVersion{} versionCheck mbindist ghcKey <- getGhcKey ghcBuild case Map.lookup ghcKey $ siGHCs si of Nothing -> throwM $ UnknownOSKey ghcKey - Just pairs_ -> getWantedCompilerInfo ghcKey versionCheck wanted GhcVersion pairs_ + Just pairs_ -> getWantedCompilerInfo ghcKey versionCheck wanted ACGhc pairs_ config <- view configL let installer = case configPlatform config of @@ -901,7 +900,7 @@ downloadAndInstallCompiler ghcBuild si wanted@GhcVersion{} versionCheck mbindist b -> " (" <> fromString (compilerBuildName b) <> ")") <> " to an isolated location." logInfo "This will not interfere with any system-level installation." - ghcPkgName <- parsePackageNameFromString ("ghc" ++ ghcVariantSuffix ghcVariant ++ compilerBuildSuffix ghcBuild) + ghcPkgName <- parsePackageNameThrowing ("ghc" ++ ghcVariantSuffix ghcVariant ++ compilerBuildSuffix ghcBuild) let tool = Tool $ PackageIdentifier ghcPkgName selectedVersion downloadAndInstallTool (configLocalPrograms config) si (gdiDownloadInfo downloadInfo) tool installer downloadAndInstallCompiler compilerBuild si wanted versionCheck _mbindistUrl = do @@ -921,8 +920,8 @@ downloadAndInstallCompiler compilerBuild si wanted versionCheck _mbindistUrl = d getWantedCompilerInfo :: (Ord k, MonadThrow m) => Text -> VersionCheck - -> CompilerVersion 'CVWanted - -> (k -> CompilerVersion 'CVActual) + -> WantedCompiler + -> (k -> ActualCompiler) -> Map k a -> m (k, a) getWantedCompilerInfo key versionCheck wanted toCV pairs_ = @@ -940,7 +939,7 @@ downloadAndInstallPossibleCompilers :: (HasGHCVariant env, HasConfig env) => [CompilerBuild] -> SetupInfo - -> CompilerVersion 'CVWanted + -> WantedCompiler -> VersionCheck -> Maybe String -> RIO env (Tool, CompilerBuild) @@ -1200,29 +1199,29 @@ installGHCJS si archiveFile archiveType _tempDir destDir = do let stackYaml = unpackDir $(mkRelFile "stack.yaml") destBinDir = destDir $(mkRelDir "bin") ensureDir destBinDir - envConfig' <- loadGhcjsEnvConfig stackYaml destBinDir - - -- On windows we need to copy options files out of the install dir. Argh! - -- This is done before the build, so that if it fails, things fail - -- earlier. - mwindowsInstallDir <- case platform of - Platform _ Cabal.Windows -> - liftM Just $ runRIO envConfig' installationRootLocal - _ -> return Nothing - - logSticky "Installing GHCJS (this will take a long time) ..." - buildInGhcjsEnv envConfig' defaultBuildOptsCLI - -- Copy over *.options files needed on windows. - forM_ mwindowsInstallDir $ \dir -> do - (_, files) <- listDir (dir $(mkRelDir "bin")) - forM_ (filter ((".options" `isSuffixOf`). toFilePath) files) $ \optionsFile -> do - let dest = destDir $(mkRelDir "bin") filename optionsFile - liftIO $ ignoringAbsence (removeFile dest) - copyFile optionsFile dest - logStickyDone "Installed GHCJS." + loadGhcjsEnvConfig stackYaml destBinDir $ \envConfig' -> do + + -- On windows we need to copy options files out of the install dir. Argh! + -- This is done before the build, so that if it fails, things fail + -- earlier. + mwindowsInstallDir <- case platform of + Platform _ Cabal.Windows -> + liftM Just $ runRIO envConfig' installationRootLocal + _ -> return Nothing + + logSticky "Installing GHCJS (this will take a long time) ..." + buildInGhcjsEnv envConfig' defaultBuildOptsCLI + -- Copy over *.options files needed on windows. + forM_ mwindowsInstallDir $ \dir -> do + (_, files) <- listDir (dir $(mkRelDir "bin")) + forM_ (filter ((".options" `isSuffixOf`). toFilePath) files) $ \optionsFile -> do + let dest = destDir $(mkRelDir "bin") filename optionsFile + liftIO $ ignoringAbsence (removeFile dest) + copyFile optionsFile dest + logStickyDone "Installed GHCJS." ensureGhcjsBooted :: HasConfig env - => CompilerVersion 'CVActual -> Bool -> [String] + => ActualCompiler -> Bool -> [String] -> RIO env () ensureGhcjsBooted cv shouldBoot bootOpts = do eres <- try $ sinkProcessStdout "ghcjs" [] (return ()) @@ -1245,7 +1244,7 @@ ensureGhcjsBooted cv shouldBoot bootOpts = do -- installed with an older version and not yet booted. stackYamlExists <- doesFileExist stackYaml ghcjsVersion <- case cv of - GhcjsVersion version _ -> return version + ACGhcjs version _ -> return version _ -> error "ensureGhcjsBooted invoked on non GhcjsVersion" actualStackYaml <- if stackYamlExists then return stackYaml else @@ -1259,8 +1258,8 @@ ensureGhcjsBooted cv shouldBoot bootOpts = do bootGhcjs :: (HasRunner env, HasProcessContext env) => Version -> Path Abs File -> Path Abs Dir -> [String] -> RIO env () -bootGhcjs ghcjsVersion stackYaml destDir bootOpts = do - envConfig <- loadGhcjsEnvConfig stackYaml (destDir $(mkRelDir "bin")) +bootGhcjs ghcjsVersion stackYaml destDir bootOpts = + loadGhcjsEnvConfig stackYaml (destDir $(mkRelDir "bin")) $ \envConfig -> do menv <- liftIO $ configProcessContextSettings (view configL envConfig) defaultEnvSettings -- Install cabal-install if missing, or if the installed one is old. mcabal <- withProcessContext menv getCabalInstallVersion @@ -1269,23 +1268,23 @@ bootGhcjs ghcjsVersion stackYaml destDir bootOpts = do logInfo "No cabal-install binary found for use with GHCJS." return True Just v - | v < $(mkVersion "1.22.4") -> do + | v < mkVersion [1, 22, 4] -> do logInfo $ "The cabal-install found on PATH is too old to be used for booting GHCJS (version " <> - RIO.display v <> + fromString (versionString v) <> ")." return True - | v >= $(mkVersion "1.23") -> do + | v >= mkVersion [1, 23] -> do logWarn $ "The cabal-install found on PATH is a version stack doesn't know about, version " <> - RIO.display v <> + fromString (versionString v) <> ". This may or may not work.\n" <> "See this issue: https://github.com/ghcjs/ghcjs/issues/470" return False - | ghcjsVersion >= $(mkVersion "0.2.0.20160413") && v >= $(mkVersion "1.22.8") -> do + | ghcjsVersion >= mkVersion [0, 2, 0, 20160413] && v >= mkVersion [1, 22, 8] -> do logWarn $ "The cabal-install found on PATH, version " <> - RIO.display v <> + fromString (versionString v) <> ", is >= 1.22.8.\n" <> "That version has a bug preventing ghcjs < 0.2.0.20160413 from booting.\n" <> "See this issue: https://github.com/ghcjs/ghcjs/issues/470" @@ -1317,7 +1316,7 @@ bootGhcjs ghcjsVersion stackYaml destDir bootOpts = do Nothing -> do logError "Failed to get cabal-install version after installing it." failedToFindErr - Just v | v >= $(mkVersion "1.22.8") && v < $(mkVersion "1.23") -> + Just v | v >= mkVersion [1, 22, 8] && v < mkVersion [1, 23] -> logWarn $ "Installed version of cabal-install is in a version range which may not work.\n" <> "See this issue: https://github.com/ghcjs/ghcjs/issues/470\n" <> @@ -1337,18 +1336,23 @@ bootGhcjs ghcjsVersion stackYaml destDir bootOpts = do withProcessContext menv' $ proc "ghcjs-boot" bootOpts logProcessStderrStdout logStickyDone "GHCJS booted." -loadGhcjsEnvConfig :: HasRunner env - => Path Abs File -> Path b t -> RIO env EnvConfig -loadGhcjsEnvConfig stackYaml binPath = do - lc <- loadConfig - (mempty - { configMonoidInstallGHC = First (Just True) - , configMonoidLocalBinPath = First (Just (toFilePath binPath)) - }) - Nothing - (SYLOverride stackYaml) - bconfig <- liftIO $ lcLoadBuildConfig lc Nothing - runRIO bconfig $ setupEnv Nothing +loadGhcjsEnvConfig + :: HasRunner env + => Path Abs File + -> Path b t + -> (EnvConfig -> RIO env a) + -> RIO env a +loadGhcjsEnvConfig stackYaml binPath inner = do + loadConfig + (mempty + { configMonoidInstallGHC = First (Just True) + , configMonoidLocalBinPath = First (Just (toFilePath binPath)) + }) + Nothing + (SYLOverride stackYaml) $ \lc -> do + bconfig <- liftIO $ lcLoadBuildConfig lc Nothing + envConfig <- runRIO bconfig $ setupEnv Nothing + inner envConfig buildInGhcjsEnv :: (HasEnvConfig env, MonadIO m) => env -> BuildOptsCLI -> m () buildInGhcjsEnv envConfig boptsCli = do @@ -1360,8 +1364,10 @@ getCabalInstallVersion :: (HasProcessContext env, HasLogFunc env) => RIO env (Ma getCabalInstallVersion = do ebs <- tryAny $ proc "cabal" ["--numeric-version"] readProcess_ case ebs of - Left _ -> return Nothing - Right (bs, _) -> Just <$> parseVersion (T.dropWhileEnd isSpace (T.decodeUtf8 (LBS.toStrict bs))) + Left _ -> + return Nothing + Right (bs, _) -> + Just <$> parseVersionThrowing (T.unpack $ T.dropWhileEnd isSpace (T.decodeUtf8 (LBS.toStrict bs))) -- | Check if given processes appear to be present, throwing an exception if -- missing. @@ -1672,10 +1678,10 @@ removeHaskellEnvVars = -- | Get map of environment variables to set to change the GHC's encoding to UTF-8 getUtf8EnvVars :: (HasProcessContext env, HasPlatform env, HasLogFunc env) - => CompilerVersion 'CVActual + => ActualCompiler -> RIO env (Map Text Text) getUtf8EnvVars compilerVer = - if getGhcVersion compilerVer >= $(mkVersion "7.10.3") + if getGhcVersion compilerVer >= mkVersion [7, 10, 3] -- GHC_CHARENC supported by GHC >=7.10.3 then return $ Map.singleton "GHC_CHARENC" "UTF-8" else legacyLocale @@ -2029,4 +2035,4 @@ getDownloadVersion (StackReleaseInfo val) = do Object o <- Just val String rawName <- HashMap.lookup "name" o -- drop the "v" at the beginning of the name - parseVersion $ T.drop 1 rawName + parseVersion $ T.unpack (T.drop 1 rawName) diff --git a/src/Stack/Setup/Installed.hs b/src/Stack/Setup/Installed.hs index 794f657db0..e6be31a45e 100644 --- a/src/Stack/Setup/Installed.hs +++ b/src/Stack/Setup/Installed.hs @@ -37,26 +37,23 @@ import Path import Path.IO import Stack.Types.Compiler import Stack.Types.Config -import Stack.Types.PackageIdentifier -import Stack.Types.PackageName -import Stack.Types.Version import RIO.Process data Tool = Tool PackageIdentifier -- ^ e.g. ghc-7.8.4, msys2-20150512 - | ToolGhcjs (CompilerVersion 'CVActual) -- ^ e.g. ghcjs-0.1.0_ghc-7.10.2 + | ToolGhcjs ActualCompiler -- ^ e.g. ghcjs-0.1.0_ghc-7.10.2 toolString :: Tool -> String toolString (Tool ident) = packageIdentifierString ident toolString (ToolGhcjs cv) = compilerVersionString cv toolNameString :: Tool -> String -toolNameString (Tool ident) = packageNameString $ packageIdentifierName ident +toolNameString (Tool ident) = packageNameString $ pkgName ident toolNameString ToolGhcjs{} = "ghcjs" parseToolText :: Text -> Maybe Tool -parseToolText (parseCompilerVersion -> Just cv@GhcjsVersion{}) = Just (ToolGhcjs cv) -parseToolText (parsePackageIdentifierFromString . T.unpack -> Just pkgId) = Just (Tool pkgId) +parseToolText (parseWantedCompiler -> Right (WCGhcjs x y)) = Just (ToolGhcjs (ACGhcjs x y)) +parseToolText (parsePackageIdentifier . T.unpack -> Just pkgId) = Just (Tool pkgId) parseToolText _ = Nothing markInstalled :: (MonadIO m, MonadThrow m) @@ -101,14 +98,14 @@ ghcjsWarning = unwords getCompilerVersion :: (HasProcessContext env, HasLogFunc env) => WhichCompiler - -> RIO env (CompilerVersion 'CVActual) + -> RIO env ActualCompiler getCompilerVersion wc = case wc of Ghc -> do logDebug "Asking GHC for its version" bs <- fst <$> proc "ghc" ["--numeric-version"] readProcess_ let (_, ghcVersion) = versionFromEnd $ BL.toStrict bs - x <- GhcVersion <$> parseVersion (T.decodeUtf8 ghcVersion) + x <- ACGhc <$> parseVersionThrowing (T.unpack $ T.decodeUtf8 ghcVersion) logDebug $ "GHC version is: " <> display x return x Ghcjs -> do @@ -118,9 +115,9 @@ getCompilerVersion wc = -- -- The Glorious Glasgow Haskell Compilation System for JavaScript, version 0.1.0 (GHC 7.10.2) bs <- fst <$> proc "ghcjs" ["--version"] readProcess_ - let (rest, ghcVersion) = T.decodeUtf8 <$> versionFromEnd (BL.toStrict bs) - (_, ghcjsVersion) = T.decodeUtf8 <$> versionFromEnd rest - GhcjsVersion <$> parseVersion ghcjsVersion <*> parseVersion ghcVersion + let (rest, ghcVersion) = T.unpack . T.decodeUtf8 <$> versionFromEnd (BL.toStrict bs) + (_, ghcjsVersion) = T.unpack . T.decodeUtf8 <$> versionFromEnd rest + ACGhcjs <$> parseVersionThrowing ghcjsVersion <*> parseVersionThrowing ghcVersion where versionFromEnd = S8.spanEnd isValid . fst . S8.breakEnd isValid isValid c = c == '.' || ('0' <= c && c <= '9') diff --git a/src/Stack/SetupCmd.hs b/src/Stack/SetupCmd.hs index 5b25fb7494..223a57c141 100644 --- a/src/Stack/SetupCmd.hs +++ b/src/Stack/SetupCmd.hs @@ -13,7 +13,6 @@ module Stack.SetupCmd ) where import Control.Applicative -import Control.Monad.Logger () import Control.Monad.Reader import qualified Data.Text as T import qualified Options.Applicative as OA @@ -22,12 +21,11 @@ import qualified Options.Applicative.Types as OA import Path import Stack.Prelude import Stack.Setup -import Stack.Types.Compiler import Stack.Types.Config import Stack.Types.Version data SetupCmdOpts = SetupCmdOpts - { scoCompilerVersion :: !(Maybe (CompilerVersion 'CVWanted)) + { scoCompilerVersion :: !(Maybe WantedCompiler) , scoForceReinstall :: !Bool , scoUpgradeCabal :: !(Maybe UpgradeTo) , scoSetupInfoYaml :: !String @@ -54,7 +52,7 @@ cabalUpgradeParser = Specific <$> version' <|> latestParser where versionReader = do s <- OA.readerAsk - case parseVersion (T.pack s) of + case parseVersion s of Nothing -> OA.readerError $ "Invalid version: " ++ s Just v -> return v version' = OA.option versionReader ( @@ -92,17 +90,17 @@ setupParser = SetupCmdOpts where readVersion = do s <- OA.readerAsk - case parseCompilerVersion ("ghc-" <> T.pack s) of - Nothing -> - case parseCompilerVersion (T.pack s) of - Nothing -> OA.readerError $ "Invalid version: " ++ s - Just x -> return x - Just x -> return x + case parseWantedCompiler ("ghc-" <> T.pack s) of + Left _ -> + case parseWantedCompiler (T.pack s) of + Left _ -> OA.readerError $ "Invalid version: " ++ s + Right x -> return x + Right x -> return x setup :: (HasConfig env, HasGHCVariant env) => SetupCmdOpts - -> CompilerVersion 'CVWanted + -> WantedCompiler -> VersionCheck -> Maybe (Path Abs File) -> RIO env () @@ -125,8 +123,8 @@ setup SetupCmdOpts{..} wantedCompiler compilerCheck mstack = do , soptsGHCJSBootOpts = scoGHCJSBootOpts ++ ["--clean" | scoGHCJSBootClean] } let compiler = case wantedCompiler of - GhcVersion _ -> "GHC" - GhcjsVersion {} -> "GHCJS" + WCGhc _ -> "GHC" + WCGhcjs {} -> "GHCJS" if sandboxedGhc then logInfo $ "stack will use a sandboxed " <> compiler <> " it installed" else logInfo $ "stack will use the " <> compiler <> " on your PATH" diff --git a/src/Stack/Sig/Sign.hs b/src/Stack/Sig/Sign.hs index eb2cf6ca4d..becfd3e52e 100644 --- a/src/Stack/Sig/Sign.hs +++ b/src/Stack/Sig/Sign.hs @@ -21,12 +21,13 @@ import qualified Codec.Compression.GZip as GZip import Stack.Prelude import qualified Data.ByteString.Lazy as BS import qualified Data.ByteString.Lazy as L +import qualified Distribution.PackageDescription as D +import qualified Distribution.PackageDescription.Parsec as D +import qualified Distribution.Verbosity as D import Network.HTTP.Download import Network.HTTP.StackClient (RequestBody (RequestBodyBS), setRequestMethod, setRequestBody, getResponseStatusCode, methodPut) import Path -import Stack.Package import Stack.Sig.GPG -import Stack.Types.PackageIdentifier import Stack.Types.Sig import qualified System.FilePath as FP @@ -95,7 +96,7 @@ signPackage url pkg filePath = do let (PackageIdentifier name version) = pkg fingerprint <- gpgVerify sig filePath let fullUrl = - url <> "/upload/signature/" <> show name <> "/" <> show version <> + url <> "/upload/signature/" <> packageNameString name <> "/" <> versionString version <> "/" <> show fingerprint req <- parseUrlThrow fullUrl @@ -107,3 +108,11 @@ signPackage url pkg filePath = do (throwM (GPGSignException "unable to sign & upload package")) logInfo ("Signature uploaded to " <> fromString fullUrl) return sig + +-- | Extract the @PackageIdentifier@ given an exploded haskell package +-- path. +cabalFilePackageId + :: (MonadIO m, MonadThrow m) + => Path Abs File -> m PackageIdentifier +cabalFilePackageId fp = do + D.package . D.packageDescription <$> liftIO (D.readGenericPackageDescription D.silent $ toFilePath fp) diff --git a/src/Stack/Snapshot.hs b/src/Stack/Snapshot.hs index 81df12d45f..19b6c5a400 100644 --- a/src/Stack/Snapshot.hs +++ b/src/Stack/Snapshot.hs @@ -19,58 +19,42 @@ module Stack.Snapshot ( loadResolver , loadSnapshot , calculatePackagePromotion + , loadGlobalHints ) where import Stack.Prelude hiding (Display (..)) import Control.Monad.State.Strict (get, put, StateT, execStateT) -import Crypto.Hash.Conduit (hashFile) -import Data.Aeson (withObject, (.!=), (.:), (.:?), Value (Object)) -import Data.Aeson.Extended (WithJSONWarnings(..), logJSONWarnings, (..!=), (..:?), jsonSubWarningsT, withObjectWarnings, (..:)) -import Data.Aeson.Types (Parser, parseEither) import Data.Store.VersionTagged import qualified Data.Conduit.List as CL -import qualified Data.HashMap.Strict as HashMap import qualified Data.Map as Map import qualified Data.Set as Set -import Data.Time (toGregorian) import qualified Data.Text as T -import Data.Text.Encoding (encodeUtf8) -import Data.Yaml (decodeFileEither, ParseException (AesonException)) +import Data.Yaml (ParseException (AesonException), decodeFileThrow) import Distribution.InstalledPackageInfo (PError) import Distribution.PackageDescription (GenericPackageDescription) import qualified Distribution.PackageDescription as C import Distribution.System (Platform) import Distribution.Text (display) import qualified Distribution.Version as C -import Network.HTTP.StackClient (Request) -import Network.HTTP.Download +import Network.HTTP.Download (download, redownload) +import Network.HTTP.StackClient (Request, parseRequest) import qualified RIO -import Network.URI (isURI) -import Path -import Path.IO -import Stack.Constants +import Data.ByteString.Builder (toLazyByteString) +import qualified Pantry +import qualified Pantry.SHA256 as SHA256 import Stack.Package import Stack.PackageDump -import Stack.PackageLocation import Stack.Types.BuildPlan -import Stack.Types.FlagName import Stack.Types.GhcPkgId -import Stack.Types.PackageIdentifier -import Stack.Types.PackageName -import Stack.Types.Version import Stack.Types.VersionIntervals import Stack.Types.Config -import Stack.Types.Urls import Stack.Types.Compiler import Stack.Types.Resolver -import qualified System.Directory as Dir -import qualified System.FilePath as FilePath - -type SinglePackageLocation = PackageLocationIndex FilePath +import Stack.Types.Runner (HasRunner) data SnapshotException - = InvalidCabalFileInSnapshot !SinglePackageLocation !PError - | PackageDefinedTwice !PackageName !SinglePackageLocation !SinglePackageLocation + = InvalidCabalFileInSnapshot !PackageLocation !PError + | PackageDefinedTwice !PackageName !PackageLocation !PackageLocation | UnmetDeps !(Map PackageName (Map PackageName (VersionIntervals, Maybe Version))) | FilepathInCustomSnapshot !Text | NeedResolverOrCompiler !Text @@ -145,238 +129,48 @@ instance Show SnapshotException where -- | Convert a 'Resolver' into a 'SnapshotDef' loadResolver :: forall env. HasConfig env - => Resolver + => SnapshotLocation + -> Maybe WantedCompiler -> RIO env SnapshotDef -loadResolver (ResolverStackage name) = do - stackage <- view stackRootL - file' <- parseRelFile $ T.unpack file - cachePath <- (buildPlanCacheDir stackage ) <$> parseRelFile (T.unpack (renderSnapName name <> ".cache")) - let fp = buildPlanDir stackage file' - tryDecode = tryAny $ $(versionedDecodeOrLoad snapshotDefVC) cachePath $ liftIO $ do - evalue <- decodeFileEither $ toFilePath fp - case evalue of - Left e -> throwIO e - Right value -> - case parseEither parseStackageSnapshot value of - Left s -> throwIO $ InvalidStackageException name s - Right x -> return x - logDebug $ "Decoding build plan from: " <> fromString (toFilePath fp) - eres <- tryDecode - case eres of - Right sd -> return sd - Left e -> do - logDebug $ - "Decoding Stackage snapshot definition from file failed: " <> - displayShow e - ensureDir (parent fp) - url <- buildBuildPlanUrl name file - req <- parseRequest $ T.unpack url - logSticky $ "Downloading " <> RIO.display name <> " build plan ..." - logDebug $ "Downloading build plan from: " <> RIO.display url - wasDownloaded <- redownload req fp - if wasDownloaded - then logStickyDone $ "Downloaded " <> RIO.display name <> " build plan." - else logStickyDone $ "Skipped download of " <> RIO.display name <> " because its the stored entity tag matches the server version" - tryDecode >>= either throwM return - - where - file = renderSnapName name <> ".yaml" - - buildBuildPlanUrl :: (MonadReader env m, HasConfig env) => SnapName -> Text -> m Text - buildBuildPlanUrl snapName file' = do - urls <- view $ configL.to configUrls - return $ - case snapName of - LTS _ _ -> urlsLtsBuildPlans urls <> "/" <> file' - Nightly _ -> urlsNightlyBuildPlans urls <> "/" <> file' - - parseStackageSnapshot = withObject "StackageSnapshotDef" $ \o -> do - Object si <- o .: "system-info" - ghcVersion <- si .:? "ghc-version" - compilerVersion <- si .:? "compiler-version" - compilerVersion' <- - case (ghcVersion, compilerVersion) of - (Just _, Just _) -> fail "can't have both compiler-version and ghc-version fields" - (Just ghc, _) -> return (GhcVersion ghc) - (_, Just compiler) -> return compiler - _ -> fail "expected field \"ghc-version\" or \"compiler-version\" not present" - let sdParent = Left compilerVersion' - sdGlobalHints <- si .: "core-packages" - - packages <- o .: "packages" - (Endo mkLocs, sdFlags, sdHidden) <- fmap mconcat $ mapM (uncurry goPkg) $ Map.toList packages - let sdLocations = mkLocs [] - - let sdGhcOptions = Map.empty -- Stackage snapshots do not allow setting GHC options - - -- Not dropping any packages in a Stackage snapshot - let sdDropPackages = Set.empty - - let sdResolver = ResolverStackage name - sdResolverName = renderSnapName name - - return SnapshotDef {..} - where - goPkg name' = withObject "StackagePackageDef" $ \o -> do - version <- o .: "version" - mcabalFileInfo <- o .:? "cabal-file-info" - mcabalFileInfo' <- forM mcabalFileInfo $ \o' -> do - msize <- Just <$> o' .: "size" - cfiHashes <- o' .: "hashes" - hash' <- - case HashMap.lookup ("SHA256" :: Text) cfiHashes of - Nothing -> fail "Could not find SHA256" - Just shaText -> - case mkCabalHashFromSHA256 shaText of - Left e -> fail $ "Invalid SHA256: " ++ show e - Right x -> return x - return $ CFIHash msize hash' - - Object constraints <- o .: "constraints" - - flags <- constraints .: "flags" - let flags' = Map.singleton name' flags - - hide <- constraints .:? "hide" .!= False - let hide' = if hide then Map.singleton name' True else Map.empty - - let location = PLIndex $ PackageIdentifierRevision (PackageIdentifier name' version) (fromMaybe CFILatest mcabalFileInfo') - - return (Endo (location:), flags', hide') -loadResolver (ResolverCompiler compiler) = return SnapshotDef - { sdParent = Left compiler - , sdResolver = ResolverCompiler compiler - , sdResolverName = compilerVersionText compiler - , sdLocations = [] - , sdDropPackages = Set.empty - , sdFlags = Map.empty - , sdHidden = Map.empty - , sdGhcOptions = Map.empty - , sdGlobalHints = Map.empty +loadResolver (SLCompiler c1) (Just c2) = throwIO $ InvalidOverrideCompiler c1 c2 +loadResolver sl mcompiler = do + esnap <- Pantry.loadSnapshot sl + (compiler, msnap, uniqueHash) <- + case esnap of + Left compiler -> pure (compiler, Nothing, mkUniqueHash compiler) + Right (snap, sha) -> do + sd <- loadResolver (snapshotParent snap) (snapshotCompiler snap) + pure + ( sdWantedCompilerVersion sd + , Just (snap, sd) + , combineHashes sha $ sdUniqueHash sd + ) + pure SnapshotDef + { sdResolver = sl + , sdSnapshot = msnap + , sdWantedCompilerVersion = fromMaybe compiler mcompiler + , sdUniqueHash = uniqueHash } -loadResolver (ResolverCustom url loc) = do - logDebug $ "Loading " <> RIO.display url <> " build plan from " <> displayShow loc - case loc of - Left req -> download' req >>= load . toFilePath - Right fp -> load fp + where - download' :: Request -> RIO env (Path Abs File) - download' req = do - let urlHash = T.unpack $ trimmedSnapshotHash $ snapshotHashFromBS $ encodeUtf8 url - hashFP <- parseRelFile $ urlHash ++ ".yaml" - customPlanDir <- getCustomPlanDir - let cacheFP = customPlanDir $(mkRelDir "yaml") hashFP - void (download req cacheFP :: RIO env Bool) - return cacheFP - - getCustomPlanDir = do - root <- view stackRootL - return $ root $(mkRelDir "custom-plan") - - load :: FilePath -> RIO env SnapshotDef - load fp = do - let resolveLocalArchives sd = sd { - sdLocations = resolveLocalArchive <$> sdLocations sd - } - resolveLocalArchive (PLOther (PLArchive archive)) = - PLOther $ PLArchive $ archive { - archiveUrl = T.pack $ resolveLocalFilePath (T.unpack $ archiveUrl archive) - } - resolveLocalArchive pl = pl - resolveLocalFilePath path = - if isURI path || FilePath.isAbsolute path - then path - else FilePath.dropFileName fp FilePath. FilePath.normalise path - - WithJSONWarnings (sd0, mparentResolver, mcompiler) warnings <- - liftIO (decodeFileEither fp) >>= either - (throwM . CustomResolverException url loc) - (either (throwM . CustomResolverException url loc . AesonException) return . parseEither parseCustom) - logJSONWarnings (T.unpack url) warnings - forM_ (sdLocations sd0) $ \loc' -> - case loc' of - PLOther (PLFilePath _) -> throwM $ FilepathInCustomSnapshot url - _ -> return () - let sd0' = resolveLocalArchives sd0 - -- The fp above may just be the download location for a URL, - -- which we don't want to use. Instead, look back at loc from - -- above. - mdir <- - case loc of - Left _ -> return Nothing - Right fp' -> Just . parent <$> liftIO (Dir.canonicalizePath fp' >>= parseAbsFile) - - -- Deal with the dual nature of the compiler key, which either - -- means "use this compiler" or "override the compiler in the - -- resolver" - (parentResolver, overrideCompiler) <- - case (mparentResolver, mcompiler) of - (Nothing, Nothing) -> throwM $ NeedResolverOrCompiler url - (Just parentResolver, Nothing) -> return (parentResolver, id) - (Nothing, Just compiler) -> return (ResolverCompiler compiler, id) - (Just parentResolver, Just compiler) -> return - ( parentResolver - , setCompilerVersion compiler - ) - - parentResolver' <- parseCustomLocation mdir parentResolver - - -- Calculate the hash of the current file, and then combine it - -- with parent hashes if necessary below. - rawHash :: SnapshotHash <- snapshotHashFromDigest <$> hashFile fp :: RIO env SnapshotHash - - (parent', hash') <- - case parentResolver' of - ResolverCompiler cv -> return (Left cv, rawHash) -- just a small optimization - _ -> do - parent' :: SnapshotDef <- loadResolver (parentResolver' :: Resolver) :: RIO env SnapshotDef - let hash' :: SnapshotHash - hash' = combineHash rawHash $ - case sdResolver parent' of - ResolverStackage snapName -> snapNameToHash snapName - ResolverCustom _ parentHash -> parentHash - ResolverCompiler _ -> error "loadResolver: Received ResolverCompiler in impossible location" - return (Right parent', hash') - return $ overrideCompiler sd0' - { sdParent = parent' - , sdResolver = ResolverCustom url hash' - } - -- | Note that the 'sdParent' and 'sdResolver' fields returned - -- here are bogus, and need to be replaced with information only - -- available after further processing. - parseCustom :: Value - -> Parser (WithJSONWarnings (SnapshotDef, Maybe (ResolverWith ()), Maybe (CompilerVersion 'CVWanted))) - parseCustom = withObjectWarnings "CustomSnapshot" $ \o -> (,,) - <$> (SnapshotDef (Left (error "loadResolver")) (ResolverStackage (LTS 0 0)) - <$> (o ..: "name") - <*> jsonSubWarningsT (o ..:? "packages" ..!= []) - <*> o ..:? "drop-packages" ..!= Set.empty - <*> o ..:? "flags" ..!= Map.empty - <*> o ..:? "hidden" ..!= Map.empty - <*> o ..:? "ghc-options" ..!= Map.empty - <*> o ..:? "global-hints" ..!= Map.empty) - <*> (o ..:? "resolver") - <*> (o ..:? "compiler") - - combineHash :: SnapshotHash -> SnapshotHash -> SnapshotHash - combineHash x y = snapshotHashFromBS (snapshotHashToBS x <> snapshotHashToBS y) - - snapNameToHash :: SnapName -> SnapshotHash - snapNameToHash = snapshotHashFromBS . encodeUtf8 . renderSnapName + mkUniqueHash :: WantedCompiler -> SHA256 + mkUniqueHash = SHA256.hashLazyBytes . toLazyByteString . getUtf8Builder . RIO.display + + combineHashes :: SHA256 -> SHA256 -> SHA256 + combineHashes x y = SHA256.hashBytes (SHA256.toRaw x <> SHA256.toRaw y) -- | Fully load up a 'SnapshotDef' into a 'LoadedSnapshot' loadSnapshot :: forall env. (HasConfig env, HasGHCVariant env) - => Maybe (CompilerVersion 'CVActual) -- ^ installed GHC we should query; if none provided, use the global hints - -> Path Abs Dir -- ^ project root, used for checking out necessary files + => Maybe ActualCompiler -- ^ installed GHC we should query; if none provided, use the global hints -> SnapshotDef -> RIO env LoadedSnapshot -loadSnapshot mcompiler root = +loadSnapshot mcompiler = start where - start (snapshotDefFixes -> sd) = do + start sd = do path <- configLoadedSnapshotCache sd (maybe GISSnapshotHints GISCompiler mcompiler) @@ -384,44 +178,38 @@ loadSnapshot mcompiler root = inner :: SnapshotDef -> RIO env LoadedSnapshot inner sd = do - ls0 <- - case sdParent sd of - Left cv -> - case mcompiler of - Nothing -> return LoadedSnapshot - { lsCompilerVersion = wantedToActual cv - , lsGlobals = fromGlobalHints $ sdGlobalHints sd + logInfo $ "Loading a snapshot from a SnapshotDef: " <> RIO.display (sdResolverName sd) + case sdSnapshot sd of + Nothing -> + case mcompiler of + Nothing -> do + ghfp <- globalHintsFile + mglobalHints <- loadGlobalHints ghfp $ sdWantedCompilerVersion sd + globalHints <- + case mglobalHints of + Just x -> pure x + Nothing -> do + logWarn $ "Unable to load global hints for " <> RIO.display (sdWantedCompilerVersion sd) + pure mempty + return LoadedSnapshot + { lsCompilerVersion = wantedToActual $ sdWantedCompilerVersion sd + , lsGlobals = fromGlobalHints globalHints , lsPackages = Map.empty } - Just cv' -> loadCompiler cv' - Right sd' -> start sd' + Just cv' -> loadCompiler cv' + Just (snapshot, sd') -> start sd' >>= inner2 snapshot + inner2 snap ls0 = do gpds <- - (concat <$> mapM (parseMultiCabalFilesIndex root) (sdLocations sd)) - `onException` do - logError "Unable to load cabal files for snapshot" - case sdResolver sd of - ResolverStackage name -> do - stackRoot <- view stackRootL - file <- parseRelFile $ T.unpack $ renderSnapName name <> ".yaml" - let fp = buildPlanDir stackRoot file - liftIO $ ignoringAbsence $ removeFile fp - logError "" - logError "----" - logError $ "Deleting cached snapshot file: " <> fromString (toFilePath fp) - logError "Recommendation: try running again. If this fails again, open an upstream issue at:" - logError $ - case name of - LTS _ _ -> "https://github.com/fpco/lts-haskell/issues/new" - Nightly _ -> "https://github.com/fpco/stackage-nightly/issues/new" - logError "----" - logError "" - _ -> return () + forM (snapshotLocations snap) $ \loc -> (, PLImmutable loc) <$> loadCabalFileImmutable loc (globals, snapshot, locals) <- - calculatePackagePromotion root ls0 + calculatePackagePromotion ls0 (map (\(x, y) -> (x, y, ())) gpds) - (sdFlags sd) (sdHidden sd) (sdGhcOptions sd) (sdDropPackages sd) + (snapshotFlags snap) + (snapshotHidden snap) + (snapshotGhcOptions snap) + (snapshotDropPackages snap) return LoadedSnapshot { lsCompilerVersion = lsCompilerVersion ls0 @@ -440,20 +228,19 @@ loadSnapshot mcompiler root = calculatePackagePromotion :: forall env localLocation. (HasConfig env, HasGHCVariant env) - => Path Abs Dir -- ^ project root - -> LoadedSnapshot - -> [(GenericPackageDescription, SinglePackageLocation, localLocation)] -- ^ packages we want to add on top of this snapshot + => LoadedSnapshot + -> [(GenericPackageDescription, PackageLocation, localLocation)] -- ^ packages we want to add on top of this snapshot -> Map PackageName (Map FlagName Bool) -- ^ flags -> Map PackageName Bool -- ^ overrides whether a package should be registered hidden -> Map PackageName [Text] -- ^ GHC options -> Set PackageName -- ^ packages in the snapshot to drop -> RIO env ( Map PackageName (LoadedPackageInfo GhcPkgId) -- new globals - , Map PackageName (LoadedPackageInfo SinglePackageLocation) -- new snapshot - , Map PackageName (LoadedPackageInfo (SinglePackageLocation, Maybe localLocation)) -- new locals + , Map PackageName (LoadedPackageInfo PackageLocation) -- new snapshot + , Map PackageName (LoadedPackageInfo (PackageLocation, Maybe localLocation)) -- new locals ) calculatePackagePromotion - root (LoadedSnapshot compilerVersion globals0 parentPackages0) + (LoadedSnapshot compilerVersion globals0 parentPackages0) gpds flags0 hides0 options0 drops0 = do platform <- view platformL @@ -496,7 +283,7 @@ calculatePackagePromotion (globals3, noLongerGlobals2) = splitUnmetDeps Map.empty globals2 -- Put together the two split out groups of packages - noLongerGlobals3 :: Map PackageName (LoadedPackageInfo SinglePackageLocation) + noLongerGlobals3 :: Map PackageName (LoadedPackageInfo PackageLocation) noLongerGlobals3 = Map.mapWithKey globalToSnapshot (Map.union noLongerGlobals1 noLongerGlobals2) -- Now do the same thing with parent packages: take out the @@ -515,7 +302,7 @@ calculatePackagePromotion -- ... so recalculate based on new values upgraded <- fmap Map.fromList - $ mapM (recalculate root compilerVersion flags hide ghcOptions) + $ mapM (recalculate compilerVersion flags hide ghcOptions) $ Map.toList allToUpgrade -- Could be nice to check snapshot early... but disabling @@ -541,39 +328,40 @@ calculatePackagePromotion -- hide values, and GHC options. recalculate :: forall env. (HasConfig env, HasGHCVariant env) - => Path Abs Dir -- ^ root - -> CompilerVersion 'CVActual + => ActualCompiler -> Map PackageName (Map FlagName Bool) -> Map PackageName Bool -- ^ hide? -> Map PackageName [Text] -- ^ GHC options - -> (PackageName, LoadedPackageInfo SinglePackageLocation) - -> RIO env (PackageName, LoadedPackageInfo SinglePackageLocation) -recalculate root compilerVersion allFlags allHide allOptions (name, lpi0) = do + -> (PackageName, LoadedPackageInfo PackageLocation) + -> RIO env (PackageName, LoadedPackageInfo PackageLocation) +recalculate compilerVersion allFlags allHide allOptions (name, lpi0) = do let hide = fromMaybe (lpiHide lpi0) (Map.lookup name allHide) options = fromMaybe (lpiGhcOptions lpi0) (Map.lookup name allOptions) case Map.lookup name allFlags of Nothing -> return (name, lpi0 { lpiHide = hide, lpiGhcOptions = options }) -- optimization Just flags -> do let loc = lpiLocation lpi0 - gpd <- parseSingleCabalFileIndex root loc + gpd <- loadCabalFile loc platform <- view platformL let res@(name', lpi) = calculate gpd platform compilerVersion loc flags hide options unless (name == name' && lpiVersion lpi0 == lpiVersion lpi) $ error "recalculate invariant violated" return res -fromGlobalHints :: Map PackageName (Maybe Version) -> Map PackageName (LoadedPackageInfo GhcPkgId) +fromGlobalHints + :: Map PackageName Version + -> Map PackageName (LoadedPackageInfo GhcPkgId) fromGlobalHints = Map.unions . map go . Map.toList where - go (_, Nothing) = Map.empty - go (name, Just ver) = Map.singleton name LoadedPackageInfo + go (name, ver) = Map.singleton name LoadedPackageInfo { lpiVersion = ver -- For global hint purposes, we only care about the -- version. All other fields are ignored when checking -- project compatibility. , lpiLocation = either impureThrow id $ parseGhcPkgId - $ packageIdentifierText + $ fromString + $ packageIdentifierString $ PackageIdentifier name ver , lpiFlags = Map.empty , lpiGhcOptions = [] @@ -615,7 +403,7 @@ checkDepsMet available m -- information in the global package database. loadCompiler :: forall env. HasConfig env - => CompilerVersion 'CVActual + => ActualCompiler -> RIO env LoadedSnapshot loadCompiler cv = do m <- ghcPkgDump (whichCompiler cv) [] @@ -651,12 +439,12 @@ loadCompiler cv = do , lpiFlags = Map.empty , lpiGhcOptions = [] , lpiPackageDeps = Map.unions $ map goDep $ dpDepends dp - , lpiExposedModules = Set.fromList $ map (ModuleName . encodeUtf8) $ dpExposedModules dp + , lpiExposedModules = dpExposedModules dp , lpiHide = not $ dpIsExposed dp } type FindPackageS localLocation = - ( Map PackageName (LoadedPackageInfo (SinglePackageLocation, localLocation)) + ( Map PackageName (LoadedPackageInfo (PackageLocation, localLocation)) , Map PackageName (Map FlagName Bool) -- flags , Map PackageName Bool -- hide , Map PackageName [Text] -- ghc options @@ -669,8 +457,8 @@ type FindPackageS localLocation = findPackage :: forall m localLocation. MonadThrow m => Platform - -> CompilerVersion 'CVActual - -> (GenericPackageDescription, SinglePackageLocation, localLocation) + -> ActualCompiler + -> (GenericPackageDescription, PackageLocation, localLocation) -> StateT (FindPackageS localLocation) m () findPackage platform compilerVersion (gpd, loc, localLoc) = do (m, allFlags, allHide, allOptions) <- get @@ -693,32 +481,13 @@ findPackage platform compilerVersion (gpd, loc, localLoc) = do assert (name == name') $ put (m', allFlags', allHide', allOptions') where - PackageIdentifier name _version = fromCabalPackageIdentifier $ C.package $ C.packageDescription gpd - --- | Some hard-coded fixes for build plans, only for hysterical raisins. -snapshotDefFixes :: SnapshotDef -> SnapshotDef -snapshotDefFixes sd | isOldStackage (sdResolver sd) = sd - { sdFlags = Map.unionWith Map.union overrides $ sdFlags sd - } - where - overrides = Map.fromList - [ ($(mkPackageName "persistent-sqlite"), Map.singleton $(mkFlagName "systemlib") False) - , ($(mkPackageName "yaml"), Map.singleton $(mkFlagName "system-libyaml") False) - ] - - -- Only apply this hack to older Stackage snapshots. In - -- particular, nightly-2018-03-13 did not contain these two - -- packages. - isOldStackage (ResolverStackage (LTS major _)) = major < 11 - isOldStackage (ResolverStackage (Nightly (toGregorian -> (year, _, _)))) = year < 2018 - isOldStackage _ = False -snapshotDefFixes sd = sd + PackageIdentifier name _version = C.package $ C.packageDescription gpd -- | Convert a global 'LoadedPackageInfo' to a snapshot one by -- creating a 'PackageLocation'. -globalToSnapshot :: PackageName -> LoadedPackageInfo loc -> LoadedPackageInfo (PackageLocationIndex FilePath) +globalToSnapshot :: PackageName -> LoadedPackageInfo loc -> LoadedPackageInfo PackageLocation globalToSnapshot name lpi = lpi - { lpiLocation = PLIndex (PackageIdentifierRevision (PackageIdentifier name (lpiVersion lpi)) CFILatest) + { lpiLocation = PLImmutable (PLIHackage (PackageIdentifierRevision name (lpiVersion lpi) CFILatest) Nothing) } -- | Split the packages into those which have their dependencies met, @@ -785,7 +554,7 @@ splitUnmetDeps extra = -- | Calculate a 'LoadedPackageInfo' from the given 'GenericPackageDescription' calculate :: GenericPackageDescription -> Platform - -> CompilerVersion 'CVActual + -> ActualCompiler -> loc -> Map FlagName Bool -> Bool -- ^ hidden? @@ -805,7 +574,7 @@ calculate gpd platform compilerVersion loc flags hide options = -- We want to ignore test suites and benchmarks, therefore choose -- the package description which modifies buildable pd = pdpModifiedBuildable $ resolvePackageDescription pconfig gpd - PackageIdentifier name version = fromCabalPackageIdentifier $ C.package pd + PackageIdentifier name version = C.package pd lpi = LoadedPackageInfo { lpiVersion = version , lpiLocation = loc @@ -816,7 +585,42 @@ calculate gpd platform compilerVersion loc flags hide options = $ packageDependencies pconfig pd , lpiExposedModules = maybe Set.empty - (Set.fromList . map fromCabalModuleName . C.exposedModules) + (Set.fromList . C.exposedModules) (C.library pd) , lpiHide = hide } + +-- | Load the global hints from Github. +loadGlobalHints + :: HasRunner env + => Path Abs File -- ^ local cached file location + -> WantedCompiler + -> RIO env (Maybe (Map PackageName Version)) +loadGlobalHints dest wc = + inner False + where + inner alreadyDownloaded = do + req <- parseRequest "https://raw.githubusercontent.com/fpco/stackage-content/master/stack/global-hints.yaml" + downloaded <- download req dest + eres <- tryAny inner2 + mres <- + case eres of + Left e -> Nothing <$ logError ("Error when parsing global hints: " <> displayShow e) + Right x -> pure x + case mres of + Nothing | not alreadyDownloaded && not downloaded -> do + logInfo $ + "Could not find local global hints for " <> + RIO.display wc <> + ", forcing a redownload" + x <- redownload req dest + if x + then inner True + else do + logInfo "Redownload didn't happen" + pure Nothing + _ -> pure mres + + inner2 = liftIO + $ Map.lookup wc . fmap (fmap unCabalString . unCabalStringMap) + <$> decodeFileThrow (toFilePath dest) diff --git a/src/Stack/Solver.hs b/src/Stack/Solver.hs index b5112a3edb..68113ec899 100644 --- a/src/Stack/Solver.hs +++ b/src/Stack/Solver.hs @@ -4,7 +4,6 @@ {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TupleSections #-} {-# LANGUAGE TypeFamilies #-} @@ -15,7 +14,6 @@ module Stack.Solver , mergeConstraints , solveExtraDeps , solveResolverSpec - , checkSnapBuildPlanActual -- * Internal - for tests , parseCabalOutputLine ) where @@ -27,7 +25,6 @@ import qualified Data.ByteString.Lazy as BL import Data.Char (isSpace) import Data.Conduit.Process.Typed (eceStderr) import qualified Data.HashMap.Strict as HashMap -import qualified Data.HashSet as HashSet import Data.List ( (\\), isSuffixOf , minimumBy, isPrefixOf , intersperse) @@ -41,6 +38,7 @@ import qualified Data.Yaml as Yaml import qualified Distribution.Package as C import qualified Distribution.PackageDescription as C import qualified Distribution.Text as C +import Distribution.Version (mkVersion) import Path import Path.Find (findFiles) import Path.IO hiding (findExecutable, findFiles, withSystemTempDir) @@ -49,8 +47,6 @@ import Stack.Build.Target (gpdVersion) import Stack.BuildPlan import Stack.Config (getLocalPackages, loadConfigYaml) import Stack.Constants (stackDotYaml, wiredInPackages) -import Stack.Package (readPackageUnresolvedDir, gpdPackageName) -import Stack.PackageIndex import Stack.PrettyPrint import Stack.Setup import Stack.Setup.Installed @@ -59,11 +55,6 @@ import Stack.Types.Build import Stack.Types.BuildPlan import Stack.Types.Compiler import Stack.Types.Config -import Stack.Types.FlagName -import Stack.Types.PackageIdentifier -import Stack.Types.PackageName -import Stack.Types.Resolver -import Stack.Types.Version import qualified System.Directory as D import qualified System.FilePath as FP import RIO.Process @@ -137,12 +128,11 @@ cabalSolver cabalfps constraintType logInfo $ RIO.display $ cabalBuildErrMsg msg let pkgs = parseConflictingPkgs msg mPkgNames = map (C.simpleParse . T.unpack) pkgs - pkgNames = map (fromCabalPackageName . C.pkgName) - (catMaybes mPkgNames) + pkgNames = map C.pkgName (catMaybes mPkgNames) when (any isNothing mPkgNames) $ do logInfo $ "*** Only some package names could be parsed: " <> - mconcat (intersperse ", " (map displayShow pkgNames)) + mconcat (intersperse ", " (map (fromString . packageNameString) pkgNames)) error $ T.unpack $ "*** User packages involved in cabal failure: " <> T.intercalate ", " (parseConflictingPkgs msg) @@ -157,11 +147,11 @@ cabalSolver cabalfps constraintType select s = (T.isPrefixOf "trying:" s || T.isPrefixOf "next goal:" s) && T.isSuffixOf "(user goal)" s - pkgName = take 1 + pkgName' = take 1 . T.words . T.drop 1 . T.dropWhile (/= ':') - in concatMap pkgName (filter select ls) + in concatMap pkgName' (filter select ls) parseCabalOutput bs = do let ls = drop 1 @@ -214,9 +204,8 @@ parseCabalOutputLine t0 = maybe (Left t0) Right . join . match re $ t0 mk :: String -> [Maybe (Bool, String)] -> Maybe (PackageName, (Version, Map FlagName Bool)) mk ident fl = do - PackageIdentifier name version <- - parsePackageIdentifierFromString ident - fl' <- (traverse . traverse) parseFlagNameFromString $ catMaybes fl + PackageIdentifier name version <- parsePackageIdentifier ident + fl' <- (traverse . traverse) parseFlagName $ catMaybes fl return (name, (version, Map.fromList $ map swap fl')) lexeme r = some (psym isSpace) *> r @@ -227,34 +216,27 @@ getCabalConfig :: HasConfig env -> Map PackageName Version -- ^ constraints -> RIO env [Text] getCabalConfig dir constraintType constraints = do - indices <- view $ cabalLoaderL.to clIndices - remotes <- mapM goIndex indices + src <- view $ hackageIndexTarballL.to toFilePath + let dstdir = dir FP. "hackage" + -- NOTE: see https://github.com/commercialhaskell/stack/issues/2888 + -- for why we are pretending that a 01-index.tar is actually a + -- 00-index.tar file. + dst0 = dstdir FP. "00-index.tar" + dst1 = dstdir FP. "01-index.tar" + liftIO $ void $ tryIO $ do + D.createDirectoryIfMissing True dstdir + D.copyFile src dst0 + D.copyFile src dst1 + let cache = T.pack $ "remote-repo-cache: " ++ dir - return $ cache : remotes ++ map goConstraint (Map.toList constraints) + remote = "remote-repo: hackage:http://0.0.0.0/fake-url" + return $ cache : remote : map goConstraint (Map.toList constraints) where - goIndex index = do - src <- configPackageIndex $ indexName index - let dstdir = dir FP. T.unpack (indexNameText $ indexName index) - -- NOTE: see https://github.com/commercialhaskell/stack/issues/2888 - -- for why we are pretending that a 01-index.tar is actually a - -- 00-index.tar file. - dst0 = dstdir FP. "00-index.tar" - dst1 = dstdir FP. "01-index.tar" - liftIO $ void $ tryIO $ do - D.createDirectoryIfMissing True dstdir - D.copyFile (toFilePath src) dst0 - D.copyFile (toFilePath src) dst1 - return $ T.concat - [ "remote-repo: " - , indexNameText $ indexName index - , ":http://0.0.0.0/fake-url" - ] - goConstraint (name, version) = assert (not . null . versionString $ version) $ T.concat [ if constraintType == Constraint - || name `HashSet.member` wiredInPackages + || name `Set.member` wiredInPackages then "constraint: " else "preference: " , T.pack $ packageNameString name @@ -264,15 +246,15 @@ getCabalConfig dir constraintType constraints = do setupCompiler :: (HasConfig env, HasGHCVariant env) - => CompilerVersion 'CVWanted + => WantedCompiler -> RIO env (Maybe ExtraDirs) setupCompiler compiler = do - let msg = Just $ T.concat - [ "Compiler version (" <> compilerVersionText compiler <> ") " - , "required by your resolver specification cannot be found.\n\n" - , "Please use '--install-ghc' command line switch to automatically " - , "install the compiler or '--system-ghc' to use a suitable " - , "compiler available on your PATH." ] + let msg = Just $ utf8BuilderToText $ + "Compiler version (" <> RIO.display compiler <> ") " <> + "required by your resolver specification cannot be found.\n\n" <> + "Please use '--install-ghc' command line switch to automatically " <> + "install the compiler or '--system-ghc' to use a suitable " <> + "compiler available on your PATH." config <- view configL (dirs, _, _) <- ensureCompiler SetupOpts @@ -297,8 +279,8 @@ setupCompiler compiler = do -- has the desired GHC on the PATH. setupCabalEnv :: (HasConfig env, HasGHCVariant env) - => CompilerVersion 'CVWanted - -> (CompilerVersion 'CVActual -> RIO env a) + => WantedCompiler + -> (ActualCompiler -> RIO env a) -> RIO env a setupCabalEnv compiler inner = do mpaths <- setupCompiler compiler @@ -312,18 +294,18 @@ setupCabalEnv compiler inner = do case mcabal of Nothing -> throwM SolverMissingCabalInstall Just version - | version < $(mkVersion "1.24") -> prettyWarn $ + | version < mkVersion [1, 24] -> prettyWarn $ "Installed version of cabal-install (" <> - display version <> + fromString (versionString version) <> ") doesn't support custom-setup clause, and so may not yield correct results." <> line <> "To resolve this, install a newer version via 'stack install cabal-install'." <> line - | version >= $(mkVersion "1.25") -> prettyWarn $ + | version >= mkVersion [1, 25] -> prettyWarn $ "Installed version of cabal-install (" <> - display version <> + fromString (versionString version) <> ") is newer than stack has been tested with. If you run into difficulties, consider downgrading." <> line | otherwise -> return () - mver <- getSystemCompiler (whichCompiler compiler) + mver <- getSystemCompiler (whichCompiler (wantedToActual compiler)) version <- case mver of Just (version, _) -> do logInfo $ "Using compiler: " <> RIO.display version @@ -363,8 +345,7 @@ mergeConstraints = Map.mergeWithKey -- dependencies. solveResolverSpec :: (HasConfig env, HasGHCVariant env) - => Path Abs File -- ^ stack.yaml file location - -> [Path Abs Dir] -- ^ package dirs containing cabal files + => [Path Abs Dir] -- ^ package dirs containing cabal files -> ( SnapshotDef , ConstraintSpec , ConstraintSpec) -- ^ ( resolver @@ -375,12 +356,12 @@ solveResolverSpec -- ^ (Conflicting packages -- (resulting src package specs, external dependency specs)) -solveResolverSpec stackYaml cabalDirs +solveResolverSpec cabalDirs (sd, srcConstraints, extraConstraints) = do logInfo $ "Using resolver: " <> RIO.display (sdResolverName sd) let wantedCompilerVersion = sdWantedCompilerVersion sd setupCabalEnv wantedCompilerVersion $ \compilerVersion -> do - (compilerVer, snapConstraints) <- getResolverConstraints (Just compilerVersion) stackYaml sd + (compilerVer, snapConstraints) <- getResolverConstraints (Just compilerVersion) sd let -- Note - The order in Map.union below is important. -- We want to override snapshot with extra deps @@ -481,14 +462,13 @@ solveResolverSpec stackYaml cabalDirs -- for that resolver. getResolverConstraints :: (HasConfig env, HasGHCVariant env) - => Maybe (CompilerVersion 'CVActual) -- ^ actually installed compiler - -> Path Abs File + => Maybe ActualCompiler -- ^ actually installed compiler -> SnapshotDef -> RIO env - (CompilerVersion 'CVActual, + (ActualCompiler, Map PackageName (Version, Map FlagName Bool)) -getResolverConstraints mcompilerVersion stackYaml sd = do - ls <- loadSnapshot mcompilerVersion (parent stackYaml) sd +getResolverConstraints mcompilerVersion sd = do + ls <- loadSnapshot mcompilerVersion sd return (lsCompilerVersion ls, lsConstraints ls) where lpiConstraints lpi = (lpiVersion lpi, lpiFlags lpi) @@ -545,7 +525,7 @@ cabalPackagesCheck cabaldirs noPkgMsg dupErrMsg = do logInfo $ formatGroup relpaths packages <- map (\(x, y) -> (y, x)) <$> - mapM (flip readPackageUnresolvedDir True) + mapM (flip loadCabalFilePath YesPrintWarnings) cabaldirs -- package name cannot be empty or missing otherwise @@ -555,7 +535,7 @@ cabalPackagesCheck cabaldirs noPkgMsg dupErrMsg = do let normalizeString = T.unpack . T.normalize T.NFC . T.pack getNameMismatchPkg (fp, gpd) - | (normalizeString . show . gpdPackageName) gpd /= (normalizeString . FP.takeBaseName . toFilePath) fp + | (normalizeString . packageNameString . gpdPackageName) gpd /= (normalizeString . FP.takeBaseName . toFilePath) fp = Just fp | otherwise = Nothing nameMismatchPkgs = mapMaybe getNameMismatchPkg packages @@ -621,7 +601,7 @@ reportMissingCabalFiles cabalfps includeSubdirs = do -- dependencies in an existing stack.yaml and suggest changes in flags or -- extra dependencies so that the specified packages can be compiled. solveExtraDeps - :: HasEnvConfig env + :: forall env. HasEnvConfig env => Bool -- ^ modify stack.yaml? -> RIO env () solveExtraDeps modStackYaml = do @@ -659,14 +639,14 @@ solveExtraDeps modStackYaml = do srcConstraints = mergeConstraints oldSrcs oldSrcFlags extraConstraints = mergeConstraints oldExtraVersions oldExtraFlags - resolverResult <- checkSnapBuildPlanActual (parent stackYaml) gpds (Just oldSrcFlags) sd + actualCompiler <- view actualCompilerVersionL + resolverResult <- checkSnapBuildPlan gpds (Just oldSrcFlags) sd (Just actualCompiler) resultSpecs <- case resolverResult of BuildPlanCheckOk flags -> return $ Just (mergeConstraints oldSrcs flags, Map.empty) BuildPlanCheckPartial {} -> either (const Nothing) Just <$> - solveResolverSpec stackYaml cabalDirs - (sd, srcConstraints, extraConstraints) + solveResolverSpec cabalDirs (sd, srcConstraints, extraConstraints) -- TODO Solver should also use the init code to ignore incompatible -- packages BuildPlanCheckFail {} -> @@ -694,14 +674,14 @@ solveExtraDeps modStackYaml = do changed = any (not . Map.null) [newVersions, goneVersions] || any (not . Map.null) [newFlags, goneFlags] - || any (/= void resolver) (fmap void mOldResolver) + || any (/= resolver) mOldResolver if changed then do logInfo "" logInfo $ "The following changes will be made to " <> fromString relStackYaml <> ":" - printResolver (fmap void mOldResolver) (void resolver) + printResolver mOldResolver resolver printFlags newFlags "* Flags to be added" printDeps newVersions "* Dependencies to be added" @@ -727,22 +707,28 @@ solveExtraDeps modStackYaml = do when (res /= oldRes) $ do logInfo $ "* Resolver changes from " <> - RIO.display (resolverRawName oldRes) <> + RIO.display oldRes <> " to " <> - RIO.display (resolverRawName res) + RIO.display res printFlags fl msg = do unless (Map.null fl) $ do logInfo $ fromString msg logInfo $ RIO.display $ indentLines $ decodeUtf8 $ Yaml.encode - $ object ["flags" .= fl] + $ object ["flags" .= toCabalStringMap (fmap toCabalStringMap fl)] printDeps deps msg = do unless (Map.null deps) $ do logInfo $ fromString msg logInfo $ RIO.display $ indentLines $ decodeUtf8 $ Yaml.encode $ object - ["extra-deps" .= map fromTuple (Map.toList deps)] - + ["extra-deps" .= map (CabalString . uncurry PackageIdentifier) (Map.toList deps)] + + writeStackYaml + :: Path Abs File + -> SnapshotLocation + -> Map PackageName Version + -> Map PackageName (Map FlagName Bool) + -> RIO env () writeStackYaml path res deps fl = do let fp = toFilePath path obj <- liftIO (Yaml.decodeFileEither fp) >>= either throwM return @@ -750,8 +736,8 @@ solveExtraDeps modStackYaml = do _ <- loadConfigYaml (parseProjectAndConfigMonoid (parent path)) path let obj' = HashMap.insert "extra-deps" - (toJSON $ map fromTuple $ Map.toList deps) - $ HashMap.insert ("flags" :: Text) (toJSON fl) + (toJSON $ map (CabalString . uncurry PackageIdentifier) $ Map.toList deps) + $ HashMap.insert ("flags" :: Text) (toJSON $ toCabalStringMap $ toCabalStringMap <$> fl) $ HashMap.insert ("resolver" :: Text) (toJSON res) obj liftIO $ Yaml.encodeFile fp obj' @@ -764,31 +750,6 @@ solveExtraDeps modStackYaml = do , " - Adjust resolver.\n" ] --- | Same as 'checkSnapBuildPLan', but set up a real GHC if needed. --- --- If we're using a Stackage snapshot, we can use the snapshot hints --- to determine global library information. This will not be available --- for custom and GHC resolvers, however. Therefore, we insist that it --- be installed first. Fortunately, the standard `stack solver` --- behavior only chooses Stackage snapshots, so the common case will --- not force the installation of a bunch of GHC versions. -checkSnapBuildPlanActual - :: (HasConfig env, HasGHCVariant env) - => Path Abs Dir -- ^ project root, used for checking out necessary files - -> [C.GenericPackageDescription] - -> Maybe (Map PackageName (Map FlagName Bool)) - -> SnapshotDef - -> RIO env BuildPlanCheck -checkSnapBuildPlanActual root gpds flags sd = do - let forNonSnapshot inner = setupCabalEnv (sdWantedCompilerVersion sd) (inner . Just) - runner = - case sdResolver sd of - ResolverStackage _ -> ($ Nothing) - ResolverCompiler _ -> forNonSnapshot - ResolverCustom _ _ -> forNonSnapshot - - runner $ checkSnapBuildPlan root gpds flags sd - prettyPath :: forall r t m. (MonadIO m, RelPath (Path r t) ~ Path Rel t, AnyPath (Path r t)) => Path r t -> m String diff --git a/src/Stack/Types/Build.hs b/src/Stack/Types/Build.hs index 67bc55c29f..6987292012 100644 --- a/src/Stack/Types/Build.hs +++ b/src/Stack/Types/Build.hs @@ -32,7 +32,6 @@ module Stack.Types.Build ,BuildSubset(..) ,defaultBuildOpts ,TaskType(..) - ,ttPackageLocation ,TaskConfigOpts(..) ,BuildCache(..) ,buildCacheVC @@ -65,19 +64,16 @@ import Data.Time.Clock import Distribution.PackageDescription (TestSuiteInterface) import Distribution.System (Arch) import qualified Distribution.Text as C -import Path (mkRelDir, parseRelDir, ()) +import Distribution.Version (mkVersion) +import Path (mkRelDir, parseRelDir, (), parent) import Path.Extra (toFilePathNoTrailingSep) import Stack.Constants -import Stack.Types.BuildPlan import Stack.Types.Compiler import Stack.Types.CompilerBuild import Stack.Types.Config -import Stack.Types.FlagName import Stack.Types.GhcPkgId import Stack.Types.NamedComponent import Stack.Types.Package -import Stack.Types.PackageIdentifier -import Stack.Types.PackageName import Stack.Types.Version import System.Exit (ExitCode (ExitFailure)) import System.FilePath (pathSeparator) @@ -88,8 +84,8 @@ import RIO.Process (showProcessArgDebug) data StackBuildException = Couldn'tFindPkgId PackageName | CompilerVersionMismatch - (Maybe (CompilerVersion 'CVActual, Arch)) -- found - (CompilerVersion 'CVWanted, Arch) -- expected + (Maybe (ActualCompiler, Arch)) -- found + (WantedCompiler, Arch) -- expected GHCVariant -- expected CompilerBuild -- expected VersionCheck @@ -162,7 +158,7 @@ instance Show StackBuildException where MatchMinor -> "minor version match with " MatchExact -> "exact version " NewerMinor -> "minor version match or newer with " - , compilerVersionString expected + , T.unpack $ utf8BuilderToText $ display expected , " (" , C.display earch , ghcVariantSuffix ghcVariant @@ -295,12 +291,12 @@ instance Show StackBuildException where "The following components have 'buildable: False' set in the cabal configuration, and so cannot be targets:\n " ++ T.unpack (renderPkgComponents xs) ++ "\nTo resolve this, either provide flags such that these components are buildable, or only specify buildable targets." - show (TestSuiteExeMissing isSimpleBuildType exeName pkgName testName) = + show (TestSuiteExeMissing isSimpleBuildType exeName pkgName' testName) = missingExeError isSimpleBuildType $ concat [ "Test suite executable \"" , exeName , " not found for " - , pkgName + , pkgName' , ":test:" , testName ] @@ -347,9 +343,9 @@ showBuildError isBuildingSetup exitCode mtaskProvides execName fullArgs logFiles in "\n-- While building " ++ (case (isBuildingSetup, mtaskProvides) of (False, Nothing) -> error "Invariant violated: unexpected case in showBuildError" - (False, Just taskProvides') -> "package " ++ dropQuotes (show taskProvides') + (False, Just taskProvides') -> "package " ++ dropQuotes (packageIdentifierString taskProvides') (True, Nothing) -> "simple Setup.hs" - (True, Just taskProvides') -> "custom Setup.hs for package " ++ dropQuotes (show taskProvides') + (True, Just taskProvides') -> "custom Setup.hs for package " ++ dropQuotes (packageIdentifierString taskProvides') ) ++ " using:\n " ++ fullCmd ++ "\n" ++ " Process exited with code: " ++ show exitCode ++ @@ -371,7 +367,7 @@ instance Exception StackBuildException -- | Package dependency oracle. newtype PkgDepsOracle = PkgDeps PackageName - deriving (Show,Typeable,Eq,Hashable,Store,NFData) + deriving (Show,Typeable,Eq,Store,NFData) -- | Stored on disk to know whether the files have changed. newtype BuildCache = BuildCache @@ -383,7 +379,7 @@ instance NFData BuildCache instance Store BuildCache buildCacheVC :: VersionConfig BuildCache -buildCacheVC = storeVersionConfig "build-v1" "KVUoviSWWAd7tiRRGeWAvd0UIN4=" +buildCacheVC = storeVersionConfig "build-v2" "c9BeiWP7Mpe9OBDAPPEYPDaFEGM=" -- | Stored on disk to know whether the flags have changed. data ConfigCache = ConfigCache @@ -412,8 +408,8 @@ instance Store CachePkgSrc instance NFData CachePkgSrc toCachePkgSrc :: PackageSource -> CachePkgSrc -toCachePkgSrc (PSFiles lp _) = CacheSrcLocal (toFilePath (lpDir lp)) -toCachePkgSrc PSIndex{} = CacheSrcUpstream +toCachePkgSrc (PSFilePath lp _) = CacheSrcLocal (toFilePath (parent (lpCabalFile lp))) +toCachePkgSrc PSRemote{} = CacheSrcUpstream configCacheVC :: VersionConfig ConfigCache configCacheVC = storeVersionConfig "config-v3" "z7N_NxX7Gbz41Gi9AGEa1zoLE-4=" @@ -464,25 +460,22 @@ instance Show TaskConfigOpts where -- | The type of a task, either building local code or something from the -- package index (upstream) -data TaskType = TTFiles LocalPackage InstallLocation - | TTIndex Package InstallLocation PackageIdentifierRevision -- FIXME major overhaul for PackageLocation? +data TaskType + = TTFilePath LocalPackage InstallLocation + | TTRemote Package InstallLocation PackageLocationImmutable deriving Show -ttPackageLocation :: TaskType -> PackageLocationIndex FilePath -ttPackageLocation (TTFiles lp _) = PLOther (lpLocation lp) -ttPackageLocation (TTIndex _ _ pir) = PLIndex pir - taskIsTarget :: Task -> Bool taskIsTarget t = case taskType t of - TTFiles lp _ -> lpWanted lp + TTFilePath lp _ -> lpWanted lp _ -> False taskLocation :: Task -> InstallLocation taskLocation task = case taskType task of - TTFiles _ loc -> loc - TTIndex _ loc _ -> loc + TTFilePath _ loc -> loc + TTRemote _ loc _ -> loc -- | A complete plan of what needs to be built and how to do it data Plan = Plan @@ -618,7 +611,7 @@ configureOptsNoDir econfig bco deps isLocal package = concat -- earlier. Cabal also might do less work then. useExactConf = configAllowNewer config - newerCabal = view cabalVersionL econfig >= $(mkVersion "1.22") + newerCabal = view cabalVersionL econfig >= mkVersion [1, 22] -- Unioning atop defaults is needed so that all flags are specified -- with --exact-configuration. @@ -629,9 +622,9 @@ configureOptsNoDir econfig bco deps isLocal package = concat where toDepOption = if newerCabal then toDepOption1_22 else toDepOption1_18 - toDepOption1_22 ident gid = concat + toDepOption1_22 (PackageIdentifier name _) gid = concat [ "--dependency=" - , packageNameString $ packageIdentifierName ident + , packageNameString name , "=" , ghcPkgIdString gid ] diff --git a/src/Stack/Types/BuildPlan.hs b/src/Stack/Types/BuildPlan.hs index 414788ddb6..669bfde3c4 100644 --- a/src/Stack/Types/BuildPlan.hs +++ b/src/Stack/Types/BuildPlan.hs @@ -5,53 +5,35 @@ {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE RecordWildCards #-} {-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE TupleSections #-} {-# LANGUAGE TypeFamilies #-} -- | Shared types for various stackage packages. module Stack.Types.BuildPlan ( -- * Types SnapshotDef (..) , snapshotDefVC - , sdRawPathName - , PackageLocation (..) - , PackageLocationIndex (..) - , RepoType (..) - , Subdirs (..) - , Repo (..) - , Archive (..) , ExeName (..) , LoadedSnapshot (..) , loadedSnapshotVC , LoadedPackageInfo (..) - , ModuleName (..) - , fromCabalModuleName + , C.ModuleName , ModuleInfo (..) , moduleInfoVC - , setCompilerVersion - , sdWantedCompilerVersion + , sdSnapshots + , sdResolverName ) where -import Data.Aeson (ToJSON (..), FromJSON (..), withText, object, (.=)) -import Data.Aeson.Extended (WithJSONWarnings (..), (..:), (..:?), withObjectWarnings, noJSONWarnings, (..!=)) import qualified Data.Map as Map import qualified Data.Set as Set import Data.Store.Version import Data.Store.VersionTagged -import qualified Data.Text as T -import Data.Text.Encoding (encodeUtf8) import qualified Distribution.ModuleName as C +import Distribution.ModuleName (ModuleName) import qualified Distribution.Version as C -import Network.HTTP.StackClient (parseRequest) +import Pantry import Stack.Prelude import Stack.Types.Compiler -import Stack.Types.FlagName import Stack.Types.GhcPkgId -import Stack.Types.PackageIdentifier -import Stack.Types.PackageName -import Stack.Types.Resolver -import Stack.Types.Version import Stack.Types.VersionIntervals -- | A definition of a snapshot. This could be a Stackage snapshot or @@ -65,230 +47,30 @@ import Stack.Types.VersionIntervals -- of this additional information by package name, and later in the -- snapshot load step we will resolve the contents of tarballs and -- repos, figure out package names, and assigned values appropriately. -data SnapshotDef = SnapshotDef - { sdParent :: !(Either (CompilerVersion 'CVWanted) SnapshotDef) - -- ^ The snapshot to extend from. This is either a specific - -- compiler, or a @SnapshotDef@ which gives us more information - -- (like packages). Ultimately, we'll end up with a - -- @CompilerVersion@. - , sdResolver :: !LoadedResolver - -- ^ The resolver that provides this definition. - , sdResolverName :: !Text - -- ^ A user-friendly way of referring to this resolver. - , sdLocations :: ![PackageLocationIndex Subdirs] - -- ^ Where to grab all of the packages from. - , sdDropPackages :: !(Set PackageName) - -- ^ Packages present in the parent which should not be included - -- here. - , sdFlags :: !(Map PackageName (Map FlagName Bool)) - -- ^ Flag values to override from the defaults - , sdHidden :: !(Map PackageName Bool) - -- ^ Packages which should be hidden when registering. This will - -- affect, for example, the import parser in the script - -- command. We use a 'Map' instead of just a 'Set' to allow - -- overriding the hidden settings in a parent snapshot. - , sdGhcOptions :: !(Map PackageName [Text]) - -- ^ GHC options per package - , sdGlobalHints :: !(Map PackageName (Maybe Version)) - -- ^ Hints about which packages are available globally. When - -- actually building code, we trust the package database provided - -- by GHC itself, since it may be different based on platform or - -- GHC install. However, when we want to check the compatibility - -- of a snapshot with some codebase without installing GHC (e.g., - -- during stack init), we would use this field. +data SnapshotDef = SnapshotDef -- To be removed as part of https://github.com/commercialhaskell/stack/issues/3922 + { sdResolver :: !SnapshotLocation + , sdSnapshot :: !(Maybe (Snapshot, SnapshotDef)) + , sdWantedCompilerVersion :: !WantedCompiler + , sdUniqueHash :: !SHA256 } deriving (Show, Eq, Data, Generic, Typeable) instance Store SnapshotDef instance NFData SnapshotDef -snapshotDefVC :: VersionConfig SnapshotDef -snapshotDefVC = storeVersionConfig "sd-v1" "CKo7nln8EXkw07Gq-4ATxszNZiE=" - --- | A relative file path including a unique string for the given --- snapshot. -sdRawPathName :: SnapshotDef -> String -sdRawPathName sd = - T.unpack $ go $ sdResolver sd - where - go (ResolverStackage name) = renderSnapName name - go (ResolverCompiler version) = compilerVersionText version - go (ResolverCustom _ hash) = "custom-" <> sdResolverName sd <> "-" <> trimmedSnapshotHash hash - --- | Modify the wanted compiler version in this snapshot. This is used --- when overriding via the `compiler` value in a custom snapshot or --- stack.yaml file. We do _not_ need to modify the snapshot's hash for --- this: all binary caches of a snapshot are stored in a filepath that --- encodes the actual compiler version in addition to the --- hash. Therefore, modifications here will not lead to any invalid --- data. -setCompilerVersion :: CompilerVersion 'CVWanted -> SnapshotDef -> SnapshotDef -setCompilerVersion cv = - go - where - go sd = - case sdParent sd of - Left _ -> sd { sdParent = Left cv } - Right sd' -> sd { sdParent = Right $ go sd' } - --- | Where to get the contents of a package (including cabal file --- revisions) from. --- --- A GADT may be more logical than the index parameter, but this plays --- more nicely with Generic deriving. -data PackageLocation subdirs - = PLFilePath !FilePath - -- ^ Note that we use @FilePath@ and not @Path@s. The goal is: first parse - -- the value raw, and then use @canonicalizePath@ and @parseAbsDir@. - | PLArchive !(Archive subdirs) - | PLRepo !(Repo subdirs) - -- ^ Stored in a source control repository - deriving (Generic, Show, Eq, Ord, Data, Typeable, Functor) -instance (Store a) => Store (PackageLocation a) -instance (NFData a) => NFData (PackageLocation a) - --- | Add in the possibility of getting packages from the index --- (including cabal file revisions). We have special handling of this --- case in many places in the codebase, and therefore represent it --- with a separate data type from 'PackageLocation'. -data PackageLocationIndex subdirs - = PLIndex !PackageIdentifierRevision - -- ^ Grab the package from the package index with the given - -- version and (optional) cabal file info to specify the correct - -- revision. - | PLOther !(PackageLocation subdirs) - deriving (Generic, Show, Eq, Ord, Data, Typeable, Functor) -instance (Store a) => Store (PackageLocationIndex a) -instance (NFData a) => NFData (PackageLocationIndex a) - --- | A package archive, could be from a URL or a local file --- path. Local file path archives are assumed to be unchanging --- over time, and so are allowed in custom snapshots. -data Archive subdirs = Archive - { archiveUrl :: !Text - , archiveSubdirs :: !subdirs - , archiveHash :: !(Maybe StaticSHA256) - } - deriving (Generic, Show, Eq, Ord, Data, Typeable, Functor) -instance Store a => Store (Archive a) -instance NFData a => NFData (Archive a) - --- | The type of a source control repository. -data RepoType = RepoGit | RepoHg - deriving (Generic, Show, Eq, Ord, Data, Typeable) -instance Store RepoType -instance NFData RepoType - -data Subdirs - = DefaultSubdirs - | ExplicitSubdirs ![FilePath] - deriving (Generic, Show, Eq, Data, Typeable) -instance Store Subdirs -instance NFData Subdirs -instance FromJSON Subdirs where - parseJSON = fmap ExplicitSubdirs . parseJSON - --- | Information on packages stored in a source control repository. -data Repo subdirs = Repo - { repoUrl :: !Text - , repoCommit :: !Text - , repoType :: !RepoType - , repoSubdirs :: !subdirs - } - deriving (Generic, Show, Eq, Ord, Data, Typeable, Functor) -instance Store a => Store (Repo a) -instance NFData a => NFData (Repo a) - -instance subdirs ~ Subdirs => ToJSON (PackageLocationIndex subdirs) where - toJSON (PLIndex ident) = toJSON ident - toJSON (PLOther loc) = toJSON loc - -instance subdirs ~ Subdirs => ToJSON (PackageLocation subdirs) where - toJSON (PLFilePath fp) = toJSON fp - toJSON (PLArchive (Archive t DefaultSubdirs Nothing)) = toJSON t - toJSON (PLArchive (Archive t subdirs msha)) = object $ concat - [ ["location" .= t] - , case subdirs of - DefaultSubdirs -> [] - ExplicitSubdirs x -> ["subdirs" .= x] - , case msha of - Nothing -> [] - Just sha -> ["sha256" .= staticSHA256ToText sha] - ] - toJSON (PLRepo (Repo url commit typ subdirs)) = object $ concat - [ case subdirs of - DefaultSubdirs -> [] - ExplicitSubdirs x -> ["subdirs" .= x] - , [urlKey .= url] - , ["commit" .= commit] - ] - where - urlKey = - case typ of - RepoGit -> "git" - RepoHg -> "hg" - -instance subdirs ~ Subdirs => FromJSON (WithJSONWarnings (PackageLocationIndex subdirs)) where - parseJSON v - = (noJSONWarnings . PLIndex <$> parseJSON v) - <|> (fmap PLOther <$> parseJSON v) - -instance subdirs ~ Subdirs => FromJSON (WithJSONWarnings (PackageLocation subdirs)) where - parseJSON v - = (noJSONWarnings <$> withText "PackageLocation" (\t -> http t <|> file t) v) - <|> repo v - <|> archiveObject v - <|> github v - where - file t = pure $ PLFilePath $ T.unpack t - http t = - case parseRequest $ T.unpack t of - Left _ -> fail $ "Could not parse URL: " ++ T.unpack t - Right _ -> return $ PLArchive $ Archive t DefaultSubdirs Nothing - - repo = withObjectWarnings "PLRepo" $ \o -> do - (repoType, repoUrl) <- - ((RepoGit, ) <$> o ..: "git") <|> - ((RepoHg, ) <$> o ..: "hg") - repoCommit <- o ..: "commit" - repoSubdirs <- o ..:? "subdirs" ..!= DefaultSubdirs - return $ PLRepo Repo {..} - - archiveObject = withObjectWarnings "PLArchive" $ \o -> do - url <- o ..: "archive" - subdirs <- o ..:? "subdirs" ..!= DefaultSubdirs - msha <- o ..:? "sha256" - msha' <- - case msha of - Nothing -> return Nothing - Just t -> - case mkStaticSHA256FromText t of - Left e -> fail $ "Invalid SHA256: " ++ T.unpack t ++ ", " ++ show e - Right x -> return $ Just x - return $ PLArchive Archive - { archiveUrl = url - , archiveSubdirs = subdirs :: Subdirs - , archiveHash = msha' - } +sdResolverName :: SnapshotDef -> Text +sdResolverName sd = + case sdSnapshot sd of + Nothing -> utf8BuilderToText $ display $ sdWantedCompilerVersion sd + Just (snapshot, _) -> snapshotName snapshot - github = withObjectWarnings "PLArchive:github" $ \o -> do - GitHubRepo ghRepo <- o ..: "github" - commit <- o ..: "commit" - subdirs <- o ..:? "subdirs" ..!= DefaultSubdirs - return $ PLArchive Archive - { archiveUrl = "https://github.com/" <> ghRepo <> "/archive/" <> commit <> ".tar.gz" - , archiveSubdirs = subdirs - , archiveHash = Nothing - } +sdSnapshots :: SnapshotDef -> [Snapshot] +sdSnapshots sd = + case sdSnapshot sd of + Nothing -> [] + Just (snap, sd') -> snap : sdSnapshots sd' --- An unexported newtype wrapper to hang a 'FromJSON' instance off of. Contains --- a GitHub user and repo name separated by a forward slash, e.g. "foo/bar". -newtype GitHubRepo = GitHubRepo Text - -instance FromJSON GitHubRepo where - parseJSON = withText "GitHubRepo" $ \s -> do - case T.split (== '/') s of - [x, y] | not (T.null x || T.null y) -> return (GitHubRepo s) - _ -> fail "expecting \"user/repo\"" +snapshotDefVC :: VersionConfig SnapshotDef +snapshotDefVC = storeVersionConfig "sd-v3" "MpkgNx8qOHakJTSePR1czDElNiU=" -- | Name of an executable. newtype ExeName = ExeName { unExeName :: Text } @@ -301,16 +83,68 @@ newtype ExeName = ExeName { unExeName :: Text } -- a snapshot may not depend upon a local or project, and all -- dependencies must be satisfied. data LoadedSnapshot = LoadedSnapshot - { lsCompilerVersion :: !(CompilerVersion 'CVActual) + { lsCompilerVersion :: !ActualCompiler , lsGlobals :: !(Map PackageName (LoadedPackageInfo GhcPkgId)) - , lsPackages :: !(Map PackageName (LoadedPackageInfo (PackageLocationIndex FilePath))) + , lsPackages :: !(Map PackageName (LoadedPackageInfo PackageLocation)) + -- ^ Snapshots themselves may not have a filepath in them, but once + -- we start adding in local configuration it's possible. } deriving (Generic, Show, Data, Eq, Typeable) instance Store LoadedSnapshot instance NFData LoadedSnapshot +{- + +MSS 2018-08-02: There's a big refactoring laid out in +https://github.com/commercialhaskell/stack/issues/3922. While working +on the pantry refactoring, I think I found a straightforward way to +approach implementing this (though there will still be a lot of code +churn involved). I don't want to lose the idea, but I also don't want +to include this change in the pantry work, so writing a note here. + +Right now, we eagerly load up all packages in a snapshot the first +time we use it. This was necessary for build tool dependencies in the +past, but not anymore +(https://github.com/commercialhaskell/stack/pull/4132). Therefore: +let's delete the @LoadedSnapshot@ data type entirely! + +Once you start down this path, you'll get to a point of not using the +@calculatePackagePromotion@ stuff as much. This is good! Delete that +function too! + +Instead, we have a @SnapshotLocation@, which can be turned into a +@Snapshot@ via @loadPantrySnapshot@. We want to traverse that +@Snapshot@ and all of its parent @Snapshot@s and come up with a few +pieces of information: + +* The wanted compiler version + +* A @type SourceMap = Map PackageName PackageSource@ + +We'll want to augment that @SourceMap@ with information from the +@stack.yaml@ file, namely: extra-deps and local packages. We'll also +need to extend it with command line parameters, such as if a user runs +@stack build acme-missiles-0.3@. + +There will be a lot of information in @PackageSource@ taken from these +various sources, but it will contain information on where the package +is from, flags, GHC options, and so on, whether it's a dependency or +part of the project, etc. + +It will be easy to see if a package is _immutable_ or not: everything +but local file paths are immutable. Awesome. + +In ConstructPlan, when figuring out dependencies of a package, we'll +use a simple rule: if the package and all of its dependencies are +immutable, we stick it in the precompiled cache, with a hash based on +the full transitive set of dependencies and their +configuration. Otherwise, we don't cache. + + +-} + loadedSnapshotVC :: VersionConfig LoadedSnapshot -loadedSnapshotVC = storeVersionConfig "ls-v5" "CeSRWh1VU8v0__kwA__msbe6WlU=" +loadedSnapshotVC = storeVersionConfig "ls-v6" "KG2o7Yvkg0tAjIOSKjQ4fEM0BKY=" -- | Information on a single package for the 'LoadedSnapshot' which -- can be installed. @@ -375,12 +209,6 @@ data Component = CompLibrary instance Store Component instance NFData Component -newtype ModuleName = ModuleName { unModuleName :: ByteString } - deriving (Show, Eq, Ord, Generic, Store, NFData, Typeable, Data) - -fromCabalModuleName :: C.ModuleName -> ModuleName -fromCabalModuleName = ModuleName . encodeUtf8 . T.intercalate "." . map T.pack . C.components - newtype ModuleInfo = ModuleInfo { miModules :: Map ModuleName (Set PackageName) } @@ -398,7 +226,3 @@ instance Monoid ModuleInfo where moduleInfoVC :: VersionConfig ModuleInfo moduleInfoVC = storeVersionConfig "mi-v2" "8ImAfrwMVmqoSoEpt85pLvFeV3s=" - --- | Determined the desired compiler version for this 'SnapshotDef'. -sdWantedCompilerVersion :: SnapshotDef -> CompilerVersion 'CVWanted -sdWantedCompilerVersion = either id sdWantedCompilerVersion . sdParent diff --git a/src/Stack/Types/Compiler.hs b/src/Stack/Types/Compiler.hs index f6f9e545de..c7832032dc 100644 --- a/src/Stack/Types/Compiler.hs +++ b/src/Stack/Types/Compiler.hs @@ -4,10 +4,20 @@ {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE KindSignatures #-} {-# LANGUAGE TypeFamilies #-} -module Stack.Types.Compiler where +module Stack.Types.Compiler + ( ActualCompiler (..) + , WhichCompiler (..) + , getGhcVersion + , whichCompiler + , compilerExeName + , compilerVersionText + , compilerVersionString + , haddockExeName + , isWantedCompiler + , wantedToActual + ) where import Data.Aeson import Data.Data @@ -21,81 +31,59 @@ data WhichCompiler | Ghcjs deriving (Show, Eq, Ord) --- | Whether the compiler version given is the wanted version (what --- the stack.yaml file, snapshot file, or --resolver argument --- request), or the actual installed GHC. Depending on the matching --- requirements, these values could be different. -data CVType = CVWanted | CVActual - -- | Specifies a compiler and its version number(s). -- -- Note that despite having this datatype, stack isn't in a hurry to -- support compilers other than GHC. -data CompilerVersion (cvType :: CVType) - = GhcVersion {-# UNPACK #-} !Version - | GhcjsVersion - {-# UNPACK #-} !Version -- GHCJS version - {-# UNPACK #-} !Version -- GHC version +data ActualCompiler + = ACGhc !Version + | ACGhcjs + !Version -- GHCJS version + !Version -- GHC version deriving (Generic, Show, Eq, Ord, Data, Typeable) -instance Store (CompilerVersion a) -instance NFData (CompilerVersion a) -instance Display (CompilerVersion a) where - display = display . compilerVersionText -instance ToJSON (CompilerVersion a) where +instance Store ActualCompiler +instance NFData ActualCompiler +instance Display ActualCompiler where + display (ACGhc x) = display (WCGhc x) + display (ACGhcjs x y) = display (WCGhcjs x y) +instance ToJSON ActualCompiler where toJSON = toJSON . compilerVersionText -instance FromJSON (CompilerVersion a) where - parseJSON (String t) = maybe (fail "Failed to parse compiler version") return (parseCompilerVersion t) +instance FromJSON ActualCompiler where + parseJSON (String t) = either (const $ fail "Failed to parse compiler version") return (parseActualCompiler t) parseJSON _ = fail "Invalid CompilerVersion, must be String" -instance FromJSONKey (CompilerVersion a) where +instance FromJSONKey ActualCompiler where fromJSONKey = FromJSONKeyTextParser $ \k -> - case parseCompilerVersion k of - Nothing -> fail $ "Failed to parse CompilerVersion " ++ T.unpack k - Just parsed -> return parsed - -actualToWanted :: CompilerVersion 'CVActual -> CompilerVersion 'CVWanted -actualToWanted (GhcVersion x) = GhcVersion x -actualToWanted (GhcjsVersion x y) = GhcjsVersion x y + case parseActualCompiler k of + Left _ -> fail $ "Failed to parse CompilerVersion " ++ T.unpack k + Right parsed -> return parsed -wantedToActual :: CompilerVersion 'CVWanted -> CompilerVersion 'CVActual -wantedToActual (GhcVersion x) = GhcVersion x -wantedToActual (GhcjsVersion x y) = GhcjsVersion x y +wantedToActual :: WantedCompiler -> ActualCompiler +wantedToActual (WCGhc x) = ACGhc x +wantedToActual (WCGhcjs x y) = ACGhcjs x y -parseCompilerVersion :: T.Text -> Maybe (CompilerVersion a) -parseCompilerVersion t - | Just t' <- T.stripPrefix "ghc-" t - , Just v <- parseVersionFromString $ T.unpack t' - = Just (GhcVersion v) - | Just t' <- T.stripPrefix "ghcjs-" t - , [tghcjs, tghc] <- T.splitOn "_ghc-" t' - , Just vghcjs <- parseVersionFromString $ T.unpack tghcjs - , Just vghc <- parseVersionFromString $ T.unpack tghc - = Just (GhcjsVersion vghcjs vghc) - | otherwise - = Nothing +parseActualCompiler :: T.Text -> Either PantryException ActualCompiler +parseActualCompiler = fmap wantedToActual . parseWantedCompiler -compilerVersionText :: CompilerVersion a -> T.Text -compilerVersionText (GhcVersion vghc) = - "ghc-" <> versionText vghc -compilerVersionText (GhcjsVersion vghcjs vghc) = - "ghcjs-" <> versionText vghcjs <> "_ghc-" <> versionText vghc +compilerVersionText :: ActualCompiler -> T.Text +compilerVersionText = utf8BuilderToText . display -compilerVersionString :: CompilerVersion a -> String +compilerVersionString :: ActualCompiler -> String compilerVersionString = T.unpack . compilerVersionText -whichCompiler :: CompilerVersion a -> WhichCompiler -whichCompiler GhcVersion {} = Ghc -whichCompiler GhcjsVersion {} = Ghcjs +whichCompiler :: ActualCompiler -> WhichCompiler +whichCompiler ACGhc{} = Ghc +whichCompiler ACGhcjs{} = Ghcjs -isWantedCompiler :: VersionCheck -> CompilerVersion 'CVWanted -> CompilerVersion 'CVActual -> Bool -isWantedCompiler check (GhcVersion wanted) (GhcVersion actual) = +isWantedCompiler :: VersionCheck -> WantedCompiler -> ActualCompiler -> Bool +isWantedCompiler check (WCGhc wanted) (ACGhc actual) = checkVersion check wanted actual -isWantedCompiler check (GhcjsVersion wanted wantedGhc) (GhcjsVersion actual actualGhc) = +isWantedCompiler check (WCGhcjs wanted wantedGhc) (ACGhcjs actual actualGhc) = checkVersion check wanted actual && checkVersion check wantedGhc actualGhc isWantedCompiler _ _ _ = False -getGhcVersion :: CompilerVersion a -> Version -getGhcVersion (GhcVersion v) = v -getGhcVersion (GhcjsVersion _ v) = v +getGhcVersion :: ActualCompiler -> Version +getGhcVersion (ACGhc v) = v +getGhcVersion (ACGhcjs _ v) = v compilerExeName :: WhichCompiler -> String compilerExeName Ghc = "ghc" diff --git a/src/Stack/Types/Config.hs b/src/Stack/Types/Config.hs index fb7e4ef8e1..4dced8f446 100644 --- a/src/Stack/Types/Config.hs +++ b/src/Stack/Types/Config.hs @@ -21,6 +21,7 @@ {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TupleSections #-} {-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE ViewPatterns #-} -- | The Config type. @@ -53,6 +54,7 @@ module Stack.Types.Config ,parseGHCVariant ,HasGHCVariant(..) ,snapshotsDir + ,globalHintsFile -- ** EnvConfig & HasEnvConfig ,EnvConfig(..) ,HasEnvConfig(..) @@ -84,14 +86,10 @@ module Stack.Types.Config ,defaultLogLevel -- ** LoadConfig ,LoadConfig(..) - -- ** PackageIndex, IndexName & IndexLocation - -- Re-exports - ,PackageIndex(..) - ,IndexName(..) - ,indexNameText -- ** Project & ProjectAndConfigMonoid ,Project(..) + ,Curator(..) ,ProjectAndConfigMonoid(..) ,parseProjectAndConfigMonoid -- ** PvpBounds @@ -164,7 +162,6 @@ module Stack.Types.Config ,whichCompilerL ,envOverrideSettingsL ,loadedSnapshotL - ,globalHintsL ,shouldForceGhcColorFlag ,appropriateGhcColorFlag -- * Lens reexport @@ -177,7 +174,7 @@ import Crypto.Hash (hashWith, SHA1(..)) import Stack.Prelude import Data.Aeson.Extended (ToJSON, toJSON, FromJSON, FromJSONKey (..), parseJSON, withText, object, - (.=), (..:), (..:?), (..!=), Value(Bool, String), + (.=), (..:), (..:?), (..!=), Value(Bool), withObjectWarnings, WarningParser, Object, jsonSubWarnings, jsonSubWarningsT, jsonSubWarningsTT, WithJSONWarnings(..), noJSONWarnings, FromJSONKeyFunction (FromJSONKeyTextParser)) @@ -202,27 +199,23 @@ import qualified Distribution.PackageDescription as C import Distribution.System (Platform) import qualified Distribution.Text import qualified Distribution.Types.UnqualComponentName as C -import Distribution.Version (anyVersion, mkVersion') +import Distribution.Version (anyVersion, mkVersion', mkVersion) import Generics.Deriving.Monoid (memptydefault, mappenddefault) import Lens.Micro (Lens', lens, _1, _2, to) import Options.Applicative (ReadM) import qualified Options.Applicative as OA import qualified Options.Applicative.Types as OA +import qualified Pantry.SHA256 as SHA256 import Path import qualified Paths_stack as Meta import Stack.Constants -import Stack.PackageIndex (HasCabalLoader (..), CabalLoader (clStackRoot)) import Stack.Types.BuildPlan import Stack.Types.Compiler import Stack.Types.CompilerBuild import Stack.Types.Docker -import Stack.Types.FlagName import Stack.Types.Image import Stack.Types.NamedComponent import Stack.Types.Nix -import Stack.Types.PackageIdentifier -import Stack.Types.PackageIndex -import Stack.Types.PackageName import Stack.Types.Resolver import Stack.Types.Runner import Stack.Types.StylesUpdate (StylesUpdate, @@ -294,8 +287,6 @@ data Config = -- ^ How many concurrent jobs to run, defaults to number of capabilities ,configOverrideGccPath :: !(Maybe (Path Abs File)) -- ^ Optional gcc override path - ,configOverrideHpack :: !HpackExecutable - -- ^ Use Hpack executable (overrides bundled Hpack) ,configExtraIncludeDirs :: !(Set FilePath) -- ^ --extra-include-dirs arguments ,configExtraLibDirs :: !(Set FilePath) @@ -345,14 +336,10 @@ data Config = ,configHackageBaseUrl :: !Text -- ^ Hackage base URL used when uploading packages ,configRunner :: !Runner - ,configCabalLoader :: !CabalLoader + ,configPantryConfig :: !PantryConfig + ,configStackRoot :: !(Path Abs Dir) } -data HpackExecutable - = HpackBundled - | HpackCommand String - deriving (Show, Read, Eq, Ord) - -- | Which packages do ghc-options on the command line apply to? data ApplyGhcOptions = AGOTargets -- ^ all local targets | AGOLocals -- ^ all local packages, even non-targets @@ -439,7 +426,7 @@ data GlobalOpts = GlobalOpts , globalTimeInLog :: !Bool -- ^ Whether to include timings in logs. , globalConfigMonoid :: !ConfigMonoid -- ^ Config monoid, for passing into 'loadConfig' , globalResolver :: !(Maybe AbstractResolver) -- ^ Resolver override - , globalCompiler :: !(Maybe (CompilerVersion 'CVWanted)) -- ^ Compiler override + , globalCompiler :: !(Maybe WantedCompiler) -- ^ Compiler override , globalTerminal :: !Bool -- ^ We're in a terminal? , globalStylesUpdate :: !StylesUpdate -- ^ SGR (Ansi) codes for styles , globalTermWidth :: !(Maybe Int) -- ^ Terminal width override @@ -462,13 +449,13 @@ data GlobalOptsMonoid = GlobalOptsMonoid , globalMonoidLogLevel :: !(First LogLevel) -- ^ Log level , globalMonoidTimeInLog :: !(First Bool) -- ^ Whether to include timings in logs. , globalMonoidConfigMonoid :: !ConfigMonoid -- ^ Config monoid, for passing into 'loadConfig' - , globalMonoidResolver :: !(First AbstractResolver) -- ^ Resolver override - , globalMonoidCompiler :: !(First (CompilerVersion 'CVWanted)) -- ^ Compiler override + , globalMonoidResolver :: !(First (Unresolved AbstractResolver)) -- ^ Resolver override + , globalMonoidCompiler :: !(First WantedCompiler) -- ^ Compiler override , globalMonoidTerminal :: !(First Bool) -- ^ We're in a terminal? , globalMonoidStyles :: !StylesUpdate -- ^ Stack's output styles , globalMonoidTermWidth :: !(First Int) -- ^ Terminal width override , globalMonoidStackYaml :: !(First FilePath) -- ^ Override project stack.yaml - } deriving (Show, Generic) + } deriving Generic instance Semigroup GlobalOptsMonoid where (<>) = mappenddefault @@ -504,9 +491,9 @@ data BuildConfig = BuildConfig -- ^ Build plan wanted for this build , bcGHCVariant :: !GHCVariant -- ^ The variant of GHC used to select a GHC bindist. - , bcPackages :: ![PackageLocation Subdirs] + , bcPackages :: ![(ResolvedPath Dir, IO LocalPackageView)] -- ^ Local packages - , bcDependencies :: ![PackageLocationIndex Subdirs] + , bcDependencies :: ![PackageLocation] -- ^ Extra dependencies specified in configuration. -- -- These dependencies will not be installed to a shared location, and @@ -517,15 +504,14 @@ data BuildConfig = BuildConfig -- ^ Location of the stack.yaml file. -- -- Note: if the STACK_YAML environment variable is used, this may be - -- different from projectRootL "stack.yaml" - -- - -- FIXME MSS 2016-12-08: is the above comment still true? projectRootL - -- is defined in terms of bcStackYaml + -- different from projectRootL "stack.yaml" if a different file + -- name is used. , bcFlags :: !(Map PackageName (Map FlagName Bool)) -- ^ Per-package flag overrides , bcImplicitGlobal :: !Bool -- ^ Are we loading from the implicit global stack.yaml? This is useful -- for providing better error messages. + , bcCurator :: !(Maybe Curator) } stackYamlL :: HasBuildConfig env => Lens' env (Path Abs File) @@ -545,7 +531,7 @@ data EnvConfig = EnvConfig -- Note that this is not necessarily the same version as the one that stack -- depends on as a library and which is displayed when running -- @stack list-dependencies | grep Cabal@ in the stack project. - ,envConfigCompilerVersion :: !(CompilerVersion 'CVActual) + ,envConfigCompilerVersion :: !ActualCompiler -- ^ The actual version of the compiler to be used, as opposed to -- 'wantedCompilerL', which provides the version specified by the -- build plan. @@ -558,14 +544,14 @@ data EnvConfig = EnvConfig data LocalPackages = LocalPackages { lpProject :: !(Map PackageName LocalPackageView) - , lpDependencies :: !(Map PackageName (GenericPackageDescription, PackageLocationIndex FilePath)) + , lpDependencies :: !(Map PackageName (GenericPackageDescription, PackageLocation)) } -- | A view of a local package needed for resolving components data LocalPackageView = LocalPackageView { lpvCabalFP :: !(Path Abs File) + , lpvResolvedDir :: !(ResolvedPath Dir) , lpvGPD :: !GenericPackageDescription - , lpvLoc :: !(PackageLocation FilePath) } -- | Root directory for the given 'LocalPackageView' @@ -575,11 +561,7 @@ lpvRoot = parent . lpvCabalFP -- | Package name for the given 'LocalPackageView lpvName :: LocalPackageView -> PackageName lpvName lpv = - let PackageIdentifier name _version = - fromCabalPackageIdentifier - $ C.package - $ C.packageDescription - $ lpvGPD lpv + let PackageIdentifier name _version = C.package $ C.packageDescription $ lpvGPD lpv in name -- | All components available in the given 'LocalPackageView' @@ -600,95 +582,79 @@ lpvComponents lpv = Set.fromList $ concat -- | Version for the given 'LocalPackageView lpvVersion :: LocalPackageView -> Version lpvVersion lpv = - let PackageIdentifier _name version = - fromCabalPackageIdentifier - $ C.package - $ C.packageDescription - $ lpvGPD lpv + let PackageIdentifier _name version = C.package $ C.packageDescription $ lpvGPD lpv in version -- | Value returned by 'Stack.Config.loadConfig'. data LoadConfig = LoadConfig { lcConfig :: !Config -- ^ Top-level Stack configuration. - , lcLoadBuildConfig :: !(Maybe (CompilerVersion 'CVWanted) -> IO BuildConfig) + , lcLoadBuildConfig :: !(Maybe WantedCompiler -> IO BuildConfig) -- ^ Action to load the remaining 'BuildConfig'. , lcProjectRoot :: !(Maybe (Path Abs Dir)) -- ^ The project root directory, if in a project. } -data PackageEntry = PackageEntry - { peExtraDepMaybe :: !(Maybe TreatLikeExtraDep) - , peLocation :: !(PackageLocation Subdirs) - , peSubdirs :: !Subdirs - } - deriving Show - --- | Should a package be treated just like an extra-dep? --- --- 'True' means, it will only be built as a dependency --- for others, and its test suite/benchmarks will not be run. --- --- Useful modifying an upstream package, see: --- https://github.com/commercialhaskell/stack/issues/219 --- https://github.com/commercialhaskell/stack/issues/386 -type TreatLikeExtraDep = Bool - -instance FromJSON (WithJSONWarnings PackageEntry) where - parseJSON (String t) = do - WithJSONWarnings loc _ <- parseJSON $ String t - return $ noJSONWarnings - PackageEntry - { peExtraDepMaybe = Nothing - , peLocation = loc - , peSubdirs = DefaultSubdirs - } - parseJSON v = withObjectWarnings "PackageEntry" (\o -> PackageEntry - <$> o ..:? "extra-dep" - <*> jsonSubWarnings (o ..: "location") - <*> o ..:? "subdirs" ..!= DefaultSubdirs) v - -- | A project is a collection of packages. We can have multiple stack.yaml -- files, but only one of them may contain project information. data Project = Project { projectUserMsg :: !(Maybe String) -- ^ A warning message to display to the user when the auto generated -- config may have issues. - , projectPackages :: ![PackageLocation Subdirs] + , projectPackages :: ![RelFilePath] -- ^ Packages which are actually part of the project (as opposed -- to dependencies). - -- - -- /NOTE/ Stack has always allowed these packages to be any kind - -- of package location, but in reality only @PLFilePath@ really - -- makes sense. We could consider replacing @[PackageLocation]@ - -- with @[FilePath]@ to properly enforce this idea, though it will - -- slightly break backwards compatibility if someone really did - -- want to treat such things as non-deps. - , projectDependencies :: ![PackageLocationIndex Subdirs] + , projectDependencies :: ![PackageLocation] -- ^ Dependencies defined within the stack.yaml file, to be -- applied on top of the snapshot. , projectFlags :: !(Map PackageName (Map FlagName Bool)) -- ^ Flags to be applied on top of the snapshot flags. - , projectResolver :: !Resolver + , projectResolver :: !SnapshotLocation -- ^ How we resolve which @SnapshotDef@ to use - , projectCompiler :: !(Maybe (CompilerVersion 'CVWanted)) - -- ^ When specified, overrides which compiler to use + , projectCompiler :: !(Maybe WantedCompiler) + -- ^ Override the compiler in 'projectResolver' , projectExtraPackageDBs :: ![FilePath] + , projectCurator :: !(Maybe Curator) + -- ^ Extra configuration intended exclusively for usage by the + -- curator tool. In other words, this is /not/ part of the + -- documented and exposed Stack API. SUBJECT TO CHANGE. } deriving Show instance ToJSON Project where -- Expanding the constructor fully to ensure we don't miss any fields. - toJSON (Project userMsg packages extraDeps flags resolver compiler extraPackageDBs) = object $ concat - [ maybe [] (\cv -> ["compiler" .= cv]) compiler + toJSON (Project userMsg packages extraDeps flags resolver mcompiler extraPackageDBs mcurator) = object $ concat + [ maybe [] (\cv -> ["compiler" .= cv]) mcompiler , maybe [] (\msg -> ["user-message" .= msg]) userMsg , if null extraPackageDBs then [] else ["extra-package-dbs" .= extraPackageDBs] , if null extraDeps then [] else ["extra-deps" .= extraDeps] - , if Map.null flags then [] else ["flags" .= flags] + , if Map.null flags then [] else ["flags" .= fmap toCabalStringMap (toCabalStringMap flags)] , ["packages" .= packages] , ["resolver" .= resolver] + , maybe [] (\c -> ["curator" .= c]) mcurator ] +-- | Extra configuration intended exclusively for usage by the +-- curator tool. In other words, this is /not/ part of the +-- documented and exposed Stack API. SUBJECT TO CHANGE. +data Curator = Curator + { curatorSkipTest :: !(Set PackageName) + , curatorSkipBenchmark :: !(Set PackageName) + , curatorSkipHaddock :: !(Set PackageName) + } + deriving Show +instance ToJSON Curator where + toJSON c = object + [ "skip-test" .= Set.map CabalString (curatorSkipTest c) + , "skip-bench" .= Set.map CabalString (curatorSkipBenchmark c) + , "skip-haddock" .= Set.map CabalString (curatorSkipHaddock c) + ] +instance FromJSON (WithJSONWarnings Curator) where + parseJSON = withObjectWarnings "Curator" $ \o -> Curator + <$> fmap (Set.map unCabalString) (o ..:? "skip-test" ..!= mempty) + <*> fmap (Set.map unCabalString) (o ..:? "skip-bench" ..!= mempty) + <*> fmap (Set.map unCabalString) (o ..:? "skip-haddock" ..!= mempty) + -- An uninterpreted representation of configuration options. -- Configurations may be "cascaded" using mappend (left-biased). data ConfigMonoid = @@ -711,7 +677,7 @@ data ConfigMonoid = -- ^ Deprecated in favour of 'urlsMonoidLatestSnapshot' , configMonoidUrls :: !UrlsMonoid -- ^ See: 'configUrls - , configMonoidPackageIndices :: !(First [PackageIndex]) + , configMonoidPackageIndices :: !(First [HackageSecurityConfig]) -- ^ See: @picIndices@ , configMonoidSystemGHC :: !(First Bool) -- ^ See: 'configSystemGHC' @@ -789,8 +755,6 @@ data ConfigMonoid = -- ^ See 'configSaveHackageCreds' , configMonoidHackageBaseUrl :: !(First Text) -- ^ See 'configHackageBaseUrl' - , configMonoidIgnoreRevisionMismatch :: !(First Bool) - -- ^ See 'configIgnoreRevisionMismatch' , configMonoidColorWhen :: !(First ColorWhen) -- ^ When to use 'ANSI' colors , configMonoidStyles :: !StylesUpdate @@ -890,7 +854,6 @@ parseConfigMonoidObject rootDir obj = do configMonoidDumpLogs <- First <$> obj ..:? configMonoidDumpLogsName configMonoidSaveHackageCreds <- First <$> obj ..:? configMonoidSaveHackageCredsName configMonoidHackageBaseUrl <- First <$> obj ..:? configMonoidHackageBaseUrlName - configMonoidIgnoreRevisionMismatch <- First <$> obj ..:? configMonoidIgnoreRevisionMismatchName configMonoidColorWhen <- First <$> obj ..:? configMonoidColorWhenName configMonoidStyles <- fromMaybe mempty <$> obj ..:? configMonoidStylesName @@ -901,9 +864,9 @@ parseConfigMonoidObject rootDir obj = do name <- if name' == "*" then return Nothing - else case parsePackageNameFromString $ T.unpack name' of - Left e -> fail $ show e - Right x -> return $ Just x + else case parsePackageName $ T.unpack name' of + Nothing -> fail $ "Invalid package name: " ++ show name' + Just x -> return $ Just x return (name, b) configMonoidWorkDirName :: Text @@ -1035,9 +998,6 @@ configMonoidSaveHackageCredsName = "save-hackage-creds" configMonoidHackageBaseUrlName :: Text configMonoidHackageBaseUrlName = "hackage-base-url" -configMonoidIgnoreRevisionMismatchName :: Text -configMonoidIgnoreRevisionMismatchName = "ignore-revision-mismatch" - configMonoidColorWhenName :: Text configMonoidColorWhenName = "color" @@ -1059,12 +1019,11 @@ data ConfigException | BadStackRoot (Path Abs Dir) | Won'tCreateStackRootInDirectoryOwnedByDifferentUser (Path Abs Dir) (Path Abs Dir) -- ^ @$STACK_ROOT@, parent dir | UserDoesn'tOwnDirectory (Path Abs Dir) - | FailedToCloneRepo String | ManualGHCVariantSettingsAreIncompatibleWithSystemGHC | NixRequiresSystemGhc | NoResolverWhenUsingNoLocalConfig | InvalidResolverForNoLocalConfig String - | DuplicateLocalPackageNames ![(PackageName, [PackageLocationIndex FilePath])] + | DuplicateLocalPackageNames ![(PackageName, [PackageLocation])] deriving Typeable instance Show ConfigException where show (ParseConfigFileException configFile exception) = concat @@ -1103,7 +1062,7 @@ instance Show ConfigException where ] show (BadStackVersionException requiredRange) = concat [ "The version of stack you are using (" - , show (fromCabalVersion (mkVersion' Meta.version)) + , show (mkVersion' Meta.version) , ") is outside the required\n" ,"version range specified in stack.yaml (" , T.unpack (versionRangeText requiredRange) @@ -1157,13 +1116,6 @@ instance Show ConfigException where , T.unpack configMonoidAllowDifferentUserName , "' to disable this precaution." ] - show (FailedToCloneRepo commandName) = concat - [ "Failed to use " - , commandName - , " to clone the repo. Please ensure that " - , commandName - , " is installed and available to stack on your PATH environment variable." - ] show ManualGHCVariantSettingsAreIncompatibleWithSystemGHC = T.unpack $ T.concat [ "stack can only control the " , configMonoidGHCVariantName @@ -1245,6 +1197,12 @@ snapshotsDir = do platform <- platformGhcRelDir return $ root $(mkRelDir "snapshots") platform +-- | Cached global hints file +globalHintsFile :: (MonadReader env m, HasConfig env) => m (Path Abs File) +globalHintsFile = do + root <- view stackRootL + pure $ root $(mkRelDir "global-hints") $(mkRelFile "global-hints.yaml") + -- | Installation root for dependencies installationRootDeps :: (MonadThrow m, MonadReader env m, HasEnvConfig env) => m (Path Abs Dir) installationRootDeps = do @@ -1295,7 +1253,7 @@ platformSnapAndCompilerRel platformSnapAndCompilerRel = do sd <- view snapshotDefL platform <- platformGhcRelDir - name <- parseRelDir $ sdRawPathName sd + name <- parseRelDir $ T.unpack $ SHA256.toHexText $ sdUniqueHash sd ghc <- compilerVersionDir useShaPathOnWindows (platform name ghc) @@ -1363,8 +1321,8 @@ compilerVersionDir :: (MonadThrow m, MonadReader env m, HasEnvConfig env) => m ( compilerVersionDir = do compilerVersion <- view actualCompilerVersionL parseRelDir $ case compilerVersion of - GhcVersion version -> versionString version - GhcjsVersion {} -> compilerVersionString compilerVersion + ACGhc version -> versionString version + ACGhcjs {} -> compilerVersionString compilerVersion -- | Package database for installing dependencies into packageDatabaseDeps :: (MonadThrow m, MonadReader env m, HasEnvConfig env) => m (Path Abs Dir) @@ -1397,7 +1355,7 @@ configLoadedSnapshotCache configLoadedSnapshotCache sd gis = do root <- view stackRootL platform <- platformGhcVerOnlyRelDir - file <- parseRelFile $ sdRawPathName sd ++ ".cache" + file <- parseRelFile $ T.unpack (SHA256.toHexText $ sdUniqueHash sd) ++ ".cache" gis' <- parseRelDir $ case gis of GISSnapshotHints -> "__snapshot_hints__" @@ -1410,7 +1368,7 @@ configLoadedSnapshotCache sd gis = do data GlobalInfoSource = GISSnapshotHints -- ^ Accept the hints in the snapshot definition - | GISCompiler (CompilerVersion 'CVActual) + | GISCompiler ActualCompiler -- ^ Look up the actual information in the installed compiler -- | Suffix applied to an installation root to get the bin dir @@ -1496,86 +1454,35 @@ getCompilerPath wc = do data ProjectAndConfigMonoid = ProjectAndConfigMonoid !Project !ConfigMonoid -parseProjectAndConfigMonoid :: Path Abs Dir -> Value -> Yaml.Parser (WithJSONWarnings ProjectAndConfigMonoid) +parseProjectAndConfigMonoid :: Path Abs Dir -> Value -> Yaml.Parser (WithJSONWarnings (IO ProjectAndConfigMonoid)) parseProjectAndConfigMonoid rootDir = withObjectWarnings "ProjectAndConfigMonoid" $ \o -> do - dirs <- jsonSubWarningsTT (o ..:? "packages") ..!= [packageEntryCurrDir] - extraDeps <- jsonSubWarningsTT (o ..:? "extra-deps") ..!= [] - flags <- o ..:? "flags" ..!= mempty - - -- Convert the packages/extra-deps/flags approach we use in - -- the stack.yaml into the internal representation. - let (packages, deps) = convert dirs extraDeps - - resolver <- (o ..: "resolver") - >>= either (fail . show) return - . parseCustomLocation (Just rootDir) - compiler <- o ..:? "compiler" + packages <- o ..:? "packages" ..!= [RelFilePath "."] + deps <- jsonSubWarningsTT (o ..:? "extra-deps") ..!= [] + flags' <- o ..:? "flags" ..!= mempty + let flags = unCabalStringMap <$> unCabalStringMap + (flags' :: Map (CabalString PackageName) (Map (CabalString FlagName) Bool)) + + resolver <- jsonSubWarnings (o ..: "resolver") + mcompiler <- o ..:? "compiler" msg <- o ..:? "user-message" config <- parseConfigMonoidObject rootDir o extraPackageDBs <- o ..:? "extra-package-dbs" ..!= [] - let project = Project - { projectUserMsg = msg - , projectResolver = resolver - , projectCompiler = compiler - , projectExtraPackageDBs = extraPackageDBs - , projectPackages = packages - , projectDependencies = deps - , projectFlags = flags - } - return $ ProjectAndConfigMonoid project config - where - convert :: [PackageEntry] - -> [PackageLocationIndex Subdirs] -- extra-deps - -> ( [PackageLocation Subdirs] -- project - , [PackageLocationIndex Subdirs] -- dependencies - ) - convert entries extraDeps = - partitionEithers $ concatMap goEntry entries ++ map Right extraDeps - where - goEntry :: PackageEntry -> [Either (PackageLocation Subdirs) (PackageLocationIndex Subdirs)] - goEntry (PackageEntry Nothing pl@(PLFilePath _) subdirs) = goEntry' False pl subdirs - goEntry (PackageEntry Nothing pl _) = fail $ concat - [ "Refusing to implicitly treat package location as an extra-dep:\n" - , show pl - , "\nRecommendation: either move to 'extra-deps' or set 'extra-dep: true'." - ] - goEntry (PackageEntry (Just extraDep) pl subdirs) = goEntry' extraDep pl subdirs - - goEntry' :: Bool -- ^ extra dep? - -> PackageLocation Subdirs - -> Subdirs - -> [Either (PackageLocation Subdirs) (PackageLocationIndex Subdirs)] - goEntry' extraDep pl subdirs = - map (if extraDep then Right . PLOther else Left) (addSubdirs pl subdirs) - - combineSubdirs :: [FilePath] -> Subdirs -> Subdirs - combineSubdirs paths DefaultSubdirs = ExplicitSubdirs paths - -- this could be considered an error condition, but we'll - -- just try and make it work - combineSubdirs paths (ExplicitSubdirs paths') = ExplicitSubdirs (paths ++ paths') - - -- We do the toList/fromList bit as an efficient nub, and - -- to avoid having duplicate subdir names (especially for - -- the "." case, where parsing gets wonky). - addSubdirs :: PackageLocation Subdirs - -> Subdirs - -> [PackageLocation Subdirs] - addSubdirs pl DefaultSubdirs = [pl] - addSubdirs (PLRepo repo) (ExplicitSubdirs subdirs) = - [PLRepo repo { repoSubdirs = combineSubdirs subdirs $ repoSubdirs repo }] - addSubdirs (PLArchive arch) (ExplicitSubdirs subdirs) = - [PLArchive arch { archiveSubdirs = combineSubdirs subdirs $ archiveSubdirs arch }] - addSubdirs (PLFilePath fp) (ExplicitSubdirs subdirs) = - map (\subdir -> PLFilePath $ fp FilePath. subdir) subdirs - --- | A PackageEntry for the current directory, used as a default -packageEntryCurrDir :: PackageEntry -packageEntryCurrDir = PackageEntry - { peExtraDepMaybe = Nothing - , peLocation = PLFilePath "." - , peSubdirs = DefaultSubdirs - } + mcurator <- jsonSubWarningsT (o ..:? "curator") + return $ do + deps' <- mapM (resolvePaths (Just rootDir)) deps + resolver' <- resolvePaths (Just rootDir) resolver + let project = Project + { projectUserMsg = msg + , projectResolver = resolver' + , projectCompiler = mcompiler -- FIXME make sure resolver' isn't SLCompiler + , projectExtraPackageDBs = extraPackageDBs + , projectPackages = packages + , projectDependencies = concatMap toList (deps' :: [NonEmpty PackageLocation]) + , projectFlags = flags + , projectCurator = mcurator + } + pure $ ProjectAndConfigMonoid project config -- | A software control system. data SCM = Git @@ -1672,7 +1579,7 @@ data VersionedDownloadInfo = VersionedDownloadInfo instance FromJSON (WithJSONWarnings VersionedDownloadInfo) where parseJSON = withObjectWarnings "VersionedDownloadInfo" $ \o -> do - version <- o ..: "version" + CabalString version <- o ..: "version" downloadInfo <- parseDownloadInfoFromObject o return VersionedDownloadInfo { vdiVersion = version @@ -1702,7 +1609,7 @@ data SetupInfo = SetupInfo , siSevenzDll :: Maybe DownloadInfo , siMsys2 :: Map Text VersionedDownloadInfo , siGHCs :: Map Text (Map Version GHCDownloadInfo) - , siGHCJSs :: Map Text (Map (CompilerVersion 'CVActual) DownloadInfo) + , siGHCJSs :: Map Text (Map ActualCompiler DownloadInfo) , siStack :: Map Text (Map Version DownloadInfo) } deriving Show @@ -1712,9 +1619,9 @@ instance FromJSON (WithJSONWarnings SetupInfo) where siSevenzExe <- jsonSubWarningsT (o ..:? "sevenzexe-info") siSevenzDll <- jsonSubWarningsT (o ..:? "sevenzdll-info") siMsys2 <- jsonSubWarningsT (o ..:? "msys2" ..!= mempty) - siGHCs <- jsonSubWarningsTT (o ..:? "ghc" ..!= mempty) + (fmap unCabalStringMap -> siGHCs) <- jsonSubWarningsTT (o ..:? "ghc" ..!= mempty) siGHCJSs <- jsonSubWarningsTT (o ..:? "ghcjs" ..!= mempty) - siStack <- jsonSubWarningsTT (o ..:? "stack" ..!= mempty) + (fmap unCabalStringMap -> siStack) <- jsonSubWarningsTT (o ..:? "stack" ..!= mempty) return SetupInfo {..} -- | For @siGHCs@ and @siGHCJSs@ fields maps are deeply merged. @@ -1843,9 +1750,9 @@ instance FromJSONKey GhcOptionKey where "$locals" -> return GOKLocals "$targets" -> return GOKTargets _ -> - case parsePackageName t of - Left e -> fail $ show e - Right x -> return $ GOKPackage x + case parsePackageName $ T.unpack t of + Nothing -> fail $ "Invalid package name: " ++ show t + Just x -> return $ GOKPackage x fromJSONKeyList = FromJSONKeyTextParser $ \_ -> fail "GhcOptionKey.fromJSONKeyList" newtype GhcOptions = GhcOptions { unGhcOptions :: [Text] } @@ -1879,7 +1786,7 @@ class HasGHCVariant env where {-# INLINE ghcVariantL #-} -- | Class for environment values that can provide a 'Config'. -class (HasPlatform env, HasProcessContext env, HasCabalLoader env) => HasConfig env where +class (HasPlatform env, HasProcessContext env, HasPantryConfig env, HasLogFunc env, HasRunner env) => HasConfig env where configL :: Lens' env Config default configL :: HasBuildConfig env => Lens' env Config configL = buildConfigL.lens bcConfig (\x y -> x { bcConfig = y }) @@ -1925,14 +1832,14 @@ instance HasProcessContext BuildConfig where instance HasProcessContext EnvConfig where processContextL = configL.processContextL -instance HasCabalLoader Config where - cabalLoaderL = lens configCabalLoader (\x y -> x { configCabalLoader = y }) -instance HasCabalLoader LoadConfig where - cabalLoaderL = configL.cabalLoaderL -instance HasCabalLoader BuildConfig where - cabalLoaderL = configL.cabalLoaderL -instance HasCabalLoader EnvConfig where - cabalLoaderL = configL.cabalLoaderL +instance HasPantryConfig Config where + pantryConfigL = lens configPantryConfig (\x y -> x { configPantryConfig = y }) +instance HasPantryConfig LoadConfig where + pantryConfigL = configL.pantryConfigL +instance HasPantryConfig BuildConfig where + pantryConfigL = configL.pantryConfigL +instance HasPantryConfig EnvConfig where + pantryConfigL = configL.pantryConfigL instance HasConfig Config where configL = id @@ -1974,18 +1881,18 @@ instance HasLogFunc EnvConfig where -- Helper lenses ----------------------------------- -stackRootL :: HasCabalLoader s => Lens' s (Path Abs Dir) -stackRootL = cabalLoaderL.lens clStackRoot (\x y -> x { clStackRoot = y }) +stackRootL :: HasConfig s => Lens' s (Path Abs Dir) +stackRootL = configL.lens configStackRoot (\x y -> x { configStackRoot = y }) -- | The compiler specified by the @SnapshotDef@. This may be -- different from the actual compiler used! -wantedCompilerVersionL :: HasBuildConfig s => Getting r s (CompilerVersion 'CVWanted) +wantedCompilerVersionL :: HasBuildConfig s => Getting r s WantedCompiler wantedCompilerVersionL = snapshotDefL.to sdWantedCompilerVersion -- | The version of the compiler which will actually be used. May be -- different than that specified in the 'SnapshotDef' and returned -- by 'wantedCompilerVersionL'. -actualCompilerVersionL :: HasEnvConfig s => Lens' s (CompilerVersion 'CVActual) +actualCompilerVersionL :: HasEnvConfig s => Lens' s ActualCompiler actualCompilerVersionL = envConfigL.lens envConfigCompilerVersion (\x y -> x { envConfigCompilerVersion = y }) @@ -2048,7 +1955,7 @@ loadedSnapshotL = envConfigL.lens envConfigLoadedSnapshot (\x y -> x { envConfigLoadedSnapshot = y }) -whichCompilerL :: Getting r (CompilerVersion a) WhichCompiler +whichCompilerL :: Getting r ActualCompiler WhichCompiler whichCompilerL = to whichCompiler envOverrideSettingsL :: HasConfig env => Lens' env (EnvSettings -> IO ProcessContext) @@ -2056,13 +1963,10 @@ envOverrideSettingsL = configL.lens configProcessContextSettings (\x y -> x { configProcessContextSettings = y }) -globalHintsL :: HasBuildConfig s => Getting r s (Map PackageName (Maybe Version)) -globalHintsL = snapshotDefL.to sdGlobalHints - shouldForceGhcColorFlag :: (HasRunner env, HasEnvConfig env) => RIO env Bool shouldForceGhcColorFlag = do - canDoColor <- (>= $(mkVersion "8.2.1")) . getGhcVersion + canDoColor <- (>= mkVersion [8, 2, 1]) . getGhcVersion <$> view actualCompilerVersionL shouldDoColor <- view useColorL return $ canDoColor && shouldDoColor diff --git a/src/Stack/Types/Config/Build.hs b/src/Stack/Types/Config/Build.hs index 296379d225..56b75210b6 100644 --- a/src/Stack/Types/Config/Build.hs +++ b/src/Stack/Types/Config/Build.hs @@ -32,8 +32,6 @@ import Data.Aeson.Extended import qualified Data.Map.Strict as Map import Generics.Deriving.Monoid (memptydefault, mappenddefault) import Stack.Prelude -import Stack.Types.FlagName -import Stack.Types.PackageName -- | Build options that is interpreted by the build command. -- This is built up from BuildOptsCLI and BuildOptsMonoid diff --git a/src/Stack/Types/Docker.hs b/src/Stack/Types/Docker.hs index 730b70e0b6..d87bbc1a23 100644 --- a/src/Stack/Types/Docker.hs +++ b/src/Stack/Types/Docker.hs @@ -26,7 +26,7 @@ import Text.Read (Read (..)) data DockerOpts = DockerOpts {dockerEnable :: !Bool -- ^ Is using Docker enabled? - ,dockerImage :: !String + ,dockerImage :: !(Either SomeException String) -- ^ Exact Docker image tag or ID. Overrides docker-repo-*/tag. ,dockerRegistryLogin :: !Bool -- ^ Does registry require login for pulls? diff --git a/src/Stack/Types/FlagName.hs b/src/Stack/Types/FlagName.hs deleted file mode 100644 index f891ec6891..0000000000 --- a/src/Stack/Types/FlagName.hs +++ /dev/null @@ -1,118 +0,0 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE BangPatterns #-} -{-# LANGUAGE TemplateHaskell #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE TupleSections #-} - --- | Names for flags. - -module Stack.Types.FlagName - (FlagName - ,FlagNameParseFail(..) - ,flagNameParser - ,parseFlagName - ,parseFlagNameFromString - ,flagNameString - ,flagNameText - ,fromCabalFlagName - ,toCabalFlagName - ,mkFlagName) - where - -import Stack.Prelude -import Data.Aeson.Extended -import Data.Attoparsec.Text as A -import Data.Char (isLetter, isDigit, toLower) -import qualified Data.Text as T -import qualified Distribution.PackageDescription as Cabal -import Language.Haskell.TH -import Language.Haskell.TH.Syntax - --- | A parse fail. -newtype FlagNameParseFail - = FlagNameParseFail Text - deriving (Typeable) -instance Exception FlagNameParseFail -instance Show FlagNameParseFail where - show (FlagNameParseFail bs) = "Invalid flag name: " ++ show bs - --- | A flag name. -newtype FlagName = - FlagName Text - deriving (Typeable,Data,Generic,Hashable,Store,NFData,ToJSONKey) -instance Eq FlagName where - x == y = compare x y == EQ -instance Ord FlagName where - compare (FlagName x) (FlagName y) = - compare (T.map toLower x) (T.map toLower y) - -instance Lift FlagName where - lift (FlagName n) = - appE (conE 'FlagName) - (stringE (T.unpack n)) - -instance Show FlagName where - show (FlagName n) = T.unpack n - -instance FromJSON FlagName where - parseJSON j = - do s <- parseJSON j - case parseFlagNameFromString s of - Nothing -> - fail ("Couldn't parse flag name: " ++ s) - Just ver -> return ver - -instance FromJSONKey FlagName where - fromJSONKey = FromJSONKeyTextParser $ \k -> - either (fail . show) return $ parseFlagName k - --- | Attoparsec parser for a flag name -flagNameParser :: Parser FlagName -flagNameParser = fmap FlagName $ do - t <- A.takeWhile1 (\c -> isAlphaNum c || separator c) - when (T.head t == '-') $ fail "flag name cannot start with dash" - return t - where separator c = c == '-' || c == '_' - isAlphaNum c = isLetter c || isDigit c - --- | Make a flag name. -mkFlagName :: String -> Q Exp -mkFlagName s = - case parseFlagNameFromString s of - Nothing -> qRunIO $ throwString ("Invalid flag name: " ++ show s) - Just pn -> [|pn|] - --- | Convenient way to parse a flag name from a 'Text'. -parseFlagName :: MonadThrow m => Text -> m FlagName -parseFlagName x = go x - where go = - either (const (throwM (FlagNameParseFail x))) return . - parseOnly (flagNameParser <* endOfInput) - --- | Convenience function for parsing from a 'String' -parseFlagNameFromString :: MonadThrow m => String -> m FlagName -parseFlagNameFromString = - parseFlagName . T.pack - --- | Produce a string representation of a flag name. -flagNameString :: FlagName -> String -flagNameString (FlagName n) = T.unpack n - --- | Produce a string representation of a flag name. -flagNameText :: FlagName -> Text -flagNameText (FlagName n) = n - --- | Convert from a Cabal flag name. -fromCabalFlagName :: Cabal.FlagName -> FlagName -fromCabalFlagName name = - let !x = T.pack $ Cabal.unFlagName name - in FlagName x - --- | Convert to a Cabal flag name. -toCabalFlagName :: FlagName -> Cabal.FlagName -toCabalFlagName (FlagName name) = - let !x = T.unpack name - in Cabal.mkFlagName x diff --git a/src/Stack/Types/NamedComponent.hs b/src/Stack/Types/NamedComponent.hs index 09bdc3fbf3..3b360f0a12 100644 --- a/src/Stack/Types/NamedComponent.hs +++ b/src/Stack/Types/NamedComponent.hs @@ -16,8 +16,8 @@ module Stack.Types.NamedComponent , isCBench ) where +import Pantry import Stack.Prelude -import Stack.Types.PackageName import qualified Data.Set as Set import qualified Data.Text as T @@ -41,7 +41,7 @@ renderPkgComponents :: [(PackageName, NamedComponent)] -> Text renderPkgComponents = T.intercalate " " . map renderPkgComponent renderPkgComponent :: (PackageName, NamedComponent) -> Text -renderPkgComponent (pkg, comp) = packageNameText pkg <> ":" <> renderComponent comp +renderPkgComponent (pkg, comp) = fromString (packageNameString pkg) <> ":" <> renderComponent comp exeComponents :: Set NamedComponent -> Set Text exeComponents = Set.fromList . mapMaybe mExeName . Set.toList diff --git a/src/Stack/Types/Package.hs b/src/Stack/Types/Package.hs index 7587d8b404..73656b60bd 100644 --- a/src/Stack/Types/Package.hs +++ b/src/Stack/Types/Package.hs @@ -10,8 +10,7 @@ module Stack.Types.Package where import Stack.Prelude -import qualified Data.ByteString as S -import Data.List +import qualified RIO.Text as T import qualified Data.Map as M import qualified Data.Set as Set import Data.IORef.RunOnce @@ -23,15 +22,11 @@ import Distribution.License (License) import Distribution.ModuleName (ModuleName) import Distribution.PackageDescription (TestSuiteInterface, BuildType) import Distribution.System (Platform (..)) -import Path as FL -import Stack.Types.BuildPlan (PackageLocation, PackageLocationIndex (..), ExeName) +import Stack.Types.BuildPlan (ExeName) import Stack.Types.Compiler import Stack.Types.Config -import Stack.Types.FlagName import Stack.Types.GhcPkgId import Stack.Types.NamedComponent -import Stack.Types.PackageIdentifier -import Stack.Types.PackageName import Stack.Types.Version -- | All exceptions thrown by the library. @@ -41,9 +36,6 @@ data PackageException !(Maybe Version) ![PError] ![PWarning] - | PackageNoCabalFileFound (Path Abs Dir) - | PackageMultipleCabalFilesFound (Path Abs Dir) [Path Abs File] - | MismatchedCabalName (Path Abs File) PackageName | MismatchedCabalIdentifier !PackageIdentifierRevision !PackageIdentifier deriving Typeable instance Exception PackageException @@ -51,7 +43,7 @@ instance Show PackageException where show (PackageInvalidCabalFile loc _mversion errs warnings) = concat [ "Unable to parse cabal file " , case loc of - Left pir -> "for " ++ packageIdentifierRevisionString pir + Left pir -> "for " ++ T.unpack (utf8BuilderToText (display pir)) Right fp -> toFilePath fp {- @@ -81,33 +73,12 @@ instance Show PackageException where ]) warnings ] - show (PackageNoCabalFileFound dir) = concat - [ "Stack looks for packages in the directories configured in" - , " the 'packages' and 'extra-deps' fields defined in your stack.yaml\n" - , "The current entry points to " - , toFilePath dir - , " but no .cabal or package.yaml file could be found there." - ] - show (PackageMultipleCabalFilesFound dir files) = - "Multiple .cabal files found in directory " ++ - toFilePath dir ++ - ": " ++ - intercalate ", " (map (toFilePath . filename) files) - show (MismatchedCabalName fp name) = concat - [ "cabal file path " - , toFilePath fp - , " does not match the package name it defines.\n" - , "Please rename the file to: " - , packageNameString name - , ".cabal\n" - , "For more information, see: https://github.com/commercialhaskell/stack/issues/317" - ] show (MismatchedCabalIdentifier pir ident) = concat [ "Mismatched package identifier." , "\nFound: " , packageIdentifierString ident , "\nExpected: " - , packageIdentifierRevisionString pir + , T.unpack $ utf8BuilderToText $ display pir ] -- | Libraries in a package. Since Cabal 2.0, internal libraries are a @@ -142,6 +113,9 @@ data Package = } deriving (Show,Typeable) +packageIdent :: Package -> PackageIdentifier +packageIdent p = PackageIdentifier (packageName p) (packageVersion p) + -- | The value for a map from dependency name. This contains both the -- version range and the type of dependency, and provides a semigroup -- instance. @@ -235,8 +209,7 @@ data PackageConfig = ,packageConfigEnableBenchmarks :: !Bool -- ^ Are benchmarks enabled? ,packageConfigFlags :: !(Map FlagName Bool) -- ^ Configured flags. ,packageConfigGhcOptions :: ![Text] -- ^ Configured ghc options. - ,packageConfigCompilerVersion - :: !(CompilerVersion 'CVActual) -- ^ GHC version + ,packageConfigCompilerVersion :: ActualCompiler -- ^ GHC version ,packageConfigPlatform :: !Platform -- ^ host platform } deriving (Show,Typeable) @@ -253,24 +226,19 @@ type SourceMap = Map PackageName PackageSource -- | Where the package's source is located: local directory or package index data PackageSource - = PSFiles LocalPackage InstallLocation - -- ^ Package which exist on the filesystem (as opposed to an index tarball) - | PSIndex InstallLocation (Map FlagName Bool) [Text] PackageIdentifierRevision - -- ^ Package which is in an index, and the files do not exist on the - -- filesystem yet. + = PSFilePath LocalPackage InstallLocation + -- ^ Package which exist on the filesystem + | PSRemote InstallLocation (Map FlagName Bool) [Text] PackageLocationImmutable PackageIdentifier + -- ^ Package which is downloaded remotely. deriving Show piiVersion :: PackageSource -> Version -piiVersion (PSFiles lp _) = packageVersion $ lpPackage lp -piiVersion (PSIndex _ _ _ (PackageIdentifierRevision (PackageIdentifier _ v) _)) = v +piiVersion (PSFilePath lp _) = packageVersion $ lpPackage lp +piiVersion (PSRemote _ _ _ _ (PackageIdentifier _ v)) = v piiLocation :: PackageSource -> InstallLocation -piiLocation (PSFiles _ loc) = loc -piiLocation (PSIndex loc _ _ _) = loc - -piiPackageLocation :: PackageSource -> PackageLocationIndex FilePath -piiPackageLocation (PSFiles lp _) = PLOther (lpLocation lp) -piiPackageLocation (PSIndex _ _ _ pir) = PLIndex pir +piiLocation (PSFilePath _ loc) = loc +piiLocation (PSRemote loc _ _ _ _) = loc -- | Information on a locally available package of source code data LocalPackage = LocalPackage @@ -292,8 +260,6 @@ data LocalPackage = LocalPackage , lpTestBench :: !(Maybe Package) -- ^ This stores the 'Package' with tests and benchmarks enabled, if -- either is asked for by the user. - , lpDir :: !(Path Abs Dir) - -- ^ Directory of the package. , lpCabalFile :: !(Path Abs File) -- ^ The .cabal file , lpForceDirty :: !Bool @@ -305,8 +271,6 @@ data LocalPackage = LocalPackage -- ^ current state of the files , lpComponentFiles :: !(IOThunk (Map NamedComponent (Set (Path Abs File)))) -- ^ all files used by this package - , lpLocation :: !(PackageLocation FilePath) - -- ^ Where this source code came from } deriving Show @@ -341,7 +305,7 @@ data InstalledPackageLocation = InstalledTo InstallLocation | ExtraGlobal data FileCacheInfo = FileCacheInfo { fciModTime :: !ModTime , fciSize :: !Word64 - , fciHash :: !S.ByteString + , fciHash :: !SHA256 } deriving (Generic, Show, Eq, Data, Typeable) instance Store FileCacheInfo @@ -427,4 +391,6 @@ installedPackageIdentifier (Executable pid) = pid -- | Get the installed Version. installedVersion :: Installed -> Version -installedVersion = packageIdentifierVersion . installedPackageIdentifier +installedVersion i = + let PackageIdentifier _ version = installedPackageIdentifier i + in version diff --git a/src/Stack/Types/PackageDump.hs b/src/Stack/Types/PackageDump.hs index 8e96ad1387..84a106e1a8 100644 --- a/src/Stack/Types/PackageDump.hs +++ b/src/Stack/Types/PackageDump.hs @@ -14,7 +14,6 @@ import Data.Store.Version import Data.Store.VersionTagged import Stack.Prelude import Stack.Types.GhcPkgId -import Stack.Types.PackageIdentifier -- | Cached information on whether package have profiling libraries and haddocks. newtype InstalledCache = InstalledCache (IORef InstalledCacheInner) @@ -31,4 +30,4 @@ data InstalledCacheEntry = InstalledCacheEntry instance Store InstalledCacheEntry installedCacheVC :: VersionConfig InstalledCacheInner -installedCacheVC = storeVersionConfig "installed-v1" "GGyaE6qY9FOqeWtozuadKqS7_QM=" +installedCacheVC = storeVersionConfig "installed-v2" "eHLVmgbOWvPSm1X3wLfclM-XiXc=" diff --git a/src/Stack/Types/PackageIdentifier.hs b/src/Stack/Types/PackageIdentifier.hs deleted file mode 100644 index dc2a6f7553..0000000000 --- a/src/Stack/Types/PackageIdentifier.hs +++ /dev/null @@ -1,289 +0,0 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE BangPatterns #-} -{-# LANGUAGE DataKinds #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# OPTIONS -fno-warn-unused-do-bind #-} - --- | Package identifier (name-version). - -module Stack.Types.PackageIdentifier - ( PackageIdentifier(..) - , PackageIdentifierRevision(..) - , CabalHash - , mkCabalHashFromSHA256 - , computeCabalHash - , showCabalHash - , CabalFileInfo(..) - , toTuple - , fromTuple - , parsePackageIdentifier - , parsePackageIdentifierFromString - , parsePackageIdentifierRevision - , packageIdentifierParser - , packageIdentifierString - , packageIdentifierRevisionString - , packageIdentifierText - , toCabalPackageIdentifier - , fromCabalPackageIdentifier - , StaticSHA256 - , mkStaticSHA256FromText - , mkStaticSHA256FromFile - , mkStaticSHA256FromDigest - , staticSHA256ToText - , staticSHA256ToBase16 - , staticSHA256ToRaw - ) - where - -import Stack.Prelude -import Crypto.Hash.Conduit (hashFile) -import Crypto.Hash as Hash (hashlazy, Digest, SHA256) -import Data.Aeson.Extended -import Data.Attoparsec.Text as A -import qualified Data.ByteArray -import qualified Data.ByteArray.Encoding as Mem -import qualified Data.ByteString.Lazy as L -import qualified Data.Text as T -import Data.Text.Encoding (decodeUtf8, encodeUtf8) -import qualified Distribution.Package as C -import Stack.StaticBytes -import Stack.Types.PackageName -import Stack.Types.Version - --- | A parse fail. -data PackageIdentifierParseFail - = PackageIdentifierParseFail Text - | PackageIdentifierRevisionParseFail Text - deriving (Typeable) -instance Show PackageIdentifierParseFail where - show (PackageIdentifierParseFail bs) = "Invalid package identifier: " ++ show bs - show (PackageIdentifierRevisionParseFail bs) = "Invalid package identifier (with optional revision): " ++ show bs -instance Exception PackageIdentifierParseFail - --- | A pkg-ver combination. -data PackageIdentifier = PackageIdentifier - { -- | Get the name part of the identifier. - packageIdentifierName :: !PackageName - -- | Get the version part of the identifier. - , packageIdentifierVersion :: !Version - } deriving (Eq,Ord,Generic,Data,Typeable) - -instance NFData PackageIdentifier where - rnf (PackageIdentifier !p !v) = - seq (rnf p) (rnf v) - -instance Hashable PackageIdentifier -instance Store PackageIdentifier - -instance Show PackageIdentifier where - show = show . packageIdentifierString -instance Display PackageIdentifier where - display = fromString . packageIdentifierString - -instance ToJSON PackageIdentifier where - toJSON = toJSON . packageIdentifierString -instance FromJSON PackageIdentifier where - parseJSON = withText "PackageIdentifier" $ \t -> - case parsePackageIdentifier t of - Left e -> fail $ show (e, t) - Right x -> return x - --- | A 'PackageIdentifier' combined with optionally specified Hackage --- cabal file revision. -data PackageIdentifierRevision = PackageIdentifierRevision - { pirIdent :: !PackageIdentifier - , pirRevision :: !CabalFileInfo - } deriving (Eq,Ord,Generic,Data,Typeable) - -instance NFData PackageIdentifierRevision where - rnf (PackageIdentifierRevision !i !c) = - seq (rnf i) (rnf c) - -instance Hashable PackageIdentifierRevision -instance Store PackageIdentifierRevision - -instance Show PackageIdentifierRevision where - show = show . packageIdentifierRevisionString - -instance ToJSON PackageIdentifierRevision where - toJSON = toJSON . packageIdentifierRevisionString -instance FromJSON PackageIdentifierRevision where - parseJSON = withText "PackageIdentifierRevision" $ \t -> - case parsePackageIdentifierRevision t of - Left e -> fail $ show (e, t) - Right x -> return x - --- | A cryptographic hash of a Cabal file. -newtype CabalHash = CabalHash { unCabalHash :: StaticSHA256 } - deriving (Generic, Show, Eq, NFData, Data, Typeable, Ord, Store, Hashable) - --- | A SHA256 hash, stored in a static size for more efficient --- serialization with store. -newtype StaticSHA256 = StaticSHA256 Bytes32 - deriving (Generic, Show, Eq, NFData, Data, Typeable, Ord, Hashable, Store) - --- | Generate a 'StaticSHA256' value from a base16-encoded SHA256 hash. -mkStaticSHA256FromText :: Text -> Either SomeException StaticSHA256 -mkStaticSHA256FromText t = - mapLeft (toException . stringException) (Mem.convertFromBase Mem.Base16 (encodeUtf8 t)) - >>= either (Left . toE) (Right . StaticSHA256) - . toStaticExact - . (id :: ByteString -> ByteString) - where - toE e = toException $ stringException $ concat - [ "Unable to convert " - , show t - , " into SHA256: " - , show e - ] - --- | Generate a 'StaticSHA256' value from the contents of a file. -mkStaticSHA256FromFile :: MonadIO m => Path Abs File -> m StaticSHA256 -mkStaticSHA256FromFile fp = liftIO $ mkStaticSHA256FromDigest <$> hashFile (toFilePath fp) - -mkStaticSHA256FromDigest :: Hash.Digest Hash.SHA256 -> StaticSHA256 -mkStaticSHA256FromDigest digest - = StaticSHA256 - $ either impureThrow id - $ toStaticExact - (Data.ByteArray.convert digest :: ByteString) - --- | Convert a 'StaticSHA256' into a base16-encoded SHA256 hash. -staticSHA256ToText :: StaticSHA256 -> Text -staticSHA256ToText = decodeUtf8 . staticSHA256ToBase16 - --- | Convert a 'StaticSHA256' into a base16-encoded SHA256 hash. -staticSHA256ToBase16 :: StaticSHA256 -> ByteString -staticSHA256ToBase16 (StaticSHA256 x) = Mem.convertToBase Mem.Base16 x - -staticSHA256ToRaw :: StaticSHA256 -> ByteString -staticSHA256ToRaw (StaticSHA256 x) = Data.ByteArray.convert x - --- | Generate a 'CabalHash' value from a base16-encoded SHA256 hash. -mkCabalHashFromSHA256 :: Text -> Either SomeException CabalHash -mkCabalHashFromSHA256 = fmap CabalHash . mkStaticSHA256FromText - --- | Convert a 'CabalHash' into a base16-encoded SHA256 hash. -cabalHashToText :: CabalHash -> Text -cabalHashToText = staticSHA256ToText . unCabalHash - --- | Compute a 'CabalHash' value from a cabal file's contents. -computeCabalHash :: L.ByteString -> CabalHash -computeCabalHash = CabalHash . mkStaticSHA256FromDigest . Hash.hashlazy - -showCabalHash :: CabalHash -> Text -showCabalHash = T.append (T.pack "sha256:") . cabalHashToText - --- | Information on the contents of a cabal file -data CabalFileInfo - = CFILatest - -- ^ Take the latest revision of the cabal file available. This - -- isn't reproducible at all, but the running assumption (not - -- necessarily true) is that cabal file revisions do not change - -- semantics of the build. - | CFIHash - !(Maybe Int) -- file size in bytes - !CabalHash - -- ^ Identify by contents of the cabal file itself - | CFIRevision !Word - -- ^ Identify by revision number, with 0 being the original and - -- counting upward. - deriving (Generic, Show, Eq, Ord, Data, Typeable) -instance Store CabalFileInfo -instance NFData CabalFileInfo -instance Hashable CabalFileInfo - --- | Convert from a package identifier to a tuple. -toTuple :: PackageIdentifier -> (PackageName,Version) -toTuple (PackageIdentifier n v) = (n,v) - --- | Convert from a tuple to a package identifier. -fromTuple :: (PackageName,Version) -> PackageIdentifier -fromTuple (n,v) = PackageIdentifier n v - --- | A parser for a package-version pair. -packageIdentifierParser :: Parser PackageIdentifier -packageIdentifierParser = - do name <- packageNameParser - char '-' - PackageIdentifier name <$> versionParser - --- | Convenient way to parse a package identifier from a 'Text'. -parsePackageIdentifier :: MonadThrow m => Text -> m PackageIdentifier -parsePackageIdentifier x = go x - where go = - either (const (throwM (PackageIdentifierParseFail x))) return . - parseOnly (packageIdentifierParser <* endOfInput) - --- | Convenience function for parsing from a 'String'. -parsePackageIdentifierFromString :: MonadThrow m => String -> m PackageIdentifier -parsePackageIdentifierFromString = - parsePackageIdentifier . T.pack - --- | Parse a 'PackageIdentifierRevision' -parsePackageIdentifierRevision :: MonadThrow m => Text -> m PackageIdentifierRevision -parsePackageIdentifierRevision x = go x - where - go = - either (const (throwM (PackageIdentifierRevisionParseFail x))) return . - parseOnly (parser <* endOfInput) - - parser = PackageIdentifierRevision - <$> packageIdentifierParser - <*> (cfiHash <|> cfiRevision <|> pure CFILatest) - - cfiHash = do - _ <- string $ T.pack "@sha256:" - hash' <- A.takeWhile (/= ',') - hash'' <- either (\e -> fail $ "Invalid SHA256: " ++ show e) return - $ mkCabalHashFromSHA256 hash' - msize <- optional $ do - _ <- A.char ',' - A.decimal - A.endOfInput - return $ CFIHash msize hash'' - - cfiRevision = do - _ <- string $ T.pack "@rev:" - y <- A.decimal - A.endOfInput - return $ CFIRevision y --- | Get a string representation of the package identifier; name-ver. -packageIdentifierString :: PackageIdentifier -> String -packageIdentifierString (PackageIdentifier n v) = show n ++ "-" ++ show v - --- | Get a string representation of the package identifier with revision; name-ver[@hashtype:hash[,size]]. -packageIdentifierRevisionString :: PackageIdentifierRevision -> String -packageIdentifierRevisionString (PackageIdentifierRevision ident cfi) = - concat $ packageIdentifierString ident : rest - where - rest = - case cfi of - CFILatest -> [] - CFIHash msize hash' -> - "@sha256:" - : T.unpack (cabalHashToText hash') - : showSize msize - CFIRevision rev -> ["@rev:", show rev] - - showSize Nothing = [] - showSize (Just int) = [',' : show int] - --- | Get a Text representation of the package identifier; name-ver. -packageIdentifierText :: PackageIdentifier -> Text -packageIdentifierText = T.pack . packageIdentifierString - -toCabalPackageIdentifier :: PackageIdentifier -> C.PackageIdentifier -toCabalPackageIdentifier x = - C.PackageIdentifier - (toCabalPackageName (packageIdentifierName x)) - (toCabalVersion (packageIdentifierVersion x)) - -fromCabalPackageIdentifier :: C.PackageIdentifier -> PackageIdentifier -fromCabalPackageIdentifier (C.PackageIdentifier name version) = - PackageIdentifier - (fromCabalPackageName name) - (fromCabalVersion version) diff --git a/src/Stack/Types/PackageIndex.hs b/src/Stack/Types/PackageIndex.hs deleted file mode 100644 index 4320ef2297..0000000000 --- a/src/Stack/Types/PackageIndex.hs +++ /dev/null @@ -1,183 +0,0 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} -{-# LANGUAGE OverloadedStrings #-} - -{-# LANGUAGE DataKinds #-} -{-# LANGUAGE DeriveDataTypeable #-} - -module Stack.Types.PackageIndex - ( PackageDownload (..) - , HSPackageDownload (..) - , PackageCache (..) - , OffsetSize (..) - -- ** PackageIndex, IndexName & IndexLocation - , PackageIndex(..) - , IndexName(..) - , indexNameText - , IndexType (..) - , HackageSecurity (..) - ) where - -import Data.Aeson.Extended -import qualified Data.Foldable as F -import qualified Data.HashMap.Strict as HashMap -import qualified Data.Map.Strict as Map -import qualified Data.Text as T -import Data.Text.Encoding (encodeUtf8, decodeUtf8) -import Path -import Stack.Prelude -import Stack.Types.PackageName -import Stack.Types.PackageIdentifier -import Stack.Types.Version -import Data.List.NonEmpty (NonEmpty) - --- | Cached information about packages in an index. We have a mapping --- from package name to a version map. Within the version map, we map --- from the version to information on an individual version. Each --- version has optional download information (about the package's --- tarball itself), and cabal file information. The cabal file --- information is a non-empty list of all cabal file revisions. Each --- file revision indicates the hash of the contents of the cabal file, --- and the offset into the index tarball. --- --- The reason for each 'Version' mapping to a two element list of --- 'CabalHash'es is because some older Stackage snapshots have CRs in --- their cabal files. For compatibility with these older snapshots, --- both hashes are stored: the first element of the two element list --- being the original hash, and the (potential) second element with --- the CRs stripped. [Note: This is was initially stored as a two --- element list, and cannot be easily packed into more explict ADT or --- newtype because of some template-haskell that would need to be --- modified as well: the 'versionedDecodeOrLoad' function call found --- in the 'getPackageCaches' function in 'Stack.PackageIndex'.] --- --- It's assumed that cabal files appear in the index tarball in the --- correct revision order. -newtype PackageCache index = PackageCache - (HashMap PackageName - (HashMap Version - (index, Maybe PackageDownload, NonEmpty ([CabalHash], OffsetSize)))) - deriving (Generic, Eq, Show, Data, Typeable, Store, NFData) - -instance Semigroup (PackageCache index) where - PackageCache x <> PackageCache y = PackageCache (HashMap.unionWith HashMap.union x y) - -instance Monoid (PackageCache index) where - mempty = PackageCache HashMap.empty - mappend = (<>) - --- | offset in bytes into the 01-index.tar file for the .cabal file --- contents, and size in bytes of the .cabal file -data OffsetSize = OffsetSize !Int64 !Int64 - deriving (Generic, Eq, Show, Data, Typeable) - -instance Store OffsetSize -instance NFData OffsetSize - -data PackageDownload = PackageDownload - { pdSHA256 :: !StaticSHA256 - , pdUrl :: !ByteString - , pdSize :: !Word64 - } - deriving (Show, Generic, Eq, Data, Typeable) - -instance Store PackageDownload -instance NFData PackageDownload -instance FromJSON PackageDownload where - parseJSON = withObject "PackageDownload" $ \o -> do - hashes <- o .: "package-hashes" - sha256' <- maybe mzero return (Map.lookup ("SHA256" :: Text) hashes) - sha256 <- - case mkStaticSHA256FromText sha256' of - Left e -> fail $ "Invalid sha256: " ++ show e - Right x -> return x - locs <- o .: "package-locations" - url <- - case reverse locs of - [] -> mzero - x:_ -> return x - size <- o .: "package-size" - return PackageDownload - { pdSHA256 = sha256 - , pdUrl = encodeUtf8 url - , pdSize = size - } - --- | Hackage Security provides a different JSON format, we'll have our --- own JSON parser for it. -newtype HSPackageDownload = HSPackageDownload { unHSPackageDownload :: PackageDownload } -instance FromJSON HSPackageDownload where - parseJSON = withObject "HSPackageDownload" $ \o1 -> do - o2 <- o1 .: "signed" - Object o3 <- o2 .: "targets" - Object o4:_ <- return $ F.toList o3 - len <- o4 .: "length" - hashes <- o4 .: "hashes" - sha256' <- hashes .: "sha256" - sha256 <- - case mkStaticSHA256FromText sha256' of - Left e -> fail $ "Invalid sha256: " ++ show e - Right x -> return x - return $ HSPackageDownload PackageDownload - { pdSHA256 = sha256 - , pdSize = len - , pdUrl = "" - } - --- | Unique name for a package index -newtype IndexName = IndexName { unIndexName :: ByteString } - deriving (Show, Eq, Ord, Hashable, Store) -indexNameText :: IndexName -> Text -indexNameText = decodeUtf8 . unIndexName -instance ToJSON IndexName where - toJSON = toJSON . indexNameText - -instance FromJSON IndexName where - parseJSON = withText "IndexName" $ \t -> - case parseRelDir (T.unpack t) of - Left e -> fail $ "Invalid index name: " ++ show e - Right _ -> return $ IndexName $ encodeUtf8 t - -data IndexType = ITHackageSecurity !HackageSecurity | ITVanilla - deriving (Show, Eq, Ord) - -data HackageSecurity = HackageSecurity - { hsKeyIds :: ![Text] - , hsKeyThreshold :: !Int - } - deriving (Show, Eq, Ord) -instance FromJSON HackageSecurity where - parseJSON = withObject "HackageSecurity" $ \o -> HackageSecurity - <$> o .: "keyids" - <*> o .: "key-threshold" - --- | Information on a single package index -data PackageIndex = PackageIndex - { indexName :: !IndexName - , indexLocation :: !Text - -- ^ URL for the tarball or, in the case of Hackage Security, the - -- root of the directory - , indexType :: !IndexType - , indexDownloadPrefix :: !Text - -- ^ URL prefix for downloading packages - , indexRequireHashes :: !Bool - -- ^ Require that hashes and package size information be available for packages in this index - } - deriving Show -instance FromJSON (WithJSONWarnings PackageIndex) where - parseJSON = withObjectWarnings "PackageIndex" $ \o -> do - name <- o ..: "name" - prefix <- o ..: "download-prefix" - http <- o ..: "http" - mhackageSecurity <- o ..:? "hackage-security" - let indexType' = maybe ITVanilla ITHackageSecurity mhackageSecurity - reqHashes <- o ..:? "require-hashes" ..!= False - return PackageIndex - { indexName = name - , indexLocation = http - , indexType = indexType' - , indexDownloadPrefix = prefix - , indexRequireHashes = reqHashes - } diff --git a/src/Stack/Types/PackageName.hs b/src/Stack/Types/PackageName.hs index 9fb345e5de..f0a26c85c7 100644 --- a/src/Stack/Types/PackageName.hs +++ b/src/Stack/Types/PackageName.hs @@ -1,139 +1,15 @@ {-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE BangPatterns #-} -{-# LANGUAGE TemplateHaskell #-} -{-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE TupleSections #-} -- | Names for packages. module Stack.Types.PackageName - (PackageName - ,PackageNameParseFail(..) - ,packageNameParser - ,parsePackageName - ,parsePackageNameFromString - ,packageNameString - ,packageNameText - ,fromCabalPackageName - ,toCabalPackageName - ,parsePackageNameFromFilePath - ,mkPackageName - ,packageNameArgument) - where + ( packageNameArgument + ) where import Stack.Prelude -import Data.Aeson.Extended -import Data.Attoparsec.Combinators -import Data.Attoparsec.Text -import Data.List (intercalate) -import qualified Data.Text as T -import qualified Distribution.Package as Cabal -import Language.Haskell.TH -import Language.Haskell.TH.Syntax import qualified Options.Applicative as O -import Path - --- | A parse fail. -data PackageNameParseFail - = PackageNameParseFail Text - | CabalFileNameParseFail FilePath - | CabalFileNameInvalidPackageName FilePath - deriving (Typeable) -instance Exception PackageNameParseFail -instance Show PackageNameParseFail where - show (PackageNameParseFail bs) = "Invalid package name: " ++ show bs - show (CabalFileNameParseFail fp) = "Invalid file path for cabal file, must have a .cabal extension: " ++ fp - show (CabalFileNameInvalidPackageName fp) = "cabal file names must use valid package names followed by a .cabal extension, the following is invalid: " ++ fp - --- | A package name. -newtype PackageName = - PackageName Text - deriving (Eq,Ord,Typeable,Data,Generic,Hashable,NFData,Store,ToJSON,ToJSONKey) - -instance Lift PackageName where - lift (PackageName n) = - appE (conE 'PackageName) - (stringE (T.unpack n)) - -instance Show PackageName where - show (PackageName n) = T.unpack n -instance Display PackageName where - display (PackageName n) = display n - -instance FromJSON PackageName where - parseJSON j = - do s <- parseJSON j - case parsePackageNameFromString s of - Nothing -> - fail ("Couldn't parse package name: " ++ s) - Just ver -> return ver - -instance FromJSONKey PackageName where - fromJSONKey = FromJSONKeyTextParser $ \k -> - either (fail . show) return $ parsePackageName k - --- | Attoparsec parser for a package name -packageNameParser :: Parser PackageName -packageNameParser = - fmap (PackageName . T.pack . intercalate "-") - (sepBy1 word (char '-')) - where - word = concat <$> sequence [many digit, - pured letter, - many (alternating letter digit)] - --- | Make a package name. -mkPackageName :: String -> Q Exp -mkPackageName s = - case parsePackageNameFromString s of - Nothing -> qRunIO $ throwString ("Invalid package name: " ++ show s) - Just pn -> [|pn|] - --- | Parse a package name from a 'Text'. -parsePackageName :: MonadThrow m => Text -> m PackageName -parsePackageName x = go x - where go = - either (const (throwM (PackageNameParseFail x))) return . - parseOnly (packageNameParser <* endOfInput) - --- | Parse a package name from a 'String'. -parsePackageNameFromString :: MonadThrow m => String -> m PackageName -parsePackageNameFromString = - parsePackageName . T.pack - --- | Produce a string representation of a package name. -packageNameString :: PackageName -> String -packageNameString (PackageName n) = T.unpack n - --- | Produce a string representation of a package name. -packageNameText :: PackageName -> Text -packageNameText (PackageName n) = n - --- | Convert from a Cabal package name. -fromCabalPackageName :: Cabal.PackageName -> PackageName -fromCabalPackageName name = - let !x = T.pack $ Cabal.unPackageName name - in PackageName x - --- | Convert to a Cabal package name. -toCabalPackageName :: PackageName -> Cabal.PackageName -toCabalPackageName (PackageName name) = - let !x = T.unpack name - in Cabal.mkPackageName x --- | Parse a package name from a file path. -parsePackageNameFromFilePath :: MonadThrow m => Path a File -> m PackageName -parsePackageNameFromFilePath fp = do - base <- clean $ toFilePath $ filename fp - case parsePackageNameFromString base of - Nothing -> throwM $ CabalFileNameInvalidPackageName $ toFilePath fp - Just x -> return x - where clean = liftM reverse . strip . reverse - strip ('l':'a':'b':'a':'c':'.':xs) = return xs - strip _ = throwM (CabalFileNameParseFail (toFilePath fp)) -- | An argument which accepts a template name of the format -- @foo.hsfiles@. @@ -145,7 +21,7 @@ packageNameArgument = either O.readerError return (p s)) where p s = - case parsePackageNameFromString s of + case parsePackageName s of Just x -> Right x Nothing -> Left $ unlines [ "Expected valid package name, but got: " ++ s diff --git a/src/Stack/Types/Resolver.hs b/src/Stack/Types/Resolver.hs index 0425972472..aaaf4b41ea 100644 --- a/src/Stack/Types/Resolver.hs +++ b/src/Stack/Types/Resolver.hs @@ -2,10 +2,7 @@ {-# LANGUAGE ConstraintKinds #-} {-# LANGUAGE CPP #-} {-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveFoldable #-} -{-# LANGUAGE DeriveFunctor #-} {-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE DeriveTraversable #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE MultiWayIf #-} @@ -17,122 +14,25 @@ {-# LANGUAGE UndecidableInstances #-} module Stack.Types.Resolver - (Resolver - ,IsLoaded(..) - ,LoadedResolver - ,ResolverWith(..) - ,parseResolverText - ,AbstractResolver(..) + (AbstractResolver(..) ,readAbstractResolver - ,resolverRawName ,SnapName(..) ,Snapshots (..) ,renderSnapName ,parseSnapName - ,SnapshotHash - ,trimmedSnapshotHash - ,snapshotHashToBS - ,snapshotHashFromBS - ,snapshotHashFromDigest - ,parseCustomLocation ) where -import Crypto.Hash as Hash (hash, Digest, SHA256) import Data.Aeson.Extended - (ToJSON, toJSON, FromJSON, parseJSON, + (FromJSON, parseJSON, withObject, (.:), withText) -import qualified Data.ByteString as B -import qualified Data.ByteString.Base64.URL as B64URL import qualified Data.HashMap.Strict as HashMap import qualified Data.IntMap.Strict as IntMap import qualified Data.Text as T -import Data.Text.Encoding (decodeUtf8) import Data.Text.Read (decimal) import Data.Time (Day) -import Network.HTTP.StackClient (Request, parseUrlThrow) import Options.Applicative (ReadM) import qualified Options.Applicative.Types as OA -import Path import Stack.Prelude -import Stack.Types.Compiler -import Stack.Types.PackageIdentifier -import qualified System.FilePath as FP - -data IsLoaded = Loaded | NotLoaded - -type LoadedResolver = ResolverWith SnapshotHash -type Resolver = ResolverWith (Either Request FilePath) - --- TODO: once GHC 8.0 is the lowest version we support, make these into --- actual haddock comments... - --- | How we resolve which dependencies to install given a set of packages. -data ResolverWith customContents - = ResolverStackage !SnapName - -- ^ Use an official snapshot from the Stackage project, either an - -- LTS Haskell or Stackage Nightly. - - | ResolverCompiler !(CompilerVersion 'CVWanted) - -- ^ Require a specific compiler version, but otherwise provide no - -- build plan. Intended for use cases where end user wishes to - -- specify all upstream dependencies manually, such as using a - -- dependency solver. - - | ResolverCustom !Text !customContents - -- ^ A custom resolver based on the given location (as a raw URL - -- or filepath). If @customContents@ is a @Either Request - -- FilePath@, it represents the parsed location value (with - -- filepaths resolved relative to the directory containing the - -- file referring to the custom snapshot). Once it has been loaded - -- from disk, it will be replaced with a @SnapshotHash@ value, - -- which is used to store cached files. - deriving (Generic, Typeable, Show, Data, Eq, Functor, Foldable, Traversable) -instance Store LoadedResolver -instance NFData LoadedResolver - -instance ToJSON (ResolverWith a) where - toJSON x = case x of - ResolverStackage name -> toJSON $ renderSnapName name - ResolverCompiler version -> toJSON $ compilerVersionText version - ResolverCustom loc _ -> toJSON loc -instance a ~ () => FromJSON (ResolverWith a) where - parseJSON = withText "ResolverWith ()" $ return . parseResolverText - --- | Convert a Resolver into its @Text@ representation for human --- presentation. When possible, you should prefer @sdResolverName@, as --- it will handle the human-friendly name inside a custom snapshot. -resolverRawName :: ResolverWith a -> Text -resolverRawName (ResolverStackage name) = renderSnapName name -resolverRawName (ResolverCompiler v) = compilerVersionText v -resolverRawName (ResolverCustom loc _ ) = "custom: " <> loc - -parseCustomLocation - :: MonadThrow m - => Maybe (Path Abs Dir) -- ^ directory config value was read from - -> ResolverWith () -- could technically be any type parameter, restricting to help with type safety - -> m Resolver -parseCustomLocation mdir (ResolverCustom t ()) = - ResolverCustom t <$> case parseUrlThrow $ T.unpack t of - Nothing -> Right <$> do - dir <- - case mdir of - Nothing -> throwM $ FilepathInDownloadedSnapshot t - Just x -> return x - let rel = - T.unpack - $ fromMaybe t - $ T.stripPrefix "file://" t <|> T.stripPrefix "file:" t - return $ toFilePath dir FP. rel - Just req -> return $ Left req -parseCustomLocation _ (ResolverStackage name) = return $ ResolverStackage name -parseCustomLocation _ (ResolverCompiler cv) = return $ ResolverCompiler cv - --- | Parse a @Resolver@ from a @Text@ -parseResolverText :: Text -> ResolverWith () -parseResolverText t - | Right x <- parseSnapName t = ResolverStackage x - | Just v <- parseCompilerVersion t = ResolverCompiler v - | otherwise = ResolverCustom t () -- | Either an actual resolver value, or an abstract description of one (e.g., -- latest nightly). @@ -140,20 +40,29 @@ data AbstractResolver = ARLatestNightly | ARLatestLTS | ARLatestLTSMajor !Int - | ARResolver !(ResolverWith ()) + | ARResolver !SnapshotLocation | ARGlobal - deriving Show -readAbstractResolver :: ReadM AbstractResolver +instance Show AbstractResolver where + show = T.unpack . utf8BuilderToText . display + +instance Display AbstractResolver where + display ARLatestNightly = "nightly" + display ARLatestLTS = "lts" + display (ARLatestLTSMajor x) = "lts-" <> display x + display (ARResolver usl) = display usl + display ARGlobal = "global" + +readAbstractResolver :: ReadM (Unresolved AbstractResolver) readAbstractResolver = do s <- OA.readerAsk case s of - "global" -> return ARGlobal - "nightly" -> return ARLatestNightly - "lts" -> return ARLatestLTS + "global" -> pure $ pure ARGlobal + "nightly" -> pure $ pure ARLatestNightly + "lts" -> pure $ pure ARLatestLTS 'l':'t':'s':'-':x | Right (x', "") <- decimal $ T.pack x -> - return $ ARLatestLTSMajor x' - _ -> return $ ARResolver $ parseResolverText $ T.pack s + pure $ pure $ ARLatestLTSMajor x' + _ -> pure $ ARResolver <$> parseSnapshotLocation (T.pack s) -- | The name of an LTS Haskell or Stackage Nightly snapshot. data SnapName @@ -234,25 +143,3 @@ instance FromJSON Snapshots where Left e -> fail $ show e Right (LTS x y) -> return $ IntMap.singleton x y Right (Nightly _) -> fail "Unexpected nightly value" - -newtype SnapshotHash = SnapshotHash { unSnapshotHash :: StaticSHA256 } - deriving (Generic, Typeable, Show, Data, Eq) -instance Store SnapshotHash -instance NFData SnapshotHash - --- | Return the first 12 characters of the hash as a B64URL-encoded --- string. -trimmedSnapshotHash :: SnapshotHash -> Text -trimmedSnapshotHash = decodeUtf8 . B.take 12 . B64URL.encode . staticSHA256ToRaw . unSnapshotHash - --- | Return the raw bytes in the hash -snapshotHashToBS :: SnapshotHash -> ByteString -snapshotHashToBS = staticSHA256ToRaw . unSnapshotHash - --- | Create a new SnapshotHash by SHA256 hashing the given contents -snapshotHashFromBS :: ByteString -> SnapshotHash -snapshotHashFromBS = snapshotHashFromDigest . Hash.hash - --- | Create a new SnapshotHash from the given digest -snapshotHashFromDigest :: Digest SHA256 -> SnapshotHash -snapshotHashFromDigest = SnapshotHash . mkStaticSHA256FromDigest diff --git a/src/Stack/Types/Runner.hs b/src/Stack/Types/Runner.hs index afc3e960ee..8af10b48ff 100644 --- a/src/Stack/Types/Runner.hs +++ b/src/Stack/Types/Runner.hs @@ -22,11 +22,9 @@ module Stack.Types.Runner ) where import Data.Aeson (FromJSON (parseJSON)) -import Distribution.PackageDescription (GenericPackageDescription) import Lens.Micro import Stack.Prelude hiding (lift) import Stack.Constants -import Stack.Types.PackageIdentifier (PackageIdentifierRevision) import Stack.Types.StylesUpdate (StylesUpdate) import System.Console.ANSI import RIO.Process (HasProcessContext (..), ProcessContext, mkDefaultProcessContext) @@ -41,18 +39,6 @@ data Runner = Runner , runnerLogFunc :: !LogFunc , runnerTermWidth :: !Int , runnerProcessContext :: !ProcessContext - , runnerParsedCabalFiles :: !(IORef - ( Map PackageIdentifierRevision GenericPackageDescription - , Map (Path Abs Dir) (GenericPackageDescription, Path Abs File) - )) - -- ^ Cache of previously parsed cabal files. - -- - -- TODO: This is really an ugly hack to avoid spamming the user with - -- warnings when we parse cabal files multiple times and bypass - -- performance issues. Ideally: we would just design the system such - -- that it only ever parses a cabal file once. But for now, this is - -- a decent workaround. See: - -- . } class (HasProcessContext env, HasLogFunc env) => HasRunner env where @@ -99,7 +85,6 @@ withRunner logLevel useTime terminal colorWhen stylesUpdate widthOverride reExec termWidth <- clipWidth <$> maybe (fromMaybe defaultTerminalWidth <$> liftIO getTerminalWidth) pure widthOverride - ref <- newIORef mempty menv <- mkDefaultProcessContext logOptions0 <- logOptionsHandle stderr False let logOptions @@ -116,7 +101,6 @@ withRunner logLevel useTime terminal colorWhen stylesUpdate widthOverride reExec , runnerStylesUpdate = stylesUpdate , runnerLogFunc = logFunc , runnerTermWidth = termWidth - , runnerParsedCabalFiles = ref , runnerProcessContext = menv } where clipWidth w diff --git a/src/Stack/Types/Sig.hs b/src/Stack/Types/Sig.hs index 3df612599b..97fa524ddf 100644 --- a/src/Stack/Types/Sig.hs +++ b/src/Stack/Types/Sig.hs @@ -21,7 +21,6 @@ import Data.Aeson (Value(..), ToJSON(..), FromJSON(..)) import qualified Data.ByteString as SB import Data.Char (isHexDigit) import qualified Data.Text as T -import Stack.Types.PackageName -- | A GPG signature. newtype Signature = @@ -63,7 +62,7 @@ instance FromJSON (Aeson PackageName) where s <- parseJSON j case parsePackageName s of Just name -> return (Aeson name) - Nothing -> fail ("Invalid package name: " <> T.unpack s) + Nothing -> fail ("Invalid package name: " <> s) -- | Handy wrapper for orphan instances. newtype Aeson a = Aeson diff --git a/src/Stack/Types/Version.hs b/src/Stack/Types/Version.hs index 147a2df299..1483a4242d 100644 --- a/src/Stack/Types/Version.hs +++ b/src/Stack/Types/Version.hs @@ -1,11 +1,7 @@ {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE BangPatterns #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} -{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE ViewPatterns #-} -- | Versions for packages. @@ -14,14 +10,6 @@ module Stack.Types.Version ,Cabal.VersionRange -- TODO in the future should have a newtype wrapper ,IntersectingVersionRange(..) ,VersionCheck(..) - ,versionParser - ,parseVersion - ,parseVersionFromString - ,versionString - ,versionText - ,toCabalVersion - ,fromCabalVersion - ,mkVersion ,versionRangeText ,withinRange ,Stack.Types.Version.intersectVersionRanges @@ -37,66 +25,18 @@ module Stack.Types.Version import Stack.Prelude hiding (Vector) import Data.Aeson.Extended -import Data.Attoparsec.Text -import Data.Hashable (Hashable (..)) import Data.List import qualified Data.Set as Set import qualified Data.Text as T -import Data.Vector.Unboxed (Vector) -import qualified Data.Vector.Unboxed as V import Distribution.Text (disp) import qualified Distribution.Version as Cabal -import Language.Haskell.TH -import Language.Haskell.TH.Syntax +import Distribution.Version (Version, versionNumbers, withinRange) import qualified Paths_stack as Meta import Text.PrettyPrint (render) --- | A parse fail. -newtype VersionParseFail = - VersionParseFail Text - deriving (Typeable) -instance Exception VersionParseFail -instance Show VersionParseFail where - show (VersionParseFail bs) = "Invalid version: " ++ show bs - -- | A Package upgrade; Latest or a specific version. data UpgradeTo = Specific Version | Latest deriving (Show) --- | A package version. -newtype Version = - Version {unVersion :: Vector Word} - deriving (Eq,Ord,Typeable,Data,Generic,Store,NFData) - -instance Hashable Version where - hashWithSalt i = hashWithSalt i . V.toList . unVersion - -instance Lift Version where - lift (Version n) = - appE (conE 'Version) - (appE (varE 'V.fromList) - (listE (map (litE . IntegerL . fromIntegral) - (V.toList n)))) - -instance Show Version where - show (Version v) = - intercalate "." - (map show (V.toList v)) -instance Display Version where - display = display . versionText - -instance ToJSON Version where - toJSON = toJSON . versionText -instance FromJSON Version where - parseJSON j = - do s <- parseJSON j - case parseVersionFromString s of - Nothing -> - fail ("Couldn't parse package version: " ++ s) - Just ver -> return ver -instance FromJSONKey Version where - fromJSONKey = FromJSONKeyTextParser $ \k -> - either (fail . show) return $ parseVersion k - newtype IntersectingVersionRange = IntersectingVersionRange { getIntersectingVersionRange :: Cabal.VersionRange } deriving Show @@ -109,79 +49,21 @@ instance Monoid IntersectingVersionRange where mempty = IntersectingVersionRange Cabal.anyVersion mappend = (<>) --- | Attoparsec parser for a package version. -versionParser :: Parser Version -versionParser = - do ls <- (:) <$> num <*> many num' - let !v = V.fromList ls - return (Version v) - where num = decimal - num' = point *> num - point = satisfy (== '.') - --- | Convenient way to parse a package version from a 'Text'. -parseVersion :: MonadThrow m => Text -> m Version -parseVersion x = go x - where go = - either (const (throwM (VersionParseFail x))) return . - parseOnly (versionParser <* endOfInput) - --- | Migration function. -parseVersionFromString :: MonadThrow m => String -> m Version -parseVersionFromString = - parseVersion . T.pack - --- | Get a string representation of a package version. -versionString :: Version -> String -versionString (Version v) = - intercalate "." - (map show (V.toList v)) - --- | Get a string representation of a package version. -versionText :: Version -> Text -versionText (Version v) = - T.intercalate - "." - (map (T.pack . show) - (V.toList v)) - --- | Convert to a Cabal version. -toCabalVersion :: Version -> Cabal.Version -toCabalVersion (Version v) = - Cabal.mkVersion (map fromIntegral (V.toList v)) - --- | Convert from a Cabal version. -fromCabalVersion :: Cabal.Version -> Version -fromCabalVersion vs = - let !v = V.fromList (map fromIntegral (Cabal.versionNumbers vs)) - in Version v - --- | Make a package version. -mkVersion :: String -> Q Exp -mkVersion s = - case parseVersionFromString s of - Nothing -> qRunIO $ throwString ("Invalid package version: " ++ show s) - Just pn -> [|pn|] - -- | Display a version range versionRangeText :: Cabal.VersionRange -> Text versionRangeText = T.pack . render . disp --- | Check if a version is within a version range. -withinRange :: Version -> Cabal.VersionRange -> Bool -withinRange v r = toCabalVersion v `Cabal.withinRange` r - -- | A modified intersection which also simplifies, for better display. intersectVersionRanges :: Cabal.VersionRange -> Cabal.VersionRange -> Cabal.VersionRange intersectVersionRanges x y = Cabal.simplifyVersionRange $ Cabal.intersectVersionRanges x y -- | Returns the first two components, defaulting to 0 if not present toMajorVersion :: Version -> Version -toMajorVersion (Version v) = - case V.length v of - 0 -> Version (V.fromList [0, 0]) - 1 -> Version (V.fromList [V.head v, 0]) - _ -> Version (V.fromList [V.head v, v V.! 1]) +toMajorVersion v = + case versionNumbers v of + [] -> Cabal.mkVersion [0, 0] + [a] -> Cabal.mkVersion [a, 0] + a:b:_ -> Cabal.mkVersion [a, b] -- | Given a version range and a set of versions, find the latest version from -- the set that is within the range. @@ -190,11 +72,11 @@ latestApplicableVersion r = listToMaybe . filter (`withinRange` r) . Set.toDescL -- | Get the next major version number for the given version nextMajorVersion :: Version -> Version -nextMajorVersion (Version v) = - case V.length v of - 0 -> Version (V.fromList [0, 1]) - 1 -> Version (V.fromList [V.head v, 1]) - _ -> Version (V.fromList [V.head v, (v V.! 1) + 1]) +nextMajorVersion v = + case versionNumbers v of + [] -> Cabal.mkVersion [0, 1] + [a] -> Cabal.mkVersion [a, 1] + a:b:_ -> Cabal.mkVersion [a, b + 1] data VersionCheck = MatchMinor @@ -216,26 +98,30 @@ instance FromJSON VersionCheck where expected = "VersionCheck value (match-minor, match-exact, or newer-minor)" checkVersion :: VersionCheck -> Version -> Version -> Bool -checkVersion check (Version wanted) (Version actual) = +checkVersion check (versionNumbers -> wanted) (versionNumbers -> actual) = case check of - MatchMinor -> V.and (V.take 3 matching) - MatchExact -> V.length wanted == V.length actual && V.and matching - NewerMinor -> V.and (V.take 2 matching) && newerMinor + MatchMinor -> and (take 3 matching) + MatchExact -> length wanted == length actual && and matching + NewerMinor -> and (take 2 matching) && newerMinor where - matching = V.zipWith (==) wanted actual + matching = zipWith (==) wanted actual + + getMinor (_a:_b:c:_) = Just c + getMinor _ = Nothing + newerMinor = - case (wanted V.!? 2, actual V.!? 2) of + case (getMinor wanted, getMinor actual) of (Nothing, _) -> True (Just _, Nothing) -> False (Just w, Just a) -> a >= w -- | Get minor version (excludes any patchlevel) minorVersion :: Version -> Version -minorVersion (Version v) = Version (V.take 3 v) +minorVersion = Cabal.mkVersion . take 3 . versionNumbers -- | Current Stack version stackVersion :: Version -stackVersion = fromCabalVersion (Cabal.mkVersion' Meta.version) +stackVersion = Cabal.mkVersion' Meta.version -- | Current Stack minor version (excludes patchlevel) stackMinorVersion :: Version diff --git a/src/Stack/Types/VersionIntervals.hs b/src/Stack/Types/VersionIntervals.hs index 97bdc0cb78..f6e3080135 100644 --- a/src/Stack/Types/VersionIntervals.hs +++ b/src/Stack/Types/VersionIntervals.hs @@ -1,7 +1,7 @@ {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE DeriveDataTypeable #-} -module Stack.Types.VersionIntervals +module Stack.Types.VersionIntervals -- to be removed with https://github.com/commercialhaskell/stack/issues/4213 ( VersionIntervals , toVersionRange , fromVersionRange @@ -40,7 +40,7 @@ fromVersionRange :: C.VersionRange -> VersionIntervals fromVersionRange = fromCabal . C.toVersionIntervals withinIntervals :: Version -> VersionIntervals -> Bool -withinIntervals v vi = C.withinIntervals (toCabalVersion v) (toCabal vi) +withinIntervals v vi = C.withinIntervals v (toCabal vi) unionVersionIntervals :: VersionIntervals -> VersionIntervals -> VersionIntervals unionVersionIntervals x y = fromCabal $ C.unionVersionIntervals @@ -57,10 +57,10 @@ toCabal (VersionIntervals vi) = C.mkVersionIntervals $ map go vi where go (VersionInterval lowerV lowerB mupper) = - ( C.LowerBound (toCabalVersion lowerV) (toCabalBound lowerB) + ( C.LowerBound lowerV (toCabalBound lowerB) , case mupper of Nothing -> C.NoUpperBound - Just (v, b) -> C.UpperBound (toCabalVersion v) (toCabalBound b) + Just (v, b) -> C.UpperBound v (toCabalBound b) ) fromCabal :: C.VersionIntervals -> VersionIntervals @@ -68,12 +68,12 @@ fromCabal = VersionIntervals . map go . C.versionIntervals where go (C.LowerBound lowerV lowerB, upper) = VersionInterval - { viLowerVersion = fromCabalVersion lowerV + { viLowerVersion = lowerV , viLowerBound = fromCabalBound lowerB , viUpper = case upper of C.NoUpperBound -> Nothing - C.UpperBound v b -> Just (fromCabalVersion v, fromCabalBound b) + C.UpperBound v b -> Just (v, fromCabalBound b) } toCabalBound :: Bound -> C.Bound diff --git a/src/Stack/Unpack.hs b/src/Stack/Unpack.hs new file mode 100644 index 0000000000..50d861b9cf --- /dev/null +++ b/src/Stack/Unpack.hs @@ -0,0 +1,116 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ScopedTypeVariables #-} +module Stack.Unpack + ( unpackPackages + ) where + +import Stack.Prelude +import Stack.Types.BuildPlan +import qualified RIO.Text as T +import qualified RIO.Map as Map +import qualified RIO.Set as Set +import RIO.List (intercalate) +import RIO.Process (HasProcessContext) +import Path ((), parseRelDir) +import Path.IO (doesDirExist) + +data UnpackException + = UnpackDirectoryAlreadyExists (Set (Path Abs Dir)) + | CouldNotParsePackageSelectors [String] + deriving Typeable +instance Exception UnpackException +instance Show UnpackException where + show (UnpackDirectoryAlreadyExists dirs) = unlines + $ "Unable to unpack due to already present directories:" + : map ((" " ++) . toFilePath) (Set.toList dirs) + show (CouldNotParsePackageSelectors strs) = unlines + $ "The following package selectors are not valid package names or identifiers:" + : map ("- " ++) strs + +-- | Intended to work for the command line command. +unpackPackages + :: forall env. (HasPantryConfig env, HasLogFunc env, HasProcessContext env) + => Maybe SnapshotDef -- ^ when looking up by name, take from this build plan + -> Path Abs Dir -- ^ destination + -> [String] -- ^ names or identifiers + -> RIO env () +unpackPackages mSnapshotDef dest input = do + let (errs1, (names, pirs1)) = + fmap partitionEithers $ partitionEithers $ map parse input + (errs2, locs2) <- partitionEithers <$> traverse toLoc names + case errs1 ++ errs2 of + [] -> pure () + errs -> throwM $ CouldNotParsePackageSelectors errs + locs <- Map.fromList <$> mapM + (\(pir, ident) -> do + suffix <- parseRelDir $ packageIdentifierString ident + pure (pir, dest suffix) + ) + (map (\pir@(PackageIdentifierRevision name ver _) -> + (PLIHackage pir Nothing, PackageIdentifier name ver)) pirs1 ++ + locs2) + + alreadyUnpacked <- filterM doesDirExist $ Map.elems locs + + unless (null alreadyUnpacked) $ + throwM $ UnpackDirectoryAlreadyExists $ Set.fromList alreadyUnpacked + + forM_ (Map.toList locs) $ \(loc, dest') -> do + unpackPackageLocation dest' loc + logInfo $ + "Unpacked " <> + display loc <> + " to " <> + fromString (toFilePath dest') + where + toLoc = maybe toLocNoSnapshot toLocSnapshot mSnapshotDef + + toLocNoSnapshot :: PackageName -> RIO env (Either String (PackageLocationImmutable, PackageIdentifier)) + toLocNoSnapshot name = do + mver1 <- getLatestHackageVersion name UsePreferredVersions + mver <- + case mver1 of + Just _ -> pure mver1 + Nothing -> do + updated <- updateHackageIndex $ Just $ "Could not find package " <> fromString (packageNameString name) <> ", updating" + case updated of + UpdateOccurred -> getLatestHackageVersion name UsePreferredVersions + NoUpdateOccurred -> pure Nothing + case mver of + Nothing -> do + candidates <- getHackageTypoCorrections name + pure $ Left $ concat + [ "Could not find package " + , packageNameString name + , " on Hackage" + , if null candidates + then "" + else ". Perhaps you meant: " ++ intercalate ", " (map packageNameString candidates) + ] + Just pir@(PackageIdentifierRevision _ ver _) -> pure $ Right + ( PLIHackage pir Nothing + , PackageIdentifier name ver + ) + + toLocSnapshot :: SnapshotDef -> PackageName -> RIO env (Either String (PackageLocationImmutable, PackageIdentifier)) + toLocSnapshot sd name = + go $ concatMap snapshotLocations $ sdSnapshots sd + where + go [] = pure $ Left $ "Package does not appear in snapshot: " ++ packageNameString name + go (loc:locs) = do + ident@(PackageIdentifier name' _) <- getPackageLocationIdent loc + if name == name' + then pure $ Right (loc, ident) + else go locs + + -- Possible future enhancement: parse names as name + version range + parse s = + case parsePackageName (T.unpack t) of + Just x -> Right $ Left x + Nothing -> + case parsePackageIdentifierRevision t of + Right x -> Right $ Right x + Left _ -> Left $ "Could not parse as package name or identifier: " ++ s + where + t = T.pack s diff --git a/src/Stack/Upgrade.hs b/src/Stack/Upgrade.hs index 0346c0a2e0..9086800e0f 100644 --- a/src/Stack/Upgrade.hs +++ b/src/Stack/Upgrade.hs @@ -13,9 +13,6 @@ module Stack.Upgrade ) where import Stack.Prelude hiding (force, Display (..)) -import qualified Data.HashMap.Strict as HashMap -import qualified Data.List -import qualified Data.Map as Map import qualified Data.Text as T import Distribution.Version (mkVersion') import Lens.Micro (set) @@ -28,14 +25,8 @@ import Stack.Config #ifdef WINDOWS import Stack.DefaultColorWhen (defaultColorWhen) #endif -import Stack.Fetch -import Stack.PackageIndex import Stack.PrettyPrint import Stack.Setup -import Stack.Types.PackageIdentifier -import Stack.Types.PackageIndex -import Stack.Types.PackageName -import Stack.Types.Version import Stack.Types.Config import Stack.Types.Resolver import System.Exit (ExitCode (ExitSuccess)) @@ -157,9 +148,9 @@ binaryUpgrade (BinaryOpts mplatform force' mver morg mrepo) = do Just downloadVersion -> do prettyInfoL [ flow "Current Stack version:" - , display stackVersion <> "," + , fromString (versionString stackVersion) <> "," , flow "available download version:" - , display downloadVersion + , fromString (versionString downloadVersion) ] return $ downloadVersion > stackVersion @@ -225,34 +216,29 @@ sourceUpgrade gConfigMonoid mresolver builtHash (SourceOpts gitRepo) = #endif return $ Just $ tmp $(mkRelDir "stack") Nothing -> do - updateAllIndices - PackageCache caches <- getPackageCaches - let versions - = filter (/= $(mkVersion "9.9.9")) -- Mistaken upload to Hackage, just ignore it - $ maybe [] HashMap.keys - $ HashMap.lookup $(mkPackageName "stack") caches - - when (null versions) (throwString "No stack found in package indices") - - let version = Data.List.maximum versions - if version <= fromCabalVersion (mkVersion' Paths.version) + void $ updateHackageIndex + $ Just "Updating index to make sure we find the latest Stack version" + mversion <- getLatestHackageVersion "stack" UsePreferredVersions + pir@(PackageIdentifierRevision _ version _) <- + case mversion of + Nothing -> throwString "No stack found in package indices" + Just version -> pure version + + if version <= mkVersion' Paths.version then do prettyInfoS "Already at latest version, no upgrade required" return Nothing else do - let ident = PackageIdentifier $(mkPackageName "stack") version - paths <- unpackPackageIdents tmp Nothing - -- accept latest cabal revision - [PackageIdentifierRevision ident CFILatest] - case Map.lookup ident paths of - Nothing -> error "Stack.Upgrade.upgrade: invariant violated, unpacked directory not found" - Just path -> return $ Just path - - forM_ mdir $ \dir -> do - lc <- loadConfig - gConfigMonoid - mresolver - (SYLOverride $ dir $(mkRelFile "stack.yaml")) + suffix <- parseRelDir $ "stack-" ++ versionString version + let dir = tmp suffix + unpackPackageLocation dir $ PLIHackage pir Nothing + pure $ Just dir + + forM_ mdir $ \dir -> + loadConfig + gConfigMonoid + mresolver + (SYLOverride $ dir $(mkRelFile "stack.yaml")) $ \lc -> do bconfig <- liftIO $ lcLoadBuildConfig lc Nothing envConfig1 <- runRIO bconfig $ setupEnv $ Just $ "Try rerunning with --install-ghc to install the correct GHC into " <> diff --git a/src/Stack/Upload.hs b/src/Stack/Upload.hs index d942808f07..db0e5ca25e 100644 --- a/src/Stack/Upload.hs +++ b/src/Stack/Upload.hs @@ -34,9 +34,6 @@ import Network.HTTP.StackClient (Request, RequestBody(Req applyDigestAuth, displayDigestAuthException) import Stack.Types.Config -import Stack.Types.PackageIdentifier (PackageIdentifier, packageIdentifierString, - packageIdentifierName) -import Stack.Types.PackageName (packageNameString) import System.Directory (createDirectoryIfMissing, removeFile) import System.FilePath ((), takeFileName) @@ -178,13 +175,13 @@ uploadRevision :: String -- ^ Hackage base URL -> PackageIdentifier -> L.ByteString -> IO () -uploadRevision baseUrl creds ident cabalFile = do +uploadRevision baseUrl creds ident@(PackageIdentifier name _) cabalFile = do req0 <- parseRequest $ concat [ baseUrl , "package/" , packageIdentifierString ident , "/" - , packageNameString $ packageIdentifierName ident + , packageNameString name , ".cabal/edit" ] req1 <- formDataBody diff --git a/src/main/Main.hs b/src/main/Main.hs index d8587e3466..3badcde9c4 100644 --- a/src/main/Main.hs +++ b/src/main/Main.hs @@ -61,8 +61,8 @@ import qualified Stack.Docker as Docker import Stack.Dot import Stack.GhcPkg (findGhcPkgField) import qualified Stack.Nix as Nix -import Stack.Fetch import Stack.FileWatch +import Stack.Freeze import Stack.Ghci import Stack.Hoogle import Stack.Ls @@ -77,6 +77,7 @@ import Stack.Options.DotParser import Stack.Options.ExecParser import Stack.Options.GhciParser import Stack.Options.GlobalParser +import Stack.Options.FreezeParser import Stack.Options.HpcReportParser import Stack.Options.NewParser @@ -85,7 +86,6 @@ import Stack.Options.ScriptParser import Stack.Options.SDistParser import Stack.Options.SolverParser import Stack.Options.Utils -import qualified Stack.PackageIndex import qualified Stack.Path import Stack.PrettyPrint import qualified Stack.PrettyPrint as PP (style) @@ -101,6 +101,7 @@ import Stack.Types.Config import Stack.Types.Compiler import Stack.Types.NamedComponent import Stack.Types.Nix +import Stack.Unpack import Stack.Upgrade import qualified Stack.Upload as Upload import qualified System.Directory as D @@ -189,12 +190,12 @@ main = do Left (exitCode :: ExitCode) -> throwIO exitCode Right (globalMonoid,run) -> do - let global = globalOptsFromMonoid isTerminal globalMonoid + global <- globalOptsFromMonoid isTerminal globalMonoid when (globalLogLevel global == LevelDebug) $ hPutStrLn stderr versionString' case globalReExecVersion global of Just expectVersion -> do - expectVersion' <- parseVersionFromString expectVersion - unless (checkVersion MatchMinor expectVersion' (fromCabalVersion (mkVersion' Meta.version))) + expectVersion' <- parseVersionThrowing expectVersion + unless (checkVersion MatchMinor expectVersion' (mkVersion' Meta.version)) $ throwIO $ InvalidReExecVersion expectVersion (showVersion Meta.version) _ -> return () run global `catch` \e -> @@ -217,7 +218,7 @@ commandLineHandler -> Bool -> IO (GlobalOptsMonoid, GlobalOpts -> IO ()) commandLineHandler currentDir progName isInterpreter = complicatedOptions - Meta.version + (mkVersion' Meta.version) (Just versionString') VERSION_hpack "stack - The Haskell Tool Stack" @@ -387,6 +388,10 @@ commandLineHandler currentDir progName isInterpreter = complicatedOptions "Run a Stack Script" scriptCmd scriptOptsParser + addCommand' "freeze" + "Show project or snapshot with pinned dependencies if there are any such" + freezeCmd + freezeOptsParser unless isInterpreter (do addCommand' "eval" @@ -665,12 +670,13 @@ uninstallCmd _ go = withConfigAndLock go $ unpackCmd :: ([String], Maybe Text) -> GlobalOpts -> IO () unpackCmd (names, Nothing) go = unpackCmd (names, Just ".") go unpackCmd (names, Just dstPath) go = withConfigAndLock go $ do - mSnapshotDef <- mapM (makeConcreteResolver Nothing >=> loadResolver) (globalResolver go) - Stack.Fetch.unpackPackages mSnapshotDef (T.unpack dstPath) names + mSnapshotDef <- mapM (makeConcreteResolver >=> flip loadResolver Nothing) (globalResolver go) + dstPath' <- resolveDir' $ T.unpack dstPath + unpackPackages mSnapshotDef dstPath' names -- | Update the package index updateCmd :: () -> GlobalOpts -> IO () -updateCmd () go = withConfigAndLock go Stack.PackageIndex.updateAllIndices +updateCmd () go = withConfigAndLock go (void (updateHackageIndex Nothing)) upgradeCmd :: UpgradeOpts -> GlobalOpts -> IO () upgradeCmd upgradeOpts' go = withGlobalConfigAndLock go $ @@ -1002,6 +1008,10 @@ queryCmd selectors go = withBuildConfig go $ queryBuildInfo $ map T.pack selecto hpcReportCmd :: HpcReportOpts -> GlobalOpts -> IO () hpcReportCmd hropts go = withBuildConfig go $ generateHpcReportForTargets hropts +freezeCmd :: FreezeOpts -> GlobalOpts -> IO () +freezeCmd freezeOpts go = + withBuildConfig go $ freeze freezeOpts + data MainException = InvalidReExecVersion String String | UpgradeCabalUnusable | InvalidPathForExec FilePath diff --git a/src/test/Network/HTTP/Download/VerifiedSpec.hs b/src/test/Network/HTTP/Download/VerifiedSpec.hs index 8e2fcb2327..fbe8a06825 100644 --- a/src/test/Network/HTTP/Download/VerifiedSpec.hs +++ b/src/test/Network/HTTP/Download/VerifiedSpec.hs @@ -25,7 +25,7 @@ getExamplePath dir = do -- | An example DownloadRequest that uses a SHA1 exampleReq :: DownloadRequest exampleReq = fromMaybe (error "exampleReq") $ do - let req = parseRequest_ "http://download.fpcomplete.com/stackage-cli/linux64/cabal-install-1.22.4.0.tar.gz" + req <- parseRequest "http://download.fpcomplete.com/stackage-cli/linux64/cabal-install-1.22.4.0.tar.gz" return DownloadRequest { drRequest = req , drHashChecks = [exampleHashCheck] @@ -120,7 +120,7 @@ spec = do -- https://github.com/commercialhaskell/stack/issues/240 it "can download hackage tarballs" $ withTempDir' $ \dir -> do dest <- (dir ) <$> parseRelFile "acme-missiles-0.3.tar.gz" - let req = parseRequest_ "http://hackage.haskell.org/package/acme-missiles-0.3/acme-missiles-0.3.tar.gz" + req <- parseRequest "http://hackage.haskell.org/package/acme-missiles-0.3/acme-missiles-0.3.tar.gz" let dReq = DownloadRequest { drRequest = req , drHashChecks = [] diff --git a/src/test/Stack/Build/TargetSpec.hs b/src/test/Stack/Build/TargetSpec.hs index 4715020ba5..90bdc9cb0c 100644 --- a/src/test/Stack/Build/TargetSpec.hs +++ b/src/test/Stack/Build/TargetSpec.hs @@ -1,15 +1,13 @@ {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE TemplateHaskell #-} module Stack.Build.TargetSpec (main, spec) where import qualified Data.Text as T +import Distribution.Types.PackageName (mkPackageName) +import Distribution.Version (mkVersion) import Stack.Build.Target import Stack.Prelude import Stack.Types.NamedComponent -import Stack.Types.PackageIdentifier -import Stack.Types.PackageName -import Stack.Types.Version import Test.Hspec main :: IO () @@ -19,13 +17,13 @@ spec :: Spec spec = do describe "parseRawTarget" $ do let test s e = it s $ parseRawTarget (T.pack s) `shouldBe` e - test "foobar" $ Just $ RTPackage $(mkPackageName "foobar") + test "foobar" $ Just $ RTPackage (mkPackageName "foobar") test "foobar-1.2.3" $ Just $ RTPackageIdentifier $ PackageIdentifier - $(mkPackageName "foobar") $(mkVersion "1.2.3") + (mkPackageName "foobar") (mkVersion [1, 2, 3]) test "./foobar" Nothing test "foobar/" Nothing test "/foobar" Nothing test ":some-exe" $ Just $ RTComponent "some-exe" - test "foobar:some-exe" $ Just $ RTPackageComponent $(mkPackageName "foobar") $ UnresolvedComponent "some-exe" - test "foobar:exe:some-exe" $ Just $ RTPackageComponent $(mkPackageName "foobar") + test "foobar:some-exe" $ Just $ RTPackageComponent (mkPackageName "foobar") $ UnresolvedComponent "some-exe" + test "foobar:exe:some-exe" $ Just $ RTPackageComponent (mkPackageName "foobar") $ ResolvedComponent $ CExe "some-exe" diff --git a/src/test/Stack/Config/DockerSpec.hs b/src/test/Stack/Config/DockerSpec.hs new file mode 100644 index 0000000000..27d2fec8e0 --- /dev/null +++ b/src/test/Stack/Config/DockerSpec.hs @@ -0,0 +1,37 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ViewPatterns #-} +module Stack.Config.DockerSpec (spec) where + +import Test.Hspec +import Test.Hspec.QuickCheck +import Stack.Prelude +import Stack.Types.Resolver +import RIO.Time (fromGregorian) +import Stack.Config.Docker (parseLtsName, addDefaultTag) + +spec :: Spec +spec = do + prop "parseLtsName" $ \(abs -> x) (abs -> y) -> do + case ltsSnapshotLocation x y of + SLUrl url _ -> + case parseLtsName url of + Just (x', y') -> do + x `shouldBe` x' + y `shouldBe` y' + Nothing -> error "parseLtsName failed" + loc -> error $ show loc + describe "addDefaultTag" $ do + it "succeeds fails no resolver" $ addDefaultTag "foo/bar" Nothing Nothing `shouldBe` Nothing + it "succeeds on LTS" $ + addDefaultTag + "foo/bar" + Nothing + (Just $ ARResolver $ ltsSnapshotLocation 1 2) + `shouldBe` Just "foo/bar:lts-1.2" + it "fails on nightly" $ + addDefaultTag + "foo/bar" + Nothing + (Just $ ARResolver $ nightlySnapshotLocation $ fromGregorian 2018 1 1) + `shouldBe` Nothing diff --git a/src/test/Stack/ConfigSpec.hs b/src/test/Stack/ConfigSpec.hs index 1f1cc5136b..4149aafa9d 100644 --- a/src/test/Stack/ConfigSpec.hs +++ b/src/test/Stack/ConfigSpec.hs @@ -7,6 +7,7 @@ module Stack.ConfigSpec where import Control.Arrow import Data.Aeson.Extended import Data.Yaml +import Pantry.Internal (pcHpackExecutable) import Path import Path.IO hiding (withSystemTempDir) import Stack.Config @@ -84,9 +85,8 @@ spec = beforeAll setup $ do describe "loadConfig" $ do let loadConfig' inner = - withRunner logLevel True False ColorAuto mempty Nothing False $ \runner -> do - lc <- runRIO runner $ loadConfig mempty Nothing SYLDefault - inner lc + withRunner logLevel True False ColorAuto mempty Nothing False $ \runner -> + runRIO runner $ loadConfig mempty Nothing SYLDefault inner -- TODO(danburton): make sure parent dirs also don't have config file it "works even if no config file exists" $ example $ loadConfig' $ const $ return () @@ -96,22 +96,23 @@ spec = beforeAll setup $ do -- TODO(danburton): more specific test for exception loadConfig' (const (return ())) `shouldThrow` anyException + let configOverrideHpack = pcHpackExecutable . view pantryConfigL + it "parses config option with-hpack" $ inTempDir $ do writeFile (toFilePath stackDotYaml) hpackConfig - loadConfig' $ \lc -> do - let Config{..} = lcConfig lc - configOverrideHpack `shouldBe` HpackCommand "/usr/local/bin/hpack" + loadConfig' $ \lc -> + liftIO $ configOverrideHpack (lcConfig lc) `shouldBe` + HpackCommand "/usr/local/bin/hpack" it "parses config bundled hpack" $ inTempDir $ do writeFile (toFilePath stackDotYaml) sampleConfig - loadConfig' $ \lc -> do - let Config{..} = lcConfig lc - configOverrideHpack `shouldBe` HpackBundled + loadConfig' $ \lc -> + liftIO $ configOverrideHpack (lcConfig lc) `shouldBe` HpackBundled it "parses build config options" $ inTempDir $ do writeFile (toFilePath stackDotYaml) buildOptsConfig - loadConfig' $ \lc -> do - let BuildOpts{..} = configBuild $ lcConfig lc + loadConfig' $ \lc -> liftIO $ do + let BuildOpts{..} = configBuild $ lcConfig lc boptsLibProfile `shouldBe` True boptsExeProfile `shouldBe` True boptsHaddock `shouldBe` True @@ -138,15 +139,15 @@ spec = beforeAll setup $ do let childDir = "child" createDirectory childDir setCurrentDirectory childDir - loadConfig' $ \LoadConfig{..} -> do - bc <- liftIO (lcLoadBuildConfig Nothing) + loadConfig' $ \LoadConfig{..} -> liftIO $ do + bc <- lcLoadBuildConfig Nothing view projectRootL bc `shouldBe` parentDir it "respects the STACK_YAML env variable" $ inTempDir $ do withSystemTempDir "config-is-here" $ \dir -> do let stackYamlFp = toFilePath (dir stackDotYaml) writeFile stackYamlFp sampleConfig - withEnvVar "STACK_YAML" stackYamlFp $ loadConfig' $ \LoadConfig{..} -> do + withEnvVar "STACK_YAML" stackYamlFp $ loadConfig' $ \LoadConfig{..} -> liftIO $ do BuildConfig{..} <- lcLoadBuildConfig Nothing bcStackYaml `shouldBe` dir stackDotYaml parent bcStackYaml `shouldBe` dir @@ -158,7 +159,7 @@ spec = beforeAll setup $ do yamlAbs = parentDir yamlRel createDirectoryIfMissing True $ toFilePath $ parent yamlAbs writeFile (toFilePath yamlAbs) "resolver: ghc-7.8" - withEnvVar "STACK_YAML" (toFilePath yamlRel) $ loadConfig' $ \LoadConfig{..} -> do + withEnvVar "STACK_YAML" (toFilePath yamlRel) $ loadConfig' $ \LoadConfig{..} -> liftIO $ do BuildConfig{..} <- lcLoadBuildConfig Nothing bcStackYaml `shouldBe` yamlAbs diff --git a/src/test/Stack/DotSpec.hs b/src/test/Stack/DotSpec.hs index 480a99fbd2..ce3cea51ab 100644 --- a/src/test/Stack/DotSpec.hs +++ b/src/test/Stack/DotSpec.hs @@ -10,9 +10,8 @@ import Data.List ((\\)) import qualified Data.Map as Map import qualified Data.Set as Set import Distribution.License (License (BSD3)) -import Stack.Prelude -import Stack.Types.PackageName -import Stack.Types.Version +import qualified RIO.Text as T +import Stack.Prelude hiding (pkgName) import Test.Hspec import Test.Hspec.QuickCheck (prop) import Test.QuickCheck (forAll,choose,Gen) @@ -20,7 +19,7 @@ import Test.QuickCheck (forAll,choose,Gen) import Stack.Dot dummyPayload :: DotPayload -dummyPayload = DotPayload (parseVersionFromString "0.0.0.0") (Just (Right BSD3)) +dummyPayload = DotPayload (parseVersion "0.0.0.0") (Just (Right BSD3)) spec :: Spec spec = do @@ -74,7 +73,7 @@ sublistOf = filterM (\_ -> choose (False, True)) -- Unsafe internal helper to create a package name pkgName :: Text -> PackageName -pkgName = fromMaybe failure . parsePackageName +pkgName = fromMaybe failure . parsePackageName . T.unpack where failure = error "Internal error during package name creation in DotSpec.pkgName" diff --git a/src/test/Stack/NixSpec.hs b/src/test/Stack/NixSpec.hs index 1ef5f24096..0256609c5e 100644 --- a/src/test/Stack/NixSpec.hs +++ b/src/test/Stack/NixSpec.hs @@ -12,11 +12,9 @@ import Stack.Config.Nix import Stack.Constants import Stack.Options.NixParser import Stack.Prelude -import Stack.Types.Compiler import Stack.Types.Config import Stack.Types.Nix import Stack.Types.Runner -import Stack.Types.Version import System.Directory import System.Environment import Test.Hspec @@ -42,9 +40,10 @@ setup = unsetEnv "STACK_YAML" spec :: Spec spec = beforeAll setup $ do - let loadConfig' cmdLineArgs = + let loadConfig' :: ConfigMonoid -> (LoadConfig -> IO ()) -> IO () + loadConfig' cmdLineArgs inner = withRunner LevelDebug True False ColorAuto mempty Nothing False $ \runner -> - runRIO runner $ loadConfig cmdLineArgs Nothing SYLDefault + runRIO runner $ loadConfig cmdLineArgs Nothing SYLDefault (liftIO . inner) inTempDir test = do currentDirectory <- getCurrentDirectory withSystemTempDirectory "Stack_ConfigSpec" $ \tempDir -> do @@ -62,45 +61,43 @@ spec = beforeAll setup $ do let trueOnNonWindows = not osIsWindows describe "nix disabled in config file" $ around_ (withStackDotYaml sampleConfigNixDisabled) $ do - it "sees that the nix shell is not enabled" $ do - lc <- loadConfig' mempty + it "sees that the nix shell is not enabled" $ loadConfig' mempty $ \lc -> nixEnable (configNix $ lcConfig lc) `shouldBe` False describe "--nix given on command line" $ - it "sees that the nix shell is enabled" $ do - lc <- loadConfig' (parseOpts ["--nix"]) + it "sees that the nix shell is enabled" $ + loadConfig' (parseOpts ["--nix"]) $ \lc -> nixEnable (configNix $ lcConfig lc) `shouldBe` trueOnNonWindows describe "--nix-pure given on command line" $ - it "sees that the nix shell is enabled" $ do - lc <- loadConfig' (parseOpts ["--nix-pure"]) + it "sees that the nix shell is enabled" $ + loadConfig' (parseOpts ["--nix-pure"]) $ \lc -> nixEnable (configNix $ lcConfig lc) `shouldBe` trueOnNonWindows describe "--no-nix given on command line" $ - it "sees that the nix shell is not enabled" $ do - lc <- loadConfig' (parseOpts ["--no-nix"]) + it "sees that the nix shell is not enabled" $ + loadConfig' (parseOpts ["--no-nix"]) $ \lc -> nixEnable (configNix $ lcConfig lc) `shouldBe` False describe "--no-nix-pure given on command line" $ - it "sees that the nix shell is not enabled" $ do - lc <- loadConfig' (parseOpts ["--no-nix-pure"]) + it "sees that the nix shell is not enabled" $ + loadConfig' (parseOpts ["--no-nix-pure"]) $ \lc -> nixEnable (configNix $ lcConfig lc) `shouldBe` False describe "nix enabled in config file" $ around_ (withStackDotYaml sampleConfigNixEnabled) $ do - it "sees that the nix shell is enabled" $ do - lc <- loadConfig' mempty + it "sees that the nix shell is enabled" $ + loadConfig' mempty $ \lc -> nixEnable (configNix $ lcConfig lc) `shouldBe` trueOnNonWindows describe "--no-nix given on command line" $ - it "sees that the nix shell is not enabled" $ do - lc <- loadConfig' (parseOpts ["--no-nix"]) + it "sees that the nix shell is not enabled" $ + loadConfig' (parseOpts ["--no-nix"]) $ \lc -> nixEnable (configNix $ lcConfig lc) `shouldBe` False describe "--nix-pure given on command line" $ - it "sees that the nix shell is enabled" $ do - lc <- loadConfig' (parseOpts ["--nix-pure"]) + it "sees that the nix shell is enabled" $ + loadConfig' (parseOpts ["--nix-pure"]) $ \lc -> nixEnable (configNix $ lcConfig lc) `shouldBe` trueOnNonWindows describe "--no-nix-pure given on command line" $ - it "sees that the nix shell is enabled" $ do - lc <- loadConfig' (parseOpts ["--no-nix-pure"]) + it "sees that the nix shell is enabled" $ + loadConfig' (parseOpts ["--no-nix-pure"]) $ \lc -> nixEnable (configNix $ lcConfig lc) `shouldBe` trueOnNonWindows - it "sees that the only package asked for is glpk and asks for the correct GHC derivation" $ do - lc <- loadConfig' mempty + it "sees that the only package asked for is glpk and asks for the correct GHC derivation" $ loadConfig' mempty $ \lc -> do nixPackages (configNix $ lcConfig lc) `shouldBe` ["glpk"] - v <- parseVersion "7.10.3" - ghc <- either throwIO return $ nixCompiler (GhcVersion v) + v <- parseVersionThrowing "7.10.3" + ghc <- either throwIO return $ nixCompiler (WCGhc v) ghc `shouldBe` "haskell.compiler.ghc7103" diff --git a/src/test/Stack/PackageDumpSpec.hs b/src/test/Stack/PackageDumpSpec.hs index 69e99a164a..2cb1809a8d 100644 --- a/src/test/Stack/PackageDumpSpec.hs +++ b/src/test/Stack/PackageDumpSpec.hs @@ -1,22 +1,20 @@ {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TupleSections #-} module Stack.PackageDumpSpec where -import Data.Conduit +import Conduit import qualified Data.Conduit.List as CL import Data.Conduit.Text (decodeUtf8) import qualified Data.Map as Map import qualified Data.Set as Set import Distribution.License (License(..)) +import Distribution.Types.PackageName (mkPackageName) +import Distribution.Version (mkVersion) import Stack.PackageDump import Stack.Prelude import Stack.Types.Compiler import Stack.Types.GhcPkgId -import Stack.Types.PackageIdentifier -import Stack.Types.PackageName -import Stack.Types.Version import RIO.Process import Test.Hspec import Test.Hspec.QuickCheck @@ -72,13 +70,14 @@ spec = do .| conduitDumpPackage .| CL.consume ghcPkgId <- parseGhcPkgId "haskell2010-1.1.2.0-05c8dd51009e08c6371c82972d40f55a" - packageIdent <- parsePackageIdentifier "haskell2010-1.1.2.0" + packageIdent <- maybe (fail "Not parsable package id") return $ + parsePackageIdentifier "haskell2010-1.1.2.0" depends <- mapM parseGhcPkgId [ "array-0.5.0.0-470385a50d2b78598af85cfe9d988e1b" , "base-4.7.0.2-bfd89587617e381ae01b8dd7b6c7f1c1" , "ghc-prim-0.3.1.0-a24f9c14c632d75b683d0f93283aea37" ] - haskell2010 { dpExposedModules = [] } `shouldBe` DumpPackage + haskell2010 { dpExposedModules = mempty } `shouldBe` DumpPackage { dpGhcPkgId = ghcPkgId , dpPackageIdent = packageIdent , dpParentLibIdent = Nothing @@ -93,7 +92,7 @@ spec = do , dpHaddock = () , dpSymbols = () , dpIsExposed = False - , dpExposedModules = [] + , dpExposedModules = mempty } it "ghc 7.10" $ do @@ -105,7 +104,8 @@ spec = do .| conduitDumpPackage .| CL.consume ghcPkgId <- parseGhcPkgId "ghc-7.10.1-325809317787a897b7a97d646ceaa3a3" - pkgIdent <- parsePackageIdentifier "ghc-7.10.1" + pkgIdent <- maybe (fail "Not parsable package id") return $ + parsePackageIdentifier "ghc-7.10.1" depends <- mapM parseGhcPkgId [ "array-0.5.1.0-e29cdbe82692341ebb7ce6e2798294f9" , "base-4.8.0.0-1b689eb8d72c4d4cc88f445839c1f01a" @@ -122,7 +122,7 @@ spec = do , "transformers-0.4.2.0-c1a7bb855a176fe475d7b665301cd48f" , "unix-2.7.1.0-e5915eb989e568b732bc7286b0d0817f" ] - haskell2010 { dpExposedModules = [] } `shouldBe` DumpPackage + haskell2010 { dpExposedModules = mempty } `shouldBe` DumpPackage { dpGhcPkgId = ghcPkgId , dpPackageIdent = pkgIdent , dpParentLibIdent = Nothing @@ -137,7 +137,7 @@ spec = do , dpHaddock = () , dpSymbols = () , dpIsExposed = False - , dpExposedModules = [] + , dpExposedModules = mempty } it "ghc 7.8.4 (osx)" $ do hmatrix:_ <- @@ -148,7 +148,8 @@ spec = do .| conduitDumpPackage .| CL.consume ghcPkgId <- parseGhcPkgId "hmatrix-0.16.1.5-12d5d21f26aa98774cdd8edbc343fbfe" - pkgId <- parsePackageIdentifier "hmatrix-0.16.1.5" + pkgId <- maybe (fail "Not parsable package id") return $ + parsePackageIdentifier "hmatrix-0.16.1.5" depends <- mapM parseGhcPkgId [ "array-0.5.0.0-470385a50d2b78598af85cfe9d988e1b" , "base-4.7.0.2-918c7ac27f65a87103264a9f51652d63" @@ -178,7 +179,7 @@ spec = do , dpHaddock = () , dpSymbols = () , dpIsExposed = True - , dpExposedModules = ["Data.Packed","Data.Packed.Vector","Data.Packed.Matrix","Data.Packed.Foreign","Data.Packed.ST","Data.Packed.Development","Numeric.LinearAlgebra","Numeric.LinearAlgebra.LAPACK","Numeric.LinearAlgebra.Algorithms","Numeric.Container","Numeric.LinearAlgebra.Util","Numeric.LinearAlgebra.Devel","Numeric.LinearAlgebra.Data","Numeric.LinearAlgebra.HMatrix","Numeric.LinearAlgebra.Static"] + , dpExposedModules = Set.fromList ["Data.Packed","Data.Packed.Vector","Data.Packed.Matrix","Data.Packed.Foreign","Data.Packed.ST","Data.Packed.Development","Numeric.LinearAlgebra","Numeric.LinearAlgebra.LAPACK","Numeric.LinearAlgebra.Algorithms","Numeric.Container","Numeric.LinearAlgebra.Util","Numeric.LinearAlgebra.Devel","Numeric.LinearAlgebra.Data","Numeric.LinearAlgebra.HMatrix","Numeric.LinearAlgebra.Static"] } it "ghc HEAD" $ do ghcBoot:_ <- @@ -189,7 +190,8 @@ spec = do .| conduitDumpPackage .| CL.consume ghcPkgId <- parseGhcPkgId "ghc-boot-0.0.0.0" - pkgId <- parsePackageIdentifier "ghc-boot-0.0.0.0" + pkgId <- maybe (fail "Not parsable package id") return $ + parsePackageIdentifier "ghc-boot-0.0.0.0" depends <- mapM parseGhcPkgId [ "base-4.9.0.0" , "binary-0.7.5.0" @@ -213,7 +215,7 @@ spec = do , dpHaddock = () , dpSymbols = () , dpIsExposed = True - , dpExposedModules = ["GHC.Lexeme", "GHC.PackageDb"] + , dpExposedModules = Set.fromList ["GHC.Lexeme", "GHC.PackageDb"] } @@ -233,13 +235,13 @@ spec = do .| addProfiling icache .| addHaddock icache .| fakeAddSymbols - .| sinkMatching False False False (Map.singleton $(mkPackageName "transformers") $(mkVersion "0.0.0.0.0.0.1")) - case Map.lookup $(mkPackageName "base") m of + .| sinkMatching False False False (Map.singleton (mkPackageName "transformers") (mkVersion [0, 0, 0, 0, 0, 0, 1])) + case Map.lookup (mkPackageName "base") m of Nothing -> error "base not present" Just _ -> return () liftIO $ do - Map.lookup $(mkPackageName "transformers") m `shouldBe` Nothing - Map.lookup $(mkPackageName "ghc") m `shouldBe` Nothing + Map.lookup (mkPackageName "transformers") m `shouldBe` Nothing + Map.lookup (mkPackageName "ghc") m `shouldBe` Nothing describe "pruneDeps" $ do it "sanity check" $ do diff --git a/src/test/Stack/SnapshotSpec.hs b/src/test/Stack/SnapshotSpec.hs new file mode 100644 index 0000000000..fdf3a589cc --- /dev/null +++ b/src/test/Stack/SnapshotSpec.hs @@ -0,0 +1,44 @@ +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE OverloadedStrings #-} +module Stack.SnapshotSpec (spec) where + +import Distribution.Types.PackageName (mkPackageName) +import Distribution.Version (mkVersion) +import Stack.Prelude +import Stack.Snapshot (loadGlobalHints) +import Stack.Types.Runner (withRunner, ColorWhen (ColorNever)) +import Test.Hspec +import qualified RIO.Map as Map +import RIO.ByteString (hPut) +import Path.IO (resolveFile') + +spec :: Spec +spec = do + describe "loadGlobalHints" $ do + let it' name inner = it name $ withSystemTempFile "global-hints.yaml" $ \fp h -> do + hPut h "this should be ignored" + hClose h :: IO () + abs' <- resolveFile' fp + withRunner LevelError False False ColorNever mempty Nothing False $ \runner -> + runRIO runner $ inner abs' + it' "unknown compiler" $ \fp -> do + mmap <- loadGlobalHints fp $ WCGhc (mkVersion [0, 0, 0, 0, 0, 0, 0]) + liftIO $ mmap `shouldBe` Nothing + it' "known compiler" $ \fp -> do + mmap <- loadGlobalHints fp $ WCGhc (mkVersion [8, 4, 3]) + case mmap of + Nothing -> error "not found" + Just m -> liftIO $ do + Map.lookup (mkPackageName "ghc") m `shouldBe` Just (mkVersion [8, 4, 3]) + Map.lookup (mkPackageName "base") m `shouldBe` Just (mkVersion [4, 11, 1, 0]) + Map.lookup (mkPackageName "bytestring") m `shouldBe` Just (mkVersion [0, 10, 8, 2]) + Map.lookup (mkPackageName "acme-missiles") m `shouldBe` Nothing + it' "older known compiler" $ \fp -> do + mmap <- loadGlobalHints fp $ WCGhc (mkVersion [7, 8, 4]) + case mmap of + Nothing -> error "not found" + Just m -> liftIO $ do + Map.lookup (mkPackageName "ghc") m `shouldBe` Just (mkVersion [7, 8, 4]) + Map.lookup (mkPackageName "base") m `shouldBe` Just (mkVersion [4, 7, 0, 2]) + Map.lookup (mkPackageName "Cabal") m `shouldBe` Just (mkVersion [1, 18, 1, 5]) + Map.lookup (mkPackageName "acme-missiles") m `shouldBe` Nothing diff --git a/src/test/Stack/SolverSpec.hs b/src/test/Stack/SolverSpec.hs index a8a64b0296..6615099935 100644 --- a/src/test/Stack/SolverSpec.hs +++ b/src/test/Stack/SolverSpec.hs @@ -1,14 +1,13 @@ {-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE OverloadedStrings #-} -- | Test suite for "Stack.Solver" module Stack.SolverSpec where import Data.Text (unpack) +import Distribution.PackageDescription (mkFlagName) +import Distribution.Types.PackageName (mkPackageName) +import Distribution.Version (mkVersion) import Stack.Prelude -import Stack.Types.FlagName -import Stack.Types.PackageName -import Stack.Types.Version import Test.Hspec import qualified Data.Map as Map @@ -19,28 +18,28 @@ spec = describe "Stack.Solver" $ do successfulExample "text-1.2.1.1 (latest: 1.2.2.0) -integer-simple (via: parsec-3.1.9) (new package)" - $(mkPackageName "text") - $(mkVersion "1.2.1.1") - [ ($(mkFlagName "integer-simple"), False) + (mkPackageName "text") + (mkVersion [1, 2, 1, 1]) + [ (mkFlagName "integer-simple", False) ] successfulExample "hspec-snap-1.0.0.0 *test (via: servant-snap-0.5) (new package)" - $(mkPackageName "hspec-snap") - $(mkVersion "1.0.0.0") + (mkPackageName "hspec-snap") + (mkVersion [1, 0, 0, 0]) [] successfulExample "time-locale-compat-0.1.1.1 -old-locale (via: http-api-data-0.2.2) (new package)" - $(mkPackageName "time-locale-compat") - $(mkVersion "0.1.1.1") - [ ($(mkFlagName "old-locale"), False) + (mkPackageName "time-locale-compat") + (mkVersion [0, 1, 1, 1]) + [ (mkFlagName "old-locale", False) ] successfulExample "flowdock-rest-0.2.0.0 -aeson-compat *test (via: haxl-fxtra-0.0.0.0) (new package)" - $(mkPackageName "flowdock-rest") - $(mkVersion "0.2.0.0") - [ ($(mkFlagName "aeson-compat"), False) + (mkPackageName "flowdock-rest") + (mkVersion [0, 2, 0, 0]) + [ (mkFlagName "aeson-compat", False) ] where - successfulExample input pkgName pkgVersion flags = + successfulExample input pkgName' pkgVersion' flags = it ("parses " ++ unpack input) $ - parseCabalOutputLine input `shouldBe` Right (pkgName, (pkgVersion, Map.fromList flags)) + parseCabalOutputLine input `shouldBe` Right (pkgName', (pkgVersion', Map.fromList flags)) diff --git a/src/test/Stack/StoreSpec.hs b/src/test/Stack/StoreSpec.hs index 61da61e590..4751494e79 100644 --- a/src/test/Stack/StoreSpec.hs +++ b/src/test/Stack/StoreSpec.hs @@ -9,6 +9,7 @@ module Stack.StoreSpec where import qualified Data.ByteString as BS +import qualified Data.ByteString.Short as SBS import Data.Containers (mapFromList, setFromList) import Data.Sequences (fromList) import Data.Store.Internal (StaticSize (..)) @@ -41,6 +42,9 @@ instance (Monad m, Serial m a, UV.Unbox a) => Serial m (UV.Vector a) where instance Monad m => Serial m BS.ByteString where series = fmap BS.pack series +instance Monad m => Serial m ShortByteString where + series = fmap SBS.pack series + instance (Monad m, Serial m a, Ord a) => Serial m (Set a) where series = fmap setFromList series diff --git a/src/test/Stack/Types/BuildPlanSpec.hs b/src/test/Stack/Types/BuildPlanSpec.hs deleted file mode 100644 index 1b95f8458b..0000000000 --- a/src/test/Stack/Types/BuildPlanSpec.hs +++ /dev/null @@ -1,94 +0,0 @@ -{-# LANGUAGE OverloadedStrings #-} - -module Stack.Types.BuildPlanSpec where - -import Data.Aeson.Extended (WithJSONWarnings(..)) -import Data.ByteString (ByteString) -import qualified Data.ByteString.Char8 as S8 -import Data.Yaml (decodeThrow) -import Stack.Types.BuildPlan -import Test.Hspec - -spec :: Spec -spec = - describe "PackageLocation" $ do - describe "Archive" $ do - describe "github" $ do - let decode' :: ByteString -> Maybe (WithJSONWarnings (PackageLocation Subdirs)) - decode' = decodeThrow - - it "'github' and 'commit' keys" $ do - let contents :: ByteString - contents = - S8.pack - (unlines - [ "github: oink/town" - , "commit: abc123" - ]) - let expected :: PackageLocation Subdirs - expected = - PLArchive Archive - { archiveUrl = "https://github.com/oink/town/archive/abc123.tar.gz" - , archiveSubdirs = DefaultSubdirs - , archiveHash = Nothing - } - decode' contents `shouldBe` Just (WithJSONWarnings expected []) - - it "'github', 'commit', and 'subdirs' keys" $ do - let contents :: ByteString - contents = - S8.pack - (unlines - [ "github: oink/town" - , "commit: abc123" - , "subdirs:" - , " - foo" - ]) - let expected :: PackageLocation Subdirs - expected = - PLArchive Archive - { archiveUrl = "https://github.com/oink/town/archive/abc123.tar.gz" - , archiveSubdirs = ExplicitSubdirs ["foo"] - , archiveHash = Nothing - } - decode' contents `shouldBe` Just (WithJSONWarnings expected []) - - it "does not parse GitHub repo with no slash" $ do - let contents :: ByteString - contents = - S8.pack - (unlines - [ "github: oink" - , "commit: abc123" - ]) - decode' contents `shouldBe` Nothing - - it "does not parse GitHub repo with leading slash" $ do - let contents :: ByteString - contents = - S8.pack - (unlines - [ "github: /oink" - , "commit: abc123" - ]) - decode' contents `shouldBe` Nothing - - it "does not parse GitHub repo with trailing slash" $ do - let contents :: ByteString - contents = - S8.pack - (unlines - [ "github: oink/" - , "commit: abc123" - ]) - decode' contents `shouldBe` Nothing - - it "does not parse GitHub repo with more than one slash" $ do - let contents :: ByteString - contents = - S8.pack - (unlines - [ "github: oink/town/here" - , "commit: abc123" - ]) - decode' contents `shouldBe` Nothing diff --git a/src/test/Stack/Untar/UntarSpec.hs b/src/test/Stack/Untar/UntarSpec.hs deleted file mode 100644 index 3b71f7fd80..0000000000 --- a/src/test/Stack/Untar/UntarSpec.hs +++ /dev/null @@ -1,49 +0,0 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE TemplateHaskell #-} -{-# OPTIONS_GHC -fno-warn-orphans #-} - -module Stack.Untar.UntarSpec where - -import Data.List (sort) -import Path -import Path.IO (removeDirRecur) -import qualified System.FilePath as FP -import Stack.Fetch (untar) -import Stack.Prelude -import Test.Hspec - -spec :: Spec -spec = do - describe "Untarring ignores strange entries" $ - mapM_ testTarFile tarFiles - where - -- XXX tests are run in the project root folder, but data files are next to - -- this source data. - currentFolder = $(mkRelDir $ "src" FP. "test" FP. "Stack" FP. "Untar") - - -- Pairs test tarball names + list of unexpected entries contained: for each - -- entry, a tar pathname + description. - tarFilesBase = [ ("test1", []) - , ("test2", [ ("bar", "named pipe") - , ("devB", "block device") - , ("devC", "character device")])] - -- Prepend tarball name to tar pathnames: - tarFiles = - [ (name, - [ (name FP. entryName, d) - | (entryName, d) <- entries]) - | (name, entries) <- tarFilesBase ] - - testTarFile (name, expected) = - it ("works on test " ++ name) $ - getEntries name `shouldReturn` sort expected - - getEntries name = do - tarballName <- parseRelFile $ name ++ ".tar.gz" - expectedTarFolder <- parseRelDir name - - entries <- untar (currentFolder tarballName) expectedTarFolder currentFolder - removeDirRecur $ currentFolder expectedTarFolder - return $ sort entries diff --git a/stack-nightly.yaml b/stack-nightly.yaml index 7309fee50e..3d2c845b4b 100644 --- a/stack-nightly.yaml +++ b/stack-nightly.yaml @@ -1,5 +1,21 @@ resolver: nightly-2018-08-04 +packages: +- . +- subs/pantry +- subs/curator + +extra-deps: +- persistent-sqlite-2.8.2@sha256:6874958eb2943c4567c30bc0069ce4868b2813c490402c22bb2e0efa5b4c4c71,3873 +# Ugly, temporary hack +- github: fpco/store + commit: 8ff486ea5a16665c7fd279963344ac8ef99b6e2a + subdirs: + - store +# Switch to Hackage version when released +- github: snoyberg/tar-conduit + commit: fd03a66110f7d0feff6fe7eb1cc9ca1a56b38fea + # docker: # enable: true # repo: fpco/stack-full diff --git a/stack.yaml b/stack.yaml index 43d1dda3a9..29083b2df7 100644 --- a/stack.yaml +++ b/stack.yaml @@ -1,5 +1,20 @@ resolver: snapshot.yaml +packages: +- . +- subs/pantry +- subs/curator + +extra-deps: +# Ugly, temporary hack +- github: fpco/store + commit: 8ff486ea5a16665c7fd279963344ac8ef99b6e2a + subdirs: + - store +# Switch to Hackage version when released +- github: snoyberg/tar-conduit + commit: fd03a66110f7d0feff6fe7eb1cc9ca1a56b38fea + # docker: # enable: true # repo: fpco/stack-full diff --git a/subs/convert/.gitignore b/subs/convert/.gitignore new file mode 100644 index 0000000000..4f7e0ac13c --- /dev/null +++ b/subs/convert/.gitignore @@ -0,0 +1,3 @@ +lts-haskell/ +stackage-nightly/ +stackage-snapshots/ diff --git a/subs/convert/convert-old-stackage.sh b/subs/convert/convert-old-stackage.sh new file mode 100755 index 0000000000..7dddf1752a --- /dev/null +++ b/subs/convert/convert-old-stackage.sh @@ -0,0 +1,17 @@ +#!/usr/bin/env bash + +set -eux + +cd $(dirname ${BASH_SOURCE[0]}) + +for d in lts-haskell stackage-nightly stackage-snapshots +do + if [[ ! -d "$d" ]] + then + git clone https://github.com/commercialhaskell/$d + else + (cd "$d" && git pull || echo "Git pull failed, ignoring") + fi +done + +stack build --flag pantry:convert-old-stackage pantry:convert-old-stackage --exec convert-old-stackage diff --git a/subs/convert/make-global-hints.hs b/subs/convert/make-global-hints.hs new file mode 100755 index 0000000000..f6ad703d9e --- /dev/null +++ b/subs/convert/make-global-hints.hs @@ -0,0 +1,37 @@ +#!/usr/bin/env stack +-- stack --resolver lts-12.0 script +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} +import RIO +import qualified RIO.Map as Map +import Conduit +import Data.Yaml + +main :: IO () +main = runSimpleApp $ do + m <- runConduitRes $ allFiles .| foldMC addFile mempty + liftIO $ encodeFile "global-hints.yaml" m + +allFiles = + sourceDirectoryDeep True "stackage-snapshots/lts" *> + sourceDirectoryDeep True "stackage-snapshots/nightly" + +addFile m fp = do + GlobalHints ghc packages <- liftIO $ decodeFileThrow fp + evaluate $ Map.insert ghc + (case Map.lookup ghc m of + Nothing -> packages + Just packages' -> Map.unionWith + (\x y -> + if x == y + then x + else error $ show (ghc, fp, x, y)) + packages + packages') m + +data GlobalHints = GlobalHints !Text !(Map Text Text) + +instance FromJSON GlobalHints where + parseJSON = withObject "GlobalHints" $ \o -> GlobalHints + <$> o .: "compiler" + <*> o .: "global-hints" diff --git a/subs/curator/.gitignore b/subs/curator/.gitignore new file mode 100644 index 0000000000..76e5f5f0a7 --- /dev/null +++ b/subs/curator/.gitignore @@ -0,0 +1,6 @@ +build-constraints.yaml +constraints.yaml +curator.cabal +snapshot-incomplete.yaml +snapshot.yaml +unpack-dir/ diff --git a/subs/curator/README.md b/subs/curator/README.md new file mode 100644 index 0000000000..242a033caf --- /dev/null +++ b/subs/curator/README.md @@ -0,0 +1,80 @@ +# curator + +Snapshot curator tool for, e.g., creating Stackage snapshots. + +This is the "curator 2.0", replacing +https://github.com/fpco/stackage-curator. It relies on pantry for +finding appropriate packages, and Stack for performing the builds. It +is intended to be much simpler to maintain than the old +stackage-curator tool. + +## Incomplete! + +This tool is not yet complete. Here's a (likely incomplete) list of +things that still need to be handled to replace `stackage-curator`: + +* Collect the Haddocks in a way that stackage-server can handle them +* Proper CLI, right now the `app/Main.hs` just runs through a bunch of + steps. We need to have individual commands like the current tool, so + each command can be called in an appropriately locked-down Docker + container. +* Logic for uploading generated snapshots and other info to Github, + S3, etc. +* Ability to roll an LTS minor version bump. +* Ability to specify package locations from Git. +* External, but: stackage-server needs to be updated to support the + new snapshot format/location +* No support for custom configure arguments from `build-constraints.yaml`. I'd + like to see if we can get rid of them entirely and instead just customize the + Docker build image. + +## Basic workflow + +Here's a rundown of how this tool is intended to be used. + +We update the Hackage index to get a list of all of the most recent +package versions. This is pantry's `updateHackageIndex` command. + +We start with `build-constraints.yaml`, the configuration file in +commercialhaskell/stackage. This specifies all of the packages we want +to include in a snapshot, along with a bunch of configuration. + +We parse `build-constraints.yaml` and convert it into the +`constraints.yaml` file, which contains a more properly structures set +of constraints. We'll continue to let users edit the +`build-constraints.yaml` file, since it's more user-friendly. But +`constraints.yaml` gives us more flexibility. + +* For LTS minor bumps, instead of generating `constraints.yaml` from + `build-constraints.yaml`, we'll take the `constraints.yaml` used for + the last LTS release in the series. Details still need to be worked + out on how upper bounds are added and where this file is stored. + +Curator team: at this point, you can edit `constraints.yaml` to make +tweaks to the build plan. This replaces the old `CONSTRAINTS` +environment variable. + +We combine the `constraints.yaml` file and the information from +Hackage to produce `snapshot-incomplete.yaml`. This has a concrete +list of all of the packages we intend to include in the +snapshot. Again, this file can be manually modified if desired. + +* When we support Git repos, we'll also be checking those repos to + find the latest appropriate release. We'll need to figure out + exactly how that plays in with LTS upper bounds; I'm thinking we'll + have logic like "use commit X, or the latest if it meets version + range Y." + +The `snapshot-incomplete.yaml` file does not have all of the +cryptographic hashes necessary for fully reproducible builds. We next +generate `snapshot.yaml` with all of this information. This file +should _never be manually edited_, instead edits should occur at the +`snapshot-incomplete.yaml` and `constraints.yaml` phases. + +We unpack all of the package specified by `snapshot.yaml` into a local +directory, and generate a `stack.yaml` that gives instructions to +build all of those packages. + +We build the packages, run test suites, and generate Haddocks. + +__TODO__ Grab artifacts and upload them to the right place. diff --git a/subs/curator/app/Main.hs b/subs/curator/app/Main.hs new file mode 100644 index 0000000000..ed6638d3ce --- /dev/null +++ b/subs/curator/app/Main.hs @@ -0,0 +1,84 @@ +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} +import Curator hiding (Snapshot) +import Data.Yaml (encodeFile, decodeFileThrow) +import Options.Generic (ParseRecord, getRecord) +import Path.IO (resolveFile', resolveDir') +import RIO.Process +import qualified Curator + +data CuratorOptions + = Update + | Constraints + | SnapshotIncomplete + | Snapshot + | Unpack + | Build + deriving (Eq, Show, Generic) + +instance ParseRecord CuratorOptions + +main :: IO () +main = runPantryApp $ + getRecord "curator" >>= \case + Update -> + update + Constraints -> + constraints + SnapshotIncomplete -> + snapshotIncomplete + Snapshot -> + snapshot + Unpack -> + unpackFiles + Build -> + build + +update :: RIO PantryApp () +update = do + void $ updateHackageIndex $ Just "Updating hackage index" + +constraints :: RIO PantryApp () +constraints = do + logInfo "Writing constraints.yaml" + loadStackageConstraints "build-constraints.yaml" >>= liftIO . encodeFile "constraints.yaml" + +snapshotIncomplete :: RIO PantryApp () +snapshotIncomplete = do + logInfo "Writing snapshot-incomplete.yaml" + decodeFileThrow "constraints.yaml" >>= \constraints' -> + makeSnapshot constraints' "my-test-snapshot-2" >>= + liftIO . encodeFile "snapshot-incomplete.yaml" + +snapshot :: RIO PantryApp () +snapshot = do + logInfo "Writing snapshot.yaml" + incomplete <- loadPantrySnapshotFile "snapshot-incomplete.yaml" + complete <- completeSnapshot incomplete + liftIO $ encodeFile "snapshot.yaml" complete + +unpackFiles :: RIO PantryApp () +unpackFiles = do + logInfo "Unpacking files" + snapshot' <- loadPantrySnapshotFile "snapshot.yaml" + constraints' <- decodeFileThrow "constraints.yaml" + dest <- resolveDir' "unpack-dir" + unpackSnapshot constraints' snapshot' dest + +build :: RIO PantryApp () +build = do + logInfo "Building" + withWorkingDir "unpack-dir" $ proc + "stack" + (words "build --test --bench --no-rerun-tests --no-run-benchmarks --haddock") + runProcess_ + +loadPantrySnapshotFile :: FilePath -> RIO PantryApp Curator.Snapshot +loadPantrySnapshotFile fp = do + abs' <- resolveFile' fp + eres <- loadSnapshot $ SLFilePath (ResolvedPath (RelFilePath (fromString fp)) abs') + case eres of + Left x -> error $ "should not happen: " ++ show (fp, x) + Right (x, _) -> pure x diff --git a/subs/curator/package.yaml b/subs/curator/package.yaml new file mode 100644 index 0000000000..a57677fda9 --- /dev/null +++ b/subs/curator/package.yaml @@ -0,0 +1,24 @@ +name: curator +version: 2.0.0.0 + +dependencies: +- base +- rio +- pantry +- Cabal +- yaml +- path +- path-io +- optparse-generic + +library: + source-dirs: src + exposed-modules: + - Curator + +executables: + curator: + source-dirs: app + main: Main.hs + dependencies: + - curator diff --git a/subs/curator/src/Curator.hs b/subs/curator/src/Curator.hs new file mode 100644 index 0000000000..3770682274 --- /dev/null +++ b/subs/curator/src/Curator.hs @@ -0,0 +1,9 @@ +module Curator + ( module Export + ) where + +import Curator.StackageConstraints as Export +import Curator.Snapshot as Export +import Curator.Unpack as Export +import Pantry as Export +import RIO as Export \ No newline at end of file diff --git a/subs/curator/src/Curator/Snapshot.hs b/subs/curator/src/Curator/Snapshot.hs new file mode 100644 index 0000000000..131d5fe79d --- /dev/null +++ b/subs/curator/src/Curator/Snapshot.hs @@ -0,0 +1,94 @@ +module Curator.Snapshot + ( makeSnapshot + ) where + +import RIO +import RIO.Process +import Curator.Types +import Pantry +import qualified RIO.Map as Map +import Distribution.Types.VersionRange (withinRange) + +makeSnapshot + :: (HasPantryConfig env, HasLogFunc env, HasProcessContext env) + => Constraints + -> Text -- ^ name + -> RIO env Snapshot +makeSnapshot cons name = do + locs <- traverseValidate (uncurry toLoc) $ Map.toList $ consPackages cons + pure Snapshot + { snapshotParent = SLCompiler $ WCGhc $ consGhcVersion cons + , snapshotCompiler = Nothing + , snapshotName = name + , snapshotLocations = catMaybes locs + , snapshotDropPackages = mempty + , snapshotFlags = Map.mapMaybe getFlags (consPackages cons) + , snapshotHidden = Map.filter id (pcHide <$> consPackages cons) + , snapshotGhcOptions = mempty + } + +getFlags :: PackageConstraints -> Maybe (Map FlagName Bool) +getFlags pc + | Map.null (pcFlags pc) = Nothing + | otherwise = Just (pcFlags pc) + +toLoc + :: (HasPantryConfig env, HasLogFunc env) + => PackageName + -> PackageConstraints + -> RIO env (Maybe PackageLocationImmutable) +toLoc name pc = + case pcSource pc of + PSHackage (HackageSource mrange mrequiredLatest revisions) -> do + versions <- getHackagePackageVersions IgnorePreferredVersions name -- don't follow the preferred versions on Hackage, give curators more control + when (Map.null versions) $ error $ "Package not found on Hackage: " ++ packageNameString name + for_ mrequiredLatest $ \required -> + case Map.maxViewWithKey versions of + Nothing -> error $ "No versions found for " ++ packageNameString name + Just ((version, _), _) + | version == required -> pure () + | otherwise -> error $ concat + [ "For package " + , fromString (packageNameString name) + , ", required latest version to be " + , fromString (versionString required) + , ", but actual latest is " + , fromString (versionString version) + ] + let versions' = + case mrange of + Nothing -> versions + Just range -> Map.filterWithKey (\v _ -> v `withinRange` range) versions + case Map.maxViewWithKey versions' of + Nothing -> pure Nothing -- argument could be made for erroring out, but currently used by curators to mean "don't include this"... + Just ((version, revs), _) -> do + let viewer = + case revisions of + NoRevisions -> Map.minView + UseRevisions -> Map.maxView + cfi <- + case viewer revs of + Nothing -> error $ "Impossible! No revisions found for " ++ show (name, version) + Just (BlobKey sha size, _) -> pure $ CFIHash sha $ Just size + pure $ Just $ PLIHackage (PackageIdentifierRevision name version cfi) Nothing + +traverseValidate + :: (MonadUnliftIO m, Traversable t) + => (a -> m b) + -> t a + -> m (t b) +traverseValidate f t = do + errsRef <- newIORef id + let f' a = f a `catchAny` \e -> do + modifyIORef' errsRef $ (. (e:)) + pure $ impureThrow e -- should never be called + res <- traverse f' t + errs <- ($ []) <$> readIORef errsRef + case errs of + [] -> pure res + [x] -> throwIO x + _ -> throwIO $ TraverseValidateExceptions errs + +newtype TraverseValidateExceptions = TraverseValidateExceptions [SomeException] + deriving (Show, Typeable) +instance Exception TraverseValidateExceptions diff --git a/subs/curator/src/Curator/StackageConstraints.hs b/subs/curator/src/Curator/StackageConstraints.hs new file mode 100644 index 0000000000..43a00d8511 --- /dev/null +++ b/subs/curator/src/Curator/StackageConstraints.hs @@ -0,0 +1,207 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE ViewPatterns #-} +-- | Deal with the @build-constraints.yaml@ format used by +-- @commercialhaskell/stackage@. +module Curator.StackageConstraints + ( loadStackageConstraints + ) where + +import Pantry +import Curator.Types +import RIO +import qualified RIO.Text as T +import qualified RIO.Map as Map +import qualified RIO.Set as Set +import Distribution.Types.VersionRange (VersionRange, intersectVersionRanges) +import Data.Yaml +import Distribution.Text (simpleParse) + +data SC = SC + { scGhcVersion :: !Version + -- ^ GHC version to use + + , scPackages :: !(Map PackageName (Set Maintainer, [VersionRange])) + -- ^ Packages to include + + , scFlags :: !(Map PackageName (Map FlagName Bool)) + -- ^ Flags for those packages + + -- FIXME let's see if we can work around this with changes to the Docker image + -- , scConfigureArgs :: !(Map PackageName [Text]) + + , scSkippedBuilds :: !(Set PackageName) + -- ^ Include the package in the snapshot, but don't build + -- it. Intended for Windows-specific packages. + + , scSkippedTests :: !(Set PackageName) + -- ^ Don't even try to build the tests, for out-of-bounds dependencies + + , scExpectedTestFailures :: !(Set PackageName) + -- ^ Test suites which are expected to fail. Run them, but don't + -- error out if they fail. + + , scSkippedBenchmarks :: !(Set PackageName) + -- ^ Like 'scSkippedTests' + + , scExpectedBenchmarkFailures :: !(Set PackageName) + -- ^ Like 'scExepctedTestFailures' + + , scExpectedHaddockFailures :: !(Set PackageName) + -- ^ Haddocks don't build successfully + + , scSkippedHaddocks :: !(Set PackageName) + -- ^ Sometimes Haddock is really flaky + + -- FIXME deal with all of the github-users and ping logic + + , scTellMeWhenItsReleased :: !(Map PackageName Version) + + , scHide :: !(Set PackageName) + + , scNoRevisions :: !(Set PackageName) + + , scNonParallelBuilds :: !(Set PackageName) + } + deriving Show + +instance FromJSON SC where + parseJSON = withObject "StackageConstraints" $ \o -> do + CabalString scGhcVersion <- o .: "ghc-version" + scPackages <- convertPackages <$> o .: "packages" + scFlags <- fmap unCabalStringMap . unCabalStringMap <$> o .: "package-flags" + + scSkippedBuilds <- Set.map unCabalString <$> o .: "skipped-builds" + + scSkippedTests <- Set.map unCabalString <$> o .: "skipped-tests" + scSkippedBenchmarks <- Set.map unCabalString <$> o .: "skipped-benchmarks" + scSkippedHaddocks <- Set.map unCabalString <$> o .: "skipped-haddocks" + + scExpectedTestFailures <- Set.map unCabalString <$> o .: "expected-test-failures" + scExpectedBenchmarkFailures <- Set.map unCabalString <$> o .: "expected-benchmark-failures" + scExpectedHaddockFailures <- Set.map unCabalString <$> o .: "expected-haddock-failures" + + scHide <- Set.map unCabalString <$> o .: "hide" + scNoRevisions <- Set.map unCabalString <$> o .: "no-revisions" + scTellMeWhenItsReleased <- + mconcat + . map (\(CabalString (PackageIdentifier name version)) -> Map.singleton name version) + <$> o .: "tell-me-when-its-released" + scNonParallelBuilds <- Set.map unCabalString <$> o .: "non-parallel-builds" + + pure SC {..} + +data PackageRange = PackageRange !PackageName !(Maybe VersionRange) +instance FromJSON PackageRange where + parseJSON = withText "PackageRange" $ \t -> do + let s = T.unpack t + maybe (fail $ "Invalid PackageRange: " ++ s) pure $ do + let (nameT, T.strip -> rangeT) = T.break (== ' ') t + name <- simpleParse $ T.unpack nameT + mrange <- + if T.null rangeT + then Just Nothing + else fmap Just $ simpleParse $ T.unpack rangeT + pure $ PackageRange name mrange + +convertPackages + :: Map Maintainer [PackageRange] + -> Map PackageName (Set Maintainer, [VersionRange]) +convertPackages = + Map.fromListWith combine . concatMap go . Map.toList + where + go (maintainer, prs) = map + (\(PackageRange name mrange) -> + ( name + , ( Set.singleton maintainer + , maybeToList mrange + ) + ) + ) + prs + + combine (a, x) (b, y) = (a <> b, x <> y) + +loadStackageConstraints :: FilePath -> RIO env Constraints +loadStackageConstraints = decodeFileThrow >=> convert + +convert :: SC -> RIO env Constraints +convert sc0 = do + let (sc1, packages, errs) = + foldl' + go + (sc0, mempty, []) + (Map.toList (scPackages sc0)) + unless (null errs) $ error $ unlines errs + -- check that all of the fields are empty now + pure Constraints + { consGhcVersion = scGhcVersion sc1 + , consPackages = packages + } + where + go :: (SC, Map PackageName PackageConstraints, [String]) + -> (PackageName, (Set Maintainer, [VersionRange])) + -> (SC, Map PackageName PackageConstraints, [String]) + go (sc1, m, errs) (name, (maintainers, ranges)) = + case res of + Left e -> (sc2, m, e : errs) + Right pc -> (sc2, Map.insert name pc m, errs) + where + sc2 = sc1 + { scTellMeWhenItsReleased = Map.delete name $ scTellMeWhenItsReleased sc1 + , scNoRevisions = Set.delete name $ scNoRevisions sc1 + , scFlags = Map.delete name $ scFlags sc1 + , scSkippedBuilds = Set.delete name $ scSkippedBuilds sc1 + , scNonParallelBuilds = Set.delete name $ scNonParallelBuilds sc1 + , scExpectedTestFailures = Set.delete name $ scExpectedTestFailures sc1 + , scSkippedTests = Set.delete name $ scSkippedTests sc1 + , scExpectedBenchmarkFailures = Set.delete name $ scExpectedBenchmarkFailures sc1 + , scSkippedBenchmarks = Set.delete name $ scSkippedBenchmarks sc1 + , scExpectedHaddockFailures = Set.delete name $ scExpectedHaddockFailures sc1 + , scSkippedHaddocks = Set.delete name $ scSkippedHaddocks sc1 + , scHide = Set.delete name $ scHide sc1 + } + res = do + tests <- + case (Set.member name $ scExpectedTestFailures sc1, Set.member name $ scSkippedTests sc1) of + (False, False) -> Right CAExpectSuccess + (True, False) -> Right CAExpectFailure + (False, True) -> Right CASkip + (True, True) -> Right CASkip -- Left $ "Cannot skip and expect test failure: " ++ displayC name + + benchmarks <- + case (Set.member name $ scExpectedBenchmarkFailures sc1, Set.member name $ scSkippedBenchmarks sc1) of + (False, False) -> Right CAExpectSuccess + (True, False) -> Right CAExpectFailure + (False, True) -> Right CASkip + (True, True) -> Right CASkip -- Left $ "Cannot skip and expect benchmark failure: " ++ displayC name + + haddock <- + case (Set.member name $ scExpectedHaddockFailures sc1, Set.member name $ scSkippedHaddocks sc1) of + (False, False) -> Right CAExpectSuccess + (True, False) -> Right CAExpectFailure + (False, True) -> Right CASkip + (True, True) -> Right CASkip -- Left $ "Cannot skip and expect haddock failure: " ++ displayC name + + Right PackageConstraints + { pcMaintainers = maintainers + , pcSource = PSHackage $ HackageSource + { hsRange = + case ranges of + [] -> Nothing + r:rs -> Just $ foldl' intersectVersionRanges r rs + , hsRequiredLatest = Map.lookup name (scTellMeWhenItsReleased sc1) + , hsRevisions = + if Set.member name (scNoRevisions sc1) + then NoRevisions + else UseRevisions + } + , pcFlags = fromMaybe mempty $ Map.lookup name $ scFlags sc1 + , pcSkipBuild = Set.member name $ scSkippedBuilds sc1 + , pcNonParallelBuild = Set.member name $ scNonParallelBuilds sc1 + , pcTests = tests + , pcBenchmarks = benchmarks + , pcHaddock = haddock + , pcHide = Set.member name $ scHide sc1 + } diff --git a/subs/curator/src/Curator/Types.hs b/subs/curator/src/Curator/Types.hs new file mode 100644 index 0000000000..8cfec62629 --- /dev/null +++ b/subs/curator/src/Curator/Types.hs @@ -0,0 +1,153 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} +module Curator.Types + ( Constraints (..) + , PackageConstraints (..) + , PackageSource (..) + , HackageSource (..) + , Maintainer + , Revisions (..) + , ComponentAction (..) + ) where + +import RIO +import Pantry +import Distribution.Types.VersionRange (VersionRange) +import Data.Yaml +import qualified RIO.Map as Map +import qualified RIO.Set as Set + +type Maintainer = Text + +data Constraints = Constraints + { consGhcVersion :: !Version + , consPackages :: !(Map PackageName PackageConstraints) + } + deriving Show + +instance ToJSON Constraints where + toJSON c = object + [ "ghc-version" .= CabalString (consGhcVersion c) + , "packages" .= toCabalStringMap (consPackages c) + ] +instance FromJSON Constraints where + parseJSON = withObject "Constraints" $ \o -> Constraints + <$> fmap unCabalString (o .: "ghc-version") + <*> fmap unCabalStringMap (o .: "packages") + +data PackageConstraints = PackageConstraints + { pcMaintainers :: !(Set Maintainer) + , pcSource :: !PackageSource + , pcFlags :: !(Map FlagName Bool) + , pcSkipBuild :: !Bool + , pcTests :: !ComponentAction + , pcBenchmarks :: !ComponentAction + , pcHaddock :: !ComponentAction + , pcNonParallelBuild :: !Bool + , pcHide :: !Bool + } + deriving Show + +instance ToJSON PackageConstraints where + toJSON pc = object $ concat + [ if Set.null (pcMaintainers pc) + then [] + else ["maintainers" .= pcMaintainers pc] + , ["source" .= pcSource pc] + , if Map.null (pcFlags pc) + then [] + else ["flags" .= toCabalStringMap (pcFlags pc)] + , if pcSkipBuild pc then ["skip-build" .= True] else [] + , case pcTests pc of + CAExpectSuccess -> [] + x -> ["tests" .= x] + , case pcBenchmarks pc of + CAExpectSuccess -> [] + x -> ["benchmarks" .= x] + , case pcHaddock pc of + CAExpectSuccess -> [] + x -> ["haddock" .= x] + , if pcNonParallelBuild pc + then ["non-parallel-build" .= True] + else [] + , if pcHide pc + then ["hide" .= True] + else [] + ] +instance FromJSON PackageConstraints where + parseJSON = withObject "PackageConstraints" $ \o -> PackageConstraints + <$> o .:? "maintainers" .!= mempty + <*> o .: "source" + <*> fmap unCabalStringMap (o .:? "flags" .!= mempty) + <*> o .:? "skip-build" .!= False + <*> o .:? "tests" .!= CAExpectSuccess + <*> o .:? "benchmarks" .!= CAExpectSuccess + <*> o .:? "haddock" .!= CAExpectSuccess + <*> o .:? "non-parallel-build" .!= False + <*> o .:? "hide" .!= False + +data PackageSource + = PSHackage !HackageSource + deriving Show +instance ToJSON PackageSource where + toJSON (PSHackage hs) = object $ ("type" .= ("hackage" :: Text)) : hsToPairs hs +instance FromJSON PackageSource where + parseJSON = withObject "PackageSource" $ \o -> do + typ <- o .: "type" + case typ :: Text of + "hackage" -> PSHackage <$> hackage o + _ -> fail $ "Invalid type: " ++ show typ + where + hackage o = HackageSource + <$> fmap (fmap unCabalString) (o .:? "range") + <*> fmap (fmap unCabalString) (o .:? "required-latest") + <*> o .:? "revisions" .!= NoRevisions + +data HackageSource = HackageSource + { hsRange :: !(Maybe VersionRange) + , hsRequiredLatest :: !(Maybe Version) + -- ^ required latest version, for tell-me-when-its-released + , hsRevisions :: !Revisions + } + deriving Show + +hsToPairs :: HackageSource -> [(Text, Value)] +hsToPairs hs = concat + [ maybe [] (\range -> ["range" .= CabalString range]) (hsRange hs) + , maybe [] (\v -> ["required-latest" .= CabalString v]) (hsRequiredLatest hs) + , case hsRevisions hs of + NoRevisions -> [] -- the only sane default, of course + UseRevisions -> ["revisions" .= UseRevisions] + ] + +data ComponentAction + = CAExpectSuccess + | CAExpectFailure + | CASkip + deriving Show +instance ToJSON ComponentAction where + toJSON CAExpectSuccess = toJSON ("expect-success" :: Text) + toJSON CAExpectFailure = toJSON ("expect-failure" :: Text) + toJSON CASkip = toJSON ("skip" :: Text) +instance FromJSON ComponentAction where + parseJSON = withText "ComponentAction" $ \t -> + case t of + "expect-success" -> pure CAExpectSuccess + "expect-failure" -> pure CAExpectFailure + "skip" -> pure CASkip + _ -> fail $ "Invalid component action: " ++ show t + +data Revisions + = UseRevisions + | NoRevisions + deriving Show + +instance ToJSON Revisions where + toJSON UseRevisions = toJSON ("use-revisions" :: Text) + toJSON NoRevisions = toJSON ("no-revisions" :: Text) +instance FromJSON Revisions where + parseJSON = withText "Revisions" $ \t -> + case t of + "use-revisions" -> pure UseRevisions + "no-revisions" -> pure NoRevisions + _ -> fail $ "Invalid revisions: " ++ show t \ No newline at end of file diff --git a/subs/curator/src/Curator/Unpack.hs b/subs/curator/src/Curator/Unpack.hs new file mode 100644 index 0000000000..19bcc5ed72 --- /dev/null +++ b/subs/curator/src/Curator/Unpack.hs @@ -0,0 +1,78 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} +-- | Unpack packages and write out a stack.yaml +module Curator.Unpack + ( unpackSnapshot + ) where + +import RIO +import RIO.Process (HasProcessContext) +import Pantry +import Curator.Types +import Path +import Path.IO +import qualified RIO.Text as T +import Data.Yaml +import qualified RIO.Map as Map +import qualified RIO.Set as Set + +unpackSnapshot + :: (HasPantryConfig env, HasLogFunc env, HasProcessContext env) + => Constraints + -> Snapshot + -> Path Abs Dir + -> RIO env () +unpackSnapshot cons snap root = do + unpacked <- parseRelDir "unpacked" + (suffixes, flags, skipTest, skipBench, skipHaddock) <- fmap fold $ for (snapshotLocations snap) $ \pl -> do + TreeKey (BlobKey sha _size) <- getPackageLocationTreeKey pl + PackageIdentifier name version <- getPackageLocationIdent pl + pc <- + case Map.lookup name $ consPackages cons of + Nothing -> error $ "Package not found in constraints: " ++ packageNameString name + Just pc -> pure pc + if pcSkipBuild pc + then pure mempty + else do + let suffixBuilder = + fromString (packageNameString name) <> + "-" <> + fromString (versionString version) <> + "@" <> + display sha + suffixTmp <- parseRelDir $ T.unpack $ utf8BuilderToText $ suffixBuilder <> ".tmp" + let destTmp = root unpacked suffixTmp + suffix <- parseRelDir $ T.unpack $ utf8BuilderToText suffixBuilder + let dest = root unpacked suffix + exists <- doesDirExist dest + unless exists $ do + ignoringAbsence $ removeDirRecur destTmp + ensureDir destTmp + logInfo $ "Unpacking " <> display pl + unpackPackageLocation destTmp pl + renameDir destTmp dest + pure + ( Set.singleton suffix + , if Map.null (pcFlags pc) then Map.empty else Map.singleton name (pcFlags pc) + , case pcTests pc of + CAExpectSuccess -> mempty + _ -> Set.singleton name -- FIXME this and others, want to differentiate skip and expect failure + , case pcBenchmarks pc of + CAExpectSuccess -> mempty + _ -> Set.singleton name + , case pcHaddock pc of + CAExpectSuccess -> mempty + _ -> Set.singleton name + ) + stackYaml <- parseRelFile "stack.yaml" + let stackYamlFP = toFilePath $ root stackYaml + liftIO $ encodeFile stackYamlFP $ object + [ "resolver" .= ("ghc-" ++ versionString (consGhcVersion cons)) + , "packages" .= Set.map (\suffix -> toFilePath (unpacked suffix)) suffixes + , "flags" .= fmap toCabalStringMap (toCabalStringMap flags) + , "curator" .= object + [ "skip-test" .= Set.map CabalString skipTest + , "skip-bench" .= Set.map CabalString skipBench + , "skip-haddock" .= Set.map CabalString skipHaddock + ] + ] diff --git a/subs/pantry/ChangeLog.md b/subs/pantry/ChangeLog.md new file mode 100644 index 0000000000..56de604439 --- /dev/null +++ b/subs/pantry/ChangeLog.md @@ -0,0 +1,5 @@ +# Changelog for pantry + +## 0.1.0.0 + +* Initial release diff --git a/subs/pantry/README.md b/subs/pantry/README.md new file mode 100644 index 0000000000..4049a13d56 --- /dev/null +++ b/subs/pantry/README.md @@ -0,0 +1,218 @@ +# pantry + +TODO: Add Travis and AppVeyor badges + +Content addressable Haskell package management, providing for secure, +reproducible acquisition of Haskell package contents and metadata. + +## What is Pantry + +* A Haskell library, command line executable, storage specification, and + network protocol +* Intended for content-addressable storage of Haskell packages +* Allows non-centralized package storage +* Primarily for use by Stackage and Stack, hopefully other tools as well + +## Goals + +* Efficient, distributed package storage for Haskell +* Superset of existing storage mechanisms +* Security via content addressable storage +* Allow more Stackage-style snapshots to exist +* Allow authors to bypass Hackage for uploads +* Allow Stackage to create forks of packages on Hackage + +__TODO__ + +Content below needs to be updated. + +* Support for hpack in PackageLocationImmutable? + +## Package definition + +Pantry defines the following concepts: + +* __Blob__: a raw byte sequence, identified by its key (SHA256 of the + contents) +* __Tree entry__: contents of a single file (identified by blob key) + and whether or not it is executable. + * NOTE: existing package formats like tarballs support more + sophisticated options. We explicitly do not support those. If + such functionality is needed, fallback to those mechanism is + required. +* __Tree__: mapping from relative path to a tree entry. Some basic + sanity rules apply to the paths: no `.` or `..` directory + components, no newlines in filepaths, does not begin with `/`, no + `\\` (we normalize to POSIX-style paths). A tree is identified by a + tree key (SHA256 of the tree's serialized format). +* __Package__: a tree key for the package contents, package name, + version number, and cabal file blob key. Requirements: there must be + a single file with a `.cabal` file extension at the root of the + tree, and it must match the cabal file blob key. The cabal file must + be located at `pkgname.cabal`. Each tree can be in at most one + package, and therefore tree keys work as package keys too. + +Note that with the above, a tree key is all the information necessary +to uniquely identify a package. However, including additional +information (package name, version, cabal key) in config files may be +useful for optimizations or user friendliness. If such extra +information is ever included, it must be validated to concur with the +package contents itself. + +### Package location + +Packages will optionally be sourced from some location: + +* __Hackage__ requires the package name, version number, and revision + number. Each revision of a package will end up with a different tree + key. +* __Archive__ takes a URL pointing to a tarball (gzipped or not) or a + ZIP file. An implicit assumption is that archives remain immutable + over time. Use tree keys to verify this assumption. (Same applies to + Hackage for that matter.) +* __Repository__ takes a repo type (Git or Mercurial), URL, and + commit. Assuming the veracity of the cryptographic hashes on the + repos, this should guarantee a unique set of files. + +In order to deal with _megarepos_ (repos and archives containing more +than one package), there is also a subdirectory for the archive and +repository cases. An empty subdir `""` would be the case for a +standard repo/archive. + +In order to meet the rules of a package listed above, the following +logic is applied to all three types above: + +* Find all of the files in the raw location, and represent as `Map + FilePath TreeEntry` (or equivalent). +* Remove a wrapper directory. If _all_ filepaths in that `Map` are + contained within the same directory, strip it from all of the + paths. For example, if the paths are `foo/bar` and `foo/baz`, the + paths will be reduced to `bar` and `baz`. +* After this wrapper is removed, then subdirectory logic is applied, + essentially applying `stripPrefix` to the filepaths. If the subdir + is `yesod-bin` and files exist called `yesod-core/yesod-core.cabal` + and `yesod-bin/yesod-bin.cabal`, the only file remaining after + subdir stripping would be `yesod-bin.cabal`. Note that trailing + slashes must be handled appropriately, and that an empty subdir + string results in this step being a noop. + +The result of all of this is that, given one of the three package +locations above, we can receive a tree key which will provide an +installable package. That tree key will remain immutable. + +### How tooling refers to packages + +We'll get to the caching mechanism for Pantry below. However, the +recommended approach for tooling is to support some kind of composite +of the Pantry keys, parsed info, and raw package location. This allows +for more efficient lookups when available, with a fallback when +mirrors don't have the needed information. + +An example: + +```yaml +extra-deps: +- name: foobar + version: 1.2.3.4 + pantry: deadbeef # tree key + cabal-file: 12345678 # blob key + archive: https://example.com/foobar-1.2.3.4.tar.gz +``` + +It is also recommended that tooling provide an easy way to generate +such complete information from, e.g., just the URL of the tarball, and +that upon reading information, hashes, package names, and version +numbers are all checked for correctness. + +## Pantry caching + +One simplistic option for Pantry would be that, every time a piece of +data is needed, Pantry downloads the necessary tarball/Git +repo/etc. However, this would in practice be highly wasteful, since +downloading Git repos and archives just to get a single cabal file +(for plan construction purposes) is overkill. Instead, here's the +basic idea for how caching works: + +* All data for Pantry can be stored in a SQL database. Local tools + like Stack will use an SQLite database. Servers will use PostgreSQL. +* We'll define a network protocol (initially just HTTP, maybe + extending to something more efficient if desired) for querying blobs + and trees. +* When a blob or tree is needed, it is first checked for in the local + SQLite cache. If it's not available there, a request to the Pantry + mirrors (configurable) will be made for the data. Since everything + is content addressable, it is safe to use untrusted mirrors. +* If the data is not available in a mirror, and a location is + provided, the location will be downloaded and cached locally. + +We may also allow these Pantry mirrors to provide some kind of query +interface to find out, e.g., the latest version of a package on +Hackage. That's still TBD. + +## Example: resolving a package location + +To work through a full example, the following three stanzas are intended to +have equivalent behavior: + +```yaml +- archive: https://example.com/foobar-1.2.3.4.tar.gz + +- name: foobar + version: 1.2.3.4 + pantry: deadbeef # tree key + cabal-file: 12345678 # blob key + archive: https://example.com/foobar-1.2.3.4.tar.gz + +- pantry: deadbeef + +``` + +The question is: how does the first one (presumably what a user would want to +enter) be resolved into the second and third? Pantry would follow this set of +steps: + +* Download the tarball from the given URL +* Place each file in the tarball into its store as a blob, getting a blob key + for each. The tarball is now represented as `Map FilePath BlobKey` +* Perform the root directory stripping step, removing a shared path +* Since there's no subdirectory: no subdirectory stripping would be performed +* Serialize the `Map FilePath BlobKey` to a binary format and take its hash to + get a tree key +* Store the tree in the store referenced by its tree key. In our example: the + tree key is `deadbeef`. +* Ensure that the tree is a valid package by checking for a single cabal file + at the root. In our example, that's found in `foobar.cabal` with blob key + `12345678`. +* Parse the cabal file and ensure that it is a valid cabal file, and that its + package name is `foobar`. Grab the version number (1.2.3.4). +* We now know that tree key `deadbeef` is a valid package, and can refer to it + by tree key exclusively. However, including the other information allows us + to verify our assumptions, provide user-friendly readable data, and provide a + fallback if the package isn't in the Pantry cache. + +## More advanced content discovery + +There are three more advanced cases to consider: + +* Providing fall-back locations for content, such as out of concern for a + single URL being removed in the future +* Closed corporate setups, where access to the general internet may either be + impossible or undesirable +* Automatic discovery of missing content by hash + +The following extensions are possible to address these cases: + +* Instead of a single package location, provide a list of package locations + with fallback semantics. +* Corporate environments will be encouraged to run a local Pantry mirror, and + configure clients like Stack to speak to these mirrors instead of the default + ones (or in addition to). +* Provide some kind of federation protocol for Pantry where servers can + registry with each other and requests for content can be pinged to each + other. + +Providing override at the client level for Pantry mirror locations is a +__MUST__. Making it easy to run in a corporate environment is a __SHOULD__. +Providing the fallback package locations seems easy enough that we should +include it initially, but falls under a __SHOULD__. The federated protocol +should be added on-demand. diff --git a/subs/pantry/attic/package-0.1.2.3.tar.gz b/subs/pantry/attic/package-0.1.2.3.tar.gz new file mode 100644 index 0000000000..a28a03742b Binary files /dev/null and b/subs/pantry/attic/package-0.1.2.3.tar.gz differ diff --git a/subs/pantry/package.yaml b/subs/pantry/package.yaml new file mode 100644 index 0000000000..354a6a60c6 --- /dev/null +++ b/subs/pantry/package.yaml @@ -0,0 +1,85 @@ +name: pantry +version: 0.1.0.0 +synopsis: Content addressable Haskell package management +description: Please see the README and documentation at +category: Development +author: Michael Snoyman +maintainer: michael@snoyman.com +copyright: 2018 FP Complete +license: MIT +github: commercialhaskell/pantry # TODO move to this repo! + +default-extensions: +- MonadFailDesugaring + +extra-source-files: +- README.md +- ChangeLog.md + +extra-source-files: +- attic/package-0.1.2.3.tar.gz + +dependencies: +- base +- digest +- rio +- aeson +- text +- unordered-containers +- containers +- path +- transformers +- generic-deriving +- unliftio +- http-conduit +- http-client-tls +- http-types +- http-client +- conduit +- bytestring +- network-uri +- hackage-security +- primitive +- vector +- memory +- store # TODO remove +- cryptonite +- cryptonite-conduit +- persistent +- persistent-sqlite >= 2.8.2 +- persistent-template +- resource-pool +- Cabal +- path-io +- rio-orphans +- conduit-extra +- tar-conduit +- time +- unix-compat +- hpack >= 0.29.6 +- yaml +- zip-archive +- text-metrics +- resourcet + +library: + source-dirs: src/ + exposed-modules: + - Pantry + - Pantry.SHA256 + - Data.Aeson.Extended + + # For testing + - Pantry.Internal + - Pantry.Internal.StaticBytes + +tests: + spec: + source-dirs: test + main: Spec.hs + dependencies: + - pantry + - hspec + - exceptions + - hedgehog + - QuickCheck diff --git a/subs/pantry/pantry.cabal b/subs/pantry/pantry.cabal new file mode 100644 index 0000000000..f91f3d2d34 --- /dev/null +++ b/subs/pantry/pantry.cabal @@ -0,0 +1,155 @@ +cabal-version: >= 1.10 + +-- This file has been generated from package.yaml by hpack version 0.29.6. +-- +-- see: https://github.com/sol/hpack +-- +-- hash: eba6c9bbaefbe2f054bed34aa2f27e1055f41fa29b37db14a5f70ff84151c739 + +name: pantry +version: 0.1.0.0 +synopsis: Content addressable Haskell package management +description: Please see the README and documentation at +category: Development +homepage: https://github.com/commercialhaskell/pantry#readme +bug-reports: https://github.com/commercialhaskell/pantry/issues +author: Michael Snoyman +maintainer: michael@snoyman.com +copyright: 2018 FP Complete +license: MIT +build-type: Simple +extra-source-files: + attic/package-0.1.2.3.tar.gz + +source-repository head + type: git + location: https://github.com/commercialhaskell/pantry + +library + exposed-modules: + Pantry + Pantry.SHA256 + Data.Aeson.Extended + Pantry.Internal + Pantry.Internal.StaticBytes + other-modules: + Hackage.Security.Client.Repository.HttpLib.HttpClient + Pantry.Archive + Pantry.Hackage + Pantry.HTTP + Pantry.Repo + Pantry.Storage + Pantry.Tree + Pantry.Types + Paths_pantry + hs-source-dirs: + src/ + default-extensions: MonadFailDesugaring + build-depends: + Cabal + , aeson + , base + , bytestring + , conduit + , conduit-extra + , containers + , cryptonite + , cryptonite-conduit + , digest + , generic-deriving + , hackage-security + , hpack >=0.29.6 + , http-client + , http-client-tls + , http-conduit + , http-types + , memory + , network-uri + , path + , path-io + , persistent + , persistent-sqlite >=2.8.2 + , persistent-template + , primitive + , resource-pool + , resourcet + , rio + , rio-orphans + , store + , tar-conduit + , text + , text-metrics + , time + , transformers + , unix-compat + , unliftio + , unordered-containers + , vector + , yaml + , zip-archive + default-language: Haskell2010 + +test-suite spec + type: exitcode-stdio-1.0 + main-is: Spec.hs + other-modules: + Pantry.ArchiveSpec + Pantry.BuildPlanSpec + Pantry.CabalSpec + Pantry.HackageSpec + Pantry.Internal.StaticBytesSpec + Pantry.InternalSpec + Pantry.TreeSpec + Pantry.TypesSpec + Paths_pantry + hs-source-dirs: + test + default-extensions: MonadFailDesugaring + build-depends: + Cabal + , QuickCheck + , aeson + , base + , bytestring + , conduit + , conduit-extra + , containers + , cryptonite + , cryptonite-conduit + , digest + , exceptions + , generic-deriving + , hackage-security + , hedgehog + , hpack >=0.29.6 + , hspec + , http-client + , http-client-tls + , http-conduit + , http-types + , memory + , network-uri + , pantry + , path + , path-io + , persistent + , persistent-sqlite >=2.8.2 + , persistent-template + , primitive + , resource-pool + , resourcet + , rio + , rio-orphans + , store + , tar-conduit + , text + , text-metrics + , time + , transformers + , unix-compat + , unliftio + , unordered-containers + , vector + , yaml + , zip-archive + default-language: Haskell2010 diff --git a/src/Data/Aeson/Extended.hs b/subs/pantry/src/Data/Aeson/Extended.hs similarity index 93% rename from src/Data/Aeson/Extended.hs rename to subs/pantry/src/Data/Aeson/Extended.hs index a767d03775..17d6af6e01 100644 --- a/src/Data/Aeson/Extended.hs +++ b/subs/pantry/src/Data/Aeson/Extended.hs @@ -35,7 +35,7 @@ import qualified Data.Set as Set import Data.Text (unpack) import qualified Data.Text as T import Generics.Deriving.Monoid (mappenddefault, memptydefault) -import Stack.Prelude +import RIO -- | Extends @.:@ warning to include field name. (.:) :: FromJSON a => Object -> Text -> Parser a @@ -166,10 +166,13 @@ data JSONWarning = JSONUnrecognizedFields String [Text] | JSONGeneralWarning !Text deriving Eq instance Show JSONWarning where - show (JSONUnrecognizedFields obj [field]) = - "Unrecognized field in " <> obj <> ": " <> T.unpack field - show (JSONUnrecognizedFields obj fields) = - "Unrecognized fields in " <> obj <> ": " <> T.unpack (T.intercalate ", " fields) - show (JSONGeneralWarning t) = T.unpack t + show = T.unpack . utf8BuilderToText . display +instance Display JSONWarning where + display (JSONUnrecognizedFields obj [field]) = + "Unrecognized field in " <> fromString obj <> ": " <> display field + display (JSONUnrecognizedFields obj fields) = + "Unrecognized fields in " <> fromString obj <> ": " <> display (T.intercalate ", " fields) + display (JSONGeneralWarning t) = display t + instance IsString JSONWarning where fromString = JSONGeneralWarning . T.pack diff --git a/src/Hackage/Security/Client/Repository/HttpLib/HttpClient.hs b/subs/pantry/src/Hackage/Security/Client/Repository/HttpLib/HttpClient.hs similarity index 58% rename from src/Hackage/Security/Client/Repository/HttpLib/HttpClient.hs rename to subs/pantry/src/Hackage/Security/Client/Repository/HttpLib/HttpClient.hs index 258613e13f..9a2a002f54 100644 --- a/src/Hackage/Security/Client/Repository/HttpLib/HttpClient.hs +++ b/subs/pantry/src/Hackage/Security/Client/Repository/HttpLib/HttpClient.hs @@ -3,13 +3,12 @@ {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE OverloadedStrings #-} -- Taken from -- https://github.com/well-typed/hackage-security/tree/master/hackage-security-http-client -- to avoid extra dependencies module Hackage.Security.Client.Repository.HttpLib.HttpClient ( - makeHttpLib - -- ** Re-exports - , Manager -- opaque + httpLib ) where import Control.Exception @@ -18,8 +17,7 @@ import Data.ByteString (ByteString) import Network.URI import qualified Data.ByteString as BS import qualified Data.ByteString.Char8 as BS.C8 -import Network.HTTP.StackClient (Manager) -import qualified Network.HTTP.StackClient as StackClient +import qualified Pantry.HTTP as HTTP import Hackage.Security.Client hiding (Header) import Hackage.Security.Client.Repository.HttpLib @@ -30,11 +28,11 @@ import qualified Hackage.Security.Util.Lens as Lens Top-level API -------------------------------------------------------------------------------} --- | Create an 'HttpLib' value from a preexisting 'Manager'. -makeHttpLib :: Manager -> HttpLib -makeHttpLib manager = HttpLib - { httpGet = get manager - , httpGetRange = getRange manager +-- | An 'HttpLib' value using the default global manager +httpLib :: HttpLib +httpLib = HttpLib + { httpGet = get + , httpGetRange = getRange } {------------------------------------------------------------------------------- @@ -42,69 +40,65 @@ makeHttpLib manager = HttpLib -------------------------------------------------------------------------------} get :: Throws SomeRemoteError - => Manager - -> [HttpRequestHeader] -> URI + => [HttpRequestHeader] -> URI -> ([HttpResponseHeader] -> BodyReader -> IO a) -> IO a -get manager reqHeaders uri callback = wrapCustomEx $ do +get reqHeaders uri callback = wrapCustomEx $ do -- TODO: setUri fails under certain circumstances; in particular, when -- the URI contains URL auth. Not sure if this is a concern. - request' <- StackClient.setUri StackClient.defaultRequest uri + request' <- HTTP.setUri HTTP.defaultRequest uri let request = setRequestHeaders reqHeaders request' - checkHttpException $ StackClient.withResponseByManager request manager $ \response -> do - let br = wrapCustomEx $ StackClient.responseBody response + checkHttpException $ HTTP.withResponse request $ \response -> do + let br = wrapCustomEx $ HTTP.getResponseBody response callback (getResponseHeaders response) br getRange :: Throws SomeRemoteError - => Manager - -> [HttpRequestHeader] -> URI -> (Int, Int) + => [HttpRequestHeader] -> URI -> (Int, Int) -> (HttpStatus -> [HttpResponseHeader] -> BodyReader -> IO a) -> IO a -getRange manager reqHeaders uri (from, to) callback = wrapCustomEx $ do - request' <- StackClient.setUri StackClient.defaultRequest uri +getRange reqHeaders uri (from, to) callback = wrapCustomEx $ do + request' <- HTTP.setUri HTTP.defaultRequest uri let request = setRange from to $ setRequestHeaders reqHeaders request' - checkHttpException $ StackClient.withResponseByManager request manager $ \response -> do - let br = wrapCustomEx $ StackClient.responseBody response + checkHttpException $ HTTP.withResponse request $ \response -> do + let br = wrapCustomEx $ HTTP.getResponseBody response case () of - () | StackClient.responseStatus response == StackClient.partialContent206 -> + () | HTTP.getResponseStatus response == HTTP.partialContent206 -> callback HttpStatus206PartialContent (getResponseHeaders response) br - () | StackClient.responseStatus response == StackClient.ok200 -> + () | HTTP.getResponseStatus response == HTTP.ok200 -> callback HttpStatus200OK (getResponseHeaders response) br _otherwise -> - throwChecked $ StackClient.HttpExceptionRequest request - $ StackClient.StatusCodeException (void response) "" + throwChecked $ HTTP.HttpExceptionRequest request + $ HTTP.StatusCodeException (void response) "" -- | Wrap custom exceptions -- -- NOTE: The only other exception defined in @http-client@ is @TimeoutTriggered@ -- but it is currently disabled -wrapCustomEx :: (Throws StackClient.HttpException => IO a) +wrapCustomEx :: (Throws HTTP.HttpException => IO a) -> (Throws SomeRemoteError => IO a) -wrapCustomEx act = handleChecked (\(ex :: StackClient.HttpException) -> go ex) act +wrapCustomEx act = handleChecked (\(ex :: HTTP.HttpException) -> go ex) act where go ex = throwChecked (SomeRemoteError ex) -checkHttpException :: Throws StackClient.HttpException => IO a -> IO a -checkHttpException = handle $ \(ex :: StackClient.HttpException) -> +checkHttpException :: Throws HTTP.HttpException => IO a -> IO a +checkHttpException = handle $ \(ex :: HTTP.HttpException) -> throwChecked ex {------------------------------------------------------------------------------- http-client auxiliary -------------------------------------------------------------------------------} -hAcceptRanges :: StackClient.HeaderName +hAcceptRanges :: HTTP.HeaderName hAcceptRanges = "Accept-Ranges" -hAcceptEncoding :: StackClient.HeaderName +hAcceptEncoding :: HTTP.HeaderName hAcceptEncoding = "Accept-Encoding" setRange :: Int -> Int - -> StackClient.Request -> StackClient.Request -setRange from to req = req { - StackClient.requestHeaders = (StackClient.hRange, rangeHeader) - : StackClient.requestHeaders req - } + -> HTTP.Request -> HTTP.Request +setRange from to = + HTTP.addRequestHeader HTTP.hRange rangeHeader where -- Content-Range header uses inclusive rather than exclusive bounds -- See @@ -112,42 +106,41 @@ setRange from to req = req { -- | Set request headers setRequestHeaders :: [HttpRequestHeader] - -> StackClient.Request -> StackClient.Request -setRequestHeaders opts req = req { - StackClient.requestHeaders = trOpt disallowCompressionByDefault opts - } + -> HTTP.Request -> HTTP.Request +setRequestHeaders opts = + HTTP.setRequestHeaders (trOpt disallowCompressionByDefault opts) where - trOpt :: [(StackClient.HeaderName, [ByteString])] + trOpt :: [(HTTP.HeaderName, [ByteString])] -> [HttpRequestHeader] - -> [StackClient.Header] + -> [HTTP.Header] trOpt acc [] = concatMap finalizeHeader acc trOpt acc (HttpRequestMaxAge0:os) = - trOpt (insert StackClient.hCacheControl ["max-age=0"] acc) os + trOpt (insert HTTP.hCacheControl ["max-age=0"] acc) os trOpt acc (HttpRequestNoTransform:os) = - trOpt (insert StackClient.hCacheControl ["no-transform"] acc) os + trOpt (insert HTTP.hCacheControl ["no-transform"] acc) os -- disable content compression (potential security issue) - disallowCompressionByDefault :: [(StackClient.HeaderName, [ByteString])] + disallowCompressionByDefault :: [(HTTP.HeaderName, [ByteString])] disallowCompressionByDefault = [(hAcceptEncoding, [])] -- Some headers are comma-separated, others need multiple headers for -- multiple options. -- -- TODO: Right we we just comma-separate all of them. - finalizeHeader :: (StackClient.HeaderName, [ByteString]) - -> [StackClient.Header] + finalizeHeader :: (HTTP.HeaderName, [ByteString]) + -> [HTTP.Header] finalizeHeader (name, strs) = [(name, BS.intercalate ", " (reverse strs))] insert :: Eq a => a -> [b] -> [(a, [b])] -> [(a, [b])] insert x y = Lens.modify (Lens.lookupM x) (++ y) -- | Extract the response headers -getResponseHeaders :: StackClient.Response a -> [HttpResponseHeader] +getResponseHeaders :: HTTP.Response a -> [HttpResponseHeader] getResponseHeaders response = concat [ [ HttpResponseAcceptRangesBytes | (hAcceptRanges, "bytes") `elem` headers ] ] where - headers = StackClient.responseHeaders response + headers = HTTP.getResponseHeaders response diff --git a/subs/pantry/src/Pantry.hs b/subs/pantry/src/Pantry.hs new file mode 100644 index 0000000000..dcd2682ffa --- /dev/null +++ b/subs/pantry/src/Pantry.hs @@ -0,0 +1,859 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE ViewPatterns #-} +-- | Content addressable Haskell package management, providing for +-- secure, reproducible acquisition of Haskell package contents and +-- metadata. +-- +-- @since 0.1.0.0 +module Pantry + ( -- * Running + PantryConfig + , HackageSecurityConfig (..) + , defaultHackageSecurityConfig + , HasPantryConfig (..) + , withPantryConfig + , HpackExecutable (..) + + -- ** Convenience + , PantryApp + , runPantryApp + , runPantryAppClean + + -- * Types + + -- ** Exceptions + , PantryException (..) + + -- ** Cabal types + , PackageName + , Version + , FlagName + , PackageIdentifier (..) + + -- ** Files + , FileSize (..) + , RelFilePath (..) + , ResolvedPath (..) + , Unresolved + + -- ** Cryptography + , SHA256 + , TreeKey (..) + , BlobKey (..) + + -- ** Packages + , PackageMetadata (..) + , Package (..) + + -- ** Hackage + , CabalFileInfo (..) + , Revision (..) + , PackageIdentifierRevision (..) + , UsePreferredVersions (..) + + -- ** Archives + , Archive (..) + , ArchiveLocation (..) + + -- ** Repos + , Repo (..) + , RepoType (..) + + -- ** Package location + , PackageLocation (..) + , PackageLocationImmutable (..) + + -- ** Snapshots + , SnapshotLocation (..) + , Snapshot (..) + , WantedCompiler (..) + + -- * Loading values + , resolvePaths + , loadPackage + , loadSnapshot + + -- * Completion functions + , completePackageLocation + , completeSnapshot + , completeSnapshotLocation + + -- * Parsers + , parseWantedCompiler + , parseSnapshotLocation + , parsePackageIdentifierRevision + + -- ** Cabal values + , parsePackageIdentifier + , parsePackageName + , parsePackageNameThrowing + , parseFlagName + , parseVersion + , parseVersionThrowing + + -- * Stackage snapshots + , ltsSnapshotLocation + , nightlySnapshotLocation + + -- * Cabal helpers + , packageIdentifierString + , packageNameString + , flagNameString + , versionString + , moduleNameString + , CabalString (..) + , toCabalStringMap + , unCabalStringMap + , gpdPackageIdentifier + , gpdPackageName + , gpdVersion + + -- * Package location + , fetchPackages + , unpackPackageLocation + , getPackageLocationIdent + , getPackageLocationTreeKey + + -- * Cabal files + , loadCabalFile + , loadCabalFileImmutable + , loadCabalFilePath + , PrintWarnings (..) + + -- * Hackage index + , updateHackageIndex + , DidUpdateOccur (..) + , hackageIndexTarballL + , getHackagePackageVersions + , getLatestHackageVersion + , getHackageTypoCorrections + ) where + +import RIO +import Conduit +import qualified RIO.Map as Map +import qualified RIO.ByteString as B +import qualified RIO.Text as T +import qualified RIO.List as List +import qualified RIO.FilePath as FilePath +import Pantry.Archive +import Pantry.Repo +import qualified Pantry.SHA256 as SHA256 +import Pantry.Storage +import Pantry.Tree +import Pantry.Types +import Pantry.Hackage +import Path (Path, Abs, File, toFilePath, Dir, (), filename, parseAbsDir, parent, parseRelFile) +import Path.IO (doesFileExist, resolveDir', listDir) +import Distribution.PackageDescription (GenericPackageDescription, FlagName) +import qualified Distribution.PackageDescription as D +import Distribution.Parsec.Common (PWarning (..), showPos) +import qualified Hpack +import qualified Hpack.Config as Hpack +import RIO.Process +import RIO.Directory (getAppUserDataDirectory) +import qualified Data.Yaml as Yaml +import Data.Aeson.Extended (WithJSONWarnings (..), Value) +import Data.Aeson.Types (parseEither) +import Data.Monoid (Endo (..)) +import Pantry.HTTP +import Data.Char (isHexDigit) + +-- | Create a new 'PantryConfig' with the given settings. +-- +-- For something easier to use in simple cases, see 'runPantryApp'. +-- +-- @since 0.1.0.0 +withPantryConfig + :: HasLogFunc env + => Path Abs Dir + -- ^ pantry root directory, where the SQLite database and Hackage + -- downloads are kept. + -> HackageSecurityConfig + -- ^ Hackage configuration. You probably want + -- 'defaultHackageSecurityConfig'. + -> HpackExecutable + -- ^ When converting an hpack @package.yaml@ file to a cabal file, + -- what version of hpack should we use? + -> Int + -- ^ Maximum connection count + -> (PantryConfig -> RIO env a) + -- ^ What to do with the config + -> RIO env a +withPantryConfig root hsc he count inner = do + env <- ask + pantryRelFile <- parseRelFile "pantry.sqlite3" + -- Silence persistent's logging output, which is really noisy + runRIO (mempty :: LogFunc) $ initStorage (root pantryRelFile) $ \storage -> runRIO env $ do + ur <- newMVar True + ref1 <- newIORef mempty + ref2 <- newIORef mempty + inner PantryConfig + { pcHackageSecurity = hsc + , pcHpackExecutable = he + , pcRootDir = root + , pcStorage = storage + , pcUpdateRef = ur + , pcConnectionCount = count + , pcParsedCabalFilesImmutable = ref1 + , pcParsedCabalFilesMutable = ref2 + } + +-- | Default 'HackageSecurityConfig' value using the official Hackage server. +-- +-- @since 0.1.0.0 +defaultHackageSecurityConfig :: HackageSecurityConfig +defaultHackageSecurityConfig = HackageSecurityConfig + { hscKeyIds = + [ "0a5c7ea47cd1b15f01f5f51a33adda7e655bc0f0b0615baa8e271f4c3351e21d" + , "1ea9ba32c526d1cc91ab5e5bd364ec5e9e8cb67179a471872f6e26f0ae773d42" + , "280b10153a522681163658cb49f632cde3f38d768b736ddbc901d99a1a772833" + , "2a96b1889dc221c17296fcc2bb34b908ca9734376f0f361660200935916ef201" + , "2c6c3627bd6c982990239487f1abd02e08a02e6cf16edb105a8012d444d870c3" + , "51f0161b906011b52c6613376b1ae937670da69322113a246a09f807c62f6921" + , "772e9f4c7db33d251d5c6e357199c819e569d130857dc225549b40845ff0890d" + , "aa315286e6ad281ad61182235533c41e806e5a787e0b6d1e7eef3f09d137d2e9" + , "fe331502606802feac15e514d9b9ea83fee8b6ffef71335479a2e68d84adc6b0" + ] + , hscKeyThreshold = 3 + , hscDownloadPrefix = "https://hackage.haskell.org/" + } + +-- | Returns the latest version of the given package available from +-- Hackage. +-- +-- @since 0.1.0.0 +getLatestHackageVersion + :: (HasPantryConfig env, HasLogFunc env) + => PackageName -- ^ package name + -> UsePreferredVersions + -> RIO env (Maybe PackageIdentifierRevision) +getLatestHackageVersion name preferred = + ((fmap fst . Map.maxViewWithKey) >=> go) <$> getHackagePackageVersions preferred name + where + go (version, m) = do + (_rev, BlobKey sha size) <- fst <$> Map.maxViewWithKey m + pure $ PackageIdentifierRevision name version $ CFIHash sha $ Just size + +fetchTreeKeys + :: (HasPantryConfig env, HasLogFunc env, Foldable f) + => f TreeKey + -> RIO env () +fetchTreeKeys _ = + logWarn "Network caching not yet implemented!" -- TODO pantry wire + +-- | Download all of the packages provided into the local cache +-- without performing any unpacking. Can be useful for build tools +-- wanting to prefetch or provide an offline mode. +-- +-- @since 0.1.0.0 +fetchPackages + :: (HasPantryConfig env, HasLogFunc env, HasProcessContext env, Foldable f) + => f PackageLocationImmutable + -> RIO env () +fetchPackages pls = do + fetchTreeKeys $ mapMaybe getTreeKey $ toList pls + traverseConcurrently_ (void . uncurry getHackageTarball) hackages + -- TODO in the future, be concurrent in these as well + fetchArchives archives + fetchRepos repos + where + s x = Endo (x:) + run (Endo f) = f [] + (hackagesE, archivesE, reposE) = foldMap go pls + hackages = run hackagesE + archives = run archivesE + repos = run reposE + + go (PLIHackage pir mtree) = (s (pir, mtree), mempty, mempty) + go (PLIArchive archive pm) = (mempty, s (archive, pm), mempty) + go (PLIRepo repo pm) = (mempty, mempty, s (repo, pm)) + +-- | Unpack a given 'PackageLocationImmutable' into the given +-- directory. Does not generate any extra subdirectories. +-- +-- @since 0.1.0.0 +unpackPackageLocation + :: (HasPantryConfig env, HasLogFunc env, HasProcessContext env) + => Path Abs Dir -- ^ unpack directory + -> PackageLocationImmutable + -> RIO env () +unpackPackageLocation fp loc = loadPackage loc >>= unpackTree loc fp . packageTree + +-- | Load the cabal file for the given 'PackageLocationImmutable'. +-- +-- This function ignores all warnings. +-- +-- Note that, for now, this will not allow support for hpack files in +-- these package locations. Instead, all @PackageLocationImmutable@s +-- will require a .cabal file. This may be relaxed in the future. +-- +-- @since 0.1.0.0 +loadCabalFileImmutable + :: (HasPantryConfig env, HasLogFunc env, HasProcessContext env) + => PackageLocationImmutable + -> RIO env GenericPackageDescription +loadCabalFileImmutable loc = withCache $ do + logDebug $ "Parsing cabal file for " <> display loc + bs <- loadCabalFileBytes loc + let foundCabalKey = BlobKey (SHA256.hashBytes bs) (FileSize (fromIntegral (B.length bs))) + (_warnings, gpd) <- rawParseGPD (Left loc) bs + let pm = + case loc of + PLIHackage (PackageIdentifierRevision name version cfi) mtree -> PackageMetadata + { pmName = Just name + , pmVersion = Just version + , pmTreeKey = mtree + , pmCabal = + case cfi of + CFIHash sha (Just size) -> Just $ BlobKey sha size + _ -> Nothing + } + PLIArchive _ pm' -> pm' + PLIRepo _ pm' -> pm' + let exc = MismatchedPackageMetadata loc pm Nothing foundCabalKey (gpdPackageIdentifier gpd) + maybe (throwIO exc) pure $ do + guard $ maybe True (== gpdPackageName gpd) (pmName pm) + guard $ maybe True (== gpdVersion gpd) (pmVersion pm) + guard $ maybe True (== foundCabalKey) (pmCabal pm) + pure gpd + where + withCache inner = do + ref <- view $ pantryConfigL.to pcParsedCabalFilesImmutable + m0 <- readIORef ref + case Map.lookup loc m0 of + Just x -> pure x + Nothing -> do + x <- inner + atomicModifyIORef' ref $ \m -> (Map.insert loc x m, x) + +-- | Same as 'loadCabalFileImmutable', but takes a +-- 'PackageLocation'. Never prints warnings, see 'loadCabalFilePath' +-- for that. +-- +-- @since 0.1.0.0 +loadCabalFile + :: (HasPantryConfig env, HasLogFunc env, HasProcessContext env) + => PackageLocation + -> RIO env GenericPackageDescription +loadCabalFile (PLImmutable loc) = loadCabalFileImmutable loc +loadCabalFile (PLMutable rfp) = fst <$> loadCabalFilePath (resolvedAbsolute rfp) NoPrintWarnings + +-- | Should we print warnings when loading a cabal file? +-- +-- @since 0.1.0.0 +data PrintWarnings = YesPrintWarnings | NoPrintWarnings + +-- | Parse the cabal file for the package inside the given +-- directory. Performs various sanity checks, such as the file name +-- being correct and having only a single cabal file. +-- +-- @since 0.1.0.0 +loadCabalFilePath + :: (HasPantryConfig env, HasLogFunc env, HasProcessContext env) + => Path Abs Dir -- ^ project directory, with a cabal file or hpack file + -> PrintWarnings + -> RIO env (GenericPackageDescription, Path Abs File) +loadCabalFilePath dir printWarnings = do + ref <- view $ pantryConfigL.to pcParsedCabalFilesMutable + mcached <- atomicModifyIORef' ref $ \m -> + case (Map.lookup dir m, printWarnings) of + (Nothing, _) -> (m, Nothing) + (Just (gpd, file, warnings@(_:_)), YesPrintWarnings) -> + -- There are warnings and we're going to print them, so remove + -- from the cache. + (Map.insert dir (gpd, file, []) m, Just (gpd, file, warnings)) + (Just triple, _) -> (m, Just triple) + case mcached of + Just (gpd, cabalfp, warnings) -> do + mapM_ (logWarn . toPretty cabalfp) warnings + pure (gpd, cabalfp) + Nothing -> do + cabalfp <- findOrGenerateCabalFile dir + bs <- liftIO $ B.readFile $ toFilePath cabalfp + (warnings0, gpd) <- rawParseGPD (Right cabalfp) bs + warnings <- + case printWarnings of + YesPrintWarnings -> mapM_ (logWarn . toPretty cabalfp) warnings0 $> warnings0 + NoPrintWarnings -> pure warnings0 + checkCabalFileName (gpdPackageName gpd) cabalfp + atomicModifyIORef' ref $ \m -> (Map.insert dir (gpd, cabalfp, warnings) m, (gpd, cabalfp)) + where + toPretty :: Path Abs File -> PWarning -> Utf8Builder + toPretty src (PWarning _type pos msg) = + "Cabal file warning in" <> + fromString (toFilePath src) <> "@" <> + fromString (showPos pos) <> ": " <> + fromString msg + + -- | Check if the given name in the @Package@ matches the name of the .cabal file + checkCabalFileName :: MonadThrow m => PackageName -> Path Abs File -> m () + checkCabalFileName name cabalfp = do + -- Previously, we just use parsePackageNameFromFilePath. However, that can + -- lead to confusing error messages. See: + -- https://github.com/commercialhaskell/stack/issues/895 + let expected = T.unpack $ unSafeFilePath $ cabalFileName name + when (expected /= toFilePath (filename cabalfp)) + $ throwM $ MismatchedCabalName cabalfp name + +-- | Get the filename for the cabal file in the given directory. +-- +-- If no .cabal file is present, or more than one is present, an exception is +-- thrown via 'throwM'. +-- +-- If the directory contains a file named package.yaml, hpack is used to +-- generate a .cabal file from it. +findOrGenerateCabalFile + :: forall env. (HasPantryConfig env, HasLogFunc env, HasProcessContext env) + => Path Abs Dir -- ^ package directory + -> RIO env (Path Abs File) +findOrGenerateCabalFile pkgDir = do + hpack pkgDir + findCabalFile1 + where + findCabalFile1 :: RIO env (Path Abs File) + findCabalFile1 = findCabalFile2 >>= either throwIO return + + findCabalFile2 :: RIO env (Either PantryException (Path Abs File)) + findCabalFile2 = do + files <- filter (flip hasExtension "cabal" . toFilePath) . snd + <$> listDir pkgDir + return $ case files of + [] -> Left $ NoCabalFileFound pkgDir + [x] -> Right x + -- If there are multiple files, ignore files that start with + -- ".". On unixlike environments these are hidden, and this + -- character is not valid in package names. The main goal is + -- to ignore emacs lock files - see + -- https://github.com/commercialhaskell/stack/issues/1897. + (filter (not . ("." `List.isPrefixOf`) . toFilePath . filename) -> [x]) -> Right x + _:_ -> Left $ MultipleCabalFilesFound pkgDir files + where hasExtension fp x = FilePath.takeExtension fp == "." ++ x + +-- | Generate .cabal file from package.yaml, if necessary. +hpack + :: (HasPantryConfig env, HasLogFunc env, HasProcessContext env) + => Path Abs Dir + -> RIO env () +hpack pkgDir = do + packageConfigRelFile <- parseRelFile Hpack.packageConfig + let hpackFile = pkgDir packageConfigRelFile + exists <- liftIO $ doesFileExist hpackFile + when exists $ do + logDebug $ "Running hpack on " <> fromString (toFilePath hpackFile) + + he <- view $ pantryConfigL.to pcHpackExecutable + case he of + HpackBundled -> do + r <- liftIO $ Hpack.hpackResult $ Hpack.setProgramName "stack" $ Hpack.setTarget (toFilePath hpackFile) Hpack.defaultOptions + forM_ (Hpack.resultWarnings r) (logWarn . fromString) + let cabalFile = fromString . Hpack.resultCabalFile $ r + case Hpack.resultStatus r of + Hpack.Generated -> logDebug $ "hpack generated a modified version of " <> cabalFile + Hpack.OutputUnchanged -> logDebug $ "hpack output unchanged in " <> cabalFile + Hpack.AlreadyGeneratedByNewerHpack -> logWarn $ + cabalFile <> + " was generated with a newer version of hpack,\n" <> + "please upgrade and try again." + Hpack.ExistingCabalFileWasModifiedManually -> logWarn $ + cabalFile <> + " was modified manually. Ignoring " <> + fromString (toFilePath hpackFile) <> + " in favor of the cabal file.\nIf you want to use the " <> + fromString (toFilePath (filename hpackFile)) <> + " file instead of the cabal file,\n" <> + "then please delete the cabal file." + HpackCommand command -> + withWorkingDir (toFilePath pkgDir) $ + proc command [] runProcess_ + +-- | Get the 'PackageIdentifier' from a 'GenericPackageDescription'. +-- +-- @since 0.1.0.0 +gpdPackageIdentifier :: GenericPackageDescription -> PackageIdentifier +gpdPackageIdentifier = D.package . D.packageDescription + +-- | Get the 'PackageName' from a 'GenericPackageDescription'. +-- +-- @since 0.1.0.0 +gpdPackageName :: GenericPackageDescription -> PackageName +gpdPackageName = pkgName . gpdPackageIdentifier + +-- | Get the 'Version' from a 'GenericPackageDescription'. +-- +-- @since 0.1.0.0 +gpdVersion :: GenericPackageDescription -> Version +gpdVersion = pkgVersion . gpdPackageIdentifier + +loadCabalFileBytes + :: (HasPantryConfig env, HasLogFunc env, HasProcessContext env) + => PackageLocationImmutable + -> RIO env ByteString + +-- Just ignore the mtree for this. Safe assumption: someone who filled +-- in the TreeKey also filled in the cabal file hash, and that's a +-- more efficient lookup mechanism. +loadCabalFileBytes (PLIHackage pir _mtree) = getHackageCabalFile pir + +loadCabalFileBytes pl = do + package <- loadPackage pl + let sfp = cabalFileName $ pkgName $ packageIdent package + TreeEntry cabalBlobKey _ft = packageCabalEntry package + mbs <- withStorage $ loadBlob cabalBlobKey + case mbs of + Nothing -> do + -- TODO when we have pantry wire, try downloading + throwIO $ TreeReferencesMissingBlob pl sfp cabalBlobKey + Just bs -> pure bs + +-- | Load a 'Package' from a 'PackageLocationImmutable'. +-- +-- @since 0.1.0.0 +loadPackage + :: (HasPantryConfig env, HasLogFunc env, HasProcessContext env) + => PackageLocationImmutable + -> RIO env Package +loadPackage (PLIHackage pir mtree) = getHackageTarball pir mtree +loadPackage pli@(PLIArchive archive pm) = getArchive pli archive pm +loadPackage (PLIRepo repo pm) = getRepo repo pm + +-- | Fill in optional fields in a 'PackageLocationImmutable' for more reproducible builds. +-- +-- @since 0.1.0.0 +completePackageLocation + :: (HasPantryConfig env, HasLogFunc env, HasProcessContext env) + => PackageLocationImmutable + -> RIO env PackageLocationImmutable +completePackageLocation orig@(PLIHackage (PackageIdentifierRevision _ _ CFIHash{}) (Just _)) = pure orig +completePackageLocation (PLIHackage pir0@(PackageIdentifierRevision name version cfi0) _) = do + logDebug $ "Completing package location information from " <> display pir0 + pir <- + case cfi0 of + CFIHash{} -> pure pir0 + _ -> do + bs <- getHackageCabalFile pir0 + let cfi = CFIHash (SHA256.hashBytes bs) (Just (FileSize (fromIntegral (B.length bs)))) + pir = PackageIdentifierRevision name version cfi + logDebug $ "Added in cabal file hash: " <> display pir + pure pir + treeKey <- getHackageTarballKey pir + pure $ PLIHackage pir (Just treeKey) +completePackageLocation pl@(PLIArchive archive pm) = + PLIArchive <$> completeArchive archive <*> completePM pl pm +completePackageLocation pl@(PLIRepo repo pm) = do + unless (isSHA1 (repoCommit repo)) $ throwIO $ CannotCompleteRepoNonSHA1 repo + PLIRepo repo <$> completePM pl pm + where + isSHA1 t = T.length t == 40 && T.all isHexDigit t + +completeArchive + :: (HasPantryConfig env, HasLogFunc env) + => Archive + -> RIO env Archive +completeArchive a@(Archive _ (Just _) (Just _) _) = pure a +completeArchive a@(Archive loc _ _ subdir) = + withArchiveLoc a $ \_fp sha size -> + pure $ Archive loc (Just sha) (Just size) subdir + +completePM + :: (HasPantryConfig env, HasLogFunc env, HasProcessContext env) + => PackageLocationImmutable + -> PackageMetadata + -> RIO env PackageMetadata +completePM plOrig pm + | isCompletePM pm = pure pm + | otherwise = do + package <- loadPackage plOrig + let pmNew = PackageMetadata + { pmName = Just $ pkgName $ packageIdent package + , pmVersion = Just $ pkgVersion $ packageIdent package + , pmTreeKey = Just $ packageTreeKey package + , pmCabal = Just $ teBlob $ packageCabalEntry package + } + + isSame (Just x) (Just y) = x == y + isSame _ _ = True + + allSame = + isSame (pmName pmNew) (pmName pm) && + isSame (pmVersion pmNew) (pmVersion pm) && + isSame (pmTreeKey pmNew) (pmTreeKey pm) && + isSame (pmCabal pmNew) (pmCabal pm) + if allSame + then pure pmNew + else throwIO $ CompletePackageMetadataMismatch plOrig pmNew + where + isCompletePM (PackageMetadata (Just _) (Just _) (Just _) (Just _)) = True + isCompletePM _ = False + +-- | Add in hashes to make a 'SnapshotLocation' reproducible. +-- +-- @since 0.1.0.0 +completeSnapshotLocation + :: (HasPantryConfig env, HasLogFunc env) + => SnapshotLocation + -> RIO env SnapshotLocation +completeSnapshotLocation sl@SLCompiler{} = pure sl +completeSnapshotLocation sl@SLFilePath{} = pure sl +completeSnapshotLocation sl@(SLUrl _ (Just _)) = pure sl +completeSnapshotLocation (SLUrl url Nothing) = do + bs <- loadFromURL url Nothing + let blobKey = BlobKey (SHA256.hashBytes bs) (FileSize $ fromIntegral $ B.length bs) + pure $ SLUrl url (Just blobKey) + +-- | Fill in optional fields in a 'Snapshot' for more reproducible builds. +-- +-- @since 0.1.0.0 +completeSnapshot + :: (HasPantryConfig env, HasLogFunc env, HasProcessContext env) + => Snapshot + -> RIO env Snapshot +completeSnapshot snapshot = do + parent' <- completeSnapshotLocation $ snapshotParent snapshot + pls <- traverseConcurrently completePackageLocation $ snapshotLocations snapshot + pure snapshot + { snapshotParent = parent' + , snapshotLocations = pls + } + +traverseConcurrently_ + :: (Foldable f, HasPantryConfig env) + => (a -> RIO env ()) -- ^ action to perform + -> f a -- ^ input values + -> RIO env () +traverseConcurrently_ f t0 = do + cnt <- view $ pantryConfigL.to pcConnectionCount + traverseConcurrentlyWith_ cnt f t0 + +traverseConcurrentlyWith_ + :: (MonadUnliftIO m, Foldable f) + => Int -- ^ concurrent workers + -> (a -> m ()) -- ^ action to perform + -> f a -- ^ input values + -> m () +traverseConcurrentlyWith_ count f t0 = do + queue <- newTVarIO $ toList t0 + + replicateConcurrently_ count $ + fix $ \loop -> join $ atomically $ do + toProcess <- readTVar queue + case toProcess of + [] -> pure (pure ()) + (x:rest) -> do + writeTVar queue rest + pure $ do + f x + loop + +traverseConcurrently + :: (HasPantryConfig env, Traversable t) + => (a -> RIO env b) -- ^ action to perform + -> t a -- ^ input values + -> RIO env (t b) +traverseConcurrently f t0 = do + cnt <- view $ pantryConfigL.to pcConnectionCount + traverseConcurrentlyWith cnt f t0 + +-- | Like 'traverse', but does things on +-- up to N separate threads at once. +traverseConcurrentlyWith + :: (MonadUnliftIO m, Traversable t) + => Int -- ^ concurrent workers + -> (a -> m b) -- ^ action to perform + -> t a -- ^ input values + -> m (t b) +traverseConcurrentlyWith count f t0 = do + (queue, t1) <- atomically $ do + queueDList <- newTVar id + t1 <- for t0 $ \x -> do + res <- newEmptyTMVar + modifyTVar queueDList (. ((x, res):)) + pure $ atomically $ takeTMVar res + dlist <- readTVar queueDList + queue <- newTVar $ dlist [] + pure (queue, t1) + + replicateConcurrently_ count $ + fix $ \loop -> join $ atomically $ do + toProcess <- readTVar queue + case toProcess of + [] -> pure (pure ()) + ((x, res):rest) -> do + writeTVar queue rest + pure $ do + y <- f x + atomically $ putTMVar res y + loop + sequence t1 + +-- | Parse a snapshot value from a 'SnapshotLocation'. +-- +-- Returns a 'Left' value if provided an 'SLCompiler' +-- constructor. Otherwise, returns a 'Right' value providing both the +-- 'Snapshot' and a hash of the input configuration file. +-- +-- @since 0.1.0.0 +loadSnapshot + :: (HasPantryConfig env, HasLogFunc env) + => SnapshotLocation + -> RIO env (Either WantedCompiler (Snapshot, SHA256)) +loadSnapshot (SLCompiler compiler) = pure $ Left compiler +loadSnapshot sl@(SLUrl url mblob) = + handleAny (throwIO . InvalidSnapshot sl) $ do + bs <- loadFromURL url mblob + value <- Yaml.decodeThrow bs + snapshot <- warningsParserHelper sl value Nothing + pure $ Right (snapshot, SHA256.hashBytes bs) +loadSnapshot sl@(SLFilePath fp) = + handleAny (throwIO . InvalidSnapshot sl) $ do + value <- Yaml.decodeFileThrow $ toFilePath $ resolvedAbsolute fp + sha <- SHA256.hashFile $ toFilePath $ resolvedAbsolute fp + snapshot <- warningsParserHelper sl value $ Just $ parent $ resolvedAbsolute fp + pure $ Right (snapshot, sha) + +loadFromURL + :: (HasPantryConfig env, HasLogFunc env) + => Text -- ^ url + -> Maybe BlobKey + -> RIO env ByteString +loadFromURL url Nothing = do + mcached <- withStorage $ loadURLBlob url + case mcached of + Just bs -> return bs + Nothing -> loadWithCheck url Nothing +loadFromURL url (Just bkey) = do + mcached <- withStorage $ loadBlob bkey + case mcached of + Just bs -> return bs + Nothing -> loadWithCheck url (Just bkey) + +loadWithCheck + :: (HasPantryConfig env, HasLogFunc env) + => Text -- ^ url + -> Maybe BlobKey + -> RIO env ByteString +loadWithCheck url mblobkey = do + let (msha, msize) = + case mblobkey of + Nothing -> (Nothing, Nothing) + Just (BlobKey sha size) -> (Just sha, Just size) + (_, _, bss) <- httpSinkChecked url msha msize sinkList + let bs = B.concat bss + withStorage $ storeURLBlob url bs + return bs + +warningsParserHelper + :: HasLogFunc env + => SnapshotLocation + -> Value + -> Maybe (Path Abs Dir) + -> RIO env Snapshot +warningsParserHelper sl val mdir = + case parseEither Yaml.parseJSON val of + Left e -> throwIO $ Couldn'tParseSnapshot sl e + Right (WithJSONWarnings x ws) -> do + unless (null ws) $ do + logWarn $ "Warnings when parsing snapshot " <> display sl + for_ ws $ logWarn . display + resolvePaths mdir x + +-- | Get the 'PackageIdentifier' of the package at the given location. +-- +-- @since 0.1.0.0 +getPackageLocationIdent + :: (HasPantryConfig env, HasLogFunc env, HasProcessContext env) + => PackageLocationImmutable + -> RIO env PackageIdentifier +getPackageLocationIdent (PLIHackage (PackageIdentifierRevision name version _) _) = pure $ PackageIdentifier name version +getPackageLocationIdent (PLIRepo _ PackageMetadata { pmName = Just name, pmVersion = Just version }) = pure $ PackageIdentifier name version +getPackageLocationIdent (PLIArchive _ PackageMetadata { pmName = Just name, pmVersion = Just version }) = pure $ PackageIdentifier name version +getPackageLocationIdent pli = packageIdent <$> loadPackage pli + +-- | Get the 'TreeKey' of the package at the given location. +-- +-- @since 0.1.0.0 +getPackageLocationTreeKey + :: (HasPantryConfig env, HasLogFunc env, HasProcessContext env) + => PackageLocationImmutable + -> RIO env TreeKey +getPackageLocationTreeKey pl = + case getTreeKey pl of + Just treeKey -> pure treeKey + Nothing -> + case pl of + PLIHackage pir _ -> getHackageTarballKey pir + PLIArchive archive pm -> getArchiveKey pl archive pm + PLIRepo repo pm -> getRepoKey repo pm + +getTreeKey :: PackageLocationImmutable -> Maybe TreeKey +getTreeKey (PLIHackage _ mtree) = mtree +getTreeKey (PLIArchive _ pm) = pmTreeKey pm +getTreeKey (PLIRepo _ pm) = pmTreeKey pm + +-- | Convenient data type that allows you to work with pantry more +-- easily than using 'withPantryConfig' directly. Uses basically sane +-- settings, like sharing a pantry directory with Stack. +-- +-- You can use 'runPantryApp' to use this. +-- +-- @since 0.1.0.0 +data PantryApp = PantryApp + { paSimpleApp :: !SimpleApp + , paPantryConfig :: !PantryConfig + } + +simpleAppL :: Lens' PantryApp SimpleApp +simpleAppL = lens paSimpleApp (\x y -> x { paSimpleApp = y }) + +instance HasLogFunc PantryApp where + logFuncL = simpleAppL.logFuncL +instance HasPantryConfig PantryApp where + pantryConfigL = lens paPantryConfig (\x y -> x { paPantryConfig = y }) +instance HasProcessContext PantryApp where + processContextL = simpleAppL.processContextL + +-- | Run some code against pantry using basic sane settings. +-- +-- For testing, see 'runPantryAppClean'. +-- +-- @since 0.1.0.0 +runPantryApp :: MonadIO m => RIO PantryApp a -> m a +runPantryApp f = runSimpleApp $ do + sa <- ask + stack <- getAppUserDataDirectory "stack" + root <- parseAbsDir $ stack FilePath. "pantry" + withPantryConfig + root + defaultHackageSecurityConfig + HpackBundled + 8 + $ \pc -> + runRIO + PantryApp + { paSimpleApp = sa + , paPantryConfig = pc + } + f + +-- | Like 'runPantryApp', but uses an empty pantry directory instead +-- of sharing with Stack. Useful for testing. +-- +-- @since 0.1.0.0 +runPantryAppClean :: MonadIO m => RIO PantryApp a -> m a +runPantryAppClean f = liftIO $ withSystemTempDirectory "pantry-clean" $ \dir -> runSimpleApp $ do + sa <- ask + root <- resolveDir' dir + withPantryConfig + root + defaultHackageSecurityConfig + HpackBundled + 8 + $ \pc -> + runRIO + PantryApp + { paSimpleApp = sa + , paPantryConfig = pc + } + f diff --git a/subs/pantry/src/Pantry/Archive.hs b/subs/pantry/src/Pantry/Archive.hs new file mode 100644 index 0000000000..c1ef4f6edb --- /dev/null +++ b/subs/pantry/src/Pantry/Archive.hs @@ -0,0 +1,441 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TupleSections #-} +{-# LANGUAGE ScopedTypeVariables #-} +-- | Logic for loading up trees from HTTPS archives. +module Pantry.Archive + ( getArchive + , getArchiveKey + , fetchArchives + , withArchiveLoc + ) where + +import RIO +import qualified Pantry.SHA256 as SHA256 +import Pantry.Storage +import Pantry.Tree +import Pantry.Types +import Pantry.Internal (normalizeParents, makeTarRelative) +import qualified RIO.Text as T +import qualified RIO.List as List +import qualified RIO.ByteString.Lazy as BL +import qualified RIO.Map as Map +import qualified RIO.Set as Set +import Data.Bits ((.&.), shiftR) +import Path (toFilePath) +import qualified Codec.Archive.Zip as Zip +import qualified Data.Digest.CRC32 as CRC32 +import Distribution.PackageDescription (packageDescription, package) + +import Conduit +import Data.Conduit.Zlib (ungzip) +import qualified Data.Conduit.Tar as Tar +import Pantry.HTTP + +fetchArchives + :: (HasPantryConfig env, HasLogFunc env) + => [(Archive, PackageMetadata)] + -> RIO env () +fetchArchives pairs = do + -- TODO be more efficient, group together shared archives + for_ pairs $ \(a, pm) -> getArchive (PLIArchive a pm) a pm + +getArchiveKey + :: forall env. (HasPantryConfig env, HasLogFunc env) + => PackageLocationImmutable -- ^ for exceptions + -> Archive + -> PackageMetadata + -> RIO env TreeKey +getArchiveKey pli archive pm = packageTreeKey <$> getArchive pli archive pm -- potential optimization + +getArchive + :: forall env. (HasPantryConfig env, HasLogFunc env) + => PackageLocationImmutable -- ^ for exceptions + -> Archive + -> PackageMetadata + -> RIO env Package +getArchive pli archive pm = do + -- Check if the value is in the archive, and use it if possible + mpa <- loadCache archive + pa <- + case mpa of + Just pa -> pure pa + -- Not in the archive. Load the archive. Completely ignore the + -- PackageMetadata for now, we'll check that the Package + -- info matches next. + Nothing -> withArchiveLoc archive $ \fp sha size -> do + pa <- parseArchive pli archive fp + -- Storing in the cache exclusively uses information we have + -- about the archive itself, not metadata from the user. + storeCache archive sha size pa + pure pa + + either throwIO pure $ checkPackageMetadata pli pm pa + +storeCache + :: forall env. (HasPantryConfig env, HasLogFunc env) + => Archive + -> SHA256 + -> FileSize + -> Package + -> RIO env () +storeCache archive sha size pa = + case archiveLocation archive of + ALUrl url -> withStorage $ storeArchiveCache url (archiveSubdir archive) sha size (packageTreeKey pa) + ALFilePath _ -> pure () -- TODO cache local as well + +loadCache + :: forall env. (HasPantryConfig env, HasLogFunc env) + => Archive + -> RIO env (Maybe Package) +loadCache archive = + case loc of + ALFilePath _ -> pure Nothing -- TODO can we do something intelligent here? + ALUrl url -> withStorage (loadArchiveCache url (archiveSubdir archive)) >>= loop + where + loc = archiveLocation archive + msha = archiveHash archive + msize = archiveSize archive + + loadFromCache :: TreeId -> RIO env (Maybe Package) + loadFromCache tid = fmap Just $ withStorage $ loadPackageById tid + + loop [] = pure Nothing + loop ((sha, size, tid):rest) = + case msha of + Nothing -> do + case msize of + Just size' | size /= size' -> loop rest + _ -> do + case loc of + ALUrl url -> do + logWarn $ "Using archive from " <> display url <> " without a specified cryptographic hash" + logWarn $ "Cached hash is " <> display sha <> ", file size " <> display size + logWarn "For security and reproducibility, please add a hash and file size to your configuration" + ALFilePath _ -> pure () + loadFromCache tid + Just sha' + | sha == sha' -> + case msize of + Nothing -> do + case loc of + ALUrl url -> do + logWarn $ "Archive from " <> display url <> " does not specify a size" + logWarn $ "To avoid an overflow attack, please add the file size to your configuration: " <> display size + ALFilePath _ -> pure () + loadFromCache tid + Just size' + | size == size' -> loadFromCache tid + | otherwise -> do + + logWarn $ "Archive from " <> display loc <> " has a matching hash but mismatched size" + logWarn "Please verify that your configuration provides the correct size" + loop rest + | otherwise -> loop rest + +-- ensure name, version, etc are correct +checkPackageMetadata + :: PackageLocationImmutable + -> PackageMetadata + -> Package + -> Either PantryException Package +checkPackageMetadata pl pm pa = do + let err = MismatchedPackageMetadata + pl + pm + (Just (packageTreeKey pa)) + (teBlob $ packageCabalEntry pa) + (packageIdent pa) + test (Just x) y = x == y + test Nothing _ = True + + tests = + [ test (pmTreeKey pm) (packageTreeKey pa) + , test (pmName pm) (pkgName $ packageIdent pa) + , test (pmVersion pm) (pkgVersion $ packageIdent pa) + , test (pmCabal pm) (teBlob $ packageCabalEntry pa) + ] + + in if and tests then Right pa else Left err + +-- | Provide a local file with the contents of the archive, regardless +-- of where it comes from. Perform SHA256 and file size validation if +-- downloading. +withArchiveLoc + :: HasLogFunc env + => Archive + -> (FilePath -> SHA256 -> FileSize -> RIO env a) + -> RIO env a +withArchiveLoc (Archive (ALFilePath resolved) msha msize _subdir) f = do + let abs' = resolvedAbsolute resolved + fp = toFilePath abs' + (sha, size) <- withBinaryFile fp ReadMode $ \h -> do + size <- FileSize . fromIntegral <$> hFileSize h + for_ msize $ \size' -> when (size /= size') $ throwIO $ LocalInvalidSize abs' Mismatch + { mismatchExpected = size' + , mismatchActual = size + } + + sha <- runConduit (sourceHandle h .| SHA256.sinkHash) + for_ msha $ \sha' -> when (sha /= sha') $ throwIO $ LocalInvalidSHA256 abs' Mismatch + { mismatchExpected = sha' + , mismatchActual = sha + } + + pure (sha, size) + f fp sha size +withArchiveLoc (Archive (ALUrl url) msha msize _subdir) f = + withSystemTempFile "archive" $ \fp hout -> do + logDebug $ "Downloading archive from " <> display url + (sha, size, ()) <- httpSinkChecked url msha msize (sinkHandle hout) + hClose hout + f fp sha size + +data ArchiveType = ATTarGz | ATTar | ATZip + deriving (Enum, Bounded) + +instance Display ArchiveType where + display ATTarGz = "GZIP-ed tar file" + display ATTar = "Uncompressed tar file" + display ATZip = "Zip file" + +data METype + = METNormal + | METExecutable + | METLink !FilePath + deriving Show + +data MetaEntry = MetaEntry + { mePath :: !FilePath + , meType :: !METype + } + deriving Show + +foldArchive + :: (HasPantryConfig env, HasLogFunc env) + => ArchiveLocation -- ^ for error reporting + -> FilePath + -> ArchiveType + -> a + -> (a -> MetaEntry -> ConduitT ByteString Void (RIO env) a) + -> RIO env a +foldArchive loc fp ATTarGz accum f = + withSourceFile fp $ \src -> runConduit $ src .| ungzip .| foldTar loc accum f +foldArchive loc fp ATTar accum f = + withSourceFile fp $ \src -> runConduit $ src .| foldTar loc accum f +foldArchive loc fp ATZip accum0 f = withBinaryFile fp ReadMode $ \h -> do + let go accum entry = do + let me = MetaEntry (Zip.eRelativePath entry) met + met = fromMaybe METNormal $ do + let modes = shiftR (Zip.eExternalFileAttributes entry) 16 + guard $ Zip.eVersionMadeBy entry .&. 0xFF00 == 0x0300 + guard $ modes /= 0 + Just $ + if (modes .&. 0o100) == 0 + then METNormal + else METExecutable + lbs = Zip.fromEntry entry + let crcExpected = Zip.eCRC32 entry + crcActual = CRC32.crc32 lbs + when (crcExpected /= crcActual) + $ throwIO $ CRC32Mismatch loc (Zip.eRelativePath entry) Mismatch + { mismatchExpected = crcExpected + , mismatchActual = crcActual + } + runConduit $ sourceLazy lbs .| f accum me + isDir entry = + case reverse $ Zip.eRelativePath entry of + '/':_ -> True + _ -> False + -- We're entering lazy I/O land thanks to zip-archive. + lbs <- BL.hGetContents h + foldM go accum0 (filter (not . isDir) $ Zip.zEntries $ Zip.toArchive lbs) + +foldTar + :: (HasPantryConfig env, HasLogFunc env) + => ArchiveLocation -- ^ for exceptions + -> a + -> (a -> MetaEntry -> ConduitT ByteString o (RIO env) a) + -> ConduitT ByteString o (RIO env) a +foldTar loc accum0 f = do + ref <- newIORef accum0 + Tar.untar $ \fi -> toME fi >>= traverse_ (\me -> do + accum <- readIORef ref + accum' <- f accum me + writeIORef ref $! accum') + readIORef ref + where + toME :: MonadIO m => Tar.FileInfo -> m (Maybe MetaEntry) + toME fi = do + let exc = InvalidTarFileType loc (Tar.getFileInfoPath fi) (Tar.fileType fi) + mmet <- + case Tar.fileType fi of + Tar.FTSymbolicLink bs -> + case decodeUtf8' bs of + Left _ -> throwIO exc + Right text -> pure $ Just $ METLink $ T.unpack text + Tar.FTNormal -> pure $ Just $ + if Tar.fileMode fi .&. 0o100 /= 0 + then METExecutable + else METNormal + Tar.FTDirectory -> pure Nothing + _ -> throwIO exc + pure $ + (\met -> MetaEntry + { mePath = Tar.getFileInfoPath fi + , meType = met + }) + <$> mmet + +data SimpleEntry = SimpleEntry + { seSource :: !FilePath + , seType :: !FileType + } + deriving Show + +-- | Attempt to parse the contents of the given archive in the given +-- subdir into a 'Tree'. This will not consult any caches. It will +-- ensure that: +-- +-- * The cabal file exists +-- +-- * The cabal file can be parsed +-- +-- * The name inside the cabal file matches the name of the cabal file itself +parseArchive + :: (HasPantryConfig env, HasLogFunc env) + => PackageLocationImmutable + -> Archive + -> FilePath -- ^ file holding the archive + -> RIO env Package +parseArchive pli archive fp = do + let loc = archiveLocation archive + getFiles [] = throwIO $ UnknownArchiveType loc + getFiles (at:ats) = do + eres <- tryAny $ foldArchive loc fp at id $ \m me -> pure $ m . (me:) + case eres of + Left e -> do + logDebug $ "parseArchive of " <> display at <> ": " <> displayShow e + getFiles ats + Right files -> pure (at, Map.fromList $ map (mePath &&& id) $ files []) + (at, files) <- getFiles [minBound..maxBound] + + let toSimple :: MetaEntry -> Either String SimpleEntry + toSimple me = + case meType me of + METNormal -> Right $ SimpleEntry (mePath me) FTNormal + METExecutable -> Right $ SimpleEntry (mePath me) FTExecutable + METLink relDest -> do + case relDest of + '/':_ -> Left $ "Cannot have an absolute relative dest: " ++ relDest + _ -> Right () + dest0 <- + case makeTarRelative (mePath me) relDest of + Left e -> Left $ concat + [ "Error resolving relative path " + , relDest + , " from symlink at " + , mePath me + , ": " + , e + ] + Right x -> Right x + dest <- + case normalizeParents dest0 of + Left e -> Left $ concat + [ "Invalid symbolic link from " + , mePath me + , " to " + , relDest + , ", tried parsing " + , dest0 + , ": " + , e + ] + Right x -> Right x + case Map.lookup dest files of + Nothing -> Left $ "Symbolic link dest not found from " ++ mePath me ++ " to " ++ relDest ++ ", looking for " ++ dest + Just me' -> + case meType me' of + METNormal -> Right $ SimpleEntry dest FTNormal + METExecutable -> Right $ SimpleEntry dest FTExecutable + METLink _ -> Left $ "Symbolic link dest cannot be a symbolic link, from " ++ mePath me ++ " to " ++ relDest + + case traverse toSimple files of + Left e -> throwIO $ UnsupportedTarball loc $ T.pack e + Right files1 -> do + let files2 = stripCommonPrefix $ Map.toList files1 + files3 = takeSubdir (archiveSubdir archive) files2 + toSafe (fp', a) = + case mkSafeFilePath fp' of + Nothing -> Left $ "Not a safe file path: " ++ show fp' + Just sfp -> Right (sfp, a) + case traverse toSafe files3 of + Left e -> throwIO $ UnsupportedTarball loc $ T.pack e + Right safeFiles -> do + let toSave = Set.fromList $ map (seSource . snd) safeFiles + blobs <- + foldArchive loc fp at mempty $ \m me -> + if mePath me `Set.member` toSave + then do + bs <- mconcat <$> sinkList + (_, blobKey) <- lift $ withStorage $ storeBlob bs + pure $ Map.insert (mePath me) blobKey m + else pure m + tree <- fmap (TreeMap . Map.fromList) $ for safeFiles $ \(sfp, se) -> + case Map.lookup (seSource se) blobs of + Nothing -> error $ "Impossible: blob not found for: " ++ seSource se + Just blobKey -> pure (sfp, TreeEntry blobKey (seType se)) + + -- parse the cabal file and ensure it has the right name + (cabalPath, cabalEntry@(TreeEntry cabalBlobKey _)) <- findCabalFile pli tree + mbs <- withStorage $ loadBlob cabalBlobKey + bs <- + case mbs of + Nothing -> throwIO $ TreeReferencesMissingBlob pli cabalPath cabalBlobKey + Just bs -> pure bs + (_warnings, gpd) <- rawParseGPD (Left pli) bs + let ident@(PackageIdentifier name _) = package $ packageDescription gpd + when (cabalPath /= cabalFileName name) $ + throwIO $ WrongCabalFileName pli cabalPath name + + -- It's good! Store the tree, let's bounce + (_tid, treeKey) <- withStorage $ storeTree ident tree cabalEntry + pure Package + { packageTreeKey = treeKey + , packageTree = tree + , packageCabalEntry = cabalEntry + , packageIdent = ident + } + +findCabalFile + :: MonadThrow m + => PackageLocationImmutable -- ^ for exceptions + -> Tree + -> m (SafeFilePath, TreeEntry) +findCabalFile loc (TreeMap m) = do + let isCabalFile (sfp, _) = + let txt = unSafeFilePath sfp + in not ("/" `T.isInfixOf` txt) && ".cabal" `T.isSuffixOf` txt + case filter isCabalFile $ Map.toList m of + [] -> throwM $ TreeWithoutCabalFile loc + [(key, te)] -> pure (key, te) + xs -> throwM $ TreeWithMultipleCabalFiles loc $ map fst xs + +-- | If all files have a shared prefix, strip it off +stripCommonPrefix :: [(FilePath, a)] -> [(FilePath, a)] +stripCommonPrefix [] = [] +stripCommonPrefix pairs@((firstFP, _):_) = fromMaybe pairs $ do + let firstDir = takeWhile (/= '/') firstFP + guard $ not $ null firstDir + let strip (fp, a) = (, a) <$> List.stripPrefix (firstDir ++ "/") fp + stripCommonPrefix <$> traverse strip pairs + +-- | Take us down to the specified subdirectory +takeSubdir + :: Text -- ^ subdir + -> [(FilePath, a)] -- ^ files after stripping common prefix + -> [(Text, a)] +takeSubdir subdir = mapMaybe $ \(fp, a) -> do + stripped <- T.stripPrefix subdir $ T.pack fp + Just (T.dropWhile (== '/') stripped, a) diff --git a/subs/pantry/src/Pantry/HTTP.hs b/subs/pantry/src/Pantry/HTTP.hs new file mode 100644 index 0000000000..a3ebb6b1fb --- /dev/null +++ b/subs/pantry/src/Pantry/HTTP.hs @@ -0,0 +1,102 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} +module Pantry.HTTP + ( module Export + , withResponse + , httpSink + , httpSinkChecked + ) where + +import Conduit +import Network.HTTP.Client as Export (parseRequest) +import Network.HTTP.Client as Export (parseUrlThrow) +import Network.HTTP.Client as Export (BodyReader, HttpExceptionContent (StatusCodeException)) +import qualified Network.HTTP.Client as HTTP (withResponse) +import Network.HTTP.Client.Internal as Export (setUri) +import Network.HTTP.Client.TLS (getGlobalManager) +import Network.HTTP.Simple as Export (HttpException (..), + Request, Response, + addRequestHeader, + defaultRequest, + getResponseBody, + getResponseHeaders, + getResponseStatus, + setRequestHeader, + setRequestHeaders) +import qualified Network.HTTP.Simple as HTTP hiding (withResponse) +import Network.HTTP.Types as Export (Header, HeaderName, + Status, hCacheControl, + hRange, ok200, + partialContent206, + statusCode) +import qualified Pantry.SHA256 as SHA256 +import Pantry.Types +import RIO +import qualified RIO.ByteString as B +import qualified RIO.Text as T + +setUserAgent :: Request -> Request +setUserAgent = setRequestHeader "User-Agent" ["Haskell pantry package"] + +withResponse + :: MonadUnliftIO m + => HTTP.Request + -> (Response BodyReader -> m a) + -> m a +withResponse req inner = withRunInIO $ \run -> do + manager <- getGlobalManager + HTTP.withResponse (setUserAgent req) manager (run . inner) + +httpSink + :: MonadUnliftIO m + => Request + -> (Response () -> ConduitT ByteString Void m a) + -> m a +httpSink req inner = HTTP.httpSink (setUserAgent req) inner + +httpSinkChecked + :: MonadUnliftIO m + => Text + -> Maybe SHA256 + -> Maybe FileSize + -> ConduitT ByteString Void m a + -> m (SHA256, FileSize, a) +httpSinkChecked url msha msize sink = do + req <- liftIO $ parseUrlThrow $ T.unpack url + httpSink req $ const $ getZipSink $ (,,) + <$> ZipSink (checkSha msha) + <*> ZipSink (checkSize msize) + <*> ZipSink sink + where + checkSha mexpected = do + actual <- SHA256.sinkHash + for_ mexpected $ \expected -> unless (actual == expected) $ + throwIO $ DownloadInvalidSHA256 url Mismatch + { mismatchExpected = expected + , mismatchActual = actual + } + pure actual + checkSize mexpected = + loop 0 + where + loop accum = do + mbs <- await + case mbs of + Nothing -> + case mexpected of + Just (FileSize expected) | expected /= accum -> + throwIO $ DownloadInvalidSize url Mismatch + { mismatchExpected = FileSize expected + , mismatchActual = FileSize accum + } + _ -> pure (FileSize accum) + Just bs -> do + let accum' = accum + fromIntegral (B.length bs) + case mexpected of + Just (FileSize expected) + | accum' > expected -> + throwIO $ DownloadTooLarge url Mismatch + { mismatchExpected = FileSize expected + , mismatchActual = FileSize accum' + } + _ -> loop accum' diff --git a/subs/pantry/src/Pantry/Hackage.hs b/subs/pantry/src/Pantry/Hackage.hs new file mode 100644 index 0000000000..2e5c5796b8 --- /dev/null +++ b/subs/pantry/src/Pantry/Hackage.hs @@ -0,0 +1,505 @@ +{-# LANGUAGE MultiWayIf #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ScopedTypeVariables #-} +module Pantry.Hackage + ( updateHackageIndex + , DidUpdateOccur (..) + , hackageIndexTarballL + , getHackageTarball + , getHackageTarballKey + , getHackageCabalFile + , getHackagePackageVersions + , getHackageTypoCorrections + , UsePreferredVersions (..) + ) where + +import RIO +import Data.Aeson +import Conduit +import Data.Conduit.Tar +import qualified RIO.Text as T +import qualified RIO.Map as Map +import Data.Text.Unsafe (unsafeTail) +import qualified RIO.ByteString as B +import qualified RIO.ByteString.Lazy as BL +import Pantry.Archive +import Pantry.Types hiding (FileType (..)) +import Pantry.Storage +import Pantry.Tree +import qualified Pantry.SHA256 as SHA256 +import Network.URI (parseURI) +import Data.Time (getCurrentTime) +import Path ((), Path, Abs, Rel, Dir, File, toFilePath, parseRelDir, parseRelFile) +import qualified Distribution.Text +import qualified Distribution.PackageDescription as Cabal +import System.IO (SeekMode (..)) +import qualified Data.List.NonEmpty as NE +import Data.Text.Metrics (damerauLevenshtein) +import Distribution.Types.Version (versionNumbers) +import Distribution.Types.VersionRange (withinRange) + +import qualified Hackage.Security.Client as HS +import qualified Hackage.Security.Client.Repository.Cache as HS +import qualified Hackage.Security.Client.Repository.Remote as HS +import qualified Hackage.Security.Client.Repository.HttpLib.HttpClient as HS +import qualified Hackage.Security.Util.Path as HS +import qualified Hackage.Security.Util.Pretty as HS + +hackageRelDir :: Path Rel Dir +hackageRelDir = either impureThrow id $ parseRelDir "hackage" + +hackageDirL :: HasPantryConfig env => SimpleGetter env (Path Abs Dir) +hackageDirL = pantryConfigL.to (( hackageRelDir) . pcRootDir) + +indexRelFile :: Path Rel File +indexRelFile = either impureThrow id $ parseRelFile "00-index.tar" + +-- | Where does pantry download its 01-index.tar file from Hackage? +-- +-- @since 0.1.0.0 +hackageIndexTarballL :: HasPantryConfig env => SimpleGetter env (Path Abs File) +hackageIndexTarballL = hackageDirL.to ( indexRelFile) + +-- | Did an update occur when running 'updateHackageIndex'? +-- +-- @since 0.1.0.0 +data DidUpdateOccur = UpdateOccurred | NoUpdateOccurred + +-- | Download the most recent 01-index.tar file from Hackage and +-- update the database tables. +-- +-- This function will only perform an update once per 'PantryConfig' +-- for user sanity. See the return value to find out if it happened. +-- +-- @since 0.1.0.0 +updateHackageIndex + :: (HasPantryConfig env, HasLogFunc env) + => Maybe Utf8Builder -- ^ reason for updating, if any + -> RIO env DidUpdateOccur +updateHackageIndex mreason = gateUpdate $ do + for_ mreason logInfo + pc <- view pantryConfigL + let HackageSecurityConfig keyIds threshold url = pcHackageSecurity pc + root <- view hackageDirL + tarball <- view hackageIndexTarballL + baseURI <- + case parseURI $ T.unpack url of + Nothing -> throwString $ "Invalid Hackage Security base URL: " ++ T.unpack url + Just x -> return x + run <- askRunInIO + let logTUF = run . logInfo . fromString . HS.pretty + withRepo = HS.withRepository + HS.httpLib + [baseURI] + HS.defaultRepoOpts + HS.Cache + { HS.cacheRoot = HS.fromAbsoluteFilePath $ toFilePath root + , HS.cacheLayout = HS.cabalCacheLayout + } + HS.hackageRepoLayout + HS.hackageIndexLayout + logTUF + didUpdate <- liftIO $ withRepo $ \repo -> HS.uncheckClientErrors $ do + needBootstrap <- HS.requiresBootstrap repo + when needBootstrap $ do + HS.bootstrap + repo + (map (HS.KeyId . T.unpack) keyIds) + (HS.KeyThreshold $ fromIntegral threshold) + now <- getCurrentTime + HS.checkForUpdates repo (Just now) + + case didUpdate of + HS.NoUpdates -> logInfo "No package index update available" + HS.HasUpdates -> do + logInfo "Updated package index downloaded" + updateCache tarball + logStickyDone "Package index cache populated" + where + updateCache tarball = withStorage $ do + -- Alright, here's the story. In theory, we only ever append to + -- a tarball. Therefore, we can store the last place we + -- populated our cache from, and fast forward to that point. But + -- there are two issues with that: + -- + -- 1. Hackage may rebase, in which case we need to recalculate + -- everything from the beginning. Unfortunately, + -- hackage-security doesn't let us know when that happens. + -- + -- 2. Some paranoia about files on the filesystem getting + -- modified out from under us. + -- + -- Therefore, we store both the last read-to index, _and_ the + -- SHA256 of all of the contents until that point. When updating + -- the cache, we calculate the new SHA256 of the whole file, and + -- the SHA256 of the previous read-to point. If the old hashes + -- match, we can do an efficient fast forward. Otherwise, we + -- clear the old cache and repopulate. + minfo <- loadLatestCacheUpdate + (offset, newHash, newSize) <- lift $ withBinaryFile (toFilePath tarball) ReadMode $ \h -> do + logInfo "Calculating hashes to check for hackage-security rebases or filesystem changes" + + -- The size of the new index tarball, ignoring the required + -- (by the tar spec) 1024 null bytes at the end, which will be + -- mutated in the future by other updates. + newSize :: Word <- (fromIntegral . max 0 . subtract 1024) <$> hFileSize h + let sinkSHA256 len = takeCE (fromIntegral len) .| SHA256.sinkHash + + case minfo of + Nothing -> do + logInfo "No old cache found, populating cache from scratch" + newHash <- runConduit $ sourceHandle h .| sinkSHA256 newSize + pure (0, newHash, newSize) + Just (FileSize oldSize, oldHash) -> do + -- oldSize and oldHash come from the database, and tell + -- us what we cached already. Compare against + -- oldHashCheck, which assuming the tarball has not been + -- rebased will be the same as oldHash. At the same + -- time, calculate newHash, which is the hash of the new + -- content as well. + (oldHashCheck, newHash) <- runConduit $ sourceHandle h .| getZipSink ((,) + <$> ZipSink (sinkSHA256 oldSize) + <*> ZipSink (sinkSHA256 newSize) + ) + offset <- + if oldHash == oldHashCheck + then oldSize <$ logInfo "Updating preexisting cache, should be quick" + else 0 <$ do + logInfo "Package index change detected, that's pretty unusual" + logInfo $ "Old size: " <> display oldSize + logInfo $ "Old hash (orig) : " <> display oldHash + logInfo $ "New hash (check): " <> display oldHashCheck + logInfo "Forcing a recache" + pure (offset, newHash, newSize) + + lift $ logInfo $ "Populating cache from file size " <> display newSize <> ", hash " <> display newHash + when (offset == 0) clearHackageRevisions + populateCache tarball (fromIntegral offset) `onException` + lift (logStickyDone "Failed populating package index cache") + storeCacheUpdate (FileSize newSize) newHash + gateUpdate inner = do + pc <- view pantryConfigL + join $ modifyMVar (pcUpdateRef pc) $ \toUpdate -> pure $ + if toUpdate + then (False, UpdateOccurred <$ inner) + else (False, pure NoUpdateOccurred) + +-- | Populate the SQLite tables with Hackage index information. +populateCache + :: (HasPantryConfig env, HasLogFunc env) + => Path Abs File -- ^ tarball + -> Integer -- ^ where to start processing from + -> ReaderT SqlBackend (RIO env) () +populateCache fp offset = withBinaryFile (toFilePath fp) ReadMode $ \h -> do + lift $ logInfo "Populating package index cache ..." + counter <- newIORef (0 :: Int) + hSeek h AbsoluteSeek offset + runConduit $ sourceHandle h .| untar (perFile counter) + where + + perFile counter fi + | FTNormal <- fileType fi + , Right path <- decodeUtf8' $ filePath fi + , Just (name, version, filename) <- parseNameVersionSuffix path = + if + | filename == "package.json" -> + sinkLazy >>= lift . addJSON name version + | filename == unSafeFilePath (cabalFileName name) -> do + (BL.toStrict <$> sinkLazy) >>= lift . addCabal name version + + count <- readIORef counter + let count' = count + 1 + writeIORef counter count' + when (count' `mod` 400 == 0) $ + lift $ lift $ + logSticky $ "Processed " <> display count' <> " cabal files" + | otherwise -> pure () + | FTNormal <- fileType fi + , Right path <- decodeUtf8' $ filePath fi + , (nameT, "/preferred-versions") <- T.break (== '/') path + , Just name <- parsePackageName $ T.unpack nameT = do + lbs <- sinkLazy + case decodeUtf8' $ BL.toStrict lbs of + Left _ -> pure () -- maybe warning + Right p -> lift $ storePreferredVersion name p + | otherwise = pure () + + addJSON name version lbs = + case eitherDecode' lbs of + Left e -> lift $ logError $ + "Error processing Hackage security metadata for " <> + fromString (Distribution.Text.display name) <> "-" <> + fromString (Distribution.Text.display version) <> ": " <> + fromString e + Right (PackageDownload sha size) -> + storeHackageTarballInfo name version sha $ FileSize size + + addCabal name version bs = do + (blobTableId, _blobKey) <- storeBlob bs + + storeHackageRevision name version blobTableId + + breakSlash x + | T.null z = Nothing + | otherwise = Just (y, unsafeTail z) + where + (y, z) = T.break (== '/') x + + parseNameVersionSuffix t1 = do + (name, t2) <- breakSlash t1 + (version, filename) <- breakSlash t2 + + name' <- Distribution.Text.simpleParse $ T.unpack name + version' <- Distribution.Text.simpleParse $ T.unpack version + + Just (name', version', filename) + +-- | Package download info from Hackage +data PackageDownload = PackageDownload !SHA256 !Word +instance FromJSON PackageDownload where + parseJSON = withObject "PackageDownload" $ \o1 -> do + o2 <- o1 .: "signed" + Object o3 <- o2 .: "targets" + Object o4:_ <- return $ toList o3 + len <- o4 .: "length" + hashes <- o4 .: "hashes" + sha256' <- hashes .: "sha256" + sha256 <- + case SHA256.fromHexText sha256' of + Left e -> fail $ "Invalid sha256: " ++ show e + Right x -> return x + return $ PackageDownload sha256 len + +getHackageCabalFile + :: (HasPantryConfig env, HasLogFunc env) + => PackageIdentifierRevision + -> RIO env ByteString +getHackageCabalFile pir@(PackageIdentifierRevision _ _ cfi) = do + bid <- resolveCabalFileInfo pir + bs <- withStorage $ loadBlobById bid + case cfi of + CFIHash sha msize -> do + let sizeMismatch = + case msize of + Nothing -> False + Just size -> FileSize (fromIntegral (B.length bs)) /= size + shaMismatch = sha /= SHA256.hashBytes bs + when (sizeMismatch || shaMismatch) + $ error $ "getHackageCabalFile: size or SHA mismatch for " ++ show (pir, bs) + _ -> pure () + pure bs + +resolveCabalFileInfo + :: (HasPantryConfig env, HasLogFunc env) + => PackageIdentifierRevision + -> RIO env BlobId +resolveCabalFileInfo pir@(PackageIdentifierRevision name ver cfi) = do + mres <- inner + case mres of + Just res -> pure res + Nothing -> do + updated <- updateHackageIndex $ Just $ "Cabal file info not found for " <> display pir <> ", updating" + mres' <- + case updated of + UpdateOccurred -> inner + NoUpdateOccurred -> pure Nothing + case mres' of + Nothing -> fuzzyLookupCandidates name ver >>= throwIO . UnknownHackagePackage pir + Just res -> pure res + where + inner = + case cfi of + CFIHash sha _msize -> withStorage $ loadBlobBySHA sha + CFIRevision rev -> (fmap fst . Map.lookup rev) <$> withStorage (loadHackagePackageVersion name ver) + CFILatest -> (fmap (fst . fst) . Map.maxView) <$> withStorage (loadHackagePackageVersion name ver) + +-- | Given package identifier and package caches, return list of packages +-- with the same name and the same two first version number components found +-- in the caches. +fuzzyLookupCandidates + :: (HasPantryConfig env, HasLogFunc env) + => PackageName + -> Version + -> RIO env FuzzyResults +fuzzyLookupCandidates name ver0 = do + m <- getHackagePackageVersions UsePreferredVersions name + if Map.null m + then FRNameNotFound <$> getHackageTypoCorrections name + else + case Map.lookup ver0 m of + Nothing -> do + let withVers vers = pure $ FRVersionNotFound $ flip NE.map vers $ \(ver, revs) -> + case Map.maxView revs of + Nothing -> error "fuzzyLookupCandidates: no revisions" + Just (BlobKey sha size, _) -> PackageIdentifierRevision name ver (CFIHash sha (Just size)) + case NE.nonEmpty $ filter (sameMajor . fst) $ Map.toList m of + Just vers -> withVers vers + Nothing -> + case NE.nonEmpty $ Map.toList m of + Nothing -> error "fuzzyLookupCandidates: no versions" + Just vers -> withVers vers + Just revisions -> + let pirs = map + (\(BlobKey sha size) -> PackageIdentifierRevision name ver0 (CFIHash sha (Just size))) + (Map.elems revisions) + in case NE.nonEmpty pirs of + Nothing -> error "fuzzyLookupCandidates: no revisions" + Just pirs' -> pure $ FRRevisionNotFound pirs' + where + sameMajor v = toMajorVersion v == toMajorVersion ver0 + +toMajorVersion :: Version -> [Int] +toMajorVersion v = + case versionNumbers v of + [] -> [0, 0] + [a] -> [a, 0] + a:b:_ -> [a, b] + +-- | Try to come up with typo corrections for given package identifier +-- using Hackage package names. This can provide more user-friendly +-- information in error messages. +-- +-- @since 0.1.0.0 +getHackageTypoCorrections + :: (HasPantryConfig env, HasLogFunc env) + => PackageName + -> RIO env [PackageName] +getHackageTypoCorrections name1 = + withStorage $ sinkHackagePackageNames + (\name2 -> name1 `distance` name2 < 4) + (takeC 10 .| sinkList) + where + distance = damerauLevenshtein `on` (T.pack . packageNameString) + +-- | Should we pay attention to Hackage's preferred versions? +-- +-- @since 0.1.0.0 +data UsePreferredVersions = UsePreferredVersions | IgnorePreferredVersions + deriving Show + +-- | Returns the versions of the package available on Hackage. +-- +-- @since 0.1.0.0 +getHackagePackageVersions + :: (HasPantryConfig env, HasLogFunc env) + => UsePreferredVersions + -> PackageName -- ^ package name + -> RIO env (Map Version (Map Revision BlobKey)) +getHackagePackageVersions usePreferred name = withStorage $ do + mpreferred <- + case usePreferred of + UsePreferredVersions -> loadPreferredVersion name + IgnorePreferredVersions -> pure Nothing + let predicate :: Version -> Map Revision BlobKey -> Bool + predicate = fromMaybe (\_ _ -> True) $ do + preferredT1 <- mpreferred + preferredT2 <- T.stripPrefix (T.pack $ packageNameString name) preferredT1 + vr <- Distribution.Text.simpleParse $ T.unpack preferredT2 + Just $ \v _ -> withinRange v vr + Map.filterWithKey predicate <$> loadHackagePackageVersions name + +withCachedTree + :: (HasPantryConfig env, HasLogFunc env) + => PackageName + -> Version + -> BlobId -- ^ cabal file contents + -> RIO env Package + -> RIO env Package +withCachedTree name ver bid inner = do + mres <- withStorage $ loadHackageTree name ver bid + case mres of + Just package -> pure package + Nothing -> do + package <- inner + withStorage $ storeHackageTree name ver bid $ packageTreeKey package + pure package + +getHackageTarballKey + :: (HasPantryConfig env, HasLogFunc env) + => PackageIdentifierRevision + -> RIO env TreeKey +getHackageTarballKey pir@(PackageIdentifierRevision name ver (CFIHash sha _msize)) = do + mres <- withStorage $ loadHackageTreeKey name ver sha + case mres of + Nothing -> packageTreeKey <$> getHackageTarball pir Nothing + Just key -> pure key +getHackageTarballKey pir = packageTreeKey <$> getHackageTarball pir Nothing + +getHackageTarball + :: (HasPantryConfig env, HasLogFunc env) + => PackageIdentifierRevision + -> Maybe TreeKey + -> RIO env Package +getHackageTarball pir@(PackageIdentifierRevision name ver _cfi) mtreeKey = do + cabalFile <- resolveCabalFileInfo pir + cabalFileKey <- withStorage $ getBlobKey cabalFile + withCachedTree name ver cabalFile $ do + mpair <- withStorage $ loadHackageTarballInfo name ver + (sha, size) <- + case mpair of + Just pair -> pure pair + Nothing -> do + let exc = NoHackageCryptographicHash $ PackageIdentifier name ver + updated <- updateHackageIndex $ Just $ display exc <> ", updating" + mpair2 <- + case updated of + UpdateOccurred -> withStorage $ loadHackageTarballInfo name ver + NoUpdateOccurred -> pure Nothing + case mpair2 of + Nothing -> throwIO exc + Just pair2 -> pure pair2 + pc <- view pantryConfigL + let urlPrefix = hscDownloadPrefix $ pcHackageSecurity pc + url = mconcat + [ urlPrefix + , "package/" + , T.pack $ Distribution.Text.display name + , "-" + , T.pack $ Distribution.Text.display ver + , ".tar.gz" + ] + package <- getArchive + (PLIHackage pir mtreeKey) + Archive + { archiveLocation = ALUrl url + , archiveHash = Just sha + , archiveSize = Just size + , archiveSubdir = T.empty -- no subdirs on Hackage + } + PackageMetadata + { pmName = Just name + , pmVersion = Just ver + , pmTreeKey = Nothing -- with a revision cabal file will differ giving a different tree + , pmCabal = Nothing -- cabal file in the tarball may be different! + } + + case packageTree package of + TreeMap m -> do + let TreeEntry _ ft = packageCabalEntry package + cabalEntry = TreeEntry cabalFileKey ft + tree' = TreeMap $ Map.insert (cabalFileName name) cabalEntry m + ident = PackageIdentifier name ver + + cabalBS <- withStorage $ do + let BlobKey sha' _ = cabalFileKey + mcabalBS <- loadBlobBySHA sha' + case mcabalBS of + Nothing -> error $ "Invariant violated, cabal file key: " ++ show cabalFileKey + Just bid -> loadBlobById bid + + (_warnings, gpd) <- rawParseGPD (Left (PLIHackage pir mtreeKey)) cabalBS + let gpdIdent = Cabal.package $ Cabal.packageDescription gpd + when (ident /= gpdIdent) $ throwIO $ + MismatchedCabalFileForHackage pir Mismatch + { mismatchExpected = ident + , mismatchActual = gpdIdent + } + + (_tid, treeKey') <- withStorage $ storeTree ident tree' cabalEntry + pure Package + { packageTreeKey = treeKey' + , packageTree = tree' + , packageIdent = ident + , packageCabalEntry = cabalEntry + } diff --git a/subs/pantry/src/Pantry/Internal.hs b/subs/pantry/src/Pantry/Internal.hs new file mode 100644 index 0000000000..be603a94f9 --- /dev/null +++ b/subs/pantry/src/Pantry/Internal.hs @@ -0,0 +1,70 @@ +{-# LANGUAGE OverloadedStrings #-} +-- | Exposed for testing, do not use! +module Pantry.Internal + ( parseTree + , renderTree + , Tree (..) + , TreeEntry (..) + , mkSafeFilePath + , pcHpackExecutable + , normalizeParents + , makeTarRelative + ) where + +import Control.Exception (assert) +import Pantry.Types +import qualified Data.Text as T + +-- | Like @System.FilePath.normalise@, however: +-- +-- * Only works on relative paths, absolute paths fail +-- +-- * May not point to directories +-- +-- * Only works on forward slashes, even on Windows +-- +-- * Normalizes parent dirs @foo/../@ get stripped +-- +-- * Spelled like an American, sorry +normalizeParents + :: FilePath + -> Either String FilePath +normalizeParents "" = Left "empty file path" +normalizeParents ('/':_) = Left "absolute path" +normalizeParents fp = do + let t = T.pack fp + case T.unsnoc t of + Just (_, '/') -> Left "trailing slash" + _ -> Right () + + let c1 = T.split (== '/') t + + case reverse c1 of + ".":_ -> Left "last component is a single dot" + _ -> Right () + + let c2 = filter (\x -> not (T.null x || x == ".")) c1 + + let loop [] = [] + loop (_:"..":rest) = loop rest + loop (x:xs) = x : loop xs + + case loop c2 of + [] -> Left "no non-empty components" + c' -> Right $ T.unpack $ T.intercalate "/" c' + +-- | Following tar file rules (Unix file paths only), make the second +-- file relative to the first file. +makeTarRelative + :: FilePath -- ^ base file + -> FilePath -- ^ relative part + -> Either String FilePath +makeTarRelative _ ('/':_) = Left "absolute path found" +makeTarRelative base rel = + case reverse base of + [] -> Left "cannot have empty base" + '/':_ -> Left "base cannot be a directory" + _:rest -> Right $ + case dropWhile (/= '/') rest of + '/':rest' -> reverse rest' ++ '/' : rel + rest' -> assert (null rest') rel diff --git a/src/Stack/StaticBytes.hs b/subs/pantry/src/Pantry/Internal/StaticBytes.hs similarity index 97% rename from src/Stack/StaticBytes.hs rename to subs/pantry/src/Pantry/Internal/StaticBytes.hs index 444594b59c..63b36ff4bc 100644 --- a/src/Stack/StaticBytes.hs +++ b/subs/pantry/src/Pantry/Internal/StaticBytes.hs @@ -1,12 +1,15 @@ --- This module can (and perhaps should) be separate into its own --- package, it's generally useful. {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE DeriveAnyClass #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE NoImplicitPrelude #-} -module Stack.StaticBytes +-- | This is an unstable API, exposed only for testing. Relying on +-- this may break your code! Caveat emptor. +-- +-- This module can (and perhaps should) be separate into its own +-- package, it's generally useful. +module Pantry.Internal.StaticBytes ( Bytes8 , Bytes16 , Bytes32 @@ -22,7 +25,8 @@ module Stack.StaticBytes , fromStatic ) where -import Stack.Prelude hiding (words) +import RIO hiding (words) +import Data.Store (Store) import qualified Data.ByteString as B import qualified Data.ByteString.Internal as B import qualified Data.Vector.Primitive as VP @@ -39,7 +43,7 @@ import Data.ByteArray newtype Bytes8 = Bytes8 Word64 deriving (Eq, Ord, Generic, NFData, Hashable, Data, Store) -instance Show Bytes8 where -- FIXME good enough? +instance Show Bytes8 where show (Bytes8 w) = show (fromWordsD 8 [w] :: B.ByteString) data Bytes16 = Bytes16 !Bytes8 !Bytes8 deriving (Show, Eq, Ord, Generic, NFData, Hashable, Data, Store) diff --git a/subs/pantry/src/Pantry/Repo.hs b/subs/pantry/src/Pantry/Repo.hs new file mode 100644 index 0000000000..9c62599781 --- /dev/null +++ b/subs/pantry/src/Pantry/Repo.hs @@ -0,0 +1,110 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ScopedTypeVariables #-} +module Pantry.Repo + ( fetchRepos + , getRepo + , getRepoKey + ) where + +import Pantry.Types +import Pantry.Archive +import Pantry.Storage +import RIO +import Path.IO (resolveFile') +import RIO.FilePath (()) +import RIO.Directory (doesDirectoryExist) +import RIO.Process +import Database.Persist (Entity (..)) +import qualified RIO.Text as T + +fetchRepos + :: (HasPantryConfig env, HasLogFunc env, HasProcessContext env) + => [(Repo, PackageMetadata)] + -> RIO env () +fetchRepos pairs = do + -- TODO be more efficient, group together shared archives + for_ pairs $ uncurry getRepo + +getRepoKey + :: forall env. (HasPantryConfig env, HasLogFunc env, HasProcessContext env) + => Repo + -> PackageMetadata + -> RIO env TreeKey +getRepoKey repo pm = packageTreeKey <$> getRepo repo pm -- potential optimization + +getRepo + :: forall env. (HasPantryConfig env, HasLogFunc env, HasProcessContext env) + => Repo + -> PackageMetadata + -> RIO env Package +getRepo repo pm = + withCache $ getRepo' repo pm + where + withCache + :: RIO env Package + -> RIO env Package + withCache inner = do + mtid <- withStorage (loadRepoCache repo (repoSubdir repo)) + case mtid of + Just tid -> withStorage $ loadPackageById tid + Nothing -> do + package <- inner + withStorage $ do + ment <- getTreeForKey $ packageTreeKey package + case ment of + Nothing -> error $ "invariant violated, Tree not found: " ++ show (packageTreeKey package) + Just (Entity tid _) -> storeRepoCache repo (repoSubdir repo) tid + pure package + +getRepo' + :: forall env. (HasPantryConfig env, HasLogFunc env, HasProcessContext env) + => Repo + -> PackageMetadata + -> RIO env Package +getRepo' repo@(Repo url commit repoType' subdir) pm = + withSystemTempDirectory "get-repo" $ + \tmpdir -> withWorkingDir tmpdir $ do + let suffix = "cloned" + dir = tmpdir suffix + tarball = tmpdir "foo.tar" + + let (commandName, cloneArgs, resetArgs, archiveArgs) = + case repoType' of + RepoGit -> + ( "git" + , ["--recursive"] + , ["reset", "--hard", T.unpack commit] + , ["archive", "-o", tarball, "HEAD"] + ) + RepoHg -> + ( "hg" + , [] + , ["update", "-C", T.unpack commit] + , ["archive", tarball, "-X", ".hg_archival.txt"] + ) + + logInfo $ "Cloning " <> display commit <> " from " <> display url + void $ proc + commandName + ("clone" : cloneArgs ++ [T.unpack url, suffix]) + readProcess_ + created <- doesDirectoryExist dir + unless created $ throwIO $ FailedToCloneRepo repo + + withWorkingDir dir $ do + void $ proc commandName resetArgs readProcess_ + void $ proc commandName archiveArgs readProcess_ + abs' <- resolveFile' tarball + getArchive + (PLIRepo repo pm) + Archive + { archiveLocation = ALFilePath $ ResolvedPath + { resolvedRelative = RelFilePath $ T.pack tarball + , resolvedAbsolute = abs' + } + , archiveHash = Nothing + , archiveSize = Nothing + , archiveSubdir = subdir + } + pm diff --git a/subs/pantry/src/Pantry/SHA256.hs b/subs/pantry/src/Pantry/SHA256.hs new file mode 100644 index 0000000000..e9dc089a35 --- /dev/null +++ b/subs/pantry/src/Pantry/SHA256.hs @@ -0,0 +1,186 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} +-- | Provides a data type ('SHA256') for efficient memory +-- representation of a sha-256 hash value, together with helper +-- functions for converting to and from that value. This module is +-- intended to be imported qualified as @SHA256@. +-- +-- Some nomenclature: +-- +-- * Hashing calculates a new hash value from some input. @from@ takes a value that representats an existing hash. +-- +-- * Raw means a raw binary representation of the hash value, without any hex encoding. +-- +-- * Text always uses lower case hex encoding +-- +-- @since 0.1.0.0 +module Pantry.SHA256 + ( -- * Types + SHA256 + , SHA256Exception (..) + -- * Hashing + , hashFile + , hashBytes + , hashLazyBytes + , sinkHash + -- * Convert from a hash representation + , fromHexText + , fromHexBytes + , fromDigest + , fromRaw + -- * Convert to a hash representation + , toHexText + , toHexBytes + , toRaw + ) where + +import RIO +import Data.Aeson +import Database.Persist.Sql +import Pantry.Internal.StaticBytes +import Data.Store (Store) +import Conduit +import qualified RIO.Text as T + +import qualified Crypto.Hash.Conduit as Hash (hashFile, sinkHash) +import qualified Crypto.Hash as Hash (hash, hashlazy, Digest, SHA256) +import qualified Data.ByteArray +import qualified Data.ByteArray.Encoding as Mem + +-- | A SHA256 hash, stored in a static size for more efficient +-- memory representation. +-- +-- @since 0.1.0.0 +newtype SHA256 = SHA256 Bytes32 + deriving (Generic, Eq, NFData, Data, Typeable, Ord, Hashable, Store) + +-- | Exceptions which can occur in this module +-- +-- @since 0.1.0.0 +data SHA256Exception + = InvalidByteCount !ByteString !StaticBytesException + | InvalidHexBytes !ByteString !Text + deriving (Typeable) + +-- | Generate a 'SHA256' value by hashing the contents of a file. +-- +-- @since 0.1.0.0 +hashFile :: MonadIO m => FilePath -> m SHA256 +hashFile fp = fromDigest <$> Hash.hashFile fp + +-- | Generate a 'SHA256' value by hashing a @ByteString@. +-- +-- @since 0.1.0.0 +hashBytes :: ByteString -> SHA256 +hashBytes = fromDigest . Hash.hash + +-- | Generate a 'SHA256' value by hashing a lazy @ByteString@. +-- +-- @since 0.1.0.0 +hashLazyBytes :: LByteString -> SHA256 +hashLazyBytes = fromDigest . Hash.hashlazy + +-- | Generate a 'SHA256' value by hashing the contents of a stream. +-- +-- @since 0.1.0.0 +sinkHash :: Monad m => ConduitT ByteString o m SHA256 +sinkHash = fromDigest <$> Hash.sinkHash + +-- | Convert a base16-encoded 'Text' value containing a hash into a 'SHA256'. +-- +-- @since 0.1.0.0 +fromHexText :: Text -> Either SHA256Exception SHA256 +fromHexText = fromHexBytes . encodeUtf8 + +-- | Convert a base16-encoded 'ByteString' value containing a hash into a 'SHA256'. +-- +-- @since 0.1.0.0 +fromHexBytes :: ByteString -> Either SHA256Exception SHA256 +fromHexBytes hexBS = do + mapLeft (InvalidHexBytes hexBS . T.pack) (Mem.convertFromBase Mem.Base16 hexBS) >>= fromRaw + +-- | Convert a 'Hash.Digest' into a 'SHA256' +-- +-- @since 0.1.0.0 +fromDigest :: Hash.Digest Hash.SHA256 -> SHA256 +fromDigest digest = + case toStaticExact (Data.ByteArray.convert digest :: ByteString) of + Left e -> error $ "Impossible failure in fromDigest: " ++ show (digest, e) + Right x -> SHA256 x + +-- | Convert a raw representation of a hash into a 'SHA256'. +-- +-- @since 0.1.0.0 +fromRaw :: ByteString -> Either SHA256Exception SHA256 +fromRaw bs = either (Left . InvalidByteCount bs) (Right . SHA256) (toStaticExact bs) + +-- | Convert a 'SHA256' into a base16-encoded SHA256 hash. +-- +-- @since 0.1.0.0 +toHexText :: SHA256 -> Text +toHexText ss = + case decodeUtf8' $ toHexBytes ss of + Left e -> error $ "Impossible failure in staticSHA256ToText: " ++ show (ss, e) + Right t -> t + +-- | Convert a 'SHA256' into a base16-encoded SHA256 hash. +-- +-- @since 0.1.0.0 +toHexBytes :: SHA256 -> ByteString +toHexBytes (SHA256 x) = Mem.convertToBase Mem.Base16 x + +-- | Convert a 'SHA256' into a raw binary representation. +-- +-- @since 0.1.0.0 +toRaw :: SHA256 -> ByteString +toRaw (SHA256 x) = Data.ByteArray.convert x + +-- Instances + +instance Show SHA256 where + show s = "SHA256 " ++ show (toHexText s) + +instance PersistField SHA256 where + toPersistValue = PersistByteString . toRaw + fromPersistValue (PersistByteString bs) = + case toStaticExact bs of + Left e -> Left $ tshow e + Right ss -> pure $ SHA256 ss + fromPersistValue x = Left $ "Unexpected value: " <> tshow x + +instance PersistFieldSql SHA256 where + sqlType _ = SqlBlob + +instance Display SHA256 where + display = displayBytesUtf8 . toHexBytes + +instance ToJSON SHA256 where + toJSON = toJSON . toHexText +instance FromJSON SHA256 where + parseJSON = withText "SHA256" $ \t -> + case fromHexText t of + Right x -> pure x + Left e -> fail $ concat + [ "Invalid SHA256 " + , show t + , ": " + , show e + ] + +instance Exception SHA256Exception +instance Show SHA256Exception where + show = T.unpack . utf8BuilderToText . display +instance Display SHA256Exception where + display (InvalidByteCount bs sbe) = + "Invalid byte count creating a SHA256 from " <> + displayShow bs <> + ": " <> + displayShow sbe + display (InvalidHexBytes bs t) = + "Invalid hex bytes creating a SHA256: " <> + displayShow bs <> + ": " <> + display t diff --git a/subs/pantry/src/Pantry/Storage.hs b/subs/pantry/src/Pantry/Storage.hs new file mode 100644 index 0000000000..d1f8faa004 --- /dev/null +++ b/subs/pantry/src/Pantry/Storage.hs @@ -0,0 +1,729 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE QuasiQuotes #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ViewPatterns #-} +module Pantry.Storage + ( SqlBackend + , initStorage + , withStorage + , storeBlob + , loadBlob + , loadBlobById + , loadBlobBySHA + , getBlobKey + , loadURLBlob + , storeURLBlob + , clearHackageRevisions + , storeHackageRevision + , loadHackagePackageVersions + , loadHackagePackageVersion + , loadLatestCacheUpdate + , storeCacheUpdate + , storeHackageTarballInfo + , loadHackageTarballInfo + , storeTree + , loadTree + , loadPackageById + , getTreeForKey + , storeHackageTree + , loadHackageTree + , loadHackageTreeKey + , storeArchiveCache + , loadArchiveCache + , storeRepoCache + , loadRepoCache + , storePreferredVersion + , loadPreferredVersion + , sinkHackagePackageNames + + -- avoid warnings + , BlobId + , HackageCabalId + , HackageTarballId + , CacheUpdateId + , FilePathId + , TreeId + , TreeEntryId + , ArchiveCacheId + , RepoCacheId + , PreferredVersionsId + , UrlBlobId + ) where + +import RIO hiding (FilePath) +import qualified RIO.ByteString as B +import qualified Pantry.Types as P +import Database.Persist +import Database.Persist.Sqlite +import Database.Persist.TH +import RIO.Orphans () +import qualified Pantry.SHA256 as SHA256 +import qualified RIO.Map as Map +import RIO.Time (UTCTime, getCurrentTime) +import Path (Path, Abs, File, toFilePath, parent) +import Path.IO (ensureDir) +import Data.Pool (destroyAllResources) +import Conduit +import Data.Acquire (with) +import Pantry.Types (PackageNameP (..), VersionP (..), SHA256, FileSize (..), FileType (..), HasPantryConfig, BlobKey, Repo (..), TreeKey, SafeFilePath, Revision (..), Package (..)) + +share [mkPersist sqlSettings, mkMigrate "migrateAll"] [persistLowerCase| +-- Raw blobs +Blob + sha SHA256 + size FileSize + contents ByteString + UniqueBlobSha sha +-- Previously downloaded blobs from given URLs. +-- May change over time, so we keep a time column too. +UrlBlob sql=url_blob + url Text + blob BlobId + time UTCTime + UniqueUrlTime url time + +-- For normalization, and avoiding storing strings in a bunch of +-- tables. +PackageName + name P.PackageNameP + UniquePackageName name +Version + version P.VersionP + UniqueVersion version +FilePath + path P.SafeFilePath + UniqueSfp path + +-- Secure download information for a package on Hackage. This does not +-- contain revision information, since sdist tarballs are (blessedly) +-- unmodified on Hackage. +HackageTarball + name PackageNameId + version VersionId + sha SHA256 + size FileSize + UniqueHackageTarball name version + +-- An individual cabal file from Hackage, representing a specific +-- revision. +HackageCabal + name PackageNameId + version VersionId + revision P.Revision + cabal BlobId + + -- If available: the full tree containing the HackageTarball + -- contents with the cabal file modified. + tree TreeId Maybe + UniqueHackage name version revision + +-- Any preferred-version information from Hackage +PreferredVersions + name PackageNameId + preferred Text + UniquePreferred name + +-- Last time we downloaded a 01-index.tar file from Hackage and +-- updated the three previous tables. +CacheUpdate + -- When did we do the update? + time UTCTime + + -- How big was the file when we updated, ignoring the last two + -- all-null 512-byte blocks. + size FileSize + + -- SHA256 of the first 'size' bytes of the file + sha SHA256 + +-- A tree containing a Haskell package. See associated TreeEntry +-- table. +Tree + key BlobId + cabal BlobId + cabalType FileType + name PackageNameId + version VersionId + UniqueTree key + +-- An individual file within a Tree. +TreeEntry + tree TreeId + path FilePathId + blob BlobId + type FileType + +-- Like UrlBlob, but stores the contents as a Tree. +ArchiveCache + time UTCTime + url Text + subdir Text + sha SHA256 + size FileSize + tree TreeId + +-- Like ArchiveCache, but for a Repo. +RepoCache + time UTCTime + url Text + type P.RepoType + commit Text + subdir Text + tree TreeId +|] + +initStorage + :: HasLogFunc env + => Path Abs File -- ^ storage file + -> (P.Storage -> RIO env a) + -> RIO env a +initStorage fp inner = do + ensureDir $ parent fp + bracket + (createSqlitePoolFromInfo sqinfo 1) + (liftIO . destroyAllResources) $ \pool -> do + + migrates <- runSqlPool (runMigrationSilent migrateAll) pool + forM_ migrates $ \mig -> logDebug $ "Migration output: " <> display mig + inner (P.Storage pool) + where + sqinfo = set extraPragmas ["PRAGMA busy_timeout=2000;"] + $ set fkEnabled True + $ mkSqliteConnectionInfo (fromString $ toFilePath fp) + +withStorage + :: (HasPantryConfig env, HasLogFunc env) + => ReaderT SqlBackend (RIO env) a + -> RIO env a +withStorage action = do + P.Storage pool <- view $ P.pantryConfigL.to P.pcStorage + runSqlPool action pool + +getPackageNameId + :: (HasPantryConfig env, HasLogFunc env) + => P.PackageName + -> ReaderT SqlBackend (RIO env) PackageNameId +getPackageNameId = fmap (either entityKey id) . insertBy . PackageName . PackageNameP + +getVersionId + :: (HasPantryConfig env, HasLogFunc env) + => P.Version + -> ReaderT SqlBackend (RIO env) VersionId +getVersionId = fmap (either entityKey id) . insertBy . Version . VersionP + +getFilePathId + :: (HasPantryConfig env, HasLogFunc env) + => SafeFilePath + -> ReaderT SqlBackend (RIO env) FilePathId +getFilePathId = fmap (either entityKey id) . insertBy . FilePath + +storeBlob + :: (HasPantryConfig env, HasLogFunc env) + => ByteString + -> ReaderT SqlBackend (RIO env) (BlobId, BlobKey) +storeBlob bs = do + let sha = SHA256.hashBytes bs + size = FileSize $ fromIntegral $ B.length bs + keys <- selectKeysList [BlobSha ==. sha] [] + key <- + case keys of + [] -> insert Blob + { blobSha = sha + , blobSize = size + , blobContents = bs + } + key:rest -> assert (null rest) (pure key) + pure (key, P.BlobKey sha size) + +loadBlob + :: (HasPantryConfig env, HasLogFunc env) + => BlobKey + -> ReaderT SqlBackend (RIO env) (Maybe ByteString) +loadBlob (P.BlobKey sha size) = do + ment <- getBy $ UniqueBlobSha sha + case ment of + Nothing -> pure Nothing + Just (Entity _ bt) + | blobSize bt == size -> pure $ Just $ blobContents bt + | otherwise -> + Nothing <$ lift (logWarn $ + "Mismatched blob size detected for SHA " <> display sha <> + ". Expected size: " <> display size <> + ". Actual size: " <> display (blobSize bt)) + +loadBlobBySHA + :: (HasPantryConfig env, HasLogFunc env) + => SHA256 + -> ReaderT SqlBackend (RIO env) (Maybe BlobId) +loadBlobBySHA sha = listToMaybe <$> selectKeysList [BlobSha ==. sha] [] + +loadBlobById + :: (HasPantryConfig env, HasLogFunc env) + => BlobId + -> ReaderT SqlBackend (RIO env) ByteString +loadBlobById bid = do + mbt <- get bid + case mbt of + Nothing -> error "loadBlobById: ID doesn't exist in database" + Just bt -> pure $ blobContents bt + +getBlobKey + :: (HasPantryConfig env, HasLogFunc env) + => BlobId + -> ReaderT SqlBackend (RIO env) BlobKey +getBlobKey bid = do + res <- rawSql "SELECT sha, size FROM blob WHERE id=?" [toPersistValue bid] + case res of + [] -> error $ "getBlobKey failed due to missing ID: " ++ show bid + [(Single sha, Single size)] -> pure $ P.BlobKey sha size + _ -> error $ "getBlobKey failed due to non-unique ID: " ++ show (bid, res) + +getBlobId + :: (HasPantryConfig env, HasLogFunc env) + => BlobKey + -> ReaderT SqlBackend (RIO env) (Maybe BlobId) +getBlobId (P.BlobKey sha size) = do + res <- rawSql "SELECT id FROM blob WHERE sha=? AND size=?" + [toPersistValue sha, toPersistValue size] + pure $ listToMaybe $ map unSingle res + +loadURLBlob + :: (HasPantryConfig env, HasLogFunc env) + => Text + -> ReaderT SqlBackend (RIO env) (Maybe ByteString) +loadURLBlob url = do + ment <- rawSql + "SELECT blob.contents\n\ + \FROM blob, url_blob\n\ + \WHERE url=?\ + \ AND url_blob.blob=blob.id\n\ + \ ORDER BY url_blob.time DESC" + [toPersistValue url] + case ment of + [] -> pure Nothing + (Single bs) : _ -> pure $ Just bs + +storeURLBlob + :: (HasPantryConfig env, HasLogFunc env) + => Text + -> ByteString + -> ReaderT SqlBackend (RIO env) () +storeURLBlob url blob = do + (blobId, _) <- storeBlob blob + now <- getCurrentTime + insert_ UrlBlob + { urlBlobUrl = url + , urlBlobBlob = blobId + , urlBlobTime = now + } + +clearHackageRevisions + :: (HasPantryConfig env, HasLogFunc env) + => ReaderT SqlBackend (RIO env) () +clearHackageRevisions = deleteWhere ([] :: [Filter HackageCabal]) + +storeHackageRevision + :: (HasPantryConfig env, HasLogFunc env) + => P.PackageName + -> P.Version + -> BlobId + -> ReaderT SqlBackend (RIO env) () +storeHackageRevision name version key = do + nameid <- getPackageNameId name + versionid <- getVersionId version + rev <- count + [ HackageCabalName ==. nameid + , HackageCabalVersion ==. versionid + ] + insert_ HackageCabal + { hackageCabalName = nameid + , hackageCabalVersion = versionid + , hackageCabalRevision = Revision (fromIntegral rev) + , hackageCabalCabal = key + , hackageCabalTree = Nothing + } + +loadHackagePackageVersions + :: (HasPantryConfig env, HasLogFunc env) + => P.PackageName + -> ReaderT SqlBackend (RIO env) (Map P.Version (Map Revision BlobKey)) +loadHackagePackageVersions name = do + nameid <- getPackageNameId name + -- would be better with esequeleto + (Map.fromListWith Map.union . map go) <$> rawSql + "SELECT hackage.revision, version.version, blob.sha, blob.size\n\ + \FROM hackage_cabal as hackage, version, blob\n\ + \WHERE hackage.name=?\n\ + \AND hackage.version=version.id\n\ + \AND hackage.cabal=blob.id" + [toPersistValue nameid] + where + go (Single revision, Single (P.VersionP version), Single key, Single size) = + (version, Map.singleton revision (P.BlobKey key size)) + +loadHackagePackageVersion + :: (HasPantryConfig env, HasLogFunc env) + => P.PackageName + -> P.Version + -> ReaderT SqlBackend (RIO env) (Map Revision (BlobId, P.BlobKey)) +loadHackagePackageVersion name version = do + nameid <- getPackageNameId name + versionid <- getVersionId version + -- would be better with esequeleto + (Map.fromList . map go) <$> rawSql + "SELECT hackage.revision, blob.sha, blob.size, blob.id\n\ + \FROM hackage_cabal as hackage, version, blob\n\ + \WHERE hackage.name=?\n\ + \AND hackage.version=?\n\ + \AND hackage.cabal=blob.id" + [toPersistValue nameid, toPersistValue versionid] + where + go (Single revision, Single sha, Single size, Single bid) = + (revision, (bid, P.BlobKey sha size)) + +loadLatestCacheUpdate + :: (HasPantryConfig env, HasLogFunc env) + => ReaderT SqlBackend (RIO env) (Maybe (FileSize, SHA256)) +loadLatestCacheUpdate = + fmap go <$> selectFirst [] [Desc CacheUpdateTime] + where + go (Entity _ cu) = (cacheUpdateSize cu, cacheUpdateSha cu) + +storeCacheUpdate + :: (HasPantryConfig env, HasLogFunc env) + => FileSize + -> SHA256 + -> ReaderT SqlBackend (RIO env) () +storeCacheUpdate size sha = do + now <- getCurrentTime + insert_ CacheUpdate + { cacheUpdateTime = now + , cacheUpdateSize = size + , cacheUpdateSha = sha + } + +storeHackageTarballInfo + :: (HasPantryConfig env, HasLogFunc env) + => P.PackageName + -> P.Version + -> SHA256 + -> FileSize + -> ReaderT SqlBackend (RIO env) () +storeHackageTarballInfo name version sha size = do + nameid <- getPackageNameId name + versionid <- getVersionId version + void $ insertBy HackageTarball + { hackageTarballName = nameid + , hackageTarballVersion = versionid + , hackageTarballSha = sha + , hackageTarballSize = size + } + +loadHackageTarballInfo + :: (HasPantryConfig env, HasLogFunc env) + => P.PackageName + -> P.Version + -> ReaderT SqlBackend (RIO env) (Maybe (SHA256, FileSize)) +loadHackageTarballInfo name version = do + nameid <- getPackageNameId name + versionid <- getVersionId version + fmap go <$> getBy (UniqueHackageTarball nameid versionid) + where + go (Entity _ ht) = (hackageTarballSha ht, hackageTarballSize ht) + +storeTree + :: (HasPantryConfig env, HasLogFunc env) + => P.PackageIdentifier + -> P.Tree + -> P.TreeEntry + -- ^ cabal file + -> ReaderT SqlBackend (RIO env) (TreeId, P.TreeKey) +storeTree (P.PackageIdentifier name version) tree@(P.TreeMap m) (P.TreeEntry (P.BlobKey cabal _) cabalType) = do + (bid, blobKey) <- storeBlob $ P.renderTree tree + mcabalid <- loadBlobBySHA cabal + cabalid <- + case mcabalid of + Just cabalid -> pure cabalid + Nothing -> error $ "storeTree: cabal BlobKey not found: " ++ show (tree, cabal) + nameid <- getPackageNameId name + versionid <- getVersionId version + etid <- insertBy Tree + { treeKey = bid + , treeCabal = cabalid + , treeCabalType = cabalType + , treeName = nameid + , treeVersion = versionid + } + case etid of + Left (Entity tid _) -> pure (tid, P.TreeKey blobKey) -- already in database, assume it matches + Right tid -> do + for_ (Map.toList m) $ \(sfp, P.TreeEntry blobKey' ft) -> do + sfpid <- getFilePathId sfp + mbid <- getBlobId blobKey' + bid' <- + case mbid of + Nothing -> error $ "Cannot store tree, contains unknown blob: " ++ show blobKey' + Just bid' -> pure bid' + insert_ TreeEntry + { treeEntryTree = tid + , treeEntryPath = sfpid + , treeEntryBlob = bid' + , treeEntryType = ft + } + pure (tid, P.TreeKey blobKey) + +loadTree + :: (HasPantryConfig env, HasLogFunc env) + => P.TreeKey + -> ReaderT SqlBackend (RIO env) (Maybe P.Tree) +loadTree key = do + ment <- getTreeForKey key + case ment of + Nothing -> pure Nothing + Just ent -> Just <$> loadTreeByEnt ent + +getTreeForKey + :: (HasPantryConfig env, HasLogFunc env) + => TreeKey + -> ReaderT SqlBackend (RIO env) (Maybe (Entity Tree)) +getTreeForKey (P.TreeKey key) = do + mbid <- getBlobId key + case mbid of + Nothing -> pure Nothing + Just bid -> getBy $ UniqueTree bid + +loadPackageById + :: (HasPantryConfig env, HasLogFunc env) + => TreeId + -> ReaderT SqlBackend (RIO env) Package +loadPackageById tid = do + mts <- get tid + ts <- + case mts of + Nothing -> error $ "loadPackageById: invalid foreign key " ++ show tid + Just ts -> pure ts + tree <- loadTreeByEnt $ Entity tid ts + key <- getBlobKey $ treeKey ts + + mname <- get $ treeName ts + name <- + case mname of + Nothing -> error $ "loadPackageByid: invalid foreign key " ++ show (treeName ts) + Just (PackageName (P.PackageNameP name)) -> pure name + + mversion <- get $ treeVersion ts + version <- + case mversion of + Nothing -> error $ "loadPackageByid: invalid foreign key " ++ show (treeVersion ts) + Just (Version (P.VersionP version)) -> pure version + + cabalKey <- getBlobKey $ treeCabal ts + let ident = P.PackageIdentifier name version + let cabalEntry = P.TreeEntry cabalKey (treeCabalType ts) + pure Package + { packageTreeKey = P.TreeKey key + , packageTree = tree + , packageCabalEntry = cabalEntry + , packageIdent = ident + } + +loadTreeByEnt + :: (HasPantryConfig env, HasLogFunc env) + => Entity Tree + -> ReaderT SqlBackend (RIO env) P.Tree +loadTreeByEnt (Entity tid _t) = do + entries <- rawSql + "SELECT file_path.path, blob.sha, blob.size, tree_entry.type\n\ + \FROM tree_entry, blob, file_path\n\ + \WHERE tree_entry.tree=?\n\ + \AND tree_entry.blob=blob.id\n\ + \AND tree_entry.path=file_path.id" + [toPersistValue tid] + pure $ P.TreeMap $ Map.fromList $ map + (\(Single sfp, Single sha, Single size, Single ft) -> + (sfp, P.TreeEntry (P.BlobKey sha size) ft)) + entries + +storeHackageTree + :: (HasPantryConfig env, HasLogFunc env) + => P.PackageName + -> P.Version + -> BlobId + -> P.TreeKey + -> ReaderT SqlBackend (RIO env) () +storeHackageTree name version cabal treeKey' = do + nameid <- getPackageNameId name + versionid <- getVersionId version + ment <- getTreeForKey treeKey' + for_ ment $ \ent -> updateWhere + [ HackageCabalName ==. nameid + , HackageCabalVersion ==. versionid + , HackageCabalCabal ==. cabal + ] + [HackageCabalTree =. Just (entityKey ent)] + +loadHackageTreeKey + :: (HasPantryConfig env, HasLogFunc env) + => P.PackageName + -> P.Version + -> SHA256 + -> ReaderT SqlBackend (RIO env) (Maybe TreeKey) +loadHackageTreeKey name ver sha = do + res <- rawSql + "SELECT treeblob.sha, treeblob.size\n\ + \FROM blob as treeblob, blob as cabalblob, package_name, version, hackage_cabal, tree\n\ + \WHERE package_name.name=?\n\ + \AND version.version=?\n\ + \AND cabalblob.sha=?\n\ + \AND hackage_cabal.name=package_name.id\n\ + \AND hackage_cabal.version=version.id\n\ + \AND hackage_cabal.cabal=cabalblob.id\n\ + \AND hackage_cabal.tree=tree.id\n\ + \AND tree.key=treeblob.id" + [ toPersistValue $ P.PackageNameP name + , toPersistValue $ P.VersionP ver + , toPersistValue sha + ] + case res of + [] -> pure Nothing + (Single treesha, Single size):_ -> + pure $ Just $ P.TreeKey $ P.BlobKey treesha size + +loadHackageTree + :: (HasPantryConfig env, HasLogFunc env) + => P.PackageName + -> P.Version + -> BlobId + -> ReaderT SqlBackend (RIO env) (Maybe Package) +loadHackageTree name ver bid = do + nameid <- getPackageNameId name + versionid <- getVersionId ver + ment <- selectFirst + [ HackageCabalName ==. nameid + , HackageCabalVersion ==. versionid + , HackageCabalCabal ==. bid + , HackageCabalTree !=. Nothing + ] + [] + case ment of + Nothing -> pure Nothing + Just (Entity _ hc) -> + case hackageCabalTree hc of + Nothing -> assert False $ pure Nothing + Just tid -> Just <$> loadPackageById tid + +storeArchiveCache + :: (HasPantryConfig env, HasLogFunc env) + => Text -- ^ URL + -> Text -- ^ subdir + -> SHA256 + -> FileSize + -> P.TreeKey + -> ReaderT SqlBackend (RIO env) () +storeArchiveCache url subdir sha size treeKey' = do + now <- getCurrentTime + ment <- getTreeForKey treeKey' + for_ ment $ \ent -> insert_ ArchiveCache + { archiveCacheTime = now + , archiveCacheUrl = url + , archiveCacheSubdir = subdir + , archiveCacheSha = sha + , archiveCacheSize = size + , archiveCacheTree = entityKey ent + } + +loadArchiveCache + :: (HasPantryConfig env, HasLogFunc env) + => Text -- ^ URL + -> Text -- ^ subdir + -> ReaderT SqlBackend (RIO env) [(SHA256, FileSize, TreeId)] +loadArchiveCache url subdir = map go <$> selectList + [ ArchiveCacheUrl ==. url + , ArchiveCacheSubdir ==. subdir + ] + [Desc ArchiveCacheTime] + where + go (Entity _ ac) = (archiveCacheSha ac, archiveCacheSize ac, archiveCacheTree ac) + +storeRepoCache + :: (HasPantryConfig env, HasLogFunc env) + => Repo + -> Text -- ^ subdir + -> TreeId + -> ReaderT SqlBackend (RIO env) () +storeRepoCache repo subdir tid = do + now <- getCurrentTime + insert_ RepoCache + { repoCacheTime = now + , repoCacheUrl = repoUrl repo + , repoCacheType = repoType repo + , repoCacheCommit = repoCommit repo + , repoCacheSubdir = subdir + , repoCacheTree = tid + } + +loadRepoCache + :: (HasPantryConfig env, HasLogFunc env) + => Repo + -> Text -- ^ subdir + -> ReaderT SqlBackend (RIO env) (Maybe TreeId) +loadRepoCache repo subdir = fmap (repoCacheTree . entityVal) <$> selectFirst + [ RepoCacheUrl ==. repoUrl repo + , RepoCacheType ==. repoType repo + , RepoCacheCommit ==. repoCommit repo + , RepoCacheSubdir ==. subdir + ] + [Desc RepoCacheTime] + +storePreferredVersion + :: (HasPantryConfig env, HasLogFunc env) + => P.PackageName + -> Text + -> ReaderT SqlBackend (RIO env) () +storePreferredVersion name p = do + nameid <- getPackageNameId name + ment <- getBy $ UniquePreferred nameid + case ment of + Nothing -> insert_ PreferredVersions + { preferredVersionsName = nameid + , preferredVersionsPreferred = p + } + Just (Entity pid _) -> update pid [PreferredVersionsPreferred =. p] + +loadPreferredVersion + :: (HasPantryConfig env, HasLogFunc env) + => P.PackageName + -> ReaderT SqlBackend (RIO env) (Maybe Text) +loadPreferredVersion name = do + nameid <- getPackageNameId name + fmap (preferredVersionsPreferred . entityVal) <$> getBy (UniquePreferred nameid) + +sinkHackagePackageNames + :: (HasPantryConfig env, HasLogFunc env) + => (P.PackageName -> Bool) + -> ConduitT P.PackageName Void (ReaderT SqlBackend (RIO env)) a + -> ReaderT SqlBackend (RIO env) a +sinkHackagePackageNames predicate sink = do + acqSrc <- selectSourceRes [] [] + with acqSrc $ \src -> runConduit + $ src + .| concatMapMC go + .| sink + where + go (Entity nameid (PackageName (PackageNameP name))) + | predicate name = do + -- Make sure it's actually on Hackage. Would be much more + -- efficient with some raw SQL and an inner join, but we + -- don't have a Conduit version of rawSql. + onHackage <- checkOnHackage nameid + pure $ if onHackage then Just name else Nothing + | otherwise = pure Nothing + + checkOnHackage nameid = do + cnt <- count [HackageCabalName ==. nameid] + pure $ cnt > 0 diff --git a/subs/pantry/src/Pantry/Tree.hs b/subs/pantry/src/Pantry/Tree.hs new file mode 100644 index 0000000000..b2a9620ea3 --- /dev/null +++ b/subs/pantry/src/Pantry/Tree.hs @@ -0,0 +1,58 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ViewPatterns #-} +module Pantry.Tree + ( unpackTree + , rawParseGPD + ) where + +import RIO +import qualified RIO.Map as Map +import qualified RIO.Text as T +import qualified RIO.ByteString as B +import Pantry.Storage +import Pantry.Types +import RIO.FilePath ((), takeDirectory) +import RIO.Directory (createDirectoryIfMissing, setPermissions, getPermissions, setOwnerExecutable) +import Path (Path, Abs, Dir, toFilePath) +import Distribution.Parsec.Common (PWarning (..)) +import Distribution.PackageDescription (GenericPackageDescription) +import Distribution.PackageDescription.Parsec +import Path (File) + +unpackTree + :: (HasPantryConfig env, HasLogFunc env) + => PackageLocationImmutable -- for exceptions + -> Path Abs Dir -- ^ dest dir, will be created if necessary + -> Tree + -> RIO env () +unpackTree loc (toFilePath -> dir) (TreeMap m) = do + withStorage $ for_ (Map.toList m) $ \(sfp, TreeEntry blobKey ft) -> do + let dest = dir T.unpack (unSafeFilePath sfp) + createDirectoryIfMissing True $ takeDirectory dest + mbs <- loadBlob blobKey + case mbs of + Nothing -> do + -- TODO when we have pantry wire stuff, try downloading + throwIO $ TreeReferencesMissingBlob loc sfp blobKey + Just bs -> do + B.writeFile dest bs + case ft of + FTNormal -> pure () + FTExecutable -> liftIO $ do + perms <- getPermissions dest + setPermissions dest $ setOwnerExecutable True perms + +-- | A helper function that performs the basic character encoding +-- necessary. +rawParseGPD + :: MonadThrow m + => Either PackageLocationImmutable (Path Abs File) + -> ByteString + -> m ([PWarning], GenericPackageDescription) +rawParseGPD loc bs = + case eres of + Left (mversion, errs) -> throwM $ InvalidCabalFile loc mversion errs warnings + Right gpkg -> return (warnings, gpkg) + where + (warnings, eres) = runParseResult $ parseGenericPackageDescription bs diff --git a/subs/pantry/src/Pantry/Types.hs b/subs/pantry/src/Pantry/Types.hs new file mode 100644 index 0000000000..72fbeeb1ed --- /dev/null +++ b/subs/pantry/src/Pantry/Types.hs @@ -0,0 +1,1731 @@ +{-# LANGUAGE BangPatterns #-} +{-# LANGUAGE DeriveFunctor #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE TupleSections #-} +{-# LANGUAGE StandaloneDeriving #-} +{-# LANGUAGE MultiWayIf #-} +{-# OPTIONS_GHC -fno-warn-orphans #-} -- TODO REMOVE! +module Pantry.Types + ( PantryConfig (..) + , HackageSecurityConfig (..) + , Storage (..) + , HasPantryConfig (..) + , BlobKey (..) + , PackageName + , Version + , PackageIdentifier (..) + , Revision (..) + , CabalFileInfo (..) + , PackageNameP (..) + , VersionP (..) + , PackageIdentifierRevision (..) + , FileType (..) + , FileSize (..) + , TreeEntry (..) + , SafeFilePath + , unSafeFilePath + , mkSafeFilePath + , TreeKey (..) + , Tree (..) + , renderTree + , parseTree + , SHA256 + , Unresolved + , resolvePaths + , Package (..) + -- , PackageTarball (..) + , PackageLocation (..) + , PackageLocationImmutable (..) + , Archive (..) + , Repo (..) + , RepoType (..) + , parsePackageIdentifier + , parsePackageName + , parsePackageNameThrowing + , parseFlagName + , parseVersion + , parseVersionThrowing + , packageIdentifierString + , packageNameString + , flagNameString + , versionString + , moduleNameString + , OptionalSubdirs (..) + , ArchiveLocation (..) + , RelFilePath (..) + , CabalString (..) + , toCabalStringMap + , unCabalStringMap + , parsePackageIdentifierRevision + , Mismatch (..) + , PantryException (..) + , FuzzyResults (..) + , ResolvedPath (..) + , HpackExecutable (..) + , WantedCompiler (..) + --, resolveSnapshotLocation + , ltsSnapshotLocation + , nightlySnapshotLocation + , SnapshotLocation (..) + , parseSnapshotLocation + , Snapshot (..) + , parseWantedCompiler + , PackageMetadata (..) + , cabalFileName + ) where + +import RIO +import qualified Data.Conduit.Tar as Tar +import qualified RIO.Text as T +import qualified RIO.ByteString as B +import qualified RIO.ByteString.Lazy as BL +import RIO.Char (isSpace) +import RIO.List (intersperse) +import RIO.Time (toGregorian, Day) +import qualified RIO.Map as Map +import qualified RIO.HashMap as HM +import qualified Data.Map.Strict as Map (mapKeysMonotonic) +import qualified RIO.Set as Set +import Data.Aeson (ToJSON (..), FromJSON (..), withText, FromJSONKey (..)) +import Data.Aeson.Types (ToJSONKey (..) ,toJSONKeyText, Parser) +import Data.Aeson.Extended +import Data.ByteString.Builder (toLazyByteString, byteString, wordDec) +import Database.Persist +import Database.Persist.Sql +import Pantry.SHA256 (SHA256) +import qualified Pantry.SHA256 as SHA256 +import qualified Distribution.Compat.ReadP as Parse +import Distribution.CabalSpecVersion (CabalSpecVersion (..), cabalSpecLatest) +import Distribution.Parsec.Common (PError (..), PWarning (..), showPos) +import Distribution.Types.PackageName (PackageName, unPackageName) +import Distribution.Types.VersionRange (VersionRange) +import Distribution.PackageDescription (FlagName, unFlagName, GenericPackageDescription) +import Distribution.Types.PackageId (PackageIdentifier (..)) +import qualified Distribution.Text +import Distribution.ModuleName (ModuleName) +import qualified Distribution.ModuleName as ModuleName +import Distribution.Types.Version (Version, mkVersion) +import Data.Store (Size (..), Store (..)) +import Network.HTTP.Client (parseRequest) +import Network.HTTP.Types (Status, statusCode) +import Data.Text.Read (decimal) +import Path (Abs, Dir, File, toFilePath, filename) +import Path.Internal (Path (..)) -- TODO don't import this +import Path.IO (resolveFile, resolveDir) +import Data.Pool (Pool) +import Data.List.NonEmpty (NonEmpty) +import qualified Data.List.NonEmpty as NE + +-- | Parsed tree with more information on the Haskell package it contains. +-- +-- @since 0.1.0.0 +data Package = Package + { packageTreeKey :: !TreeKey + -- ^ The 'TreeKey' containing this package. + -- + -- This is a hash of the binary representation of 'packageTree'. + -- + -- @since 0.1.0.0 + , packageTree :: !Tree + -- ^ The 'Tree' containing this package. + -- + -- @since 0.1.0.0 + , packageCabalEntry :: !TreeEntry + -- ^ Information on the cabal file inside this package. + -- + -- @since 0.1.0.0 + , packageIdent :: !PackageIdentifier + -- ^ The package name and version in this package. + -- + -- @since 0.1.0.0 + } + deriving (Show, Eq) + +cabalFileName :: PackageName -> SafeFilePath +cabalFileName name = + case mkSafeFilePath $ T.pack (packageNameString name) <> ".cabal" of + Nothing -> error $ "cabalFileName: failed for " ++ show name + Just sfp -> sfp + +-- | The revision number of a package from Hackage, counting upwards +-- from 0 (the original cabal file). +-- +-- See caveats on 'CFIRevision'. +-- +-- @since 0.1.0.0 +newtype Revision = Revision Word + deriving (Generic, Show, Eq, NFData, Data, Typeable, Ord, Hashable, Store, Display, PersistField, PersistFieldSql) + +newtype Storage = Storage (Pool SqlBackend) + +-- | Configuration value used by the entire pantry package. Create one +-- using @withPantryConfig@. See also @PantryApp@ for a convenience +-- approach to using pantry. +-- +-- @since 0.1.0.0 +data PantryConfig = PantryConfig + { pcHackageSecurity :: !HackageSecurityConfig + , pcHpackExecutable :: !HpackExecutable + , pcRootDir :: !(Path Abs Dir) + , pcStorage :: !Storage + , pcUpdateRef :: !(MVar Bool) + -- ^ Want to try updating the index once during a single run for missing + -- package identifiers. We also want to ensure we only update once at a + -- time. Start at @True@. + , pcParsedCabalFilesImmutable :: !(IORef (Map PackageLocationImmutable GenericPackageDescription)) + -- ^ Cache of previously parsed cabal files, to save on slow parsing time. + , pcParsedCabalFilesMutable :: + !(IORef + (Map + (Path Abs Dir) + (GenericPackageDescription, Path Abs File, [PWarning]) + ) + ) + -- ^ Same. We also keep a list of warnings which haven't been + -- printed yet, so that if a file is first loaded with warnings + -- turned off, and then again with warnings turned on, we print the + -- warnings. + , pcConnectionCount :: !Int + -- ^ concurrently open downloads + } + +-- | Wraps a value which potentially contains relative paths. Needs to +-- be provided with a base directory to resolve these paths. +-- +-- Unwrap this using 'resolvePaths'. +-- +-- @since 0.1.0.0 +newtype Unresolved a = Unresolved (Maybe (Path Abs Dir) -> IO a) + deriving Functor +instance Applicative Unresolved where + pure = Unresolved . const . pure + Unresolved f <*> Unresolved x = Unresolved $ \mdir -> f mdir <*> x mdir + +-- | Resolve all of the file paths in an 'Unresolved' relative to the +-- given directory. +-- +-- @since 0.1.0.0 +resolvePaths + :: MonadIO m + => Maybe (Path Abs Dir) -- ^ directory to use for relative paths + -> Unresolved a + -> m a +resolvePaths mdir (Unresolved f) = liftIO (f mdir) + +-- | A combination of the relative path provided in a config file, +-- together with the resolved absolute path. +-- +-- @since 0.1.0.0 +data ResolvedPath t = ResolvedPath + { resolvedRelative :: !RelFilePath + -- ^ Original value parsed from a config file. + , resolvedAbsolute :: !(Path Abs t) + -- ^ Absolute path resolved against base directory loaded from. + } + deriving (Show, Eq, Data, Generic, Ord) +instance NFData (ResolvedPath t) +instance (Generic t, Store t) => Store (ResolvedPath t) + +-- | Location to load a package from. Can either be immutable (see +-- 'PackageLocationImmutable') or a local directory which is expected +-- to change over time. +-- +-- @since 0.1.0.0 +data PackageLocation + = PLImmutable !PackageLocationImmutable + | PLMutable !(ResolvedPath Dir) + deriving (Show, Eq, Data, Generic) +instance NFData PackageLocation +instance Store PackageLocation + +instance Display PackageLocation where + display (PLImmutable loc) = display loc + display (PLMutable fp) = fromString $ toFilePath $ resolvedAbsolute fp + +-- | Location for remote packages or archives assumed to be immutable. +-- +-- @since 0.1.0.0 +data PackageLocationImmutable + = PLIHackage !PackageIdentifierRevision !(Maybe TreeKey) + | PLIArchive !Archive !PackageMetadata + | PLIRepo !Repo !PackageMetadata + deriving (Generic, Show, Eq, Ord, Data, Typeable) +instance NFData PackageLocationImmutable +instance Store PackageLocationImmutable + +instance Display PackageLocationImmutable where + display (PLIHackage pir _tree) = display pir <> " (from Hackage)" + display (PLIArchive archive _pm) = + "Archive from " <> display (archiveLocation archive) <> + (if T.null $ archiveSubdir archive + then mempty + else " in subdir " <> display (archiveSubdir archive)) + display (PLIRepo repo _pm) = + "Repo from " <> display (repoUrl repo) <> + ", commit " <> display (repoCommit repo) <> + (if T.null $ repoSubdir repo + then mempty + else " in subdir " <> display (repoSubdir repo)) + +-- | A package archive, could be from a URL or a local file +-- path. Local file path archives are assumed to be unchanging +-- over time, and so are allowed in custom snapshots. +-- +-- @since 0.1.0.0 +data Archive = Archive + { archiveLocation :: !ArchiveLocation + -- ^ Location of the archive + -- + -- @since 0.1.0.0 + , archiveHash :: !(Maybe SHA256) + -- ^ Cryptographic hash of the archive file + -- + -- @since 0.1.0.0 + , archiveSize :: !(Maybe FileSize) + -- ^ Size of the archive file + -- + -- @since 0.1.0.0 + , archiveSubdir :: !Text + -- ^ Subdirectory within the archive to get the package from. + -- + -- @since 0.1.0.0 + } + deriving (Generic, Show, Eq, Ord, Data, Typeable) +instance Store Archive +instance NFData Archive + +-- | The type of a source control repository. +-- +-- @since 0.1.0.0 +data RepoType = RepoGit | RepoHg + deriving (Generic, Show, Eq, Ord, Data, Typeable) +instance Store RepoType +instance NFData RepoType +instance PersistField RepoType where + toPersistValue RepoGit = toPersistValue (1 :: Int32) + toPersistValue RepoHg = toPersistValue (2 :: Int32) + fromPersistValue v = do + i <- fromPersistValue v + case i :: Int32 of + 1 -> pure RepoGit + 2 -> pure RepoHg + _ -> fail $ "Invalid RepoType: " ++ show i +instance PersistFieldSql RepoType where + sqlType _ = SqlInt32 + +-- | Information on packages stored in a source control repository. +-- +-- @since 0.1.0.0 +data Repo = Repo + { repoUrl :: !Text + -- ^ Location of the repo + -- + -- @since 0.1.0.0 + , repoCommit :: !Text + -- ^ Commit to use from the repo. It's strongly recommended to use + -- a hash instead of a tag or branch name. + -- + -- @since 0.1.0.0 + , repoType :: !RepoType + -- ^ The type of the repo + -- + -- @since 0.1.0.0 + , repoSubdir :: !Text + -- ^ Subdirectory within the archive to get the package from. + -- + -- @since 0.1.0.0 + } + deriving (Generic, Eq, Ord, Data, Typeable) +instance Store Repo +instance NFData Repo +instance Show Repo where + show = T.unpack . utf8BuilderToText . display +instance Display Repo where + display (Repo url commit typ subdir) = + (case typ of + RepoGit -> "Git" + RepoHg -> "Mercurial") <> + " repo at " <> + display url <> + ", commit " <> + display commit <> + (if T.null subdir + then mempty + else " in subdirectory " <> display subdir) + +-- An unexported newtype wrapper to hang a 'FromJSON' instance off of. Contains +-- a GitHub user and repo name separated by a forward slash, e.g. "foo/bar". +newtype GitHubRepo = GitHubRepo Text + +instance FromJSON GitHubRepo where + parseJSON = withText "GitHubRepo" $ \s -> do + case T.split (== '/') s of + [x, y] | not (T.null x || T.null y) -> return (GitHubRepo s) + _ -> fail "expecting \"user/repo\"" + +-- | Configuration for Hackage Security to securely download package +-- metadata and contents from Hackage. For most purposes, you'll want +-- to use the default Hackage settings via +-- @defaultHackageSecurityConfig@. +-- +-- /NOTE/ It's highly recommended to only use the official Hackage +-- server or a mirror. See +-- . +-- +-- @since 0.1.0.0 +data HackageSecurityConfig = HackageSecurityConfig + { hscKeyIds :: ![Text] + , hscKeyThreshold :: !Int + , hscDownloadPrefix :: !Text + } + deriving Show +instance FromJSON (WithJSONWarnings HackageSecurityConfig) where + parseJSON = withObjectWarnings "HackageSecurityConfig" $ \o' -> do + hscDownloadPrefix <- o' ..: "download-prefix" + Object o <- o' ..: "hackage-security" + hscKeyIds <- o ..: "keyids" + hscKeyThreshold <- o ..: "key-threshold" + pure HackageSecurityConfig {..} + +-- | An environment which contains a 'PantryConfig'. +-- +-- @since 0.1.0.0 +class HasPantryConfig env where + -- | Lens to get or set the 'PantryConfig' + -- + -- @since 0.1.0.0 + pantryConfigL :: Lens' env PantryConfig + +-- | File size in bytes +-- +-- @since 0.1.0.0 +newtype FileSize = FileSize Word + deriving (Show, Eq, Ord, Data, Typeable, Generic, Display, Hashable, NFData, Store, PersistField, PersistFieldSql, ToJSON, FromJSON) + +-- | A key for looking up a blob, which combines the SHA256 hash of +-- the contents and the file size. +-- +-- The file size may seem redundant with the hash. However, it is +-- necessary for safely downloading blobs from an untrusted +-- source. See +-- . +-- +-- @since 0.1.0.0 +data BlobKey = BlobKey !SHA256 !FileSize + deriving (Eq, Ord, Data, Typeable, Generic) +instance Store BlobKey +instance NFData BlobKey + +instance Show BlobKey where + show = T.unpack . utf8BuilderToText . display +instance Display BlobKey where + display (BlobKey sha size') = display sha <> "," <> display size' + +blobKeyPairs :: BlobKey -> [(Text, Value)] +blobKeyPairs (BlobKey sha size') = + [ "sha256" .= sha + , "size" .= size' + ] + +instance ToJSON BlobKey where + toJSON = object . blobKeyPairs +instance FromJSON BlobKey where + parseJSON = withObject "BlobKey" $ \o -> BlobKey + <$> o .: "sha256" + <*> o .: "size" + +newtype PackageNameP = PackageNameP { unPackageNameP :: PackageName } +instance PersistField PackageNameP where + toPersistValue (PackageNameP pn) = PersistText $ T.pack $ packageNameString pn + fromPersistValue v = do + str <- fromPersistValue v + case parsePackageName str of + Nothing -> Left $ "Invalid package name: " <> T.pack str + Just pn -> Right $ PackageNameP pn +instance PersistFieldSql PackageNameP where + sqlType _ = SqlString + +newtype VersionP = VersionP Version +instance PersistField VersionP where + toPersistValue (VersionP v) = PersistText $ T.pack $ versionString v + fromPersistValue v = do + str <- fromPersistValue v + case parseVersion str of + Nothing -> Left $ "Invalid version number: " <> T.pack str + Just ver -> Right $ VersionP ver +instance PersistFieldSql VersionP where + sqlType _ = SqlString + +-- | How to choose a cabal file for a package from Hackage. This is to +-- work with Hackage cabal file revisions, which makes +-- @PackageIdentifier@ insufficient for specifying a package from +-- Hackage. +-- +-- @since 0.1.0.0 +data CabalFileInfo + = CFILatest + -- ^ Take the latest revision of the cabal file available. This + -- isn't reproducible at all, but the running assumption (not + -- necessarily true) is that cabal file revisions do not change + -- semantics of the build. + -- + -- @since 0.1.0.0 + | CFIHash !SHA256 !(Maybe FileSize) + -- ^ Identify by contents of the cabal file itself. Only reason for + -- @Maybe@ on @FileSize@ is for compatibility with input that + -- doesn't include the file size. + -- + -- @since 0.1.0.0 + | CFIRevision !Revision + -- ^ Identify by revision number, with 0 being the original and + -- counting upward. This relies on Hackage providing consistent + -- versioning. @CFIHash@ should be preferred wherever possible for + -- reproducibility. + -- + -- @since 0.1.0.0 + deriving (Generic, Show, Eq, Ord, Data, Typeable) +instance Store CabalFileInfo +instance NFData CabalFileInfo +instance Hashable CabalFileInfo + +instance Display CabalFileInfo where + display CFILatest = mempty + display (CFIHash hash' msize) = + "@sha256:" <> display hash' <> maybe mempty (\i -> "," <> display i) msize + display (CFIRevision rev) = "@rev:" <> display rev + +-- | A full specification for a package from Hackage, including the +-- package name, version, and how to load up the correct cabal file +-- revision. +-- +-- @since 0.1.0.0 +data PackageIdentifierRevision = PackageIdentifierRevision !PackageName !Version !CabalFileInfo + deriving (Generic, Eq, Ord, Data, Typeable) +instance NFData PackageIdentifierRevision + +instance Show PackageIdentifierRevision where + show = T.unpack . utf8BuilderToText . display + +instance Display PackageIdentifierRevision where + display (PackageIdentifierRevision name version cfi) = + fromString (packageNameString name) <> "-" <> fromString (versionString version) <> display cfi + +instance ToJSON PackageIdentifierRevision where + toJSON = toJSON . utf8BuilderToText . display +instance FromJSON PackageIdentifierRevision where + parseJSON = withText "PackageIdentifierRevision" $ \t -> + case parsePackageIdentifierRevision t of + Left e -> fail $ show e + Right pir -> pure pir + +-- | Parse a 'PackageIdentifierRevision' +-- +-- @since 0.1.0.0 +parsePackageIdentifierRevision :: Text -> Either PantryException PackageIdentifierRevision +parsePackageIdentifierRevision t = maybe (Left $ PackageIdentifierRevisionParseFail t) Right $ do + let (identT, cfiT) = T.break (== '@') t + PackageIdentifier name version <- parsePackageIdentifier $ T.unpack identT + cfi <- + case splitColon cfiT of + Just ("@sha256", shaSizeT) -> do + let (shaT, sizeT) = T.break (== ',') shaSizeT + sha <- either (const Nothing) Just $ SHA256.fromHexText shaT + msize <- + case T.stripPrefix "," sizeT of + Nothing -> Just Nothing + Just sizeT' -> + case decimal sizeT' of + Right (size', "") -> Just $ Just $ FileSize size' + _ -> Nothing + pure $ CFIHash sha msize + Just ("@rev", revT) -> + case decimal revT of + Right (rev, "") -> pure $ CFIRevision $ Revision rev + _ -> Nothing + Nothing -> pure CFILatest + _ -> Nothing + pure $ PackageIdentifierRevision name version cfi + where + splitColon t' = + let (x, y) = T.break (== ':') t' + in (x, ) <$> T.stripPrefix ":" y + +data Mismatch a = Mismatch + { mismatchExpected :: !a + , mismatchActual :: !a + } + +-- | Things that can go wrong in pantry. Note two things: +-- +-- * Many other exception types may be thrown from underlying +-- libraries. Pantry does not attempt to wrap these underlying +-- exceptions. +-- +-- * We may add more constructors to this data type in minor version +-- bumps of pantry. This technically breaks the PVP. You should not +-- be writing pattern matches against this type that expect total +-- matching. +-- +-- @since 0.1.0.0 +data PantryException + = PackageIdentifierRevisionParseFail !Text + | InvalidCabalFile + !(Either PackageLocationImmutable (Path Abs File)) + !(Maybe Version) + ![PError] + ![PWarning] + | TreeWithoutCabalFile !PackageLocationImmutable + | TreeWithMultipleCabalFiles !PackageLocationImmutable ![SafeFilePath] + | MismatchedCabalName !(Path Abs File) !PackageName + | NoCabalFileFound !(Path Abs Dir) + | MultipleCabalFilesFound !(Path Abs Dir) ![Path Abs File] + | InvalidWantedCompiler !Text + | InvalidSnapshotLocation !(Path Abs Dir) !Text + | InvalidOverrideCompiler !WantedCompiler !WantedCompiler + | InvalidFilePathSnapshot !Text + | InvalidSnapshot !SnapshotLocation !SomeException + | MismatchedPackageMetadata + !PackageLocationImmutable + !PackageMetadata + !(Maybe TreeKey) + !BlobKey -- cabal file found + !PackageIdentifier + | Non200ResponseStatus !Status + | InvalidBlobKey !(Mismatch BlobKey) + | Couldn'tParseSnapshot !SnapshotLocation !String + | WrongCabalFileName !PackageLocationImmutable !SafeFilePath !PackageName + | DownloadInvalidSHA256 !Text !(Mismatch SHA256) + | DownloadInvalidSize !Text !(Mismatch FileSize) + | DownloadTooLarge !Text !(Mismatch FileSize) + -- ^ Different from 'DownloadInvalidSize' since 'mismatchActual' is + -- a lower bound on the size from the server. + | LocalInvalidSHA256 !(Path Abs File) !(Mismatch SHA256) + | LocalInvalidSize !(Path Abs File) !(Mismatch FileSize) + | UnknownArchiveType !ArchiveLocation + | InvalidTarFileType !ArchiveLocation !FilePath !Tar.FileType + | UnsupportedTarball !ArchiveLocation !Text + | NoHackageCryptographicHash !PackageIdentifier + | FailedToCloneRepo !Repo + | TreeReferencesMissingBlob !PackageLocationImmutable !SafeFilePath !BlobKey + | CompletePackageMetadataMismatch !PackageLocationImmutable !PackageMetadata + | CRC32Mismatch !ArchiveLocation !FilePath !(Mismatch Word32) + | UnknownHackagePackage !PackageIdentifierRevision !FuzzyResults + | CannotCompleteRepoNonSHA1 !Repo + | MutablePackageLocationFromUrl !Text + | MismatchedCabalFileForHackage !PackageIdentifierRevision !(Mismatch PackageIdentifier) + | PackageNameParseFail !Text + | PackageVersionParseFail !Text + + deriving Typeable +instance Exception PantryException where +instance Show PantryException where + show = T.unpack . utf8BuilderToText . display +instance Display PantryException where + display (PackageIdentifierRevisionParseFail text) = + "Invalid package identifier (with optional revision): " <> + display text + display (InvalidCabalFile loc mversion errs warnings) = + "Unable to parse cabal file from package " <> + either display (fromString . toFilePath) loc <> + "\n\n" <> + foldMap + (\(PError pos msg) -> + "- " <> + fromString (showPos pos) <> + ": " <> + fromString msg <> + "\n") + errs <> + foldMap + (\(PWarning _ pos msg) -> + "- " <> + fromString (showPos pos) <> + ": " <> + fromString msg <> + "\n") + warnings <> + + (case mversion of + Just version + | version > cabalSpecLatestVersion -> + "\n\nThe cabal file uses the cabal specification version " <> + fromString (versionString version) <> + ", but we only support up to version " <> + fromString (versionString cabalSpecLatestVersion) <> + ".\nRecommended action: upgrade your build tool (e.g., `stack upgrade`)." + _ -> mempty) + display (TreeWithoutCabalFile pl) = "No cabal file found for " <> display pl + display (TreeWithMultipleCabalFiles pl sfps) = + "Multiple cabal files found for " <> display pl <> ": " <> + fold (intersperse ", " (map display sfps)) + display (MismatchedCabalName fp name) = + "cabal file path " <> + fromString (toFilePath fp) <> + " does not match the package name it defines.\n" <> + "Please rename the file to: " <> + fromString (packageNameString name) <> + ".cabal\n" <> + "For more information, see: https://github.com/commercialhaskell/stack/issues/317" + display (NoCabalFileFound dir) = + "Stack looks for packages in the directories configured in\n" <> + "the 'packages' and 'extra-deps' fields defined in your stack.yaml\n" <> + "The current entry points to " <> + fromString (toFilePath dir) <> + ",\nbut no .cabal or package.yaml file could be found there." + display (MultipleCabalFilesFound dir files) = + "Multiple .cabal files found in directory " <> + fromString (toFilePath dir) <> + ":\n" <> + fold (intersperse "\n" (map (\x -> "- " <> fromString (toFilePath (filename x))) files)) + display (InvalidWantedCompiler t) = "Invalid wanted compiler: " <> display t + display (InvalidSnapshotLocation dir t) = + "Invalid snapshot location " <> + displayShow t <> + " relative to directory " <> + displayShow (toFilePath dir) + display (InvalidOverrideCompiler x y) = + "Specified compiler for a resolver (" <> + display x <> + "), but also specified an override compiler (" <> + display y <> + ")" + display (InvalidFilePathSnapshot t) = + "Specified snapshot as file path with " <> + displayShow t <> + ", but not reading from a local file" + display (InvalidSnapshot loc e) = + "Exception while reading snapshot from " <> + display loc <> + ":\n" <> + displayShow e + display (MismatchedPackageMetadata loc pm mtreeKey foundCabal foundIdent) = + "Mismatched package metadata for " <> display loc <> + "\nFound: " <> fromString (packageIdentifierString foundIdent) <> " with cabal file " <> + display foundCabal <> + (case mtreeKey of + Nothing -> mempty + Just treeKey -> " and tree " <> display treeKey) <> + "\nExpected: " <> display pm + display (Non200ResponseStatus status) = + "Unexpected non-200 HTTP status code: " <> + displayShow (statusCode status) + display (InvalidBlobKey Mismatch{..}) = + "Invalid blob key found, expected: " <> + display mismatchExpected <> + ", actual: " <> + display mismatchActual + display (Couldn'tParseSnapshot sl e) = + "Couldn't parse snapshot from " <> display sl <> ": " <> fromString e + display (WrongCabalFileName pl sfp name) = + "Wrong cabal file name for package " <> display pl <> + "\nCabal file is named " <> display sfp <> + ", but package name is " <> fromString (packageNameString name) <> + "\nFor more information, see:\n - https://github.com/commercialhaskell/stack/issues/317\n -https://github.com/commercialhaskell/stack/issues/895" + display (DownloadInvalidSHA256 url Mismatch {..}) = + "Mismatched SHA256 hash from " <> display url <> + "\nExpected: " <> display mismatchExpected <> + "\nActual: " <> display mismatchActual + display (DownloadInvalidSize url Mismatch {..}) = + "Mismatched download size from " <> display url <> + "\nExpected: " <> display mismatchExpected <> + "\nActual: " <> display mismatchActual + display (DownloadTooLarge url Mismatch {..}) = + "Download from " <> display url <> " was too large.\n" <> + "Expected: " <> display mismatchExpected <> ", stopped after receiving: " <> + display mismatchActual + display (LocalInvalidSHA256 path Mismatch {..}) = + "Mismatched SHA256 hash from " <> fromString (toFilePath path) <> + "\nExpected: " <> display mismatchExpected <> + "\nActual: " <> display mismatchActual + display (LocalInvalidSize path Mismatch {..}) = + "Mismatched file size from " <> fromString (toFilePath path) <> + "\nExpected: " <> display mismatchExpected <> + "\nActual: " <> display mismatchActual + display (UnknownArchiveType loc) = "Unable to determine archive type of: " <> display loc + display (InvalidTarFileType loc fp x) = + "Unsupported tar filetype in archive " <> display loc <> " at file " <> fromString fp <> ": " <> displayShow x + display (UnsupportedTarball loc e) = + "Unsupported tarball from " <> display loc <> ": " <> display e + display (NoHackageCryptographicHash ident) = + "Not cryptographic hash found for Hackage package " <> fromString (packageIdentifierString ident) + display (FailedToCloneRepo repo) = "Failed to clone repo " <> display repo + display (TreeReferencesMissingBlob loc sfp key) = + "The package " <> display loc <> + " needs blob " <> display key <> + " for file path " <> display sfp <> + ", but the blob is not available" + display (CompletePackageMetadataMismatch loc pm) = + "When completing package metadata for " <> display loc <> + ", some values changed in the new package metadata: " <> + display pm + display (CRC32Mismatch loc fp Mismatch {..}) = + "CRC32 mismatch in ZIP file from " <> display loc <> + " on internal file " <> fromString fp <> + "\n.Expected: " <> display mismatchExpected <> + "\n.Actual: " <> display mismatchActual + display (UnknownHackagePackage pir fuzzy) = + "Could not find " <> display pir <> " on Hackage" <> + displayFuzzy fuzzy + display (CannotCompleteRepoNonSHA1 repo) = + "Cannot complete repo information for a non SHA1 commit due to non-reproducibility: " <> + display repo + display (MutablePackageLocationFromUrl t) = + "Cannot refer to a mutable package location from a URL: " <> display t + display (MismatchedCabalFileForHackage pir Mismatch{..}) = + "When processing cabal file for Hackage package " <> display pir <> + ":\nMismatched package identifier." <> + "\nExpected: " <> fromString (packageIdentifierString mismatchExpected) <> + "\nActual: " <> fromString (packageIdentifierString mismatchActual) + display (PackageNameParseFail t) = + "Invalid package name: " <> display t + display (PackageVersionParseFail t) = + "Invalid version: " <> display t + +data FuzzyResults + = FRNameNotFound ![PackageName] + | FRVersionNotFound !(NonEmpty PackageIdentifierRevision) + | FRRevisionNotFound !(NonEmpty PackageIdentifierRevision) + +displayFuzzy :: FuzzyResults -> Utf8Builder +displayFuzzy (FRNameNotFound names) = + case NE.nonEmpty names of + Nothing -> "" + Just names' -> + "\nPerhaps you meant " <> + orSeparated (NE.map (fromString . packageNameString) names') <> + "?" +displayFuzzy (FRVersionNotFound pirs) = + "\nPossible candidates: " <> + commaSeparated (NE.map display pirs) <> + "." +displayFuzzy (FRRevisionNotFound pirs) = + "The specified revision was not found.\nPossible candidates: " <> + commaSeparated (NE.map display pirs) <> + "." + +orSeparated :: NonEmpty Utf8Builder -> Utf8Builder +orSeparated xs + | NE.length xs == 1 = NE.head xs + | NE.length xs == 2 = NE.head xs <> " or " <> NE.last xs + | otherwise = fold (intersperse ", " (NE.init xs)) <> ", or " <> NE.last xs + +commaSeparated :: NonEmpty Utf8Builder -> Utf8Builder +commaSeparated = fold . NE.intersperse ", " + +-- You'd really think there'd be a better way to do this in Cabal. +cabalSpecLatestVersion :: Version +cabalSpecLatestVersion = + case cabalSpecLatest of + CabalSpecOld -> error "this cannot happen" + CabalSpecV1_22 -> error "this cannot happen" + CabalSpecV1_24 -> error "this cannot happen" + CabalSpecV2_0 -> error "this cannot happen" + CabalSpecV2_2 -> mkVersion [2, 2] + +data FileType = FTNormal | FTExecutable + deriving (Show, Eq, Enum, Bounded) +instance PersistField FileType where + toPersistValue FTNormal = PersistInt64 1 + toPersistValue FTExecutable = PersistInt64 2 + + fromPersistValue v = do + i <- fromPersistValue v + case i :: Int64 of + 1 -> Right FTNormal + 2 -> Right FTExecutable + _ -> Left $ "Invalid FileType: " <> tshow i +instance PersistFieldSql FileType where + sqlType _ = SqlInt32 + +data TreeEntry = TreeEntry + { teBlob :: !BlobKey + , teType :: !FileType + } + deriving (Show, Eq) + +newtype SafeFilePath = SafeFilePath Text + deriving (Show, Eq, Ord, Display) + +instance PersistField SafeFilePath where + toPersistValue = toPersistValue . unSafeFilePath + fromPersistValue v = do + t <- fromPersistValue v + maybe (Left $ "Invalid SafeFilePath: " <> t) Right $ mkSafeFilePath t +instance PersistFieldSql SafeFilePath where + sqlType _ = SqlString + +unSafeFilePath :: SafeFilePath -> Text +unSafeFilePath (SafeFilePath t) = t + +mkSafeFilePath :: Text -> Maybe SafeFilePath +mkSafeFilePath t = do + guard $ not $ "\\" `T.isInfixOf` t + guard $ not $ "//" `T.isInfixOf` t + guard $ not $ "\n" `T.isInfixOf` t + guard $ not $ "\0" `T.isInfixOf` t + + (c, _) <- T.uncons t + guard $ c /= '/' + + guard $ all (not . T.all (== '.')) $ T.split (== '/') t + + Just $ SafeFilePath t + +-- | The hash of the binary representation of a 'Tree'. +-- +-- @since 0.1.0.0 +newtype TreeKey = TreeKey BlobKey + deriving (Show, Eq, Ord, Generic, Data, Typeable, ToJSON, FromJSON, NFData, Store, Display) + +-- | Represents the contents of a tree, which is a mapping from +-- relative file paths to 'TreeEntry's. +-- +-- @since 0.1.0.0 +newtype Tree + = TreeMap (Map SafeFilePath TreeEntry) + -- In the future, consider allowing more lax parsing + -- See: https://www.fpcomplete.com/blog/2018/07/pantry-part-2-trees-keys + -- TreeTarball !PackageTarball + deriving (Show, Eq) + +renderTree :: Tree -> ByteString +renderTree = BL.toStrict . toLazyByteString . go + where + go :: Tree -> Builder + go (TreeMap m) = "map:" <> Map.foldMapWithKey goEntry m + + goEntry sfp (TreeEntry (BlobKey sha (FileSize size')) ft) = + netstring (unSafeFilePath sfp) <> + byteString (SHA256.toRaw sha) <> + netword size' <> + (case ft of + FTNormal -> "N" + FTExecutable -> "X") + +netstring :: Text -> Builder +netstring t = + let bs = encodeUtf8 t + in netword (fromIntegral (B.length bs)) <> byteString bs + +netword :: Word -> Builder +netword w = wordDec w <> ":" + +parseTree :: ByteString -> Maybe Tree +parseTree bs1 = do + tree <- parseTree' bs1 + let bs2 = renderTree tree + guard $ bs1 == bs2 + Just tree + +parseTree' :: ByteString -> Maybe Tree +parseTree' bs0 = do + entriesBS <- B.stripPrefix "map:" bs0 + TreeMap <$> loop Map.empty entriesBS + where + loop !m bs1 + | B.null bs1 = pure m + | otherwise = do + (sfpBS, bs2) <- takeNetstring bs1 + sfp <- + case decodeUtf8' sfpBS of + Left _ -> Nothing + Right sfpT -> mkSafeFilePath sfpT + (sha, bs3) <- takeSha bs2 + (size', bs4) <- takeNetword bs3 + (typeW, bs5) <- B.uncons bs4 + ft <- + case typeW of + 78 -> Just FTNormal -- 'N' + 88 -> Just FTExecutable -- 'X' + _ -> Nothing + let entry = TreeEntry (BlobKey sha (FileSize (fromIntegral size'))) ft + loop (Map.insert sfp entry m) bs5 + + takeNetstring bs1 = do + (size', bs2) <- takeNetword bs1 + guard $ B.length bs2 >= size' + Just $ B.splitAt size' bs2 + + takeSha bs = do + let (x, y) = B.splitAt 32 bs + x' <- either (const Nothing) Just (SHA256.fromRaw x) + Just (x', y) + + takeNetword = + go 0 + where + go !accum bs = do + (next, rest) <- B.uncons bs + if + | next == 58 -> pure (accum, rest) -- ':' + | next >= 48 && next <= 57 -> + go + (accum * 10 + fromIntegral (next - 48)) + rest + | otherwise -> Nothing + + {- +data PackageTarball = PackageTarball + { ptBlob :: !BlobKey + -- ^ Contains the tarball itself + , ptCabal :: !BlobKey + -- ^ Contains the cabal file contents + , ptSubdir :: !FilePath + -- ^ Subdir containing the files we want for this package. + -- + -- There must be precisely one file with a @.cabal@ file extension + -- located there. Thanks to Hackage revisions, its contents will be + -- overwritten by the value of @ptCabal@. + } + deriving Show + -} + +-- | This is almost a copy of Cabal's parser for package identifiers, +-- the main difference is in the fact that Stack requires version to be +-- present while Cabal uses "null version" as a defaul value +-- +-- @since 0.1.0.0 +parsePackageIdentifier :: String -> Maybe PackageIdentifier +parsePackageIdentifier str = + case [p | (p, s) <- Parse.readP_to_S parser str, all isSpace s] of + [] -> Nothing + (p:_) -> Just p + where + parser = do + n <- Distribution.Text.parse + -- version is a required component of a package identifier for Stack + v <- Parse.char '-' >> Distribution.Text.parse + return (PackageIdentifier n v) + +-- | Parse a package name from a 'String'. +-- +-- @since 0.1.0.0 +parsePackageName :: String -> Maybe PackageName +parsePackageName = Distribution.Text.simpleParse + +-- | Parse a package name from a 'String' throwing on failure +-- +-- @since 0.1.0.0 +parsePackageNameThrowing :: MonadThrow m => String -> m PackageName +parsePackageNameThrowing str = + case parsePackageName str of + Nothing -> throwM $ PackageNameParseFail $ T.pack str + Just pn -> pure pn + +-- | Parse a version from a 'String'. +-- +-- @since 0.1.0.0 +parseVersion :: String -> Maybe Version +parseVersion = Distribution.Text.simpleParse + +-- | Parse a package version from a 'String' throwing on failure +-- +-- @since 0.1.0.0 +parseVersionThrowing :: MonadThrow m => String -> m Version +parseVersionThrowing str = + case parseVersion str of + Nothing -> throwM $ PackageVersionParseFail $ T.pack str + Just v -> pure v + +-- | Parse a version range from a 'String'. +-- +-- @since 0.1.0.0 +parseVersionRange :: String -> Maybe VersionRange +parseVersionRange = Distribution.Text.simpleParse + +-- | Parse a flag name from a 'String'. +-- +-- @since 0.1.0.0 +parseFlagName :: String -> Maybe FlagName +parseFlagName = Distribution.Text.simpleParse + +-- | Render a package name as a 'String'. +-- +-- @since 0.1.0.0 +packageNameString :: PackageName -> String +packageNameString = unPackageName + +-- | Render a package identifier as a 'String'. +-- +-- @since 0.1.0.0 +packageIdentifierString :: PackageIdentifier -> String +packageIdentifierString = Distribution.Text.display + +-- | Render a version as a 'String'. +-- +-- @since 0.1.0.0 +versionString :: Version -> String +versionString = Distribution.Text.display + +-- | Render a flag name as a 'String'. +-- +-- @since 0.1.0.0 +flagNameString :: FlagName -> String +flagNameString = unFlagName + +-- | Render a module name as a 'String'. +-- +-- @since 0.1.0.0 +moduleNameString :: ModuleName -> String +moduleNameString = Distribution.Text.display + +data OptionalSubdirs + = OSSubdirs !(NonEmpty Text) + | OSPackageMetadata !Text !PackageMetadata + -- ^ subdirectory and package metadata + deriving (Show, Eq, Data, Generic) +instance NFData OptionalSubdirs +instance Store OptionalSubdirs + +-- | Metadata provided by a config file for archives and repos. This +-- information can be used for optimized lookups of information like +-- package identifiers, or for validating that the user configuration +-- has the expected information. +-- +-- @since 0.1.0.0 +data PackageMetadata = PackageMetadata + { pmName :: !(Maybe PackageName) + -- ^ Package name in the cabal file + -- + -- @since 0.1.0.0 + , pmVersion :: !(Maybe Version) + -- ^ Package version in the cabal file + -- + -- @since 0.1.0.0 + , pmTreeKey :: !(Maybe TreeKey) + -- ^ Tree key of the loaded up package + -- + -- @since 0.1.0.0 + , pmCabal :: !(Maybe BlobKey) + -- ^ Blob key containing the cabal file + -- + -- @since 0.1.0.0 + } + deriving (Show, Eq, Ord, Generic, Data, Typeable) +instance Store PackageMetadata +instance NFData PackageMetadata + +instance Display PackageMetadata where + display pm = fold $ intersperse ", " $ catMaybes + [ (\name -> "name == " <> fromString (packageNameString name)) <$> pmName pm + , (\version -> "version == " <> fromString (versionString version)) <$> pmVersion pm + , (\tree -> "tree == " <> display tree) <$> pmTreeKey pm + , (\cabal -> "cabal file == " <> display cabal) <$> pmCabal pm + ] + +-- | File path relative to the configuration file it was parsed from +-- +-- @since 0.1.0.0 +newtype RelFilePath = RelFilePath Text + deriving (Show, ToJSON, FromJSON, Eq, Ord, Generic, Data, Typeable, Store, NFData, Display) + +-- | Location that an archive is stored at +-- +-- @since 0.1.0.0 +data ArchiveLocation + = ALUrl !Text + -- ^ Archive stored at an HTTP(S) URL + -- + -- @since 0.1.0.0 + | ALFilePath !(ResolvedPath File) + -- ^ Archive stored at a local file path + -- + -- @since 0.1.0.0 + deriving (Show, Eq, Ord, Generic, Data, Typeable) +instance Store ArchiveLocation +instance NFData ArchiveLocation + +instance Display ArchiveLocation where + display (ALUrl url) = display url + display (ALFilePath resolved) = fromString $ toFilePath $ resolvedAbsolute resolved + +parseArchiveLocationObject :: Object -> WarningParser (Unresolved ArchiveLocation) +parseArchiveLocationObject o = + ((o ..: "url") >>= validateUrl) <|> + ((o ..: "filepath") >>= validateFilePath) <|> + ((o ..: "archive") >>= parseArchiveLocationText) <|> + ((o ..: "location") >>= parseArchiveLocationText) + +-- Forgive me my father, for I have sinned (bad fail, bad!) +parseArchiveLocationText :: (Monad m, Alternative m) => Text -> m (Unresolved ArchiveLocation) +parseArchiveLocationText t = validateUrl t <|> validateFilePath t + +validateUrl :: Monad m => Text -> m (Unresolved ArchiveLocation) +validateUrl t = + case parseRequest $ T.unpack t of + Left _ -> fail $ "Could not parse URL: " ++ T.unpack t + Right _ -> pure $ pure $ ALUrl t + +validateFilePath :: Monad m => Text -> m (Unresolved ArchiveLocation) +validateFilePath t = + if any (\ext -> ext `T.isSuffixOf` t) (T.words ".zip .tar .tar.gz") + then pure $ Unresolved $ \mdir -> + case mdir of + Nothing -> throwIO $ InvalidFilePathSnapshot t + Just dir -> do + abs' <- resolveFile dir $ T.unpack t + pure $ ALFilePath $ ResolvedPath (RelFilePath t) abs' + else fail $ "Does not have an archive file extension: " ++ T.unpack t + +instance ToJSON PackageLocation where + toJSON (PLImmutable pli) = toJSON pli + toJSON (PLMutable resolved) = toJSON (resolvedRelative resolved) +instance FromJSON (WithJSONWarnings (Unresolved (NonEmpty PackageLocation))) where + parseJSON v = + ((fmap.fmap.fmap.fmap) PLImmutable (parseJSON v)) <|> + ((noJSONWarnings . mkMutable) <$> parseJSON v) + where + mkMutable :: Text -> Unresolved (NonEmpty PackageLocation) + mkMutable t = Unresolved $ \mdir -> do + case mdir of + Nothing -> throwIO $ MutablePackageLocationFromUrl t + Just dir -> do + abs' <- resolveDir dir $ T.unpack t + pure $ pure $ PLMutable $ ResolvedPath (RelFilePath t) abs' + +instance ToJSON PackageLocationImmutable where + toJSON (PLIHackage pir mtree) = object $ concat + [ ["hackage" .= pir] + , maybe [] (\tree -> ["pantry-tree" .= tree]) mtree + ] + toJSON (PLIArchive (Archive loc msha msize subdir) pm) = object $ concat + [ case loc of + ALUrl url -> ["url" .= url] + ALFilePath resolved -> ["filepath" .= resolvedRelative resolved] + , maybe [] (\sha -> ["sha256" .= sha]) msha + , maybe [] (\size' -> ["size" .= size']) msize + , if T.null subdir then [] else ["subdir" .= subdir] + , pmToPairs pm + ] + toJSON (PLIRepo (Repo url commit typ subdir) pm) = object $ concat + [ [ urlKey .= url + , "commit" .= commit + ] + , if T.null subdir then [] else ["subdir" .= subdir] + , pmToPairs pm + ] + where + urlKey = + case typ of + RepoGit -> "git" + RepoHg -> "hg" + +pmToPairs :: PackageMetadata -> [(Text, Value)] +pmToPairs (PackageMetadata mname mversion mtree mcabal) = concat + [ maybe [] (\name -> ["name" .= CabalString name]) mname + , maybe [] (\version -> ["version" .= CabalString version]) mversion + , maybe [] (\tree -> ["pantry-tree" .= tree]) mtree + , maybe [] (\cabal -> ["cabal-file" .= cabal]) mcabal + ] + +instance FromJSON (WithJSONWarnings (Unresolved (NonEmpty PackageLocationImmutable))) where + parseJSON v + = http v + <|> hackageText v + <|> hackageObject v + <|> repo v + <|> archiveObject v + <|> github v + <|> fail ("Could not parse a UnresolvedPackageLocationImmutable from: " ++ show v) + where + http :: Value -> Parser (WithJSONWarnings (Unresolved (NonEmpty PackageLocationImmutable))) + http = withText "UnresolvedPackageLocationImmutable.UPLIArchive (Text)" $ \t -> + case parseArchiveLocationText t of + Nothing -> fail $ "Invalid archive location: " ++ T.unpack t + Just (Unresolved mkArchiveLocation) -> + pure $ noJSONWarnings $ Unresolved $ \mdir -> do + archiveLocation <- mkArchiveLocation mdir + let archiveHash = Nothing + archiveSize = Nothing + archiveSubdir = T.empty + pure $ pure $ PLIArchive Archive {..} pmEmpty + + hackageText = withText "UnresolvedPackageLocationImmutable.UPLIHackage (Text)" $ \t -> + case parsePackageIdentifierRevision t of + Left e -> fail $ show e + Right pir -> pure $ noJSONWarnings $ pure $ pure $ PLIHackage pir Nothing + + hackageObject = withObjectWarnings "UnresolvedPackageLocationImmutable.UPLIHackage" $ \o -> (pure.pure) <$> (PLIHackage + <$> o ..: "hackage" + <*> o ..:? "pantry-tree") + + optionalSubdirs :: Object -> WarningParser OptionalSubdirs + optionalSubdirs o = + -- if subdirs exists, it needs to be valid + case HM.lookup "subdirs" o of + Just v' -> do + tellJSONField "subdirs" + subdirs <- lift $ parseJSON v' + case NE.nonEmpty subdirs of + Nothing -> fail "Invalid empty subdirs" + Just x -> pure $ OSSubdirs x + Nothing -> OSPackageMetadata + <$> o ..:? "subdir" ..!= T.empty + <*> (PackageMetadata + <$> (fmap unCabalString <$> (o ..:? "name")) + <*> (fmap unCabalString <$> (o ..:? "version")) + <*> o ..:? "pantry-tree" + <*> o ..:? "cabal-file") + + repo = withObjectWarnings "UnresolvedPackageLocationImmutable.UPLIRepo" $ \o -> do + (repoType, repoUrl) <- + ((RepoGit, ) <$> o ..: "git") <|> + ((RepoHg, ) <$> o ..: "hg") + repoCommit <- o ..: "commit" + os <- optionalSubdirs o + pure $ pure $ NE.map (\(repoSubdir, pm) -> PLIRepo Repo {..} pm) (osToPms os) + + archiveObject = withObjectWarnings "UnresolvedPackageLocationImmutable.UPLIArchive" $ \o -> do + Unresolved mkArchiveLocation <- parseArchiveLocationObject o + archiveHash <- o ..:? "sha256" + archiveSize <- o ..:? "size" + os <- optionalSubdirs o + pure $ Unresolved $ \mdir -> do + archiveLocation <- mkArchiveLocation mdir + pure $ NE.map (\(archiveSubdir, pm) -> PLIArchive Archive {..} pm) (osToPms os) + + github = withObjectWarnings "PLArchive:github" $ \o -> do + GitHubRepo ghRepo <- o ..: "github" + commit <- o ..: "commit" + let archiveLocation = ALUrl $ T.concat + [ "https://github.com/" + , ghRepo + , "/archive/" + , commit + , ".tar.gz" + ] + archiveHash <- o ..:? "sha256" + archiveSize <- o ..:? "size" + os <- optionalSubdirs o + pure $ pure $ NE.map (\(archiveSubdir, pm) -> PLIArchive Archive {..} pm) (osToPms os) + +-- | Returns pairs of subdirectory and 'PackageMetadata'. +osToPms :: OptionalSubdirs -> NonEmpty (Text, PackageMetadata) +osToPms (OSSubdirs subdirs) = NE.map (, pmEmpty) subdirs +osToPms (OSPackageMetadata subdir pm) = pure (subdir, pm) + +pmEmpty :: PackageMetadata +pmEmpty = PackageMetadata Nothing Nothing Nothing Nothing + +-- | Newtype wrapper for easier JSON integration with Cabal types. +-- +-- @since 0.1.0.0 +newtype CabalString a = CabalString { unCabalString :: a } + deriving (Show, Eq, Ord, Typeable) + +-- I'd like to use coerce here, but can't due to roles. unsafeCoerce +-- could work, but let's avoid unsafe code. + +-- | Wrap the keys in a 'Map' with a 'CabalString' to get a 'ToJSON' +-- instance. +-- +-- @since 0.1.0.0 +toCabalStringMap :: Map a v -> Map (CabalString a) v +toCabalStringMap = Map.mapKeysMonotonic CabalString + +-- | Unwrap the 'CabalString' from the keys in a 'Map' to use a +-- 'FromJSON' instance. +-- +-- @since 0.1.0.0 +unCabalStringMap :: Map (CabalString a) v -> Map a v +unCabalStringMap = Map.mapKeysMonotonic unCabalString + +instance Distribution.Text.Text a => ToJSON (CabalString a) where + toJSON = toJSON . Distribution.Text.display . unCabalString +instance Distribution.Text.Text a => ToJSONKey (CabalString a) where + toJSONKey = toJSONKeyText $ T.pack . Distribution.Text.display . unCabalString + +instance forall a. IsCabalString a => FromJSON (CabalString a) where + parseJSON = withText name $ \t -> + case cabalStringParser $ T.unpack t of + Nothing -> fail $ "Invalid " ++ name ++ ": " ++ T.unpack t + Just x -> pure $ CabalString x + where + name = cabalStringName (Nothing :: Maybe a) +instance forall a. IsCabalString a => FromJSONKey (CabalString a) where + fromJSONKey = + FromJSONKeyTextParser $ \t -> + case cabalStringParser $ T.unpack t of + Nothing -> fail $ "Invalid " ++ name ++ ": " ++ T.unpack t + Just x -> pure $ CabalString x + where + name = cabalStringName (Nothing :: Maybe a) + +class IsCabalString a where + cabalStringName :: proxy a -> String + cabalStringParser :: String -> Maybe a +instance IsCabalString PackageName where + cabalStringName _ = "package name" + cabalStringParser = parsePackageName +instance IsCabalString Version where + cabalStringName _ = "version" + cabalStringParser = parseVersion +instance IsCabalString VersionRange where + cabalStringName _ = "version range" + cabalStringParser = parseVersionRange +instance IsCabalString PackageIdentifier where + cabalStringName _ = "package identifier" + cabalStringParser = parsePackageIdentifier +instance IsCabalString FlagName where + cabalStringName _ = "flag name" + cabalStringParser = parseFlagName + +-- | What to use for running hpack +-- +-- @since 0.1.0.0 +data HpackExecutable + = HpackBundled + -- ^ Compiled in library + | HpackCommand !FilePath + -- ^ Executable at the provided path + deriving (Show, Read, Eq, Ord) + +-- | Which compiler a snapshot wants to use. The build tool may elect +-- to do some fuzzy matching of versions (e.g., allowing different +-- patch versions). +-- +-- @since 0.1.0.0 +data WantedCompiler + = WCGhc !Version + | WCGhcjs + !Version + !Version + -- ^ GHCJS version followed by GHC version + deriving (Show, Eq, Ord, Data, Generic) +instance NFData WantedCompiler +instance Store WantedCompiler +instance Display WantedCompiler where + display (WCGhc vghc) = "ghc-" <> fromString (versionString vghc) + display (WCGhcjs vghcjs vghc) = + "ghcjs-" <> fromString (versionString vghcjs) <> "_ghc-" <> fromString (versionString vghc) +instance ToJSON WantedCompiler where + toJSON = toJSON . utf8BuilderToText . display +instance FromJSON WantedCompiler where + parseJSON = withText "WantedCompiler" $ either (fail . show) pure . parseWantedCompiler +instance FromJSONKey WantedCompiler where + fromJSONKey = + FromJSONKeyTextParser $ \t -> + case parseWantedCompiler t of + Left e -> fail $ "Invalid WantedComiler " ++ show t ++ ": " ++ show e + Right x -> pure x + +-- | Parse a 'Text' into a 'WantedCompiler' value. +-- +-- @since 0.1.0.0 +parseWantedCompiler :: Text -> Either PantryException WantedCompiler +parseWantedCompiler t0 = maybe (Left $ InvalidWantedCompiler t0) Right $ + case T.stripPrefix "ghcjs-" t0 of + Just t1 -> parseGhcjs t1 + Nothing -> T.stripPrefix "ghc-" t0 >>= parseGhc + where + parseGhcjs t1 = do + let (ghcjsVT, t2) = T.break (== '_') t1 + ghcjsV <- parseVersion $ T.unpack ghcjsVT + ghcVT <- T.stripPrefix "_ghc-" t2 + ghcV <- parseVersion $ T.unpack ghcVT + pure $ WCGhcjs ghcjsV ghcV + parseGhc = fmap WCGhc . parseVersion . T.unpack + +instance FromJSON (WithJSONWarnings (Unresolved SnapshotLocation)) where + parseJSON v = text v <|> obj v + where + text :: Value -> Parser (WithJSONWarnings (Unresolved SnapshotLocation)) + text = withText "UnresolvedSnapshotLocation (Text)" $ pure . noJSONWarnings . parseSnapshotLocation + + obj :: Value -> Parser (WithJSONWarnings (Unresolved SnapshotLocation)) + obj = withObjectWarnings "UnresolvedSnapshotLocation (Object)" $ \o -> + ((pure . SLCompiler) <$> o ..: "compiler") <|> + ((\x y -> pure $ SLUrl x y) <$> o ..: "url" <*> blobKey o) <|> + (parseSnapshotLocationPath <$> o ..: "filepath") + + blobKey o = do + msha <- o ..:? "sha256" + msize <- o ..:? "size" + case (msha, msize) of + (Nothing, Nothing) -> pure Nothing + (Just sha, Just size') -> pure $ Just $ BlobKey sha size' + (Just _sha, Nothing) -> fail "You must also specify the file size" + (Nothing, Just _) -> fail "You must also specify the file's SHA256" + +instance Display SnapshotLocation where + display (SLCompiler compiler) = display compiler + display (SLUrl url Nothing) = display url + display (SLUrl url (Just blob)) = display url <> " (" <> display blob <> ")" + display (SLFilePath resolved) = display (resolvedRelative resolved) + +-- | Parse a 'Text' into an 'Unresolved' 'SnapshotLocation'. +-- +-- @since 0.1.0.0 +parseSnapshotLocation :: Text -> Unresolved SnapshotLocation +parseSnapshotLocation t0 = fromMaybe (parseSnapshotLocationPath t0) $ + (either (const Nothing) (Just . pure . SLCompiler) (parseWantedCompiler t0)) <|> + parseLts <|> + parseNightly <|> + parseGithub <|> + parseUrl + where + parseLts = do + t1 <- T.stripPrefix "lts-" t0 + Right (x, t2) <- Just $ decimal t1 + t3 <- T.stripPrefix "." t2 + Right (y, "") <- Just $ decimal t3 + Just $ pure $ ltsSnapshotLocation x y + parseNightly = do + t1 <- T.stripPrefix "nightly-" t0 + date <- readMaybe (T.unpack t1) + Just $ pure $ nightlySnapshotLocation date + + parseGithub = do + t1 <- T.stripPrefix "github:" t0 + let (user, t2) = T.break (== '/') t1 + t3 <- T.stripPrefix "/" t2 + let (repo, t4) = T.break (== ':') t3 + path <- T.stripPrefix ":" t4 + Just $ pure $ githubSnapshotLocation user repo path + + parseUrl = parseRequest (T.unpack t0) $> pure (SLUrl t0 Nothing) + +parseSnapshotLocationPath :: Text -> Unresolved SnapshotLocation +parseSnapshotLocationPath t = + Unresolved $ \mdir -> + case mdir of + Nothing -> throwIO $ InvalidFilePathSnapshot t + Just dir -> do + abs' <- resolveFile dir (T.unpack t) `catchAny` \_ -> throwIO (InvalidSnapshotLocation dir t) + pure $ SLFilePath $ ResolvedPath (RelFilePath t) abs' + +githubSnapshotLocation :: Text -> Text -> Text -> SnapshotLocation +githubSnapshotLocation user repo path = + let url = T.concat + [ "https://raw.githubusercontent.com/" + , user + , "/" + , repo + , "/master/" + , path + ] + in SLUrl url Nothing + +defUser :: Text +defUser = "commercialhaskell" + +defRepo :: Text +defRepo = "stackage-snapshots" + +-- | Location of an LTS snapshot +-- +-- @since 0.1.0.0 +ltsSnapshotLocation + :: Int -- ^ major version + -> Int -- ^ minor version + -> SnapshotLocation +ltsSnapshotLocation x y = + githubSnapshotLocation defUser defRepo $ + utf8BuilderToText $ + "lts/" <> display x <> "/" <> display y <> ".yaml" + +-- | Location of a Stackage Nightly snapshot +-- +-- @since 0.1.0.0 +nightlySnapshotLocation :: Day -> SnapshotLocation +nightlySnapshotLocation date = + githubSnapshotLocation defUser defRepo $ + utf8BuilderToText $ + "nightly/" <> display year <> "/" <> display month <> "/" <> display day <> ".yaml" + where + (year, month, day) = toGregorian date + +-- | Where to load a snapshot from. +-- +-- @since 0.1.0.0 +data SnapshotLocation + = SLCompiler !WantedCompiler + -- ^ Don't use an actual snapshot, just a version of the compiler + -- with its shipped packages. + -- + -- @since 0.1.0.0 + | SLUrl !Text !(Maybe BlobKey) + -- ^ Download the snapshot from the given URL. The optional + -- 'BlobKey' is used for reproducibility. + -- + -- @since 0.1.0.0 + | SLFilePath !(ResolvedPath File) + -- ^ Snapshot at a local file path. + -- + -- @since 0.1.0.0 + deriving (Show, Eq, Data, Ord, Generic) +instance Store SnapshotLocation +instance NFData SnapshotLocation + +instance ToJSON SnapshotLocation where + toJSON (SLCompiler compiler) = object ["compiler" .= compiler] + toJSON (SLUrl url mblob) = object + $ "url" .= url + : maybe [] blobKeyPairs mblob + toJSON (SLFilePath resolved) = object ["filepath" .= resolvedRelative resolved] + +-- | Specification of a snapshot, such as LTS Haskell. +-- +-- @since 0.1.0.0 +data Snapshot = Snapshot + { snapshotParent :: !SnapshotLocation + -- ^ The snapshot to extend from. This is either a specific + -- compiler, or a @SnapshotLocation@ which gives us more information + -- (like packages). Ultimately, we'll end up with a + -- @CompilerVersion@. + -- + -- @since 0.1.0.0 + , snapshotCompiler :: !(Maybe WantedCompiler) + -- ^ Override the compiler specified in 'snapshotParent'. Must be + -- 'Nothing' if using 'SLCompiler'. + -- + -- @since 0.1.0.0 + , snapshotName :: !Text + -- ^ A user-friendly way of referring to this resolver. + -- + -- @since 0.1.0.0 + , snapshotLocations :: ![PackageLocationImmutable] + -- ^ Where to grab all of the packages from. + -- + -- @since 0.1.0.0 + , snapshotDropPackages :: !(Set PackageName) + -- ^ Packages present in the parent which should not be included + -- here. + -- + -- @since 0.1.0.0 + , snapshotFlags :: !(Map PackageName (Map FlagName Bool)) + -- ^ Flag values to override from the defaults + -- + -- @since 0.1.0.0 + , snapshotHidden :: !(Map PackageName Bool) + -- ^ Packages which should be hidden when registering. This will + -- affect, for example, the import parser in the script + -- command. We use a 'Map' instead of just a 'Set' to allow + -- overriding the hidden settings in a parent snapshot. + -- + -- @since 0.1.0.0 + , snapshotGhcOptions :: !(Map PackageName [Text]) + -- ^ GHC options per package + -- + -- @since 0.1.0.0 + } + deriving (Show, Eq, Data, Generic) +instance Store Snapshot +instance NFData Snapshot +instance ToJSON Snapshot where + toJSON snap = object $ concat + [ ["resolver" .= snapshotParent snap] + , maybe [] (\compiler -> ["compiler" .= compiler]) (snapshotCompiler snap) + , ["name" .= snapshotName snap] + , ["packages" .= snapshotLocations snap] + , if Set.null (snapshotDropPackages snap) then [] else ["drop-packages" .= Set.map CabalString (snapshotDropPackages snap)] + , if Map.null (snapshotFlags snap) then [] else ["flags" .= fmap toCabalStringMap (toCabalStringMap (snapshotFlags snap))] + , if Map.null (snapshotHidden snap) then [] else ["hidden" .= toCabalStringMap (snapshotHidden snap)] + , if Map.null (snapshotGhcOptions snap) then [] else ["ghc-options" .= toCabalStringMap (snapshotGhcOptions snap)] + ] + +instance FromJSON (WithJSONWarnings (Unresolved Snapshot)) where + parseJSON = withObjectWarnings "Snapshot" $ \o -> do + mcompiler <- o ..:? "compiler" + mresolver <- jsonSubWarningsT $ o ..:? "resolver" + unresolvedSnapshotParent <- + case (mcompiler, mresolver) of + (Nothing, Nothing) -> fail "Snapshot must have either resolver or compiler" + (Just compiler, Nothing) -> pure $ pure (SLCompiler compiler, Nothing) + (_, Just (Unresolved usl)) -> pure $ Unresolved $ \mdir -> do + sl <- usl mdir + case (sl, mcompiler) of + (SLCompiler c1, Just c2) -> throwIO $ InvalidOverrideCompiler c1 c2 + _ -> pure (sl, mcompiler) + + snapshotName <- o ..: "name" + unresolvedLocs <- jsonSubWarningsT (o ..:? "packages" ..!= []) + snapshotDropPackages <- Set.map unCabalString <$> (o ..:? "drop-packages" ..!= Set.empty) + snapshotFlags <- (unCabalStringMap . fmap unCabalStringMap) <$> (o ..:? "flags" ..!= Map.empty) + snapshotHidden <- unCabalStringMap <$> (o ..:? "hidden" ..!= Map.empty) + snapshotGhcOptions <- unCabalStringMap <$> (o ..:? "ghc-options" ..!= Map.empty) + pure $ (\snapshotLocations (snapshotParent, snapshotCompiler) -> Snapshot {..}) + <$> ((concat . map NE.toList) <$> sequenceA unresolvedLocs) + <*> unresolvedSnapshotParent + +-- TODO ORPHANS remove + +instance Store PackageIdentifier where + size = + VarSize $ \(PackageIdentifier name version) -> + (case size of + ConstSize x -> x + VarSize f -> f name) + + (case size of + ConstSize x -> x + VarSize f -> f version) + peek = PackageIdentifier <$> peek <*> peek + poke (PackageIdentifier name version) = poke name *> poke version +instance Store PackageName where + size = + VarSize $ \name -> + case size of + ConstSize x -> x + VarSize f -> f (packageNameString name) + peek = peek >>= maybe (fail "Invalid package name") pure . parsePackageName + poke name = poke (packageNameString name) +instance Store Version where + size = + VarSize $ \version -> + case size of + ConstSize x -> x + VarSize f -> f (versionString version) + peek = peek >>= maybe (fail "Invalid version") pure . parseVersion + poke version = poke (versionString version) +instance Store FlagName where + size = + VarSize $ \fname -> + case size of + ConstSize x -> x + VarSize f -> f (flagNameString fname) + peek = peek >>= maybe (fail "Invalid flag name") pure . parseFlagName + poke fname = poke (flagNameString fname) +instance Store ModuleName where + size = + VarSize $ \mname -> + case size of + ConstSize x -> x + VarSize f -> f $ ModuleName.components mname + peek = ModuleName.fromComponents <$> peek + poke = poke . ModuleName.components +instance Store PackageIdentifierRevision where + size = + VarSize $ \(PackageIdentifierRevision name version cfi) -> + (case size of + ConstSize x -> x + VarSize f -> f name) + + (case size of + ConstSize x -> x + VarSize f -> f version) + + (case size of + ConstSize x -> x + VarSize f -> f cfi) + peek = PackageIdentifierRevision <$> peek <*> peek <*> peek + poke (PackageIdentifierRevision name version cfi) = poke name *> poke version *> poke cfi + +deriving instance Data Abs +deriving instance Data Dir +deriving instance Data File +deriving instance (Data a, Data t) => Data (Path a t) + +deriving instance Generic Abs +deriving instance Generic Dir +deriving instance Generic File +deriving instance (Generic a, Generic t) => Generic (Path a t) + +instance Store Abs +instance Store Dir +instance Store File +instance (Generic a, Generic t, Store a, Store t) => Store (Path a t) diff --git a/subs/pantry/test/Pantry/ArchiveSpec.hs b/subs/pantry/test/Pantry/ArchiveSpec.hs new file mode 100644 index 0000000000..e4491e29da --- /dev/null +++ b/subs/pantry/test/Pantry/ArchiveSpec.hs @@ -0,0 +1,50 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} +module Pantry.ArchiveSpec (spec) where + +import Test.Hspec +import RIO +import Pantry +import Path.IO (resolveFile') + +spec :: Spec +spec = do + it "cabal file from tarball" $ asIO $ runPantryApp $ do + let rel = "attic/package-0.1.2.3.tar.gz" + abs' <- resolveFile' rel + ident <- getPackageLocationIdent $ PLIArchive + Archive + { archiveLocation = ALFilePath ResolvedPath + { resolvedRelative = RelFilePath $ fromString rel + , resolvedAbsolute = abs' + } + , archiveHash = Nothing + , archiveSize = Nothing + , archiveSubdir = "" + } + PackageMetadata + { pmName = Nothing + , pmVersion = Nothing + , pmTreeKey = Nothing + , pmCabal = Nothing + } + case parsePackageIdentifier "package-0.1.2.3" of + Nothing -> error "should have parsed" + Just expected -> liftIO $ ident `shouldBe` expected + it "handles symlinks to parent dirs" $ do + ident <- runPantryApp $ getPackageLocationIdent $ PLIArchive + Archive + { archiveLocation = ALUrl "https://github.com/commercialhaskell/stack/archive/2b846ff4fda13a8cd095e7421ce76df0a08b10dc.tar.gz" + , archiveHash = Nothing + , archiveSize = Nothing + , archiveSubdir = "subs/pantry/" + } + PackageMetadata + { pmName = Nothing + , pmVersion = Nothing + , pmTreeKey = Nothing + , pmCabal = Nothing + } + case parsePackageIdentifier "pantry-0.1.0.0" of + Nothing -> error "should have parsed" + Just expected -> ident `shouldBe` expected diff --git a/subs/pantry/test/Pantry/BuildPlanSpec.hs b/subs/pantry/test/Pantry/BuildPlanSpec.hs new file mode 100644 index 0000000000..d6523ea18c --- /dev/null +++ b/subs/pantry/test/Pantry/BuildPlanSpec.hs @@ -0,0 +1,121 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} + +module Pantry.BuildPlanSpec where + +import Data.Aeson.Extended (WithJSONWarnings(..)) +import RIO +import qualified Data.ByteString.Char8 as S8 +import Data.Yaml (decodeThrow) +import Pantry +import Test.Hspec +import Control.Monad.Catch (MonadThrow) +import Data.List.NonEmpty (NonEmpty) + +spec :: Spec +spec = + describe "PackageLocation" $ do + describe "Archive" $ do + describe "github" $ do + let decode' :: (HasCallStack, MonadThrow m) => ByteString -> m (WithJSONWarnings (Unresolved (NonEmpty PackageLocationImmutable))) + decode' = decodeThrow + + decode'' :: HasCallStack => ByteString -> IO (NonEmpty PackageLocationImmutable) + decode'' bs = do + WithJSONWarnings unresolved warnings <- decode' bs + unless (null warnings) $ error $ show warnings + resolvePaths Nothing unresolved + + it "'github' and 'commit' keys" $ do + let contents :: ByteString + contents = + S8.pack + (unlines + [ "github: oink/town" + , "commit: abc123" + ]) + let expected :: PackageLocationImmutable + expected = + PLIArchive + Archive + { archiveLocation = ALUrl "https://github.com/oink/town/archive/abc123.tar.gz" + , archiveHash = Nothing + , archiveSize = Nothing + , archiveSubdir = "" + } + PackageMetadata + { pmName = Nothing + , pmVersion = Nothing + , pmTreeKey = Nothing + , pmCabal = Nothing + } + actual <- decode'' contents + actual `shouldBe` pure expected + + it "'github', 'commit', and 'subdirs' keys" $ do + let contents :: ByteString + contents = + S8.pack + (unlines + [ "github: oink/town" + , "commit: abc123" + , "subdirs:" + , " - foo" + ]) + let expected :: PackageLocationImmutable + expected = + PLIArchive + Archive + { archiveLocation = ALUrl "https://github.com/oink/town/archive/abc123.tar.gz" + , archiveHash = Nothing + , archiveSize = Nothing + , archiveSubdir = "foo" + } + PackageMetadata + { pmName = Nothing + , pmVersion = Nothing + , pmTreeKey = Nothing + , pmCabal = Nothing + } + actual <- decode'' contents + actual `shouldBe` pure expected + + it "does not parse GitHub repo with no slash" $ do + let contents :: ByteString + contents = + S8.pack + (unlines + [ "github: oink" + , "commit: abc123" + ]) + void (decode' contents) `shouldBe` Nothing + + it "does not parse GitHub repo with leading slash" $ do + let contents :: ByteString + contents = + S8.pack + (unlines + [ "github: /oink" + , "commit: abc123" + ]) + void (decode' contents) `shouldBe` Nothing + + it "does not parse GitHub repo with trailing slash" $ do + let contents :: ByteString + contents = + S8.pack + (unlines + [ "github: oink/" + , "commit: abc123" + ]) + void (decode' contents) `shouldBe` Nothing + + it "does not parse GitHub repo with more than one slash" $ do + let contents :: ByteString + contents = + S8.pack + (unlines + [ "github: oink/town/here" + , "commit: abc123" + ]) + void (decode' contents) `shouldBe` Nothing diff --git a/subs/pantry/test/Pantry/CabalSpec.hs b/subs/pantry/test/Pantry/CabalSpec.hs new file mode 100644 index 0000000000..2a3991b391 --- /dev/null +++ b/subs/pantry/test/Pantry/CabalSpec.hs @@ -0,0 +1,112 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} +module Pantry.CabalSpec (spec) where + +import Test.Hspec +import Pantry +import qualified Pantry.SHA256 as SHA256 +import RIO +import Distribution.Types.PackageName (mkPackageName) +import Distribution.Types.Version (mkVersion) + +spec :: Spec +spec = describe "wrong cabal file" $ do + let test name action = it name (runPantryApp action :: IO ()) + shouldThrow' x y = withRunInIO $ \run -> run x `shouldThrow` y + test "Hackage" $ do + sha <- either throwIO pure + $ SHA256.fromHexBytes "71c2c685a932cd3a70ec52d7bd0ec96ecbfa5e31e22130099cd50fa073ad1a69" + let pli = + PLIHackage + (PackageIdentifierRevision + name + version3 + (CFIHash sha (Just size))) + Nothing + go = loadCabalFileImmutable pli + name = mkPackageName "acme-missiles" + version2 = mkVersion [0, 2] + version3 = mkVersion [0, 3] + size = FileSize 597 + go `shouldThrow'` \e -> + case e of + MismatchedPackageMetadata pli' pm _tree cabal ident -> + pli == pli' && + pm == PackageMetadata + { pmName = Just name + , pmVersion = Just version3 + , pmTreeKey = Nothing + , pmCabal = Just $ BlobKey sha size + } && + cabal == BlobKey sha size && + ident == PackageIdentifier name version2 + _ -> False + + test "tarball with wrong ident" $ do + archiveHash' <- either throwIO pure + $ SHA256.fromHexBytes "b5a582209c50e4a61e4b6c0fb91a6a7d65177a881225438b0144719bc3682c3a" + sha <- either throwIO pure + $ SHA256.fromHexBytes "71c2c685a932cd3a70ec52d7bd0ec96ecbfa5e31e22130099cd50fa073ad1a69" + let pli = PLIArchive archive pm + archive = + Archive + { archiveLocation = ALUrl "https://github.com/yesodweb/yesod/archive/yesod-auth-1.6.4.1.tar.gz" + , archiveHash = Just archiveHash' + , archiveSize = Just $ FileSize 309199 + , archiveSubdir = "yesod-auth" + } + pm = + PackageMetadata + { pmName = Just acmeMissiles + , pmVersion = Just version2 + , pmCabal = Just $ BlobKey sha (FileSize 597) + , pmTreeKey = Nothing + } + go = loadCabalFileImmutable pli + acmeMissiles = mkPackageName "acme-missiles" + version2 = mkVersion [0, 2] + go `shouldThrow'` \e -> + case e of + MismatchedPackageMetadata pli' pm' _treeKey cabal ident -> + pli == pli' && + pm == pm' && + cabal == BlobKey + (either impureThrow id $ SHA256.fromHexBytes "940d82426ad1db0fcc978c0f386ac5d06df019546071993cb7c6633f1ad17d50") + (FileSize 3038) && + ident == PackageIdentifier + (mkPackageName "yesod-auth") + (mkVersion [1, 6, 4, 1]) + _ -> False + + test "tarball with wrong cabal file" $ do + sha <- either throwIO pure + $ SHA256.fromHexBytes "71c2c685a932cd3a70ec52d7bd0ec96ecbfa5e31e22130099cd50fa073ad1a69" + let pli = PLIArchive archive pm + archive = + Archive + { archiveLocation = ALUrl "https://github.com/yesodweb/yesod/archive/yesod-auth-1.6.4.1.tar.gz" + , archiveHash = either impureThrow Just + $ SHA256.fromHexBytes "b5a582209c50e4a61e4b6c0fb91a6a7d65177a881225438b0144719bc3682c3a" + , archiveSize = Just $ FileSize 309199 + , archiveSubdir = "yesod-auth" + } + pm = + PackageMetadata + { pmName = Just yesodAuth + , pmVersion = Just version + , pmCabal = Just $ BlobKey sha (FileSize 597) + , pmTreeKey = Nothing + } + go = loadCabalFileImmutable pli + yesodAuth = mkPackageName "yesod-auth" + version = mkVersion [1, 6, 4, 1] + go `shouldThrow'` \e -> + case e of + MismatchedPackageMetadata pli' pm' _treeKey cabal ident -> + pli == pli' && + pm == pm' && + cabal == BlobKey + (either impureThrow id $ SHA256.fromHexBytes "940d82426ad1db0fcc978c0f386ac5d06df019546071993cb7c6633f1ad17d50") + (FileSize 3038) && + ident == PackageIdentifier yesodAuth version + _ -> False diff --git a/subs/pantry/test/Pantry/HackageSpec.hs b/subs/pantry/test/Pantry/HackageSpec.hs new file mode 100644 index 0000000000..06f7de9969 --- /dev/null +++ b/subs/pantry/test/Pantry/HackageSpec.hs @@ -0,0 +1,23 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} +module Pantry.HackageSpec (spec) where + +import Test.Hspec +import Pantry +import RIO +import Distribution.Types.Version (mkVersion) + +spec :: Spec +spec = do + it "update works" $ asIO $ void $ runPantryApp $ updateHackageIndex Nothing + it "fuzzy lookup kicks in" $ do + let pir = PackageIdentifierRevision "thisisnot-tobe-foundon-hackage-please" (mkVersion [1..3]) CFILatest + runPantryApp (loadPackage (PLIHackage pir Nothing)) + `shouldThrow` \e -> + case e of + UnknownHackagePackage pir' _ -> pir == pir' + _ -> False + -- Flaky test, can be broken by new packages on Hackage. + it "finds acme-missiles" $ do + x <- runPantryApp (getHackageTypoCorrections "acme-missile") + x `shouldSatisfy` ("acme-missiles" `elem`) diff --git a/src/test/Stack/StaticBytesSpec.hs b/subs/pantry/test/Pantry/Internal/StaticBytesSpec.hs similarity index 67% rename from src/test/Stack/StaticBytesSpec.hs rename to subs/pantry/test/Pantry/Internal/StaticBytesSpec.hs index 13921819a7..09e3c01b5b 100644 --- a/src/test/Stack/StaticBytesSpec.hs +++ b/subs/pantry/test/Pantry/Internal/StaticBytesSpec.hs @@ -1,16 +1,18 @@ {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE NoImplicitPrelude #-} -module Stack.StaticBytesSpec (spec) where +module Pantry.Internal.StaticBytesSpec (spec) where -import Stack.StaticBytes -import Stack.Prelude +import RIO +import Pantry.Internal.StaticBytes +import Control.Monad (replicateM) import qualified Data.ByteString as B import qualified Data.Vector.Unboxed as VU import qualified Data.Vector.Primitive as VP import qualified Data.Vector.Storable as VS import Test.Hspec import Test.Hspec.QuickCheck +import Test.QuickCheck import qualified Data.Text as T import qualified Data.Text.Encoding as TE @@ -23,30 +25,31 @@ spec = do tests :: (Eq dbytes, Show dbytes, DynamicBytes dbytes) => ([Word8] -> dbytes) -> Spec tests pack = do - it "disallows 4 bytes" $ do - toStaticExact (pack [1..4]) `shouldBe` (Left NotEnoughBytes :: Either StaticBytesException Bytes8) - it "toStaticExact matches ByteString" $ do - let octets = [1..8] + it "disallows 4 bytes" $ property $ \(w1,w2,w3,w4) -> + toStaticExact (pack [w1,w2,w3,w4]) `shouldBe` (Left NotEnoughBytes :: Either StaticBytesException Bytes8) + it "toStaticExact matches ByteString" $ property $ \(w1,w2,w3,w4) -> property $ \(w5,w6,w7,w8) -> do + let octets = [w1,w2,w3,w4,w5,w6,w7,w8] (expected :: Bytes8) = either impureThrow id $ toStaticExact (B.pack octets) actual = either impureThrow id $ toStaticExact (pack octets) actual `shouldBe` expected - it "fromStatic round trips" $ do - let octets = [1..8] + it "fromStatic round trips" $ property $ \(w1,w2,w3,w4) -> property $ \(w5,w6,w7,w8) -> do + let octets = [w1,w2,w3,w4,w5,w6,w7,w8] v1 = pack octets (b8 :: Bytes8) = either impureThrow id $ toStaticExact v1 v2 = fromStatic b8 v2 `shouldBe` v1 - it "allows 8 bytes" $ do - let bs = pack [1..8] + it "allows 8 bytes" $ property $ \(w1,w2,w3,w4) -> property $ \(w5,w6,w7,w8) -> do + let bs = pack [w1,w2,w3,w4,w5,w6,w7,w8] case toStaticExact bs of Left e -> throwIO e Right b8 -> fromStatic (b8 :: Bytes8) `shouldBe` bs toStaticExact bs `shouldBe` (Left NotEnoughBytes :: Either StaticBytesException Bytes16) - it "padding is the same as trailing nulls" $ do - let bs1 = pack $ [1..4] ++ replicate 4 0 - bs2 = pack [1..4] + it "padding is the same as trailing nulls" $ property $ \(w1,w2,w3,w4) -> do + let ws = [w1,w2,w3,w4] + bs1 = pack $ ws ++ replicate 4 0 + bs2 = pack ws Right (toStaticPadTruncate bs2 :: Bytes8) `shouldBe` toStaticExact bs1 prop "handles bytes16" $ \octets -> do @@ -54,10 +57,10 @@ tests pack = do (b16 :: Bytes16) = either impureThrow id $ toStaticPad bs fromStatic b16 `shouldBe` pack (take 16 (octets ++ replicate 16 0)) - it "spot check bytes16" $ do - let bs = pack $ replicate 16 0 + it "spot check bytes16" $ forAll (replicateM 16 arbitrary) $ \ws -> do + let bs = pack ws (b16 :: Bytes16) = either impureThrow id $ toStaticPad bs - fromStatic b16 `shouldBe` pack (replicate 16 0) + fromStatic b16 `shouldBe` pack ws prop "handles bytes32" $ \octets -> do let bs = pack $ take 32 octets diff --git a/subs/pantry/test/Pantry/InternalSpec.hs b/subs/pantry/test/Pantry/InternalSpec.hs new file mode 100644 index 0000000000..9b7dcaee46 --- /dev/null +++ b/subs/pantry/test/Pantry/InternalSpec.hs @@ -0,0 +1,55 @@ +module Pantry.InternalSpec (spec) where + +import Test.Hspec +import Pantry.Internal (normalizeParents, makeTarRelative) + +spec :: Spec +spec = do + describe "normalizeParents" $ do + let (!) :: HasCallStack => String -> Maybe String -> Spec + input ! output = + it input $ + let x = normalizeParents input + y = either (const Nothing) Just x + in y `shouldBe` output + + "/file/\\test" ! Nothing + "file/\\test" ! Just "file/\\test" + "/file/////\\test" ! Nothing + "file/////\\test" ! Just "file/\\test" + "/file/\\test////" ! Nothing + "/file/./test" ! Nothing + "file/./test" ! Just "file/test" + "/test/file/../bob/fred/" ! Nothing + "/test/file/../bob/fred" ! Nothing + "test/file/../bob/fred/" ! Nothing + "test/file/../bob/fred" ! Just "test/bob/fred" + "../bob/fred/" ! Nothing + "./bob/fred/" ! Nothing + "./bob/fred" ! Just "bob/fred" + "./" ! Nothing + "./." ! Nothing + "/./" ! Nothing + "/" ! Nothing + "bob/fred/." ! Nothing + "//home" ! Nothing + "foobarbaz\\bin" ! Just "foobarbaz\\bin" + + describe "makeTarRelative" $ do + let test :: HasCallStack => FilePath -> FilePath -> Maybe FilePath -> Spec + test base rel expected = + it (show (base, rel)) $ + either (const Nothing) Just (makeTarRelative base rel) + `shouldBe` expected + + test "foo/bar" "baz" $ Just "foo/baz" + test "foo" "bar" $ Just "bar" + test "foo" "/bar" Nothing + test "foo/" "bar" Nothing + + -- MSS 2018-08-23: Arguable whether this should be Nothing + -- instead, since we don't want any absolute paths. However, + -- that's really a concern for normalizeParents. Point being: if + -- you refactor in the future, and this turns into Nothing, that's + -- fine. + test "/foo" "bar" $ Just "/bar" diff --git a/subs/pantry/test/Pantry/TreeSpec.hs b/subs/pantry/test/Pantry/TreeSpec.hs new file mode 100644 index 0000000000..ea360ff348 --- /dev/null +++ b/subs/pantry/test/Pantry/TreeSpec.hs @@ -0,0 +1,60 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} +module Pantry.TreeSpec (spec) where + +import Test.Hspec +import RIO +import Pantry + +spec :: Spec +spec = do + let tarURL = "https://github.com/snoyberg/file-embed/archive/47b499c3c58ca465c56ee0295d0a76782a66751d.tar.gz" + zipURL = "https://github.com/snoyberg/file-embed/archive/47b499c3c58ca465c56ee0295d0a76782a66751d.zip" + pm = PackageMetadata + { pmName = Nothing + , pmVersion = Nothing + , pmTreeKey = Nothing + , pmCabal = Nothing + } + mkArchive url = + PLIArchive + Archive + { archiveLocation = ALUrl url + , archiveHash = Nothing + , archiveSize = Nothing + , archiveSubdir = "" + } + pm + tarPL = mkArchive tarURL + zipPL = mkArchive zipURL + gitPL = + PLIRepo + Repo + { repoUrl = "https://github.com/snoyberg/file-embed.git" + , repoCommit = "47b499c3c58ca465c56ee0295d0a76782a66751d" + , repoType = RepoGit + , repoSubdir = "" + } + pm + hgPL = + PLIRepo + Repo + { repoUrl = "https://bitbucket.org/snoyberg/file-embed" + , repoCommit = "6d8126e7a4821788a0275fa7c2c4a0083e14d690" + , repoType = RepoHg + , repoSubdir = "" + } + pm + + it "zip and tar.gz archives match" $ asIO $ runPantryAppClean $ do + pair1 <- loadPackage tarPL + pair2 <- loadPackage zipPL + liftIO $ pair2 `shouldBe` pair1 + it "archive and Git repo match" $ asIO $ runPantryAppClean $ do + pair1 <- loadPackage tarPL + pair2 <- loadPackage gitPL + liftIO $ pair2 `shouldBe` pair1 + it "archive and Hg repo match" $ asIO $ runPantryAppClean $ do + pair1 <- loadPackage tarPL + pair2 <- loadPackage hgPL + liftIO $ pair2 `shouldBe` pair1 diff --git a/subs/pantry/test/Pantry/TypesSpec.hs b/subs/pantry/test/Pantry/TypesSpec.hs new file mode 100644 index 0000000000..c746846062 --- /dev/null +++ b/subs/pantry/test/Pantry/TypesSpec.hs @@ -0,0 +1,55 @@ +{-# LANGUAGE NoImplicitPrelude #-} +{-# LANGUAGE OverloadedStrings #-} +module Pantry.TypesSpec (spec) where + +import Test.Hspec +import Hedgehog +import qualified Hedgehog.Gen as Gen +import qualified Hedgehog.Range as Range +import Pantry +import qualified Pantry.SHA256 as SHA256 +import Pantry.Internal (parseTree, renderTree, Tree (..), TreeEntry (..), mkSafeFilePath) +import RIO +import Distribution.Types.Version (mkVersion) +import qualified RIO.Text as T + +hh :: HasCallStack => String -> Property -> Spec +hh name p = it name $ do + result <- check p + unless result $ throwString "Hedgehog property failed" :: IO () + +genBlobKey :: Gen BlobKey +genBlobKey = BlobKey <$> genSha256 <*> (FileSize <$> (Gen.word (Range.linear 1 10000))) + +genSha256 :: Gen SHA256 +genSha256 = SHA256.hashBytes <$> Gen.bytes (Range.linear 1 500) + +spec :: Spec +spec = do + describe "WantedCompiler" $ do + hh "parse/render works" $ property $ do + wc <- forAll $ + let ghc = WCGhc <$> genVersion + ghcjs = WCGhcjs <$> genVersion <*> genVersion + genVersion = mkVersion <$> Gen.list (Range.linear 1 5) (Gen.int (Range.linear 0 100)) + in Gen.choice [ghc, ghcjs] + let text = utf8BuilderToText $ display wc + case parseWantedCompiler text of + Left e -> throwIO e + Right actual -> liftIO $ actual `shouldBe` wc + describe "Tree" $ do + hh "parse/render works" $ property $ do + tree <- forAll $ + let sfp = do + pieces <- Gen.list (Range.linear 1 10) sfpComponent + let combined = T.intercalate "/" pieces + case mkSafeFilePath combined of + Nothing -> error $ "Incorrect SafeFilePath in test suite: " ++ show pieces + Just sfp' -> pure sfp' + sfpComponent = Gen.text (Range.linear 1 15) Gen.alphaNum + entry = TreeEntry + <$> genBlobKey + <*> Gen.choice (map pure [minBound..maxBound]) + in TreeMap <$> Gen.map (Range.linear 1 20) ((,) <$> sfp <*> entry) + let bs = renderTree tree + liftIO $ parseTree bs `shouldBe` Just tree diff --git a/subs/pantry/test/Spec.hs b/subs/pantry/test/Spec.hs new file mode 100644 index 0000000000..a824f8c30c --- /dev/null +++ b/subs/pantry/test/Spec.hs @@ -0,0 +1 @@ +{-# OPTIONS_GHC -F -pgmF hspec-discover #-} diff --git a/test/integration/.gitignore b/test/integration/.gitignore new file mode 100644 index 0000000000..185be12bb3 --- /dev/null +++ b/test/integration/.gitignore @@ -0,0 +1,3 @@ +logs/ +tests-fail/ +tests-success/ diff --git a/test/integration/README.md b/test/integration/README.md index 3108199ff5..6db87f1488 100644 --- a/test/integration/README.md +++ b/test/integration/README.md @@ -29,3 +29,16 @@ $ stack test --flag stack:integration-tests stack:test:stack-integration-test Note that this command can take a _long_ time. It's also more thorough than the quick command given above, as it will run each test with a clean `STACK_ROOT`. + +## Helper scripts + +There are two helper scripts in this directory. Note that these may +not always work as anticipated, since some of the tests expect a clean +`STACK_ROOT`, and these scripts do not set that up. + +* `run-sort-tests.sh` will run all of the tests in the `tests` + directory, and move the successful ones into `tests-success`, and + the failing ones into `tests-fail`. It will keep the logs of failing + tests in `logs`. +* `run-single-test.sh` takes a single argument (the name of a test), + and runs just that test. diff --git a/test/integration/run-single-test.sh b/test/integration/run-single-test.sh new file mode 100755 index 0000000000..f9ba6e5cb9 --- /dev/null +++ b/test/integration/run-single-test.sh @@ -0,0 +1,22 @@ +#!/usr/bin/env bash + +set -uo pipefail + +cd "$( dirname "${BASH_SOURCE[0]}" )" + +export STACK_ROOT=$HOME/.stack +unset GHC_PACKAGE_PATH + +DIR=$(pwd) +STACK=$(stack exec which stack) + +if [[ ! -d "tests/$1" ]] +then + echo Test does not exist: $1 + exit 1 +fi + +mkdir -p tests/$1/files +cd tests/$1/files +echo Running test $1 +exec $STACK --stack-yaml $DIR/../../stack.yaml runghc --no-ghc-package-path -- -i../../../lib ../Main.hs diff --git a/test/integration/run-sort-tests.sh b/test/integration/run-sort-tests.sh new file mode 100755 index 0000000000..3546fbf60a --- /dev/null +++ b/test/integration/run-sort-tests.sh @@ -0,0 +1,37 @@ +#!/usr/bin/env bash + +set -uo pipefail + +cd "$( dirname "${BASH_SOURCE[0]}" )" + +export STACK_ROOT=$HOME/.stack + +DIR=$(pwd) +STACK=$(stack exec which stack) + +mkdir -p tests-success +mkdir -p tests-fail +mkdir -p logs + +cd "$DIR/tests" +for f in * +do + cd "$DIR/tests" + if [[ -d "$f" ]] + then + mkdir -p "$f/files" + cd "$f/files" + echo Running test $f + $STACK --stack-yaml $DIR/../../stack.yaml runghc --no-ghc-package-path -- -i../../../lib ../Main.hs > $DIR/logs/$f 2>&1 + RES=$? + cd "$DIR/tests" + echo Result code for $f: $RES + if [[ $RES -eq 0 ]] + then + mv "$f" ../tests-success + rm $DIR/logs/$f + else + mv "$f" ../tests-fail + fi + fi +done diff --git a/test/integration/tests/1198-multiple-exes-with-same-name/files/.gitignore b/test/integration/tests/1198-multiple-exes-with-same-name/files/.gitignore new file mode 100644 index 0000000000..684dbffa96 --- /dev/null +++ b/test/integration/tests/1198-multiple-exes-with-same-name/files/.gitignore @@ -0,0 +1 @@ +stack.yaml diff --git a/test/integration/tests/1336-1337-new-package-names/.gitignore b/test/integration/tests/1336-1337-new-package-names/.gitignore new file mode 100644 index 0000000000..027271b9b2 --- /dev/null +++ b/test/integration/tests/1336-1337-new-package-names/.gitignore @@ -0,0 +1 @@ +files diff --git a/test/integration/tests/1336-1337-new-package-names/Main.hs b/test/integration/tests/1336-1337-new-package-names/Main.hs index ef7d16ccc7..603fdc4341 100644 --- a/test/integration/tests/1336-1337-new-package-names/Main.hs +++ b/test/integration/tests/1336-1337-new-package-names/Main.hs @@ -9,7 +9,7 @@ main = if isWindows then logInfo "Disabled on Windows (see https://github.com/commercialhaskell/stack/issues/1337#issuecomment-166118678)" else do - stack ["new", "1234a-4b-b4-abc-12b34"] + safeNew "1234a-4b-b4-abc-12b34" doesExist "./1234a-4b-b4-abc-12b34/stack.yaml" stackErr ["new", "1234-abc"] doesNotExist "./1234-abc/stack.yaml" @@ -18,8 +18,14 @@ main = stackErr ["new", "44444444444444"] stackErr ["new", "abc-1"] stackErr ["new", "444-ば日本-4本"] - unless isMacOSX $ stack ["new", "ば日本-4本"] - stack ["new", "אבהץש"] - stack ["new", "ΔΘΩϬ"] + unless isMacOSX $ safeNew "ば日本-4本" + safeNew "אבהץש" + safeNew "ΔΘΩϬ" doesExist "./ΔΘΩϬ/stack.yaml" doesExist "./ΔΘΩϬ/ΔΘΩϬ.cabal" + +safeNew :: String -> IO () +safeNew name = do + exists <- doesDirectoryExist name + when exists $ removeDirectoryRecursive name + stack ["new", name] diff --git a/test/integration/tests/1884-url-to-tarball/files/.gitignore b/test/integration/tests/1884-url-to-tarball/files/.gitignore new file mode 100644 index 0000000000..d43d807c0d --- /dev/null +++ b/test/integration/tests/1884-url-to-tarball/files/.gitignore @@ -0,0 +1 @@ +*.cabal diff --git a/test/integration/tests/1884-url-to-tarball/files/package.yaml b/test/integration/tests/1884-url-to-tarball/files/package.yaml new file mode 100644 index 0000000000..2ab76b4002 --- /dev/null +++ b/test/integration/tests/1884-url-to-tarball/files/package.yaml @@ -0,0 +1,5 @@ +name: foo +dependencies: +- base +- half +library: {} diff --git a/test/integration/tests/1884-url-to-tarball/files/stack.yaml b/test/integration/tests/1884-url-to-tarball/files/stack.yaml index b4c19707dc..6a61c49383 100644 --- a/test/integration/tests/1884-url-to-tarball/files/stack.yaml +++ b/test/integration/tests/1884-url-to-tarball/files/stack.yaml @@ -1,5 +1,5 @@ -packages: +extra-deps: - location: https://hackage.haskell.org/package/half-0.2.2.3/half-0.2.2.3.tar.gz - extra-dep: false -extra-deps: [] + sha256: 85c244c80d1c889a3d79073a6f5a99d9e769dbe3c574ca11d992b2b4f7599a5c + size: 6050 resolver: lts-11.19 diff --git a/test/integration/tests/2643-copy-compiler-tool/Main.hs b/test/integration/tests/2643-copy-compiler-tool/Main.hs index 25ef7bc52c..7ca7b869f2 100644 --- a/test/integration/tests/2643-copy-compiler-tool/Main.hs +++ b/test/integration/tests/2643-copy-compiler-tool/Main.hs @@ -5,6 +5,7 @@ main :: IO () main = do -- init removeFileIgnore "stack.yaml" + removeDirIgnore ".stack-work" stack ["init", defaultResolverArg] -- place to throw some exes diff --git a/test/integration/tests/2643-copy-compiler-tool/files/.gitignore b/test/integration/tests/2643-copy-compiler-tool/files/.gitignore new file mode 100644 index 0000000000..56f4748f50 --- /dev/null +++ b/test/integration/tests/2643-copy-compiler-tool/files/.gitignore @@ -0,0 +1,2 @@ +binny/ +stack.yaml diff --git a/test/integration/tests/2781-shadow-bug/files/.gitignore b/test/integration/tests/2781-shadow-bug/files/.gitignore new file mode 100644 index 0000000000..da86f0dbe2 --- /dev/null +++ b/test/integration/tests/2781-shadow-bug/files/.gitignore @@ -0,0 +1 @@ +foo/src/ diff --git a/test/integration/tests/32-unlisted-module/files/.gitignore b/test/integration/tests/32-unlisted-module/files/.gitignore new file mode 100644 index 0000000000..6c87a5ea83 --- /dev/null +++ b/test/integration/tests/32-unlisted-module/files/.gitignore @@ -0,0 +1,2 @@ +embed.txt +src/Unlisted.hs diff --git a/test/integration/tests/3229-exe-targets/Main.hs b/test/integration/tests/3229-exe-targets/Main.hs index 86e0e1df33..502030659d 100644 --- a/test/integration/tests/3229-exe-targets/Main.hs +++ b/test/integration/tests/3229-exe-targets/Main.hs @@ -14,8 +14,9 @@ import StackTest main :: IO () main = do - stack [defaultResolverArg, "clean", "--full"] - stack [defaultResolverArg, "init", "--force"] + removeDirIgnore ".stack-work" + removeFileIgnore "stack.yaml" + stack [defaultResolverArg, "init"] stack ["build", ":alpha"] bracket (S.readFile alphaFile) diff --git a/test/integration/tests/3229-exe-targets/files/.gitignore b/test/integration/tests/3229-exe-targets/files/.gitignore new file mode 100644 index 0000000000..684dbffa96 --- /dev/null +++ b/test/integration/tests/3229-exe-targets/files/.gitignore @@ -0,0 +1 @@ +stack.yaml diff --git a/test/integration/tests/3396-package-indices/Main.hs b/test/integration/tests/3396-package-indices/Main.hs index 91115a6e82..1c6cc3ba8c 100644 --- a/test/integration/tests/3396-package-indices/Main.hs +++ b/test/integration/tests/3396-package-indices/Main.hs @@ -5,8 +5,11 @@ import System.FilePath (()) main :: IO () main = do + putStrLn "With pantry, non-Hackage Security indices are no longer supported, skipping test" + {- home <- getEnv "HOME" setEnv "STACK_ROOT" (home ".stack") -- Needed for Windows createDirectoryIfMissing True (home ".stack" "indices" "CustomIndex") copy "CustomIndex/01-index.tar" (home ".stack" "indices" "CustomIndex" "01-index.tar") stack ["build"] + -} diff --git a/test/integration/tests/3397-ghc-solver/files/.gitignore b/test/integration/tests/3397-ghc-solver/files/.gitignore new file mode 100644 index 0000000000..85a0a53562 --- /dev/null +++ b/test/integration/tests/3397-ghc-solver/files/.gitignore @@ -0,0 +1,2 @@ +*.cabal +stack.yaml diff --git a/test/integration/tests/3520-revision-matching/Main.hs b/test/integration/tests/3520-revision-matching/Main.hs index 28894c1bc8..5d01c83bf3 100644 --- a/test/integration/tests/3520-revision-matching/Main.hs +++ b/test/integration/tests/3520-revision-matching/Main.hs @@ -5,9 +5,12 @@ import System.Directory main :: IO () main = do + putStrLn "Test disabled due to switch to pantry" + {- copyFile "bad-stack.yaml" "stack.yaml" stackErrStderr ["build", "--dry-run"] $ \msg -> unless ("legacy 00-index.tar.gz" `isInfixOf` msg) $ error "Expected a warning about 00-index usage" copyFile "good-stack.yaml" "stack.yaml" stack ["build", "--dry-run"] + -} diff --git a/test/integration/tests/3574-extra-dep-local/files/stack.yaml b/test/integration/tests/3574-extra-dep-local/files/stack.yaml index 406c411d12..641ebe4356 100644 --- a/test/integration/tests/3574-extra-dep-local/files/stack.yaml +++ b/test/integration/tests/3574-extra-dep-local/files/stack.yaml @@ -1,8 +1,9 @@ resolver: ghc-8.2.2 -packages: -- location: foo - extra-dep: true +packages: [] + +extra-deps: +- foo ghc-options: $locals: -bob diff --git a/test/integration/tests/365-invalid-success/files/.gitignore b/test/integration/tests/365-invalid-success/files/.gitignore new file mode 100644 index 0000000000..8fec4903f5 --- /dev/null +++ b/test/integration/tests/365-invalid-success/files/.gitignore @@ -0,0 +1 @@ +app/Foo.hs diff --git a/test/integration/tests/366-non-root-dir/Main.hs b/test/integration/tests/366-non-root-dir/Main.hs index 1855418810..f1dd62cefb 100644 --- a/test/integration/tests/366-non-root-dir/Main.hs +++ b/test/integration/tests/366-non-root-dir/Main.hs @@ -3,6 +3,7 @@ import System.Directory main :: IO () main = do + removeDirIgnore ".stack-work" stackErr ["exec", "hello-world"] setCurrentDirectory "app" stack ["build"] diff --git a/test/integration/tests/3685-config-yaml-for-allow-newer/Main.hs b/test/integration/tests/3685-config-yaml-for-allow-newer/Main.hs index bd1289b23c..6b56e9f48b 100644 --- a/test/integration/tests/3685-config-yaml-for-allow-newer/Main.hs +++ b/test/integration/tests/3685-config-yaml-for-allow-newer/Main.hs @@ -5,6 +5,7 @@ import System.Directory main :: IO () main = do + removeFileIgnore "stack.yaml" stack ["init", defaultResolverArg] (_, stdErr) <- stackStderr ["install", "intero-0.1.23"] -- here we check stderr for 'allow-newer: true' and diff --git a/test/integration/tests/3685-config-yaml-for-allow-newer/files/.gitignore b/test/integration/tests/3685-config-yaml-for-allow-newer/files/.gitignore new file mode 100644 index 0000000000..684dbffa96 --- /dev/null +++ b/test/integration/tests/3685-config-yaml-for-allow-newer/files/.gitignore @@ -0,0 +1 @@ +stack.yaml diff --git a/test/integration/tests/384-local-deps/Main.hs b/test/integration/tests/384-local-deps/Main.hs index 776ac31386..fe0186368f 100644 --- a/test/integration/tests/384-local-deps/Main.hs +++ b/test/integration/tests/384-local-deps/Main.hs @@ -2,5 +2,6 @@ import StackTest main :: IO () main = do + removeFileIgnore "stack.yaml" stack ["init", defaultResolverArg] stack ["test"] diff --git a/test/integration/tests/384-local-deps/files/.gitignore b/test/integration/tests/384-local-deps/files/.gitignore new file mode 100644 index 0000000000..684dbffa96 --- /dev/null +++ b/test/integration/tests/384-local-deps/files/.gitignore @@ -0,0 +1 @@ +stack.yaml diff --git a/test/integration/tests/3926-ghci-with-sublibraries/files/.gitignore b/test/integration/tests/3926-ghci-with-sublibraries/files/.gitignore new file mode 100644 index 0000000000..54bdefd6ee --- /dev/null +++ b/test/integration/tests/3926-ghci-with-sublibraries/files/.gitignore @@ -0,0 +1,2 @@ +src/Lib.hs +src-internal/Internal.hs diff --git a/test/integration/tests/3942-solver-error-output/files/test-stack.yml b/test/integration/tests/3942-solver-error-output/files/test-stack.yml index b98b936ed4..8c2b419ff0 100644 --- a/test/integration/tests/3942-solver-error-output/files/test-stack.yml +++ b/test/integration/tests/3942-solver-error-output/files/test-stack.yml @@ -1,11 +1,6 @@ resolver: lts-11.19 -packages: -- location: ./one-deps - extra-dep: true +packages: [] -extra-deps: [] - -flags: {} - -extra-package-dbs: [] +extra-deps: +- ./one-deps diff --git a/test/integration/tests/3959-order-of-flags/files/.gitignore b/test/integration/tests/3959-order-of-flags/files/.gitignore new file mode 100644 index 0000000000..d43d807c0d --- /dev/null +++ b/test/integration/tests/3959-order-of-flags/files/.gitignore @@ -0,0 +1 @@ +*.cabal diff --git a/test/integration/tests/4215-missing-unregister/Main.hs b/test/integration/tests/4215-missing-unregister/Main.hs new file mode 100644 index 0000000000..83056eb580 --- /dev/null +++ b/test/integration/tests/4215-missing-unregister/Main.hs @@ -0,0 +1,6 @@ +import StackTest + +main :: IO () +main = do + stack ["build", "--stack-yaml", "stack1.yaml"] + stack ["build", "--stack-yaml", "stack2.yaml"] diff --git a/test/integration/tests/4215-missing-unregister/files/.gitignore b/test/integration/tests/4215-missing-unregister/files/.gitignore new file mode 100644 index 0000000000..0afa51175a --- /dev/null +++ b/test/integration/tests/4215-missing-unregister/files/.gitignore @@ -0,0 +1 @@ +foo.cabal diff --git a/test/integration/tests/4215-missing-unregister/files/stack1.yaml b/test/integration/tests/4215-missing-unregister/files/stack1.yaml new file mode 100644 index 0000000000..ec89cd2774 --- /dev/null +++ b/test/integration/tests/4215-missing-unregister/files/stack1.yaml @@ -0,0 +1,3 @@ +resolver: ghc-8.2.2 +packages: +- v1 diff --git a/test/integration/tests/4215-missing-unregister/files/stack2.yaml b/test/integration/tests/4215-missing-unregister/files/stack2.yaml new file mode 100644 index 0000000000..6a7f4b6532 --- /dev/null +++ b/test/integration/tests/4215-missing-unregister/files/stack2.yaml @@ -0,0 +1,3 @@ +resolver: ghc-8.2.2 +packages: +- v2 diff --git a/test/integration/tests/4215-missing-unregister/files/v1/package.yaml b/test/integration/tests/4215-missing-unregister/files/v1/package.yaml new file mode 100644 index 0000000000..7bcacfcb87 --- /dev/null +++ b/test/integration/tests/4215-missing-unregister/files/v1/package.yaml @@ -0,0 +1,7 @@ +name: foo +version: 1 + +dependencies: +- base + +library: {} diff --git a/test/integration/tests/4215-missing-unregister/files/v2/package.yaml b/test/integration/tests/4215-missing-unregister/files/v2/package.yaml new file mode 100644 index 0000000000..e49b4fdc65 --- /dev/null +++ b/test/integration/tests/4215-missing-unregister/files/v2/package.yaml @@ -0,0 +1,7 @@ +name: foo +version: 2 + +dependencies: +- base + +library: {} diff --git a/test/integration/tests/4220-freeze-command/Main.hs b/test/integration/tests/4220-freeze-command/Main.hs new file mode 100644 index 0000000000..3b055cd630 --- /dev/null +++ b/test/integration/tests/4220-freeze-command/Main.hs @@ -0,0 +1,23 @@ +import Control.Monad (unless) +import StackTest + +main :: IO () +main = do + stackCheckStdout ["freeze"] $ \stdOut -> do + let expected = unlines + [ "resolver:" + , " size: 527165" + , " url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/11/19.yaml" + , " sha256: 0116ad1779b20ad2c9d6620f172531f13b12bb69867e78f4277157e28865dfd4" + , "extra-deps:" + , "- hackage: a50-0.5@sha256:b8dfcc13dcbb12e444128bb0e17527a2a7a9bd74ca9450d6f6862c4b394ac054,1491" + , " pantry-tree:" + , " size: 409" + , " sha256: a7c6151a18b04afe1f13637627cad4deff91af51d336c4f33e95fc98c64c40d3" + ] + unless (stdOut == expected) $ + error $ concat [ "Expected: " + , show expected + , "\nActual: " + , show stdOut + ] diff --git a/test/integration/tests/4220-freeze-command/files/freeze-command.cabal b/test/integration/tests/4220-freeze-command/files/freeze-command.cabal new file mode 100644 index 0000000000..0875aa6927 --- /dev/null +++ b/test/integration/tests/4220-freeze-command/files/freeze-command.cabal @@ -0,0 +1,12 @@ +name: freeze-command +version: 0.1.0.0 +build-type: Simple +cabal-version: >= 2.0 + +library + exposed-modules: Src + hs-source-dirs: src + build-depends: base + , rio + , vector + default-language: Haskell2010 diff --git a/test/integration/tests/4220-freeze-command/files/src/Src.hs b/test/integration/tests/4220-freeze-command/files/src/Src.hs new file mode 100644 index 0000000000..0f8db7fb77 --- /dev/null +++ b/test/integration/tests/4220-freeze-command/files/src/Src.hs @@ -0,0 +1,5 @@ +module Src where + +-- | A function of the main library +funMainLib :: Int -> Int +funMainLib = succ diff --git a/test/integration/tests/4220-freeze-command/files/stack.yaml b/test/integration/tests/4220-freeze-command/files/stack.yaml new file mode 100644 index 0000000000..d67d97edb4 --- /dev/null +++ b/test/integration/tests/4220-freeze-command/files/stack.yaml @@ -0,0 +1,5 @@ +resolver: lts-11.19 +packages: +- . +extra-deps: +- a50-0.5@rev:0 diff --git a/test/integration/tests/443-specify-path/.gitignore b/test/integration/tests/443-specify-path/.gitignore new file mode 100644 index 0000000000..027271b9b2 --- /dev/null +++ b/test/integration/tests/443-specify-path/.gitignore @@ -0,0 +1 @@ +files diff --git a/test/integration/tests/443-specify-path/Main.hs b/test/integration/tests/443-specify-path/Main.hs index 47fef94c08..12a8c5e3e8 100644 --- a/test/integration/tests/443-specify-path/Main.hs +++ b/test/integration/tests/443-specify-path/Main.hs @@ -6,6 +6,7 @@ import System.Info (os) main :: IO () main = do -- install in relative path + removeDirIgnore "bin" createDirectory "bin" stack [defaultResolverArg, "--local-bin-path", "./bin", "install" , "happy"] doesExist ("./bin/happy" ++ exeExt) @@ -23,6 +24,7 @@ main = do -- install in absolute path tmpDirectory <- fmap ( "absolute-bin") getCurrentDirectory + removeDirIgnore tmpDirectory createDirectory tmpDirectory stack [defaultResolverArg, "--local-bin-path", tmpDirectory, "install", "happy" ] doesExist (tmpDirectory ("happy" ++ exeExt)) diff --git a/test/integration/tests/717-sdist-test/files/stack.yaml b/test/integration/tests/717-sdist-test/files/stack.yaml index 24b0da903c..5450e6d10b 100644 --- a/test/integration/tests/717-sdist-test/files/stack.yaml +++ b/test/integration/tests/717-sdist-test/files/stack.yaml @@ -3,10 +3,8 @@ packages: - package-with-th - package-with-working-th - package-with-failing-test -- location: subdirs - subdirs: - - dependent-on-failing-packages - - failing-in-subdir +- subdirs/dependent-on-failing-packages +- subdirs/failing-in-subdir extra-deps: [] flags: {} extra-package-dbs: [] diff --git a/test/integration/tests/cabal-solver/Main.hs b/test/integration/tests/cabal-solver/Main.hs index b96feca447..60827a41ab 100644 --- a/test/integration/tests/cabal-solver/Main.hs +++ b/test/integration/tests/cabal-solver/Main.hs @@ -8,8 +8,10 @@ main = do then logInfo "Disabled on Alpine Linux and ARM since it cannot yet install its own GHC." else do run "cabal" ["sandbox", "init"] + removeDirIgnore "acme-dont-1.1" stack ["unpack", "acme-dont-1.1"] run "cabal" ["install", "./acme-dont-1.1"] removeDirectoryRecursive "acme-dont-1.1" + removeFileIgnore "stack.yaml" stack ["--install-ghc", "init", "--solver"] stack ["build"] diff --git a/test/integration/tests/cabal-solver/files/.gitignore b/test/integration/tests/cabal-solver/files/.gitignore new file mode 100644 index 0000000000..b1be9ce6ad --- /dev/null +++ b/test/integration/tests/cabal-solver/files/.gitignore @@ -0,0 +1,2 @@ +stack.yaml +.cabal-sandbox diff --git a/test/integration/tests/cyclic-test-deps/.gitignore b/test/integration/tests/cyclic-test-deps/.gitignore new file mode 100644 index 0000000000..027271b9b2 --- /dev/null +++ b/test/integration/tests/cyclic-test-deps/.gitignore @@ -0,0 +1 @@ +files diff --git a/test/integration/tests/cyclic-test-deps/Main.hs b/test/integration/tests/cyclic-test-deps/Main.hs index 5508f741bd..36014895fa 100644 --- a/test/integration/tests/cyclic-test-deps/Main.hs +++ b/test/integration/tests/cyclic-test-deps/Main.hs @@ -2,6 +2,8 @@ import StackTest main :: IO () main = do + removeDirIgnore "text-1.2.2.1" stack ["unpack", "text-1.2.2.1"] + removeFileIgnore "stack.yaml" stack ["init", defaultResolverArg] stack ["test", "--dry-run"] diff --git a/test/integration/tests/duplicate-package-ids/Main.hs b/test/integration/tests/duplicate-package-ids/Main.hs index beb9e6d515..88fd081bc6 100644 --- a/test/integration/tests/duplicate-package-ids/Main.hs +++ b/test/integration/tests/duplicate-package-ids/Main.hs @@ -6,5 +6,6 @@ main = do stack ["setup"] stack ["build", "auto-update"] readFile "stack2.yaml" >>= writeFile "stack.yaml" + removeDirIgnore "auto-update-0.1.2.1" stack ["unpack", "auto-update-0.1.2.1"] stack ["build"] diff --git a/test/integration/tests/duplicate-package-ids/files/.gitignore b/test/integration/tests/duplicate-package-ids/files/.gitignore new file mode 100644 index 0000000000..f39970f250 --- /dev/null +++ b/test/integration/tests/duplicate-package-ids/files/.gitignore @@ -0,0 +1,2 @@ +stack.yaml +auto-update-0.1.2.1 diff --git a/test/integration/tests/haddock-options/Main.hs b/test/integration/tests/haddock-options/Main.hs index b0fe6fe300..4cc0a88578 100644 --- a/test/integration/tests/haddock-options/Main.hs +++ b/test/integration/tests/haddock-options/Main.hs @@ -2,6 +2,8 @@ import StackTest main :: IO () main = do + removeDirIgnore ".stack-work" + -- Fails to work because BAR is defined here and FOO in stack file stackErr ["haddock", "--haddock-arguments", "--optghc=-DBAR"] stack ["clean"] diff --git a/test/integration/tests/sanity/Main.hs b/test/integration/tests/sanity/Main.hs index 442fd018dc..4315b3d938 100644 --- a/test/integration/tests/sanity/Main.hs +++ b/test/integration/tests/sanity/Main.hs @@ -1,14 +1,23 @@ import StackTest +import Control.Monad (unless) +import System.Directory (doesFileExist) main :: IO () main = do stack ["--version"] stack ["--help"] + removeDirIgnore "acme-missiles-0.2" + removeDirIgnore "acme-missiles-0.3" stack ["unpack", "acme-missiles-0.2"] stack ["unpack", "acme-missiles"] stackErr ["command-does-not-exist"] stackErr ["unpack", "invalid-package-name-"] - stackErr ["build"] + + -- When running outside of IntegrationSpec.hs, this will use the + -- stack.yaml from Stack itself + exists <- doesFileExist "../../../../../stack.yaml" + unless exists $ stackErr ["build"] + doesNotExist "stack.yaml" if isWindows diff --git a/test/integration/tests/sanity/files/.gitignore b/test/integration/tests/sanity/files/.gitignore new file mode 100644 index 0000000000..6c7ac47db7 --- /dev/null +++ b/test/integration/tests/sanity/files/.gitignore @@ -0,0 +1,2 @@ +acme-missiles-0.2 +acme-missiles-0.3 diff --git a/test/integration/tests/skip-unreachable-dirs/Main.hs b/test/integration/tests/skip-unreachable-dirs/Main.hs index e4cfd48ad3..686071bc2d 100644 --- a/test/integration/tests/skip-unreachable-dirs/Main.hs +++ b/test/integration/tests/skip-unreachable-dirs/Main.hs @@ -1,8 +1,11 @@ +{-# LANGUAGE ScopedTypeVariables #-} import StackTest -import System.Directory (setPermissions, emptyPermissions, createDirectory) +import System.Directory +import Control.Exception (catch, IOException) main :: IO () main = do - createDirectory "unreachabledir" + removeFileIgnore "stack.yaml" + createDirectory "unreachabledir" `catch` \(e :: IOException) -> pure () setPermissions "unreachabledir" emptyPermissions stack ["init"] diff --git a/test/integration/tests/skip-unreachable-dirs/files/.gitignore b/test/integration/tests/skip-unreachable-dirs/files/.gitignore new file mode 100644 index 0000000000..684dbffa96 --- /dev/null +++ b/test/integration/tests/skip-unreachable-dirs/files/.gitignore @@ -0,0 +1 @@ +stack.yaml