-
Notifications
You must be signed in to change notification settings - Fork 32
/
Prefix.jl
730 lines (641 loc) · 27.1 KB
/
Prefix.jl
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
## This file contains functionality related to the actual layout of the files
# on disk. Things like the name of where downloads are stored, and what
# environment variables must be updated to, etc...
import Base: convert, joinpath, show
using SHA, CodecZlib, TOML, LibGit2_jll
export Prefix, bindir, libdirs, includedir, logdir, temp_prefix, package
"""
temp_prefix(func::Function)
Create a temporary prefix, passing the prefix into the user-defined function so
that build/packaging operations can occur within the temporary prefix, which is
then cleaned up after all operations are finished. If the path provided exists
already, it will be deleted.
Usage example:
out_path = abspath("./libfoo")
temp_prefix() do p
# <insert build steps here>
# tarball up the built package
tarball_path, tarball_hash = package(p, out_path)
end
"""
function temp_prefix(func::Function)
# Helper function to create a docker-mountable temporary directory
function _tempdir()
@static if Sys.isapple()
# Docker, on OSX at least, can only mount from certain locations by
# default, so we ensure all our temporary directories live within
# those locations so that they are accessible by Docker.
return realpath("/tmp")
else
return realpath(tempdir())
end
end
mktempdir(_tempdir()) do path
prefix = Prefix(path)
# Run the user function
func(prefix)
end
end
struct Prefix
path::String
"""
Prefix(path::AbstractString)
A `Prefix` represents a binary installation location.
"""
function Prefix(path::AbstractString)
# Canonicalize immediately, create the overall prefix, then return
path = abspath(path)
mkpath(path)
return new(path)
end
end
# Make it easy to bandy about prefixes as paths. There has got to be a better
# way to do this, but it's hackin' time, so just go with the flow.
joinpath(prefix::Prefix, args...) = joinpath(prefix.path, args...)
joinpath(s::AbstractString, prefix::Prefix, args...) = joinpath(s, prefix.path, args...)
convert(::Type{AbstractString}, prefix::Prefix) = prefix.path
show(io::IO, prefix::Prefix) = show(io, "Prefix($(prefix.path))")
"""
bindir(prefix::Prefix)
Returns the binary directory for the given `prefix`.
"""
function bindir(prefix::Prefix)
return joinpath(prefix, "bin")
end
"""
libdirs(prefix::Prefix, platform = HostPlatform())
Returns the library directories for the given `prefix` (note that this differs
between unix systems and windows systems, and between 32- and 64-bit systems).
"""
function libdirs(prefix::Prefix, platform = HostPlatform())
if Sys.iswindows(platform)
return [joinpath(prefix, "bin")]
else
if wordsize(platform) == 64
return [joinpath(prefix, "lib64"), joinpath(prefix, "lib")]
else
return [joinpath(prefix, "lib")]
end
end
end
"""
includedir(prefix::Prefix)
Returns the include directory for the given `prefix`
"""
function includedir(prefix::Prefix)
return joinpath(prefix, "include")
end
"""
logdir(prefix::Prefix; subdir::AbstractString="")
Returns the logs directory for the given `prefix`. If `subdir` is a non-empty string, that
directory it is appended to the logdir of the given `prefix`.
"""
function logdir(prefix::Prefix; subdir::AbstractString="")
return strip_path_separator(joinpath(prefix, "logs", subdir))
end
"""
package(prefix::Prefix, output_base::AbstractString,
version::VersionNumber;
platform::AbstractPlatform = HostPlatform(),
verbose::Bool = false, force::Bool = false)
Build a tarball of the `prefix`, storing the tarball at `output_base`,
appending a version number, a platform-dependent suffix and a file extension.
If no platform is given, defaults to current platform. Returns the full path to,
the SHA256 hash and the git tree SHA1 of the generated tarball.
"""
function package(prefix::Prefix,
output_base::AbstractString,
version::VersionNumber;
platform::AbstractPlatform = HostPlatform(),
verbose::Bool = false,
force::Bool = false)
# Calculate output path
out_path = "$(output_base).v$(version).$(triplet(platform)).tar.gz"
if isfile(out_path)
if force
if verbose
@info("$(out_path) already exists, force-overwriting...")
end
rm(out_path; force=true)
else
msg = replace(strip("""
$(out_path) already exists, refusing to package into it without
`force` being set to `true`.
"""), "\n" => " ")
error(msg)
end
end
# Copy our build prefix into an Artifact
tree_hash = create_artifact() do art_path
for f in readdir(prefix.path)
cp(joinpath(prefix.path, f), joinpath(art_path, f))
end
# Attempt to maintain permissions of original owning dir
try
chmod(art_path, stat(prefix.path).mode)
catch e
if verbose
@warn("Could not chmod $(art_path):", e)
end
end
end
# Calculate git tree hash
if verbose
@info("Tree hash of contents of $(basename(out_path)): $(tree_hash)")
end
tarball_hash = archive_artifact(tree_hash, out_path; honor_overrides=false)
if verbose
@info("SHA256 of $(basename(out_path)): $(tarball_hash)")
end
return out_path, tarball_hash, tree_hash
end
function symlink_tree(src::AbstractString, dest::AbstractString)
for (root, dirs, files) in walkdir(src)
# Create all directories
for d in dirs
# If `d` is itself a symlink, recreate that symlink
d_path = joinpath(root, d)
dest_dir = joinpath(dest, relpath(root, src), d)
if islink(d_path)
if ispath(dest_dir)
# We can't overwrite an existing file on disk with a symlink
error("Symlink $(d) from artifact $(basename(src)) already exists on disk")
end
symlink(readlink(d_path), dest_dir)
else
if ispath(dest_dir) && !isdir(realpath(dest_dir))
# We can't create a directory if the destination exists and
# is not a directory or a symlink to a directory.
error("Directory $(d) from artifact $(basename(src)) already exists on disk and is not a directory")
end
mkpath(dest_dir)
end
end
# Symlink all files
for f in files
src_file = joinpath(root, f)
dest_file = joinpath(dest, relpath(root, src), f)
if isfile(dest_file)
# Ugh, destination file already exists. If source and destination files
# have the same size and SHA256 hash, just move on, otherwise issue a
# warning.
if filesize(src_file) == filesize(dest_file)
src_file_hash = open(io -> bytes2hex(sha256(io)), src_file, "r")
dest_file_hash = open(io -> bytes2hex(sha256(io)), dest_file, "r")
if src_file_hash == dest_file_hash
continue
end
end
# Find source artifact that this pre-existent destination file belongs to
dest_artifact_source = realpath(dest_file)
while occursin("artifacts", dest_artifact_source) && basename(dirname(dest_artifact_source)) != "artifacts"
dest_artifact_source = dirname(dest_artifact_source)
end
@warn("Symlink $(f) from artifact $(basename(src)) already exists in artifact $(basename(dest_artifact_source))")
else
# If it's already a symlink, copy over the exact symlink target
if islink(src_file)
symlink(readlink(src_file), dest_file)
else
# Otherwise, point it at the proper location
symlink(relpath(src_file, dirname(dest_file)), dest_file)
end
end
end
end
end
function unsymlink_tree(src::AbstractString, dest::AbstractString)
for (root, dirs, files) in walkdir(src)
# Unsymlink all symlinked directories, non-symlink directories will be culled in audit.
for d in dirs
dest_dir = joinpath(dest, relpath(root, src), d)
if islink(dest_dir)
rm(dest_dir)
end
end
# Unsymlink all symlinked files
for f in files
dest_file = joinpath(dest, relpath(root, src), f)
if islink(dest_file)
rm(dest_file)
end
end
end
end
function setup(source::SetupSource{GitSource}, targetdir, verbose)
mkpath(targetdir)
# Chop off the `.git-$(sha256(url))` at the end of the source.path (`.git` is optional).
name = replace(basename(source.path), r"(\.git)?-[0-9a-fA-F]{64}$" => "")
repo_dir = joinpath(targetdir, name)
if verbose
# Need to strip the trailing separator
path = strip_path_separator(targetdir)
@info "Checking $(basename(source.path)) out to $(basename(repo_dir))..."
end
LibGit2.with(LibGit2.clone(source.path, repo_dir)) do repo
LibGit2.checkout!(repo, source.hash)
end
end
function setup(source::SetupSource{ArchiveSource}, targetdir, verbose; tar_flags = verbose ? "xvof" : "xof")
mkpath(targetdir)
# Extract with host tools because it is _much_ faster on e.g. OSX.
# If this becomes a compatibility problem, we'll just have to install
# our own `tar` and `unzip` through BP as dependencies for BB.
cd(targetdir) do
if any(endswith(source.path, ext) for ext in tar_extensions)
if verbose
@info "Extracting tarball $(basename(source.path))..."
end
run(`tar -$(tar_flags) $(source.path)`)
elseif endswith(source.path, ".zip")
if verbose
@info "Extracting zipball $(basename(source.path))..."
end
run(`unzip -q $(source.path)`)
else
error("Unknown archive format")
end
end
end
function setup(source::SetupSource{FileSource}, target, verbose)
if verbose
@info "Copying $(basename(source.path)) in $(basename(target))..."
end
cp(source.path, target)
end
function setup(source::SetupSource{DirectorySource}, targetdir, verbose)
mkpath(targetdir)
# Need to strip the trailing separator also here
srcpath = strip_path_separator(source.path)
if verbose
@info "Copying content of $(basename(srcpath)) in $(basename(targetdir))..."
end
for file_dir in readdir(srcpath)
# Copy the content of the source directory to the destination
cp(joinpath(srcpath, file_dir), joinpath(targetdir, basename(file_dir));
follow_symlinks=source.follow_symlinks)
end
end
function setup(source::PatchSource, targetdir, verbose)
if verbose
@info "Adding patch $(source.name)..."
end
patches_dir = joinpath(targetdir, "patches")
mkdir(patches_dir)
open(f->write(f, source.patch), joinpath(patches_dir, source.name), "w")
end
destdir(prefix, platform::AbstractPlatform) =
joinpath(prefix, triplet(platform), "destdir")
"""
setup_workspace(build_path::String, sources::Vector{SetupSource};
verbose::Bool = false)
Sets up a workspace within `build_path`, creating the directory structure
needed by further steps, unpacking the source within `build_path`, and defining
the environment variables that will be defined within the sandbox environment.
This method returns the `Prefix` to install things into, and the runner
that can be used to launch commands within this workspace.
"""
function setup_workspace(build_path::AbstractString, sources::Vector,
target_platform::AbstractPlatform,
host_platform::AbstractPlatform=default_host_platform;
verbose::Bool = false)
# Use a random nonce to make detection of paths in embedded binary easier
nonce = randstring()
workspace = joinpath(build_path, nonce)
mkdir(workspace)
# We now set up two directories, one as a source dir, one as a dest dir
srcdir = joinpath(workspace, "srcdir")
target_destdir = destdir(workspace, target_platform)
host_destdir = destdir(workspace, host_platform)
metadir = joinpath(workspace, "metadir")
mkpath.((srcdir, target_destdir, host_destdir, metadir))
# Create the symlink /workspace/destdir -> /workspace/TARGET_TRIPLET/destdir
# Necessary for compatibility with recipes that hardcode `/workspace/destdir` in them,
# as well as `.pc` files that contain absolute paths to `/workspace/destdir/...`
symlink("$(triplet(target_platform))/destdir", joinpath(workspace, "destdir"))
# Setup all sources
for source in sources
if isa(source, SetupSource)
target = joinpath(srcdir, source.target)
# Trailing directory separator matters for `basename`, so let's strip it
# to avoid confusion
target = strip_path_separator(target)
setup(source, target, verbose)
else
setup(source, srcdir, verbose)
end
end
# Return the build prefix
return Prefix(realpath(workspace))
end
"""
collect_jll_uuids(manifest::Pkg.Types.Manifest, dependencies::Set{Base.UUID})
Return a `Set` of all JLL packages in the `manifest` with `dependencies` being
the list of direct dependencies of the environment.
"""
function collect_jll_uuids(manifest::Pkg.Types.Manifest, dependencies::Set{Base.UUID})
jlls = copy(dependencies)
for (uuid, pkg_entry) in manifest
if uuid in jlls
for (dep_name, dep_uuid) in pkg_entry.deps
if endswith(dep_name, "_jll")
push!(jlls, dep_uuid)
end
end
end
end
if jlls == dependencies
return jlls
else
return collect_jll_uuids(manifest, jlls)
end
end
"""
get_tree_hash(tree::LibGit2.GitTree)
Given a `GitTree`, get the `GitHash` that identifies it.
"""
function get_tree_hash(tree::LibGit2.GitTree)
oid_ptr = Ref(LibGit2.GitHash())
oid_ptr = ccall((:git_tree_id, libgit2), Ptr{LibGit2.GitHash}, (Ptr{Cvoid},), tree.ptr)
oid_ptr == C_NULL && throw("bad tree ID: $tree")
return unsafe_load(oid_ptr)
end
"""
get_commit_sha(url::String, tree_hash::Base.SHA1; verbose::Bool=false)
Find the latest git commit corresponding to the given git tree SHA1 for the remote
repository with the given `url`. The repository is cached locally for quicker future
access. If `verbose` is `true`, print to screen some debugging information.
The return value is the commit SHA as a `String`, if the corresponding revision is found,
`nothing` otherwise.
"""
function get_commit_sha(url::String, tree_hash::Base.SHA1; verbose::Bool=false)
git_commit_sha = nothing
dir = cached_git_clone(url; verbose)
LibGit2.with(LibGit2.GitRepo(dir)) do repo
LibGit2.with(LibGit2.GitRevWalker(repo)) do walker
# The repo is cached, so locally it may be checking out an outdated commit.
# Start the search from HEAD of the tracking upstream repo.
try
LibGit2.push!(walker, LibGit2.GitHash(LibGit2.peel(LibGit2.GitCommit, LibGit2.upstream(LibGit2.head(repo)))))
catch
@warn("Could not walk from origin branch!")
LibGit2.push_head!(walker)
end
# For each commit in the git repo, check to see if its treehash
# matches the one we're looking for.
for oid in walker
tree = LibGit2.peel(LibGit2.GitTree, LibGit2.GitCommit(repo, oid))
if all(get_tree_hash(tree).val .== tree_hash.bytes)
git_commit_sha = LibGit2.string(oid)
break
end
end
end
end
return git_commit_sha
end
"""
get_addable_spec(name::AbstractString, version::VersionNumber)
Given a JLL name and registered version, return a `PackageSpec` that, when passed as a
`Dependency`, ensures that exactly that version will be installed. Example usage:
dependencies = [
BuildDependency(get_addable_spec("LLVM_jll", v"9.0.1+0")),
]
"""
function get_addable_spec(name::AbstractString, version::VersionNumber;
ctx = Pkg.Types.Context(), verbose::Bool = false)
# First, resolve the UUID
uuid = first(Pkg.Types.registry_resolve!(ctx.registries, Pkg.Types.PackageSpec(;name))).uuid
# Next, determine the tree hash from the registry
repo_urls = Set{String}()
tree_hashes = Set{Base.SHA1}()
for reg in ctx.registries
if !haskey(reg, uuid)
continue
end
pkg_info = registry_info(reg[uuid])
if pkg_info.repo !== nothing
push!(repo_urls, pkg_info.repo)
end
if pkg_info.version_info !== nothing
if haskey(pkg_info.version_info, version)
version_info = pkg_info.version_info[version]
push!(tree_hashes, version_info.git_tree_sha1)
end
end
end
if isempty(tree_hashes)
@error("Unable to find dependency!",
name,
version,
registries=ctx.registries,
)
error("Unable to find dependency!")
end
if length(tree_hashes) != 1
@error("Multiple treehashes found!",
name,
version,
tree_hashes,
registries=ctx.registries,
)
error("Multiple treehashes found!")
end
tree_hash_sha1 = first(tree_hashes)
# Once we have a tree hash, turn that into a git commit sha
git_commit_sha = nothing
valid_url = nothing
for url in repo_urls
git_commit_sha = get_commit_sha(url, tree_hash_sha1; verbose)
# Stop searching urls as soon as we find one
if git_commit_sha !== nothing
valid_url = url
break
end
end
if git_commit_sha === nothing
@error("Unable to find revision for specified dependency!",
name,
version,
tree_hash = bytes2hex(tree_hash_sha1.bytes),
repo_urls,
)
error("Unable to find revision for specified dependency!")
end
return Pkg.Types.PackageSpec(
name=name,
uuid=uuid,
#version=version,
tree_hash=tree_hash_sha1,
repo=Pkg.Types.GitRepo(rev=git_commit_sha, source=valid_url),
)
end
"""
setup_dependencies(prefix::Prefix, dependencies::Vector{PackageSpec}, platform::AbstractPlatform; verbose::Bool = false)
Given a list of JLL package specifiers, install their artifacts into the build prefix.
The artifacts are installed into the global artifact store, then copied into a temporary location,
then finally symlinked into the build prefix. This allows us to (a) save download bandwidth by not
downloading the same artifacts over and over again, (b) maintain separation in the event of
catastrophic containment failure, avoiding hosing the main system if a build script decides to try
to modify the dependent artifact files, and (c) keeping a record of what files are a part of
dependencies as opposed to the package being built, in the form of symlinks to a specific artifacts
directory.
"""
function setup_dependencies(prefix::Prefix,
dependencies::Vector{PkgSpec},
platform::AbstractPlatform;
verbose::Bool = false)
artifact_paths = String[]
if isempty(dependencies)
return artifact_paths
end
# We occasionally generate "illegal" package specs, where we provide both version and tree hash.
# we trust the treehash over the version, so drop the version for any that exists here:
function filter_redundant_version(p::PkgSpec)
if p.version !== nothing && p.tree_hash !== nothing
return Pkg.Types.PackageSpec(;name=p.name, tree_hash=p.tree_hash, repo=p.repo)
end
return p
end
dependencies = filter_redundant_version.(dependencies)
dependencies_names = getname.(dependencies)
# Get julia version specificity, if it exists, from the `Platform` object
julia_version = nothing
if haskey(platform, "julia_version")
julia_version = VersionNumber(platform["julia_version"])
end
# We're going to create a project and install all dependent packages within
# it, then create symlinks from those installed products to our build prefix
mkpath(joinpath(prefix, triplet(platform), "artifacts"))
deps_project = joinpath(prefix, triplet(platform), ".project")
Pkg.activate(deps_project) do
# Update registry first, in case the jll packages we're looking for have just been registered/updated
ctx = Pkg.Types.Context(;julia_version)
outs = verbose ? stdout : devnull
update_registry(outs)
# Add all dependencies
Pkg.add(ctx, dependencies; platform=platform, io=outs)
# Ony Julia v1.6, `Pkg.add()` doesn't mutate `dependencies`, so we can't use the `UUID`
# that was found during resolution there. Instead, we'll make use of `ctx.env` to figure
# out the UUIDs of all our packages.
dependency_uuids = Set([uuid for (uuid, pkg) in ctx.env.manifest if pkg.name ∈ dependencies_names])
# Some JLLs are also standard libraries that may be present in the manifest because
# they were pulled by other stdlibs (e.g. through dependence on `Pkg`), not beacuse
# they were actually required for this package. Filter them out if they're present
# in the manifest but aren't direct dependencies or dependencies of other JLLS.
installed_jll_uuids = collect_jll_uuids(ctx.env.manifest, dependency_uuids)
installed_jlls = [
Pkg.Types.PackageSpec(;
name=pkg.name,
uuid,
tree_hash=pkg.tree_hash,
path=pkg.path,
) for (uuid, pkg) in ctx.env.manifest if uuid ∈ installed_jll_uuids
]
# Check for stdlibs lurking in the installed JLLs
stdlib_pkgspecs = PackageSpec[]
for dep in installed_jlls
# If the dependency doesn't have a path yet and the `tree_hash` is
# `nothing`, then this JLL is probably an stdlib.
if dep.path === nothing && dep.tree_hash === nothing
# Figure out what version this stdlib _should_ be at for this version
dep.version = stdlib_version(dep.uuid, julia_version)
# Interrogate the registry to determine the correct treehash
Pkg.Operations.load_tree_hash!(ctx.registries, dep, nothing)
# We'll still use `Pkg.add()` to install the version we want, even though
# we've used the above two lines to figure out the treehash, so construct
# an addable spec that will get the correct bits down on disk.
push!(stdlib_pkgspecs, get_addable_spec(dep.name, dep.version; verbose))
end
end
# Re-install stdlib dependencies, but this time with `julia_version = nothing`
if !isempty(stdlib_pkgspecs)
Pkg.add(ctx, stdlib_pkgspecs; io=outs, julia_version=nothing)
end
# Load their Artifacts.toml files
for dep in installed_jlls
name = getname(dep)
# If the package has a path, use it, otherwise ask Pkg where it
# should have been installed.
dep_path = dep.path !== nothing ? dep.path : Pkg.Operations.find_installed(name, dep.uuid, dep.tree_hash)
# Skip dependencies that didn't get installed?
if dep_path === nothing
@warn("Dependency $(name) not installed, despite our best efforts!")
continue
end
# Load the Artifacts.toml file
artifacts_toml = joinpath(dep_path, "Artifacts.toml")
if !isfile(artifacts_toml)
# Try `StdlibArtifacts.toml` instead
artifacts_toml = joinpath(dep_path, "StdlibArtifacts.toml")
if !isfile(artifacts_toml)
@warn("Dependency $(name) does not have an (Stdlib)Artifacts.toml in $(dep_path)!")
continue
end
end
# If the artifact is available for the given platform, make sure it
# is also installed. It may not be the case for lazy artifacts or stdlibs.
meta = artifact_meta(name[1:end-4], artifacts_toml; platform=platform)
if meta === nothing
@warn("Dependency $(name) does not have a mapping for artifact $(name[1:end-4]) for platform $(triplet(platform))")
continue
end
ensure_artifact_installed(name[1:end-4], meta, artifacts_toml; platform=platform)
# Copy the artifact from the global installation location into this build-specific artifacts collection
src_path = Pkg.Artifacts.artifact_path(Base.SHA1(meta["git-tree-sha1"]))
dest_path = joinpath(prefix, triplet(platform), "artifacts", basename(src_path))
rm(dest_path; force=true, recursive=true)
cp(src_path, dest_path)
# Keep track of our dep paths for later symlinking
push!(artifact_paths, dest_path)
end
end
# Symlink all the deps into the prefix
for art_path in artifact_paths
symlink_tree(art_path, destdir(prefix, platform))
end
# Return the artifact_paths so that we can clean them up later
return artifact_paths
end
function cleanup_dependencies(prefix::Prefix, artifact_paths, platform)
for art_path in artifact_paths
# Unsymlink all destdirs within the prefix
for dir in readdir(prefix.path; join=true)
ddir = destdir(prefix, platform)
if isdir(ddir)
unsymlink_tree(art_path, ddir)
end
end
end
end
"""
compress_dir(dir::AbstractString;
compressor_stream = GzipCompressorStream,
level::Int = 9,
extension::AbstractString = ".gz",
verbose::Bool = false)
Compress all files in `dir` using the specified `compressor_stream` with
compression level equal to `level`, appending `extension` to the filenames.
Remove the original uncompressed files at the end.
"""
function compress_dir(dir::AbstractString;
compressor_stream = GzipCompressorStream,
level::Int = 9,
extension::AbstractString = ".gz",
verbose::Bool = false)
if isdir(dir)
if verbose
@info "Compressing files in $(dir)"
end
for f in readdir(dir)
filename = joinpath(dir, f)
if isfile(filename)
text = read(filename, String)
stream = compressor_stream(open(filename * extension, "w"); level=level)
write(stream, text)
close(stream)
rm(filename; force=true)
end
end
end
end