diff --git a/pkg/distro/BUILD b/pkg/distro/BUILD index 7b3d861e..90c2d84f 100644 --- a/pkg/distro/BUILD +++ b/pkg/distro/BUILD @@ -98,13 +98,14 @@ genrule( bzl_library( name = "rules_pkg_lib", srcs = [ - "//private:util.bzl", "//:package_variables.bzl", "//:path.bzl", "//:pkg.bzl", "//:providers.bzl", "//:rpm.bzl", "//:version.bzl", + "//private:pkg_files.bzl", + "//private:util.bzl", ], visibility = ["//visibility:private"], ) diff --git a/pkg/pkg.bzl b/pkg/pkg.bzl index 24d85769..8e962c1d 100644 --- a/pkg/pkg.bzl +++ b/pkg/pkg.bzl @@ -14,8 +14,22 @@ """Rules for manipulation of various packaging.""" load(":path.bzl", "compute_data_path", "dest_path") -load(":providers.bzl", "PackageArtifactInfo", "PackageVariablesInfo") +load( + ":providers.bzl", + "PackageArtifactInfo", + "PackageFilegroupInfo", + "PackageFilesInfo", + "PackageVariablesInfo", +) load("//private:util.bzl", "setup_output_files", "substitute_package_variables") +load( + "//private:pkg_files.bzl", + "add_directory", + "add_label_list", + "add_single_file", + "add_tree_artifact", + "process_src", + "write_manifest") # TODO(aiuto): Figure out how to get this from the python toolchain. # See check for lzma in archive.py for a hint at a method. @@ -103,33 +117,69 @@ def _pkg_tar_impl(ctx): if ctx.attr.portable_mtime: args.append("--mtime=portable") + # Now we begin processing the files. + file_deps = [] # inputs we depend on + content_map = {} # content handled in the manifest + + # Start with all the pkg_* inputs + for src in ctx.attr.srcs: + # Gather the files for every srcs entry here, even if it is not from + # a pkg_* rule. + if DefaultInfo in src: + file_deps.append(src[DefaultInfo].files) + if not process_src( + content_map, + src, + src.label, + default_mode = None, + default_user = None, + default_group = None, + ): + # Add in the files of srcs which are not pkg_* types + for f in src.files.to_list(): + d_path = dest_path(f, data_path, data_path_without_prefix) + if f.is_directory: + # Tree artifacts need a name, but the name is never really + # the important part. The likely behavior people want is + # just the content, so we strip the directory name. + dest = '/'.join(d_path.split('/')[0:-1]) + add_tree_artifact(content_map, dest, f, src.label) + else: + # Note: This extra remap is the bottleneck preventing this + # large block from being a utility method as shown below. + # Should we disallow mixing pkg_files in srcs with remap? + # I am fine with that if it makes the code more readable. + dest = _remap(remap_paths, d_path) + add_single_file(content_map, dest, f, src.label) + + # TODO(aiuto): I want the code to look like this, but we don't have lambdas. + # transform_path = lambda f: _remap( + # remap_paths, dest_path(f, data_path, data_path_without_prefix)) + # add_label_list(ctx, content_map, file_deps, ctx.attr.srcs, transform_path) + # Add runfiles if requested - file_inputs = [] + runfiles_depsets = [] if ctx.attr.include_runfiles: - runfiles_depsets = [] + # TODO(#339): Rethink this w.r.t. binaries in pkg_files() rules. for f in ctx.attr.srcs: default_runfiles = f[DefaultInfo].default_runfiles if default_runfiles != None: runfiles_depsets.append(default_runfiles.files) - # deduplicates files in srcs attribute and their runfiles - file_inputs = depset(ctx.files.srcs, transitive = runfiles_depsets).to_list() - else: - file_inputs = ctx.files.srcs[:] - - args += [ - "--file=%s=%s" % (_quote(f.path), _remap( - remap_paths, - dest_path(f, data_path, data_path_without_prefix), - )) - for f in file_inputs - ] + # The files attribute is a map of labels to destinations. We can add them + # directly to the content map. for target, f_dest_path in ctx.attr.files.items(): target_files = target.files.to_list() if len(target_files) != 1: fail("Each input must describe exactly one file.", attr = "files") - file_inputs += target_files - args += ["--file=%s=%s" % (_quote(target_files[0].path), f_dest_path)] + file_deps.append(depset([target_files[0]])) + add_single_file( + content_map, + f_dest_path, + target_files[0], + target.label, + ) + if ctx.attr.modes: args += [ "--modes=%s=%s" % (_quote(key), ctx.attr.modes[key]) @@ -147,8 +197,8 @@ def _pkg_tar_impl(ctx): ] if ctx.attr.empty_files: args += ["--empty_file=%s" % empty_file for empty_file in ctx.attr.empty_files] - if ctx.attr.empty_dirs: - args += ["--empty_dir=%s" % empty_dir for empty_dir in ctx.attr.empty_dirs] + for empty_dir in ctx.attr.empty_dirs or []: + add_directory(content_map, empty_dir, ctx.label) args += ["--tar=" + f.path for f in ctx.files.deps] args += [ "--link=%s:%s" % (_quote(k, protect = ":"), ctx.attr.symlinks[k]) @@ -158,6 +208,13 @@ def _pkg_tar_impl(ctx): ctx.attr.private_stamp_detect): args.append("--stamp_from=%s" % ctx.version_file.path) files.append(ctx.version_file) + + file_inputs = depset(transitive = file_deps + runfiles_depsets) + manifest_file = ctx.actions.declare_file(ctx.label.name + ".manifest") + files.append(manifest_file) + write_manifest(ctx, manifest_file, content_map) + args.append("--manifest=%s" % manifest_file.path) + arg_file = ctx.actions.declare_file(ctx.label.name + ".args") files.append(arg_file) ctx.actions.write(arg_file, "\n".join(args)) @@ -165,7 +222,7 @@ def _pkg_tar_impl(ctx): ctx.actions.run( mnemonic = "PackageTar", progress_message = "Writing: %s" % output_file.path, - inputs = file_inputs + ctx.files.deps + files, + inputs = file_inputs.to_list() + ctx.files.deps + files, tools = [ctx.executable.compressor] if ctx.executable.compressor else [], executable = ctx.executable.build_tar, arguments = ["@" + arg_file.path], diff --git a/pkg/private/build_tar.py b/pkg/private/build_tar.py index 7f2099c1..dbf1906e 100644 --- a/pkg/private/build_tar.py +++ b/pkg/private/build_tar.py @@ -23,6 +23,12 @@ import helpers import build_info +# These must be kept in sync with the values from private/pkg_files.bzl +ENTRY_IS_FILE = 0 # Entry is a file: take content from +ENTRY_IS_LINK = 1 # Entry is a symlink: dest -> +ENTRY_IS_DIR = 2 # Entry is an empty dir +ENTRY_IS_TREE = 3 # Entry is a tree artifact: take tree from + class TarFile(object): """A class to generates a TAR file.""" @@ -57,7 +63,7 @@ def add_file(self, f, destfile, mode=None, ids=None, names=None): Args: f: the file to add to the layer destfile: the name of the file in the layer - mode: force to set the specified mode, by default the value from the + mode: (int) force to set the specified mode, by default the value from the source is taken. ids: (uid, gid) for the file to set ownership names: (username, groupname) for the file to set ownership. `f` will be @@ -197,6 +203,92 @@ def add_deb(self, deb): self.add_tar(tmpfile[1]) os.remove(tmpfile[1]) + def add_tree(self, tree_top, destpath, mode=None, ids=None, names=None): + """Add a tree artifact to the tar file. + + Args: + tree_top: the top of the tree to add + destpath: the path under which to place the files + mode: (int) force to set the specified posix mode (e.g. 0o755). The + default is derived from the source + ids: (uid, gid) for the file to set ownership + names: (username, groupname) for the file to set ownership. `f` will be + copied to `self.directory/destfile` in the layer. + """ + dest = destpath.strip('/') # redundant, dests should never have / here + if self.directory and self.directory != '/': + dest = self.directory.lstrip('/') + '/' + dest + + dest = os.path.normpath(dest).replace(os.path.sep, '/') + if ids is None: + ids = (0, 0) + if names is None: + names = ('', '') + + to_write = {} + for root, dirs, files in os.walk(tree_top): + dirs = sorted(dirs) + rel_path_from_top = root[len(tree_top):].lstrip('/') + if rel_path_from_top: + dest_dir = dest + '/' + rel_path_from_top + '/' + else: + dest_dir = dest + '/' + for dir in dirs: + to_write[dest_dir + dir] = None + for file in sorted(files): + to_write[dest_dir + file] = os.path.join(root, file) + + for path in sorted(to_write.keys()): + content_path = to_write[path] + # If mode is unspecified, derive the mode from the file's mode. + if mode is None: + f_mode = 0o755 if os.access(content_path, os.X_OK) else 0o644 + else: + f_mode = mode + if not content_path: + self.add_empty_file( + path, + mode=f_mode, + ids=ids, + names=names, + kind=tarfile.DIRTYPE) + else: + self.tarfile.add_file( + path, + file_content=content_path, + mode=f_mode, + uid=ids[0], + gid=ids[1], + uname=names[0], + gname=names[1]) + + def add_manifest_entry(self, entry, file_attributes): + entry_type, dest, src, mode, user, group = entry + + # Use the pkg_tar mode/owner remaping as a fallback + non_abs_path = dest.strip('/') + if file_attributes: + attrs = file_attributes(non_abs_path) + else: + attrs = {} + # But any attributes from the manifest have higher precedence + if mode is not None and mode != '': + attrs['mode'] = int(mode, 8) + if user: + if group: + attrs['names'] = (user, group) + else: + # Use group that legacy tar process would assign + attrs['names'] = (user, attrs.get('names')[1]) + if entry_type == ENTRY_IS_LINK: + self.add_link(dest, src) + elif entry_type == ENTRY_IS_DIR: + self.add_empty_dir(dest, **attrs) + elif entry_type == ENTRY_IS_TREE: + self.add_tree(src, dest, **attrs) + else: + self.add_file(src, dest, **attrs) + def main(): parser = argparse.ArgumentParser( @@ -207,7 +299,9 @@ def main(): parser.add_argument('--file', action='append', help='A file to add to the layer.') parser.add_argument('--manifest', - help='JSON manifest of contents to add to the layer.') + help='manifest of contents to add to the layer.') + parser.add_argument('--legacy_manifest', + help='DEPRECATED: JSON manifest of contents to add to the layer.') parser.add_argument('--mode', help='Force the mode on the added files (in octal).') parser.add_argument( @@ -325,8 +419,9 @@ def file_attributes(filename): 'names': names_map.get(filename, default_ownername), } - if options.manifest: - with open(options.manifest, 'r') as manifest_fp: + # TODO(aiuto): Make sure this is unused and remove the code. + if options.legacy_manifest: + with open(options.legacy_manifest, 'r') as manifest_fp: manifest = json.load(manifest_fp) for f in manifest.get('files', []): output.add_file(f['src'], f['dst'], **file_attributes(f['dst'])) @@ -343,6 +438,12 @@ def file_attributes(filename): for deb in manifest.get('debs', []): output.add_deb(deb) + if options.manifest: + with open(options.manifest, 'r') as manifest_fp: + manifest = json.load(manifest_fp) + for entry in manifest: + output.add_manifest_entry(entry, file_attributes) + for f in options.file or []: (inf, tof) = helpers.SplitNameValuePairAtSeparator(f, '=') output.add_file(inf, tof, **file_attributes(tof)) diff --git a/pkg/private/pkg_files.bzl b/pkg/private/pkg_files.bzl new file mode 100644 index 00000000..8d7cc516 --- /dev/null +++ b/pkg/private/pkg_files.bzl @@ -0,0 +1,322 @@ +# Copyright 2021 The Bazel Authors. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Internal functions for processing pkg_file* instances. + +Concepts and terms: + + DestFile: A provider holding the source path, attributes and other + information about a file that should appear in the package. + When attributes are empty in DestFile, we let the package + tool decide their values. + + content map: The map of destination paths to DestFile instances. Note that + several distinct destinations make share the same source path. + Attempting to insert a duplicate entry in the content map is + an error, because it means you are collapsing files together. + We may want to relax this in the future if their DestFiles + are equal. + + manifest: The file which represents the content map. This is generated + by rule implementations and passed to the build_*.py helpers. +""" + +load("//:path.bzl", "compute_data_path", "dest_path") +load( + "//:providers.bzl", + "PackageArtifactInfo", + "PackageDirsInfo", + "PackageFilegroupInfo", + "PackageFilesInfo", + "PackageSymlinkInfo", +) + +# Possible values for entry_type +# These must be kept in sync with the declarations in private/build_*.py +ENTRY_IS_FILE = 0 # Entry is a file: take content from +ENTRY_IS_LINK = 1 # Entry is a symlink: dest -> +ENTRY_IS_DIR = 2 # Entry is an empty dir +ENTRY_IS_TREE = 3 # Entry is a tree artifact: take tree from + +_DestFile = provider( + doc = """Information about each destination in the final package.""", + fields = { + "src": "source file", + "mode": "mode, or empty", + "user": "user, or empty", + "group": "group, or empty", + "link_to": "path to link to. src must not be set", + "entry_type": "int. See ENTRY_IS_* values above.", + "origin": "target which added this", + }, +) + +def _check_dest(content_map, dest, origin): + if dest in content_map: + fail("Duplicate output path: <%s>, declared in %s and %s" % ( + dest, + origin, + content_map[dest].origin, + )) + +def _merge_attributes(info, mode, user, group): + if hasattr(info, "attributes"): + attrs = info.attributes + mode = attrs.get("mode") or mode + user = attrs.get("user") or user + group = attrs.get("group") or group + return (mode, user, group) + +def _process_pkg_dirs(content_map, pkg_dirs_info, origin, default_mode, default_user, default_group): + attrs = _merge_attributes(pkg_dirs_info, default_mode, default_user, default_group) + for dir in pkg_dirs_info.dirs: + dest = dir.strip('/') + _check_dest(content_map, dest, origin) + content_map[dest] = _DestFile( + src = None, + entry_type = ENTRY_IS_DIR, + mode = attrs[0], + user = attrs[1], + group = attrs[2], + origin = origin, + ) + +def _process_pkg_files(content_map, pkg_files_info, origin, default_mode, default_user, default_group): + attrs = _merge_attributes(pkg_files_info, default_mode, default_user, default_group) + for filename, src in pkg_files_info.dest_src_map.items(): + dest = filename.strip('/') + _check_dest(content_map, dest, origin) + content_map[dest] = _DestFile( + src = src, + mode = attrs[0], + user = attrs[1], + group = attrs[2], + origin = origin, + ) + +def _process_pkg_symlink(content_map, pkg_symlink_info, origin, default_mode, default_user, default_group): + dest = pkg_symlink_info.destination.strip('/') + attrs = _merge_attributes(pkg_symlink_info, default_mode, default_user, default_group) + _check_dest(content_map, dest, origin) + content_map[dest] = _DestFile( + src = None, + mode = attrs[0], + user = attrs[1], + group = attrs[2], + origin = origin, + link_to = pkg_symlink_info.source, + ) + +def _process_pkg_filegroup(content_map, pkg_filegroup_info, origin, default_mode, default_user, default_group): + for d in pkg_filegroup_info.pkg_dirs: + _process_pkg_dirs(content_map, d[0], d[1], default_mode, default_user, default_group) + for pf in pkg_filegroup_info.pkg_files: + _process_pkg_files(content_map, pf[0], pf[1], default_mode, default_user, default_group) + for psl in pkg_filegroup_info.pkg_symlinks: + _process_pkg_symlink(content_map, psl[0], psl[1], default_mode, default_user, default_group) + +def process_src(content_map, src, origin, default_mode, default_user, default_group): + """Add an entry to the content map. + + Args: + content_map: The content map + src: Source Package*Info object + origin: The rule instance adding this entry + default_mode: fallback mode to use for Package*Info elements without mode + default_user: fallback user to use for Package*Info elements without user + default_group: fallback mode to use for Package*Info elements without group + + Returns: + True if src was a Package*Info and added to content_map. + """ + found_info = False + if PackageFilesInfo in src: + _process_pkg_files( + content_map, + src[PackageFilesInfo], + origin, + default_mode, + default_user, + default_group, + ) + found_info = True + if PackageFilegroupInfo in src: + _process_pkg_filegroup( + content_map, + src[PackageFilegroupInfo], + origin, + default_mode, + default_user, + default_group, + ) + found_info = True + if PackageSymlinkInfo in src: + _process_pkg_symlink( + content_map, + src[PackageSymlinkInfo], + origin, + default_mode, + default_user, + default_group, + ) + found_info = True + if PackageDirsInfo in src: + _process_pkg_dirs( + content_map, + src[PackageDirsInfo], + origin, + "0555", + default_user, + default_group, + ) + found_info = True + return found_info + +def add_directory(content_map, dir_path, origin, mode=None, user=None, group=None): + """Add an single file to the content map. + + Args: + content_map: The content map + dir_path: Where to place the file in the package. + origin: The rule instance adding this entry + mode: fallback mode to use for Package*Info elements without mode + user: fallback user to use for Package*Info elements without user + group: fallback mode to use for Package*Info elements without group + """ + content_map[dir_path.strip('/')] = _DestFile( + src = None, + entry_type = ENTRY_IS_DIR, + origin = origin, + mode = mode, + user = user, + group = group, + ) + +def add_label_list(ctx, content_map, file_deps, srcs): + """Helper method to add a list of labels (typically 'srcs') to a content_map. + + Args: + ctx: rule context. + content_map: (r/w) The content map to update. + file_deps: (r/w) The list of File objects srcs depend on. + srcs: List of source objects. + """ + # Compute the relative path + data_path = compute_data_path( + ctx, + ctx.attr.strip_prefix if hasattr(ctx.attr, "strip_prefix") else "") + data_path_without_prefix = compute_data_path(ctx, ".") + + for src in srcs: + # Gather the files for every srcs entry here, even if it is not from + # a pkg_* rule. + if DefaultInfo in src: + file_deps.append(src[DefaultInfo].files) + if not process_src( + content_map, + src, + src.label, + default_mode = None, + default_user = None, + default_group = None, + ): + # Add in the files of srcs which are not pkg_* types + for f in src.files.to_list(): + d_path = dest_path(f, data_path, data_path_without_prefix) + if f.is_directory: + # Tree artifacts need a name, but the name is never really + # the important part. The likely behavior people want is + # just the content, so we strip the directory name. + dest = '/'.join(d_path.split('/')[0:-1]) + add_tree_artifact(content_map, dest, f, src.label) + else: + add_single_file(content_map, d_path, f, src.label) + +def add_single_file(content_map, dest_path, src, origin, mode=None, user=None, group=None): + """Add an single file to the content map. + + Args: + content_map: The content map + dest_path: Where to place the file in the package. + src: Source object. Must have len(src[DefaultInfo].files) == 1 + origin: The rule instance adding this entry + mode: fallback mode to use for Package*Info elements without mode + user: fallback user to use for Package*Info elements without user + group: fallback mode to use for Package*Info elements without group + """ + dest = dest_path.strip('/') + _check_dest(content_map, dest, origin) + content_map[dest] = _DestFile( + src = src, + origin = origin, + mode = mode, + user = user, + group = group, + ) + +def add_tree_artifact(content_map, dest_path, src, origin, mode=None, user=None, group=None): + """Add an tree artifact (directory output) to the content map. + + Args: + content_map: The content map + dest_path: Where to place the file in the package. + src: Source object. Must have len(src[DefaultInfo].files) == 1 + origin: The rule instance adding this entry + mode: fallback mode to use for Package*Info elements without mode + user: fallback user to use for Package*Info elements without user + group: fallback mode to use for Package*Info elements without group + """ + content_map[dest_path] = _DestFile( + src = src, + origin = origin, + entry_type = ENTRY_IS_TREE, + mode = mode, + user = user, + group = group, + ) + +def write_manifest(ctx, manifest_file, content_map): + """Write a content map to a manifest file. + + The format of this file is currently undocumented, as it is a private + contract between the rule implementation and the package writers. It will + become a published interface in a future release. + + For reproducibility, the manifest file must be ordered consistently. + """ + ctx.actions.write( + manifest_file, + "[\n" + ",\n".join( + [_encode_manifest_entry(dst, content_map[dst]) + for dst in sorted(content_map.keys())] + ) + "\n]\n" + ) + +def _encode_manifest_entry(dest, df): + entry_type = df.entry_type if hasattr(df, "entry_type") else ENTRY_IS_FILE + if df.src: + src = df.src.path + entry_type = ENTRY_IS_FILE + elif hasattr(df, "link_to"): + src = df.link_to + entry_type = ENTRY_IS_LINK + else: + src = None + return json.encode([ + entry_type, + dest.strip('/'), + src, + df.mode or "", + df.user or None, + df.group or None, + ]) diff --git a/pkg/tests/BUILD b/pkg/tests/BUILD index 06d055bf..8af63218 100644 --- a/pkg/tests/BUILD +++ b/pkg/tests/BUILD @@ -14,6 +14,7 @@ # -*- coding: utf-8 -*- load("//:pkg.bzl", "SUPPORTED_TAR_COMPRESSIONS", "pkg_deb", "pkg_tar", "pkg_zip") +load("//tests/util:defs.bzl", "directory") load("@rules_python//python:defs.bzl", "py_test") load("@bazel_skylib//rules:copy_file.bzl", "copy_file") load(":my_package_name.bzl", "my_package_naming") @@ -88,6 +89,19 @@ genrule( cmd = "for i in $(OUTS); do echo 1 >$$i; done", ) +directory( + name = "generate_tree", + filenames = [ + # buildifier: don't sort + "b/e", + "a/a", + "b/c/d", + "b/d", + "a/b/c", + ], + contents = "hello there", +) + copy_file( name = "zipcontent_loremipsum", src = "testdata/loremipsum.txt", @@ -466,6 +480,14 @@ pkg_tar( ], ) +pkg_tar( + name = "test-tar-tree-artifact", + package_dir = "a_tree", + srcs = [ + ":generate_tree" + ], +) + py_test( name = "pkg_tar_test", size = "medium", @@ -485,6 +507,7 @@ py_test( ":test_tar_package_dir_substitution.tar", ":test-tar-long-filename", ":test-tar-repackaging-long-filename.tar", + ":test-tar-tree-artifact", ] + [ ":test-tar-basic-%s" % compression for compression in SUPPORTED_TAR_COMPRESSIONS diff --git a/pkg/tests/mappings/BUILD b/pkg/tests/mappings/BUILD index c3c72ce7..357ff422 100644 --- a/pkg/tests/mappings/BUILD +++ b/pkg/tests/mappings/BUILD @@ -13,7 +13,74 @@ # limitations under the License. load(":mappings_test.bzl", "mappings_analysis_tests", "mappings_unit_tests") +load( + "//:mappings.bzl", + "pkg_attributes", + "pkg_filegroup", + "pkg_files", + "pkg_mkdirs", + "pkg_mklink", + "strip_prefix", +) +load("//tests/util:defs.bzl", "write_content_manifest") +load("@rules_python//python:defs.bzl", "py_test") mappings_analysis_tests() mappings_unit_tests() + +pkg_mkdirs( + name = "dirs", + attributes = pkg_attributes( + group = "bar", + mode = "711", + user = "foo", + ), + dirs = [ + "foodir", + ], +) + +pkg_files( + name = "files", + srcs = [ + "mappings_test.bzl", + ], +) + +write_content_manifest( + name = "all", + srcs = [ + "BUILD", + ":dirs", + ":files", + ], +) + +# TODO(aiuto): This rule and the small srcs should be generated from just the +# pair of write_content_manifest name and the golden file name. +# We could even inline the golden data into a string which gets parsed as +# json and written out. +py_test( + name = "all_test", + srcs = [ + "all_test.py", + ], + data = [ + "all.manifest.golden", + ":all.manifest", + ], + python_version = "PY3", + deps = [ + ":manifest_test_lib", + ], +) + +py_library( + name = "manifest_test_lib", + srcs = ["manifest_test_lib.py"], + srcs_version = "PY3", + deps = [ + "@bazel_tools//tools/python/runfiles", + ], +) diff --git a/pkg/tests/mappings/all.manifest.golden b/pkg/tests/mappings/all.manifest.golden new file mode 100644 index 00000000..35082be1 --- /dev/null +++ b/pkg/tests/mappings/all.manifest.golden @@ -0,0 +1,5 @@ +[ +[0, "BUILD","tests/mappings/BUILD","",null,null], +[2, "foodir",null,"711","foo","bar"], +[0, "mappings_test.bzl","tests/mappings/mappings_test.bzl","0644",null,null] +] diff --git a/pkg/tests/mappings/all_test.py b/pkg/tests/mappings/all_test.py new file mode 100644 index 00000000..68e38ae4 --- /dev/null +++ b/pkg/tests/mappings/all_test.py @@ -0,0 +1,29 @@ +# Copyright 2021 The Bazel Authors. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests for generated content manifest.""" + +# TODO(aiuto): Generate this file. + +import unittest + +import manifest_test_lib + +class ManifestAllTest(manifest_test_lib.ContentManifestTest): + + def test_match(self): + self.assertManifestsMatch('all.manifest.golden', 'all.manifest') + + +if __name__ == '__main__': + unittest.main() diff --git a/pkg/tests/mappings/manifest_test_lib.py b/pkg/tests/mappings/manifest_test_lib.py new file mode 100644 index 00000000..a909c31e --- /dev/null +++ b/pkg/tests/mappings/manifest_test_lib.py @@ -0,0 +1,41 @@ +# Copyright 2021 The Bazel Authors. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Compare to content manifest files.""" + +import json +import unittest + +from bazel_tools.tools.python.runfiles import runfiles + +class ContentManifestTest(unittest.TestCase): + """Test harness to see if we wrote the content manifest correctly.""" + + run_files = runfiles.Create() + + def assertManifestsMatch(self, expected, got): + """Check two manifest files for equality. + + Args: + expected: The path to the content we expect. + got: The path to the content we got. + """ + e_file = ContentManifestTest.run_files.Rlocation( + 'rules_pkg/tests/mappings/' + expected) + with open(e_file, 'r') as e_fp: + expected = json.load(e_fp) + g_file = ContentManifestTest.run_files.Rlocation( + 'rules_pkg/tests/mappings/' + got) + with open(g_file, 'r') as g_fp: + got = json.load(g_fp) + self.assertEquals(expected, got) diff --git a/pkg/tests/pkg_tar_test.py b/pkg/tests/pkg_tar_test.py index 8498ebb2..5abe8f37 100644 --- a/pkg/tests/pkg_tar_test.py +++ b/pkg/tests/pkg_tar_test.py @@ -127,10 +127,10 @@ def test_empty_files(self): def test_empty_dirs(self): content = [ {'name': '.'}, - {'name': './tmp', 'isdir': True, 'size': 0, 'uid': 0, - 'mtime': PORTABLE_MTIME}, {'name': './pmt', 'isdir': True, 'size': 0, 'uid': 0, 'mtime': PORTABLE_MTIME}, + {'name': './tmp', 'isdir': True, 'size': 0, 'uid': 0, + 'mtime': PORTABLE_MTIME}, ] self.assertTarFileContent('test-tar-empty_dirs.tar', content) @@ -206,5 +206,28 @@ def test_repackage_file_with_long_name(self): ] self.assertTarFileContent('test-tar-repackaging-long-filename.tar', content) + def test_tar_with_tree_artifact(self): + # (sorted) list of files: + # "a/a" + # "a/b/c" + # "b/c/d" + # "b/d" + # "b/e" + + content = [ + {'name': '.'}, + {'name': './a_tree', 'isdir': True}, + {'name': './a_tree/a', 'isdir': True}, + {'name': './a_tree/a/a'}, + {'name': './a_tree/a/b', 'isdir': True}, + {'name': './a_tree/a/b/c'}, + {'name': './a_tree/b', 'isdir': True}, + {'name': './a_tree/b/c', 'isdir': True}, + {'name': './a_tree/b/c/d'}, + {'name': './a_tree/b/d'}, + {'name': './a_tree/b/e'}, + ] + self.assertTarFileContent('test-tar-tree-artifact.tar', content) + if __name__ == '__main__': unittest.main() diff --git a/pkg/tests/util/defs.bzl b/pkg/tests/util/defs.bzl index 53f6c46e..69024d38 100644 --- a/pkg/tests/util/defs.bzl +++ b/pkg/tests/util/defs.bzl @@ -16,6 +16,7 @@ load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts") load("@rules_python//python:defs.bzl", "py_binary") +load("//private:pkg_files.bzl", "add_label_list", "write_manifest") def _directory_impl(ctx): out_dir_file = ctx.actions.declare_directory(ctx.attr.outdir or ctx.attr.name) @@ -100,6 +101,29 @@ cc_binary in complexity, but does not depend on a large toolchain.""", }, ) +def _write_content_manifest_impl(ctx): + content_map = {} # content handled in the manifest + file_deps = [] # inputs we depend on + add_label_list(ctx, content_map, file_deps, ctx.attr.srcs) + write_manifest(ctx, ctx.outputs.out, content_map) + +_write_content_manifest = rule( + doc = """Helper rule to write the content manifest for a pkg_*. + +This is intended only for testing the manifest creation features.""", + implementation = _write_content_manifest_impl, + attrs = { + "srcs": attr.label_list( + doc = """List of source inputs.""", + allow_files = True, + ), + "out": attr.output(), + }, +) + +def write_content_manifest(name, srcs): + _write_content_manifest(name = name, srcs = srcs, out = name + ".manifest") + ############################################################ # Test boilerplate ############################################################ @@ -133,4 +157,3 @@ generic_negative_test = analysistest.make( }, expect_failure = True, ) -