diff --git a/.github/workflows/_buildpacks-release.yml b/.github/workflows/_buildpacks-release.yml index faa44d1b..9be4447c 100644 --- a/.github/workflows/_buildpacks-release.yml +++ b/.github/workflows/_buildpacks-release.yml @@ -71,14 +71,31 @@ jobs: with: submodules: true - - name: Install musl-tools - run: sudo apt-get install musl-tools --no-install-recommends + - name: Install Languages CLI + uses: heroku/languages-github-actions/.github/actions/install-languages-cli@main + with: + branch: ${{ inputs.languages_cli_branch }} + update_rust_toolchain: false + + - name: Generate buildpack matrix + id: generate-buildpack-matrix + run: actions generate-buildpack-matrix --temporary-id "${{ github.run_id }}" --package-dir "${{ env.PACKAGE_DIR }}" - name: Update Rust toolchain run: rustup update - - name: Install Rust linux-musl target - run: rustup target add x86_64-unknown-linux-musl + - name: Install cross-compile tooling + env: + RUST_TRIPLES: ${{ steps.generate-buildpack-matrix.outputs.rust_triples }} + run: | + for triple in $(jq --exit-status -r '.[]' <<< "${RUST_TRIPLES}"); do + if [[ "$triple" == "aarch64-unknown-linux-musl" ]]; then + sudo apt-get install musl-tools gcc-aarch64-linux-gnu g++-aarch64-linux-gnu libc6-dev-arm64-cross --no-install-recommends + elif [[ "$triple" == "x86_64-unknown-linux-musl" ]]; then + sudo apt-get install musl-tools --no-install-recommends + fi + rustup target add "$triple" + done - name: Rust cache uses: Swatinem/rust-cache@v2.7.3 @@ -94,62 +111,34 @@ jobs: ) cargo install --locked "libcnb-cargo@${LIBCNB_PACKAGE_VERSION}" - - name: Install Languages CLI - uses: heroku/languages-github-actions/.github/actions/install-languages-cli@main - with: - branch: ${{ inputs.languages_cli_branch }} - update_rust_toolchain: false - - name: Package buildpacks id: libcnb-package - run: cargo libcnb package --release --package-dir ${{ env.PACKAGE_DIR }} - - - name: Generate buildpack matrix - id: generate-buildpack-matrix - run: actions generate-buildpack-matrix --package-dir ${{ env.PACKAGE_DIR }} - - - name: Generate changelog - id: generate-changelog - run: actions generate-changelog --version ${{ steps.generate-buildpack-matrix.outputs.version }} - - - name: Temporary fix for bash-based buildpacks + env: + BUILDPACKS: ${{ steps.generate-buildpack-matrix.outputs.buildpacks }} run: | - buildpacks='${{ steps.generate-buildpack-matrix.outputs.buildpacks }}' - - bash_buildpack_source_dirs=() - bash_buildpack_output_dirs=() - - # copy any bash-based buildpack to target buildpack dir because `cargo libcnb package` will ignore them - for buildpack in $(jq --exit-status -c '.[]' <<< "${buildpacks}"); do + package_dir=$(realpath "${{ env.PACKAGE_DIR }}") + for buildpack in $(jq --exit-status -c '.[]' <<< "${BUILDPACKS}"); do buildpack_dir=$(jq --exit-status -r '.buildpack_dir' <<< "${buildpack}") - output_dir=$(jq --exit-status -r '.buildpack_output_dir' <<< "${buildpack}") - if [ ! -d "${output_dir}" ]; then - echo "bash-based buildpack detected at ${buildpack_dir}" - cp -R "${buildpack_dir}" "${output_dir}" - bash_buildpack_source_dirs+=("${buildpack_dir}") - bash_buildpack_output_dirs+=("${output_dir}") - fi - done - - # replace dependencies that reference a bash-buildpack - for buildpack in $(jq --exit-status -c '.[]' <<< "${buildpacks}"); do - output_dir=$(jq --exit-status -r '.buildpack_output_dir' <<< "${buildpack}") - echo "checking dependencies in ${output_dir}/package.toml" - for dep in $(yq -oy '.dependencies[].uri' "${output_dir}/package.toml"); do - if realpath "${dep}" &> /dev/null; then - dep_path=$(realpath "${dep}") - for i in "${!bash_buildpack_source_dirs[@]}"; do - bash_buildpack_source_dir="${bash_buildpack_source_dirs[$i]}" - bash_buildpack_output_dir="${bash_buildpack_output_dirs[$i]}" - if [ "${bash_buildpack_source_dir}" = "${dep_path}" ]; then - echo "replacing ${dep} with ${bash_buildpack_output_dir}" - sed -i 's|'"$dep"'|'"$bash_buildpack_output_dir"'|g' "${output_dir}/package.toml" - fi - done + buildpack_type=$(jq --exit-status -r '.buildpack_type' <<< "${buildpack}") + cd "$buildpack_dir" + for target in $(jq --exit-status -c '.targets | .[]' <<< "${buildpack}"); do + output_dir=$(jq --exit-status -r '.output_dir' <<< "${target}") + if [[ "$buildpack_type" == "bash" ]]; then + echo "Copying bash buildpack from ${buildpack_dir} to ${output_dir}." + mkdir -p $(dirname "$output_dir") + cp -R "$buildpack_dir" "$output_dir" + continue fi + echo "Packaging ${buildpack_dir}." + triple=$(jq --exit-status -r '.rust_triple' <<< "${target}") + cargo libcnb package --release --package-dir "${package_dir}" --target "${triple}" done done + - name: Generate changelog + id: generate-changelog + run: actions generate-changelog --version ${{ steps.generate-buildpack-matrix.outputs.version }} + - name: Cache buildpacks uses: actions/cache/save@v4 with: @@ -165,6 +154,16 @@ jobs: matrix: include: ${{ fromJSON(needs.compile.outputs.buildpacks) }} steps: + # Composite buildpacks that depend on bash buildpacks (like + # heroku/nodejs-function) refer to bash buildpacks by their source + # location rather than the packaged location. Other buildpacks don't + # don't need this step, so it's skipped where possible. + - name: Checkout + if: matrix.buildpack_type == 'composite' + uses: actions/checkout@v4 + with: + submodules: true + - name: Restore buildpacks uses: actions/cache/restore@v4 with: @@ -177,32 +176,114 @@ jobs: - name: Install Pack CLI uses: buildpacks/github-actions/setup-pack@v5.6.0 - - name: Create Docker Image - run: pack buildpack package ${{ matrix.buildpack_id }} --config ${{ matrix.buildpack_output_dir }}/package.toml -v + - name: Install Crane + uses: buildpacks/github-actions/setup-tools@v5.5.4 - name: Login to Docker Hub - if: inputs.dry_run == false uses: docker/login-action@v3.1.0 with: registry: docker.io username: ${{ secrets.docker_hub_user }} password: ${{ secrets.docker_hub_token }} - - name: Check if version is already on Docker Hub - id: check - run: echo "published_to_docker=$(docker manifest inspect "${{ matrix.docker_repository }}:${{ matrix.buildpack_version }}" &> /dev/null && echo 'true' || echo 'false')" >> $GITHUB_OUTPUT + - name: Publish to temporary tags + env: + TARGETS: ${{ toJSON(matrix.targets) }} + run: | + echo "Published temporary tags:" >> $GITHUB_STEP_SUMMARY + target_temp_tags=($(jq --exit-status -r "map(.temporary_tag) | join(\" \")" <<< "${TARGETS}")) + # Publish each target to a temp tag + for i in "${!target_temp_tags[@]}"; do + output_dir=$(jq --exit-status -r ".[$i].output_dir" <<< "${TARGETS}") + echo "Packaging ${output_dir} into ${target_temp_tags[i]}" + pack buildpack package "${target_temp_tags[i]}" --config "${output_dir}/package.toml" -v --publish + digest=$(crane digest "${target_temp_tags[i]}") + echo -e "- \`${target_temp_tags[i]}\`\n - \`${digest}\`" >> $GITHUB_STEP_SUMMARY + done + + # If there is more than one target, publish a multi-platform + # manifest list / image index to a temp tag. + if (( ${#target_temp_tags[@]} > 1 )); then + # create a manifest list using platform-specific images created above. + docker manifest create "${{ matrix.temporary_tag }}" "${target_temp_tags[@]}" + + # annotate each of the manifest list entries with the correct os/arch + for i in "${!target_temp_tags[@]}"; do + os=$(jq --exit-status -r ".[$i].os" <<< "${TARGETS}") + arch=$(jq --exit-status -r ".[$i].arch" <<< "${TARGETS}") + echo "Annotating ${{ matrix.temporary_tag }} / ${target_temp_tags[i]} with ${os}/${arch}" + docker manifest annotate "${{ matrix.temporary_tag }}" "${target_temp_tags[i]}" --os "${os}" --arch "${arch}" + done - - name: Tag and publish buildpack - if: inputs.dry_run == false && steps.check.outputs.published_to_docker == 'false' + # Push the manifest list / image index to a temporary tag + docker manifest push "${{ matrix.temporary_tag }}" + digest=$(crane digest "${{ matrix.temporary_tag }}") + echo -e "- \`${{ matrix.temporary_tag }}\`\n - \`${digest}\`" >> $GITHUB_STEP_SUMMARY + fi + + - name: Promote temporary tags to stable tags + if: inputs.dry_run == false + env: + TARGETS: ${{ toJSON(matrix.targets) }} + run: | + # Promote target temp tags to stable tags + echo "Published stable tags:" >> $GITHUB_STEP_SUMMARY + target_temp_tags=($(jq --exit-status -r "map(.temporary_tag) | join(\" \")" <<< "${TARGETS}")) + for i in "${!target_temp_tags[@]}"; do + stable_tag=$(jq --exit-status -r ".[$i].stable_tag" <<< "${TARGETS}") + crane copy "${target_temp_tags[i]}" "${stable_tag}" + echo "- \`${stable_tag}\`" >> $GITHUB_STEP_SUMMARY + done + # promote primary image manifest or manifest list to permanent tag + crane copy "${{ matrix.temporary_tag }}" "${{ matrix.stable_tag }}" + echo "- \`${{ matrix.stable_tag }}\`" >> $GITHUB_STEP_SUMMARY + + - name: Unpublish temp tags from this run + if: always() + env: + TARGETS: ${{ toJSON(matrix.targets) }} run: | - docker tag ${{ matrix.buildpack_id }} ${{ matrix.docker_repository }}:${{ matrix.buildpack_version }} - docker push ${{ matrix.docker_repository }}:${{ matrix.buildpack_version }} + dockerhub_token=$(curl -sS -f --retry 3 --retry-connrefused --connect-timeout 5 --max-time 30 -H "Content-Type: application/json" -X POST -d "{\"username\": \"${{ secrets.docker_hub_user }}\", \"password\": \"${{ secrets.docker_hub_token }}\"}" https://hub.docker.com/v2/users/login/ | jq --exit-status -r .token) + namespace=$(cut -d "/" -f2 <<< "${{ matrix.image_repository }}") + repo=$(cut -d "/" -f3 <<< "${{ matrix.image_repository }}") + status=0 + temp_tags=($(jq --exit-status -r "map(.temporary_tag) | join(\" \")" <<< "${TARGETS}")) + temp_tags+=("${{ matrix.temporary_tag }}") + temp_tags=($(printf '%s\n' "${temp_tags[@]}" | sort -u)) + for temp_tag in "${temp_tags[@]}"; do + echo "Deleting ${temp_tag}" + response=$(curl -sS --retry 3 --retry-connrefused --connect-timeout 5 --max-time 30 -X DELETE \ + -H "Authorization: JWT ${dockerhub_token}" \ + "https://hub.docker.com/v2/namespaces/${namespace}/repositories/${repo}/tags/${temp_tag#*:}" + ) + + if [[ -z $response ]]; then + echo "Deleted." + elif [[ $response =~ "tag not found" ]]; then + echo "Tag does not exist." + else + echo "Couldn't delete. Response: ${response}" + status=22 + fi + done + exit $status publish-github: name: Publish → GitHub Release needs: [compile] runs-on: ${{ inputs.ip_allowlisted_runner }} steps: + # Composite buildpacks that depend on bash buildpacks (like + # heroku/nodejs-function) refer to bash buildpacks by their source + # location rather than the packaged location. Other buildpacks don't + # don't need this step. Since it's challenging to determine if any of + # the buildpacks in this repo meet this criteria, and this step is + # reasonably fast, it is always run. + - name: Checkout + uses: actions/checkout@v4 + with: + submodules: true + - name: Restore buildpacks uses: actions/cache/restore@v4 with: @@ -218,9 +299,11 @@ jobs: - name: Generate CNB files run: | for buildpack in $(jq --exit-status -c '.[]' <<< '${{ needs.compile.outputs.buildpacks }}'); do - artifact_prefix=$(jq --exit-status -r '.buildpack_artifact_prefix' <<< "${buildpack}") - output_dir=$(jq --exit-status -r '.buildpack_output_dir' <<< "${buildpack}") - pack buildpack package "${artifact_prefix}.cnb" --config "${output_dir}/package.toml" --format file --verbose + for target in $(jq --exit-status -c ".targets | .[]" <<< "${buildpack}"); do + output_dir=$(jq --exit-status -r ".output_dir" <<< "${target}") + cnb_file=$(jq --exit-status -r ".cnb_file" <<< "${target}") + pack buildpack package "$cnb_file" --config "${output_dir}/package.toml" --format file --verbose + done done - name: Get token for GitHub application (Linguist) @@ -246,7 +329,7 @@ jobs: files: "*.cnb" fail_on_unmatched_files: true - publish-cnb: + publish-cnb-registry: name: Publish → CNB Registry - ${{ matrix.buildpack_id }} needs: [compile, publish-docker] runs-on: ubuntu-latest @@ -270,7 +353,7 @@ jobs: - name: Calculate the buildpack image digest id: digest - run: echo "value=$(crane digest ${{ matrix.docker_repository }}:${{ matrix.buildpack_version }})" >> "$GITHUB_OUTPUT" + run: echo "value=$(crane digest ${{ matrix.stable_tag }})" >> "$GITHUB_OUTPUT" - name: Register the new version with the CNB Buildpack Registry if: inputs.dry_run == false && steps.check.outputs.published_to_cnb_registry == 'false' @@ -279,11 +362,11 @@ jobs: token: ${{ secrets.cnb_registry_token }} id: ${{ matrix.buildpack_id }} version: ${{ matrix.buildpack_version }} - address: ${{ matrix.docker_repository }}@${{ steps.digest.outputs.value }} + address: ${{ matrix.image_repository }}@${{ steps.digest.outputs.value }} update-builder: name: Update Builder - needs: [compile, publish-docker, publish-cnb, publish-github] + needs: [compile, publish-docker, publish-cnb-registry, publish-github] runs-on: ${{ inputs.ip_allowlisted_runner }} steps: - name: Get token for GH application (Linguist) diff --git a/Cargo.lock b/Cargo.lock index 32f0e09e..16b0b644 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -86,6 +86,12 @@ dependencies = [ "thiserror", ] +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + [[package]] name = "chrono" version = "0.4.38" @@ -297,7 +303,9 @@ dependencies = [ "markdown", "regex", "semver", + "serde", "serde_json", + "tempfile", "thiserror", "toml", "toml_edit", @@ -551,6 +559,18 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "tempfile" +version = "3.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1" +dependencies = [ + "cfg-if", + "fastrand", + "rustix", + "windows-sys", +] + [[package]] name = "thiserror" version = "1.0.59" diff --git a/Cargo.toml b/Cargo.toml index de315354..a171c522 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -21,7 +21,7 @@ unused_crate_dependencies = "warn" [lints.clippy] panic_in_result_fn = "warn" -pedantic = "warn" +pedantic = { level = "warn", priority = -1 } unwrap_used = "warn" [dependencies] @@ -47,6 +47,8 @@ serde_json = "1" thiserror = "1" toml_edit = "0.22" uriparse = "0.6" +serde = { version = "1.0.198", features = ["derive"] } [dev-dependencies] toml = "0.8" +tempfile = "3.10" diff --git a/src/commands/generate_buildpack_matrix/command.rs b/src/commands/generate_buildpack_matrix/command.rs index ed7c3a00..7c5e8a08 100644 --- a/src/commands/generate_buildpack_matrix/command.rs +++ b/src/commands/generate_buildpack_matrix/command.rs @@ -5,12 +5,13 @@ use crate::commands::generate_buildpack_matrix::errors::Error; use crate::commands::resolve_path; use crate::github::actions; use clap::Parser; -use libcnb_data::buildpack::{BuildpackDescriptor, BuildpackId}; +use libcnb_data::buildpack::{BuildpackDescriptor, BuildpackId, BuildpackTarget}; use libcnb_package::output::{ create_packaged_buildpack_dir_resolver, default_buildpack_directory_name, }; use libcnb_package::CargoProfile; -use std::collections::{BTreeMap, HashSet}; +use serde::Serialize; +use std::collections::HashSet; use std::path::{Path, PathBuf}; type Result = std::result::Result; @@ -19,41 +20,55 @@ type Result = std::result::Result; #[command(author, version, about = "Generates a JSON list of buildpack information for each buildpack detected", long_about = None)] pub(crate) struct GenerateBuildpackMatrixArgs { #[arg(long)] - pub(crate) package_dir: PathBuf, - #[arg(long, default_value = "x86_64-unknown-linux-musl")] - pub(crate) target: String, + pub(crate) source_dir: Option, + #[arg(long)] + pub(crate) package_dir: Option, + #[arg(long)] + pub(crate) temporary_id: String, } pub(crate) fn execute(args: &GenerateBuildpackMatrixArgs) -> Result<()> { - let current_dir = std::env::current_dir().map_err(Error::GetCurrentDir)?; - let package_dir = resolve_path(&args.package_dir, ¤t_dir); - - let packaged_buildpack_dir_resolver = - create_packaged_buildpack_dir_resolver(&package_dir, CargoProfile::Release, &args.target); + let source_dir = match &args.source_dir { + Some(path) => path.clone(), + None => std::env::current_dir().map_err(Error::GetCurrentDir)?, + }; + let package_dir = resolve_path( + match &args.package_dir { + Some(path) => path, + None => Path::new("./packaged"), + }, + &source_dir, + ); let buildpack_dirs = - find_releasable_buildpacks(¤t_dir).map_err(Error::FindReleasableBuildpacks)?; + find_releasable_buildpacks(&source_dir).map_err(Error::FindReleasableBuildpacks)?; let buildpacks = buildpack_dirs .iter() .map(|dir| read_buildpack_descriptor(dir).map_err(Error::ReadBuildpackDescriptor)) .collect::>>()?; - let includes = buildpack_dirs + let buildpacks_info = buildpack_dirs .iter() .zip(buildpacks.iter()) .map(|(buildpack_dir, buildpack_descriptor)| { - extract_buildpack_info( + read_buildpack_info( buildpack_descriptor, buildpack_dir, - &packaged_buildpack_dir_resolver, + &package_dir, + &args.temporary_id, ) }) .collect::>>()?; - let includes_json = serde_json::to_string(&includes).map_err(Error::SerializingJson)?; + let buildpacks_json = + serde_json::to_string_pretty(&buildpacks_info).map_err(Error::SerializingJson)?; - actions::set_output("buildpacks", includes_json).map_err(Error::SetActionOutput)?; + actions::set_output("buildpacks", &buildpacks_json).map_err(Error::WriteActionData)?; + actions::set_summary(format!( + "
Buildpack Matrix\n\n```json\n{buildpacks_json}\n```\n
" + )) + .map_err(Error::WriteActionData)?; let versions = buildpacks .iter() @@ -69,44 +84,326 @@ pub(crate) fn execute(args: &GenerateBuildpackMatrixArgs) -> Result<()> { .next() .ok_or(Error::FixedVersion(versions.clone()))?; - actions::set_output("version", version).map_err(Error::SetActionOutput)?; + actions::set_output("version", version).map_err(Error::WriteActionData)?; + + let rust_triples = buildpacks + .iter() + .flat_map(read_buildpack_targets) + .filter_map(|t| rust_triple(&t).ok()) + .collect::>(); + + actions::set_output( + "rust_triples", + serde_json::to_string(&rust_triples).map_err(Error::SerializingJson)?, + ) + .map_err(Error::WriteActionData)?; Ok(()) } -pub(crate) fn extract_buildpack_info( +#[derive(Serialize)] +pub(crate) struct BuildpackInfo { + buildpack_id: String, + buildpack_version: String, + buildpack_type: BuildpackType, + buildpack_dir: PathBuf, + targets: Vec, + image_repository: String, + stable_tag: String, + temporary_tag: String, +} + +#[derive(Serialize)] +pub(crate) struct TargetInfo { + os: Option, + arch: Option, + rust_triple: Option, + cnb_file: String, + stable_tag: String, + temporary_tag: String, + output_dir: PathBuf, +} + +#[derive(Clone, Debug, PartialEq, Serialize)] +#[serde(rename_all = "snake_case")] +enum BuildpackType { + Bash, + Composite, + Libcnb, +} + +pub(crate) fn read_buildpack_info( + buildpack_descriptor: &BuildpackDescriptor, + buildpack_dir: &Path, + package_dir: &Path, + temporary_id: &str, +) -> Result { + let version = buildpack_descriptor.buildpack().version.to_string(); + let image_repository = read_image_repository_metadata(buildpack_descriptor).ok_or( + Error::MissingImageRepositoryMetadata(buildpack_dir.join("buildpack.toml")), + )?; + let targets = read_buildpack_targets(buildpack_descriptor); + let buildpack_type = buildpack_type(buildpack_descriptor, buildpack_dir)?; + Ok(BuildpackInfo { + buildpack_id: buildpack_descriptor.buildpack().id.to_string(), + buildpack_version: version.clone(), + buildpack_dir: buildpack_dir.into(), + buildpack_type: buildpack_type.clone(), + targets: read_buildpack_targets(buildpack_descriptor) + .iter() + .map(|target| { + let suffix = if targets.len() > 1 { + Some(target_name(target)) + } else { + None + }; + Ok(TargetInfo { + cnb_file: cnb_file(&buildpack_descriptor.buildpack().id, suffix.as_deref()), + os: target.os.clone(), + arch: target.arch.clone(), + output_dir: target_output_dir( + &buildpack_descriptor.buildpack().id, + &buildpack_type, + package_dir, + target, + )?, + rust_triple: rust_triple(target).ok(), + stable_tag: generate_tag(&image_repository, &version, suffix.as_deref()), + temporary_tag: generate_tag( + &image_repository, + &format!("_{temporary_id}"), + suffix.as_deref(), + ), + }) + }) + .collect::>>()?, + stable_tag: generate_tag(&image_repository, &version, None), + temporary_tag: generate_tag(&image_repository, &format!("_{temporary_id}"), None), + image_repository, + }) +} + +// Reads targets from buildpacks while ensuring each buildpack returns at least +// one target (libcnb assumes a linux/amd64 target by default, even if no +// targets are defined). +fn read_buildpack_targets(buildpack_descriptor: &BuildpackDescriptor) -> Vec { + let mut targets = match buildpack_descriptor { + BuildpackDescriptor::Component(descriptor) => descriptor.targets.clone(), + BuildpackDescriptor::Composite(_) => vec![], + }; + if targets.is_empty() { + targets.push(BuildpackTarget { + os: Some("linux".into()), + arch: Some("amd64".into()), + variant: None, + distros: vec![], + }); + }; + targets +} + +fn generate_tag(repo: &str, tag: &str, suffix: Option<&str>) -> String { + suffix.map_or_else( + || format!("{repo}:{tag}"), + |suffix| format!("{repo}:{tag}_{suffix}"), + ) +} + +fn cnb_file(buildpack_id: &BuildpackId, suffix: Option<&str>) -> String { + let name = default_buildpack_directory_name(buildpack_id); + suffix.map_or_else( + || format!("{name}.cnb"), + |suffix| format!("{name}_{suffix}.cnb"), + ) +} + +// Returns the target naming suffix for image tags and .cnb files. +fn target_name(target: &BuildpackTarget) -> String { + match (target.os.as_deref(), target.arch.as_deref()) { + (Some(os), Some(arch)) => format!("{os}-{arch}"), + (Some(os), None) => os.to_string(), + (None, Some(arch)) => format!("universal-{arch}"), + (_, _) => "universal".to_string(), + } +} + +fn rust_triple(target: &BuildpackTarget) -> Result { + match (target.os.as_deref(), target.arch.as_deref()) { + (Some("linux"), Some("amd64")) => Ok(String::from("x86_64-unknown-linux-musl")), + (Some("linux"), Some("arm64")) => Ok(String::from("aarch64-unknown-linux-musl")), + (_, _) => Err(Error::UnknownRustTarget(target.clone())), + } +} + +// Returns the expected output directory for a target. libcnb.rs and composite +// buildpacks should return the libcnb.rs packaged directory. +// (e.g.: packaged/x86_64-unknown-linux-musl/release/heroku_procfile), +// while bash buildpacks should return a similar path, without relying on a +// rust triple. +fn target_output_dir( + buildpack_id: &BuildpackId, + buildpack_type: &BuildpackType, + package_dir: &Path, + target: &BuildpackTarget, +) -> Result { + let target_dirname = match buildpack_type { + BuildpackType::Bash => target_name(target), + _ => rust_triple(target)?, + }; + Ok(create_packaged_buildpack_dir_resolver( + package_dir, + CargoProfile::Release, + &target_dirname, + )(buildpack_id)) +} + +fn buildpack_type( buildpack_descriptor: &BuildpackDescriptor, buildpack_dir: &Path, - packaged_buildpack_dir_resolver: &impl Fn(&BuildpackId) -> PathBuf, -) -> Result> { - Ok(BTreeMap::from([ - ( - "buildpack_id".to_string(), - buildpack_descriptor.buildpack().id.to_string(), - ), - ( - "buildpack_version".to_string(), - buildpack_descriptor.buildpack().version.to_string(), - ), - ( - "buildpack_dir".to_string(), - buildpack_dir.to_string_lossy().to_string(), - ), - ( - "buildpack_artifact_prefix".to_string(), - default_buildpack_directory_name(&buildpack_descriptor.buildpack().id), - ), - ( - "buildpack_output_dir".to_string(), - packaged_buildpack_dir_resolver(&buildpack_descriptor.buildpack().id) - .to_string_lossy() - .to_string(), - ), - ( - "docker_repository".to_string(), - read_image_repository_metadata(buildpack_descriptor).ok_or( - Error::MissingImageRepositoryMetadata(buildpack_dir.join("buildpack.toml")), - )?, - ), - ])) +) -> Result { + match ( + buildpack_descriptor, + has_cargo_toml(buildpack_dir), + has_bin_files(buildpack_dir), + ) { + (BuildpackDescriptor::Composite(_), false, false) => Ok(BuildpackType::Composite), + (BuildpackDescriptor::Composite(_), _, _) => { + Err(Error::MultipleTypes(buildpack_dir.into())) + } + (BuildpackDescriptor::Component(_), true, false) => Ok(BuildpackType::Libcnb), + (BuildpackDescriptor::Component(_), false, true) => Ok(BuildpackType::Bash), + (BuildpackDescriptor::Component(_), false, false) => { + Err(Error::UnknownType(buildpack_dir.into())) + } + (_, true, true) => Err(Error::MultipleTypes(buildpack_dir.into())), + } +} + +fn has_cargo_toml(buildpack_dir: &Path) -> bool { + buildpack_dir.join("Cargo.toml").exists() +} + +fn has_bin_files(buildpack_dir: &Path) -> bool { + ["detect", "build"] + .iter() + .all(|file| buildpack_dir.join("bin").join(file).exists()) +} + +#[cfg(test)] +mod tests { + use super::read_buildpack_info; + use crate::commands::generate_buildpack_matrix::command::BuildpackType; + use libcnb_data::buildpack::BuildpackDescriptor; + use std::{ + fs::{create_dir_all, OpenOptions}, + path::PathBuf, + }; + use tempfile::tempdir; + + #[test] + fn read_multitarget_libcnb_buildpack() { + let bp_descriptor: BuildpackDescriptor = toml::from_str( + r#" + api = "0.10" + [buildpack] + id = "heroku/fakeymcfakeface" + version = "1.2.3" + [[targets]] + os="linux" + arch="amd64" + [[targets]] + os="linux" + arch="arm64" + [metadata.release] + image = { repository = "docker.io/heroku/buildpack-fakey" } + "#, + ) + .expect("expected buildpack descriptor to parse"); + let package_dir = PathBuf::from("./packaged-fake"); + let bp_dir = tempdir().expect("Error creating tempdir"); + OpenOptions::new() + .write(true) + .create(true) + .truncate(true) + .open(bp_dir.path().join("Cargo.toml")) + .expect("Couldn't write dummy Cargo.toml"); + + let bp_info = read_buildpack_info(&bp_descriptor, bp_dir.path(), &package_dir, "918273") + .expect("Expected to read buildpack info"); + assert_eq!(bp_info.buildpack_id, "heroku/fakeymcfakeface"); + assert_eq!(bp_info.buildpack_type, BuildpackType::Libcnb); + assert_eq!( + bp_info.temporary_tag, + "docker.io/heroku/buildpack-fakey:_918273" + ); + assert_eq!(bp_info.targets[0].os, Some("linux".to_string())); + assert_eq!(bp_info.targets[1].arch, Some("arm64".to_string())); + assert_eq!( + bp_info.targets[0].rust_triple, + Some("x86_64-unknown-linux-musl".to_string()) + ); + assert_eq!( + bp_info.targets[1].rust_triple, + Some("aarch64-unknown-linux-musl".to_string()) + ); + assert_eq!( + bp_info.targets[0].temporary_tag, + "docker.io/heroku/buildpack-fakey:_918273_linux-amd64" + ); + assert_eq!( + bp_info.targets[1].stable_tag, + "docker.io/heroku/buildpack-fakey:1.2.3_linux-arm64" + ); + assert_eq!( + bp_info.targets[0].output_dir, + PathBuf::from( + "./packaged-fake/x86_64-unknown-linux-musl/release/heroku_fakeymcfakeface" + ) + ); + } + + #[test] + fn read_targetless_bash_buildpack() { + let bp_descriptor: BuildpackDescriptor = toml::from_str( + r#" + api = "0.10" + [buildpack] + id = "heroku/fakeymcfakeface" + version = "3.2.1" + [[stacks]] + id = "*" + [metadata.release] + image = { repository = "docker.io/heroku/buildpack-fakey" } + "#, + ) + .expect("expected buildpack descriptor to parse"); + let package_dir = PathBuf::from("./packaged-fake"); + let bp_dir = tempdir().expect("Error creating tempdir"); + create_dir_all(bp_dir.path().join("bin")).expect("Couldn't create bash bin directory"); + for filename in ["detect", "build"] { + OpenOptions::new() + .write(true) + .create(true) + .truncate(true) + .open(bp_dir.path().join("bin").join(filename)) + .expect("Couldn't write dummy bash file"); + } + + let bp_info = read_buildpack_info(&bp_descriptor, bp_dir.path(), &package_dir, "1928273") + .expect("Expected to read buildpack info"); + + assert_eq!(bp_info.buildpack_id, "heroku/fakeymcfakeface"); + assert_eq!(bp_info.buildpack_type, BuildpackType::Bash); + assert_eq!(bp_info.stable_tag, "docker.io/heroku/buildpack-fakey:3.2.1"); + assert_eq!( + bp_info.targets[0].temporary_tag, + "docker.io/heroku/buildpack-fakey:_1928273" + ); + assert_eq!(bp_info.targets[0].os, Some("linux".to_string())); + assert_eq!(bp_info.targets[0].arch, Some("amd64".to_string())); + assert_eq!( + bp_info.targets[0].output_dir, + PathBuf::from("./packaged-fake/linux-amd64/release/heroku_fakeymcfakeface") + ); + } } diff --git a/src/commands/generate_buildpack_matrix/errors.rs b/src/commands/generate_buildpack_matrix/errors.rs index 06dc4b16..25caa64c 100644 --- a/src/commands/generate_buildpack_matrix/errors.rs +++ b/src/commands/generate_buildpack_matrix/errors.rs @@ -1,5 +1,6 @@ use crate::buildpacks::{FindReleasableBuildpacksError, ReadBuildpackDescriptorError}; -use crate::github::actions::SetActionOutputError; +use crate::github::actions::WriteActionDataError; +use libcnb_data::buildpack::BuildpackTarget; use std::collections::HashSet; use std::path::PathBuf; @@ -18,7 +19,15 @@ pub(crate) enum Error { #[error("Expected all buildpacks to have the same version but multiple versions were found:\n{}", list_versions(.0))] FixedVersion(HashSet), #[error(transparent)] - SetActionOutput(SetActionOutputError), + WriteActionData(WriteActionDataError), + #[error("Unknown target configuration. Couldn't determine a rust triple for {0:?}.")] + UnknownRustTarget(BuildpackTarget), + #[error("Couldn't determine buildpack type. Found evidence for two or more buildpack types (bash, composite, libcnb.rs) in {0}.")] + MultipleTypes(PathBuf), + #[error( + "Couldn't determine buildpack type. Found no evidence of a bash, composite, or libcnb.rs buildpack in {0}." + )] + UnknownType(PathBuf), } fn list_versions(versions: &HashSet) -> String { diff --git a/src/commands/generate_changelog/errors.rs b/src/commands/generate_changelog/errors.rs index 877dd010..294d4490 100644 --- a/src/commands/generate_changelog/errors.rs +++ b/src/commands/generate_changelog/errors.rs @@ -1,6 +1,6 @@ use crate::buildpacks::{FindReleasableBuildpacksError, ReadBuildpackDescriptorError}; use crate::changelog::ChangelogError; -use crate::github::actions::SetActionOutputError; +use crate::github::actions::WriteActionDataError; use std::path::PathBuf; #[derive(Debug, thiserror::Error)] @@ -16,5 +16,5 @@ pub(crate) enum Error { #[error("Could not parse changelog\nPath: {}\nError: {1}", .0.display())] ParsingChangelog(PathBuf, #[source] ChangelogError), #[error(transparent)] - SetActionOutput(SetActionOutputError), + SetActionOutput(WriteActionDataError), } diff --git a/src/commands/prepare_release/errors.rs b/src/commands/prepare_release/errors.rs index 0df3af6e..19be8829 100644 --- a/src/commands/prepare_release/errors.rs +++ b/src/commands/prepare_release/errors.rs @@ -1,6 +1,6 @@ use crate::buildpacks::FindReleasableBuildpacksError; use crate::changelog::ChangelogError; -use crate::github::actions::SetActionOutputError; +use crate::github::actions::WriteActionDataError; use libcnb_data::buildpack::BuildpackVersion; use std::collections::HashMap; use std::io; @@ -13,7 +13,7 @@ pub(crate) enum Error { #[error(transparent)] FindReleasableBuildpacks(FindReleasableBuildpacksError), #[error(transparent)] - SetActionOutput(SetActionOutputError), + SetActionOutput(WriteActionDataError), #[error("Invalid URL `{0}` for argument --repository-url\nError: {1}")] InvalidRepositoryUrl(String, #[source] uriparse::URIError), #[error("Invalid Version `{0}` for argument --declarations-starting-version\nError: {1}")] diff --git a/src/github/actions.rs b/src/github/actions.rs index dacf83f3..3ba933e4 100644 --- a/src/github/actions.rs +++ b/src/github/actions.rs @@ -2,13 +2,17 @@ use std::fs::OpenOptions; use std::io::{stdout, Write}; use std::{io, iter}; +pub(crate) fn set_summary>(markdown: M) -> Result<(), WriteActionDataError> { + let markdown = markdown.into(); + write_data("GITHUB_STEP_SUMMARY", format!("{markdown}\n").as_bytes()) +} + pub(crate) fn set_output, V: Into>( name: N, value: V, -) -> Result<(), SetActionOutputError> { +) -> Result<(), WriteActionDataError> { let name = name.into(); let value = value.into(); - let line = if value.contains('\n') { let delimiter: String = iter::repeat_with(fastrand::alphanumeric).take(20).collect(); format!("{name}<<{delimiter}\n{value}\n{delimiter}") @@ -16,26 +20,28 @@ pub(crate) fn set_output, V: Into>( format!("{name}={value}") }; let line = format!("{line}\n"); + write_data("GITHUB_OUTPUT", line.as_bytes()) +} - let mut file: Box = match std::env::var("GITHUB_OUTPUT") { +fn write_data(env_name: &str, data: &[u8]) -> Result<(), WriteActionDataError> { + let mut file: Box = match std::env::var(env_name) { Ok(github_output) => { let append_file = OpenOptions::new() .append(true) .open(github_output) - .map_err(SetActionOutputError::Opening)?; + .map_err(WriteActionDataError::Opening)?; Box::new(append_file) } Err(_) => Box::new(stdout()), }; - file.write_all(line.as_bytes()) - .map_err(SetActionOutputError::Writing) + file.write_all(data).map_err(WriteActionDataError::Writing) } #[derive(Debug, thiserror::Error)] -pub(crate) enum SetActionOutputError { - #[error("Could not open action output\nError: {0}")] +pub(crate) enum WriteActionDataError { + #[error("Could not open action data file\nError: {0}")] Opening(#[source] io::Error), - #[error("Could not write action output\nError: {0}")] + #[error("Could not write action data file\nError: {0}")] Writing(#[source] io::Error), }