Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[2.0] Fixing tooling issues during package candidates resolution. (CP #2091) #2092

Merged
merged 1 commit into from
Feb 4, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
43 changes: 23 additions & 20 deletions toolkit/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@
# Licensed under the MIT License.

# Contains:
# - Definitions
# - High Level Targets
# - Submake Includes
# - Definitions
# - High Level Targets
# - Submake Includes

######## DEFINITIONS ########

Expand Down Expand Up @@ -50,6 +50,7 @@ REBUILD_DEP_CHAINS ?= y
toolkit_root := $(abspath $(dir $(lastword $(MAKEFILE_LIST))))
TOOLS_DIR ?= $(toolkit_root)/tools
TOOL_BINS_DIR ?= $(toolkit_root)/out/tools
REPOS_DIR ?= $(toolkit_root)/repos
RESOURCES_DIR ?= $(toolkit_root)/resources
SCRIPTS_DIR ?= $(toolkit_root)/scripts

Expand Down Expand Up @@ -86,25 +87,26 @@ IMAGES_DIR ?= $(OUT_DIR)/images

# If toolchain RPMs are being rebuilt locally, they belong with the other RPMs
ifeq ($(REBUILD_TOOLCHAIN),y)
toolchain_rpms_dir := $(RPMS_DIR)
toolchain_rpms_dir := $(RPMS_DIR)
else
toolchain_rpms_dir := $(CACHED_RPMS_DIR)/cache
toolchain_rpms_dir := $(CACHED_RPMS_DIR)/cache
endif

# External source server
SOURCE_URL ?=

PACKAGE_URL_LIST ?= https://packages.microsoft.com/cbl-mariner/$(RELEASE_MAJOR_ID)/prod/base/$(build_arch)
PACKAGE_URL_LIST += https://packages.microsoft.com/cbl-mariner/$(RELEASE_MAJOR_ID)/prod/base/debuginfo/$(build_arch)
REPO_LIST ?=
SRPM_URL_LIST ?= https://packages.microsoft.com/cbl-mariner/$(RELEASE_MAJOR_ID)/prod/base/srpms

ifeq ($(USE_PREVIEW_REPO),y)
PACKAGE_URL_LIST += https://packages.microsoft.com/cbl-mariner/$(RELEASE_MAJOR_ID)/preview/base/$(build_arch)
PACKAGE_URL_LIST += https://packages.microsoft.com/cbl-mariner/$(RELEASE_MAJOR_ID)/preview/base/debuginfo/$(build_arch)
SRPM_URL_LIST += https://packages.microsoft.com/cbl-mariner/$(RELEASE_MAJOR_ID)/preview/base/srpms
PACKAGE_URL_LIST += https://packages.microsoft.com/cbl-mariner/$(RELEASE_MAJOR_ID)/preview/base/$(build_arch)
PACKAGE_URL_LIST += https://packages.microsoft.com/cbl-mariner/$(RELEASE_MAJOR_ID)/preview/base/debuginfo/$(build_arch)
override REPO_LIST += $(REPOS_DIR)/mariner-official-preview.repo
SRPM_URL_LIST += https://packages.microsoft.com/cbl-mariner/$(RELEASE_MAJOR_ID)/preview/base/srpms
endif

REPO_LIST ?=
CA_CERT ?=
TLS_CERT ?=
TLS_KEY ?=
Expand All @@ -114,15 +116,15 @@ DIST_TAG ?= .cm2
BUILD_NUMBER ?= $(shell git rev-parse --short HEAD)
# an empty BUILD_NUMBER breaks the build later on
ifeq ($(BUILD_NUMBER),)
BUILD_NUMBER = non-git
BUILD_NUMBER = non-git
endif
RELEASE_MAJOR_ID ?= 2.0
# use minor ID defined in file (if exist) otherwise define it
# note this file must be single line
ifneq ($(wildcard $(OUT_DIR)/version-minor-id.config),)
RELEASE_MINOR_ID ?= .$(shell cat $(OUT_DIR)/version-minor-id.config)
RELEASE_MINOR_ID ?= .$(shell cat $(OUT_DIR)/version-minor-id.config)
else
RELEASE_MINOR_ID ?= .$(shell date +'%Y%m%d.%H%M')
RELEASE_MINOR_ID ?= .$(shell date +'%Y%m%d.%H%M')
endif
RELEASE_VERSION ?= $(RELEASE_MAJOR_ID)$(RELEASE_MINOR_ID)

Expand All @@ -133,6 +135,7 @@ IMAGE_TAG ?= Preview-C
LOG_LEVEL ?= info
STOP_ON_WARNING ?= n
STOP_ON_PKG_FAIL ?= n
STOP_ON_FETCH_FAIL ?= n

######## HIGH LEVEL TARGETS ########

Expand All @@ -146,29 +149,29 @@ all: toolchain go-tools chroot-tools
include $(SCRIPTS_DIR)/utils.mk

# Bootstrap the toolchain's compilers and other tools with:
# toolchain, raw-toolchain, clean-toolchain, check-manifests, check-x86_64-manifests, check-aarch64-manifests
# toolchain, raw-toolchain, clean-toolchain, check-manifests, check-x86_64-manifests, check-aarch64-manifests
include $(SCRIPTS_DIR)/toolchain.mk

# go utilities with:
# go-tools, clean-go-tools, go-tidy-all (tidy go utilities before committing) go-test-coverage
# go-tools, clean-go-tools, go-tidy-all (tidy go utilities before committing) go-test-coverage
# chroot worker with:
# chroot-tools clean-chroot-tools
# chroot-tools clean-chroot-tools
# macro definitions with:
# macro-tools clean-macro-tools
# macro-tools clean-macro-tools
include $(SCRIPTS_DIR)/tools.mk

# Create SRPMS from local SPECS with:
# input-srpms, clean-input-srpms
# input-srpms, clean-input-srpms
include $(SCRIPTS_DIR)/srpm_pack.mk

# Expand local SRPMS into sources and SPECS with:
# expand-specs clean-expand-specs
# expand-specs clean-expand-specs
include $(SCRIPTS_DIR)/srpm_expand.mk

# Create a package build workplan with:
# workplan, clean-workplan clean-cache
# workplan, clean-workplan clean-cache
# Build a package with:
# build-packages clean-build-packages
# build-packages clean-build-packages
# Either create or consume compressed folders of rpms with:
# hydrate-rpms, compress-rpms, clean-compress-rpms, compress-srpms, clean-compress-srpms
include $(SCRIPTS_DIR)/pkggen.mk
Expand Down
4 changes: 4 additions & 0 deletions toolkit/scripts/pkggen.mk
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,10 @@ ifeq ($(USE_PREVIEW_REPO),y)
graphpkgfetcher_extra_flags += --use-preview-repo
endif

ifeq ($(STOP_ON_FETCH_FAIL),y)
graphpkgfetcher_extra_flags += --stop-on-failure
endif

$(cached_file): $(graph_file) $(go-graphpkgfetcher) $(chroot_worker) $(pkggen_local_repo) $(depend_REPO_LIST) $(REPO_LIST) $(shell find $(CACHED_RPMS_DIR)/) $(pkggen_rpms)
mkdir -p $(CACHED_RPMS_DIR)/cache && \
$(go-graphpkgfetcher) \
Expand Down
19 changes: 11 additions & 8 deletions toolkit/tools/graphpkgfetcher/graphpkgfetcher.go
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,8 @@ var (
tlsClientCert = app.Flag("tls-cert", "TLS client certificate to use when downloading files.").String()
tlsClientKey = app.Flag("tls-key", "TLS client key to use when downloading files.").String()

stopOnFailure = app.Flag("stop-on-failure", "Stop if failed to cache all unresolved nodes.").Bool()

inputSummaryFile = app.Flag("input-summary-file", "Path to a file with the summary of packages cloned to be restored").String()
outputSummaryFile = app.Flag("output-summary-file", "Path to save the summary of packages cloned").String()

Expand All @@ -58,7 +60,7 @@ func main() {
}

if hasUnresolvedNodes(dependencyGraph) {
err = resolveGraphNodes(dependencyGraph, *inputSummaryFile, *outputSummaryFile, *disableUpstreamRepos)
err = resolveGraphNodes(dependencyGraph, *inputSummaryFile, *outputSummaryFile, *disableUpstreamRepos, *stopOnFailure)
if err != nil {
logger.Log.Panicf("Failed to resolve graph. Error: %s", err)
}
Expand All @@ -84,7 +86,7 @@ func hasUnresolvedNodes(graph *pkggraph.PkgGraph) bool {

// resolveGraphNodes scans a graph and for each unresolved node in the graph clones the RPMs needed
// to satisfy it.
func resolveGraphNodes(dependencyGraph *pkggraph.PkgGraph, inputSummaryFile, outputSummaryFile string, disableUpstreamRepos bool) (err error) {
func resolveGraphNodes(dependencyGraph *pkggraph.PkgGraph, inputSummaryFile, outputSummaryFile string, disableUpstreamRepos, stopOnFailure bool) (err error) {
// Create the worker environment
cloner := rpmrepocloner.New()
err = cloner.Initialize(*outDir, *tmpDir, *workertar, *existingRpmDir, *usePreviewRepo, *repoFiles)
Expand All @@ -102,6 +104,7 @@ func resolveGraphNodes(dependencyGraph *pkggraph.PkgGraph, inputSummaryFile, out
}
}

cachingSucceeded := true
if strings.TrimSpace(inputSummaryFile) == "" {
// Cache an RPM for each unresolved node in the graph.
fetchedPackages := make(map[string]bool)
Expand All @@ -111,6 +114,7 @@ func resolveGraphNodes(dependencyGraph *pkggraph.PkgGraph, inputSummaryFile, out
// Failing to clone a dependency should not halt a build.
// The build should continue and attempt best effort to build as many packages as possible.
if resolveErr != nil {
cachingSucceeded = false
errorMessage := strings.Builder{}
errorMessage.WriteString(fmt.Sprintf("Failed to resolve all nodes in the graph while resolving '%s'\n", n))
errorMessage.WriteString("Nodes which have this as a dependency:\n")
Expand All @@ -124,9 +128,10 @@ func resolveGraphNodes(dependencyGraph *pkggraph.PkgGraph, inputSummaryFile, out
} else {
// If an input summary file was provided, simply restore the cache using the file.
err = repoutils.RestoreClonedRepoContents(cloner, inputSummaryFile)
if err != nil {
return
}
cachingSucceeded = err == nil
}
if stopOnFailure && !cachingSucceeded {
return fmt.Errorf("failed to cache unresolved nodes")
}

logger.Log.Info("Configuring downloaded RPMs as a local repository")
Expand Down Expand Up @@ -238,7 +243,5 @@ func assignRPMPath(node *pkggraph.PkgNode, outDir string, resolvedPackages []str
func rpmPackageToRPMPath(rpmPackage, outDir string) string {
// Construct the rpm path of the cloned package.
rpmName := fmt.Sprintf("%s.rpm", rpmPackage)
// To calculate the architecture grab the last segment of the resolved name since it will be in the NVRA format.
rpmArch := rpmPackage[strings.LastIndex(rpmPackage, ".")+1:]
return filepath.Join(outDir, rpmArch, rpmName)
return filepath.Join(outDir, rpmName)
}
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,13 @@ const (
)

var (
// Every valid line will be of the form: <package>-<version>.<arch> : <Description>
packageLookupNameMatchRegex = regexp.MustCompile(`^\s*([^:]+(x86_64|aarch64|noarch))\s*:`)
// Every valid line pair will be of the form:
// <package>-<version>.<arch> : <Description>
// Repo : [repo_name]
//
// NOTE: we ignore packages installed in the build environment denoted by "Repo : @System".
packageLookupNameMatchRegex = regexp.MustCompile(`([^:\s]+(x86_64|aarch64|noarch))\s*:[^\n]*\nRepo\s+:\s+[^@]`)
packageNameIndex = 1

// Every valid line will be of the form: <package_name>.<architecture> <version>.<dist> <repo_id>
// For:
Expand Down Expand Up @@ -145,10 +150,17 @@ func (r *RpmRepoCloner) Initialize(destinationDir, tmpDir, workerTar, existingRp
return
}

logger.Log.Info("Initializing local RPM repository")
err = r.initializeMountedChrootRepo(chrootLocalRpmsDir)
if err != nil {
return
// The 'cacheRepoDir' repo is only used during Docker based builds, which don't
// use overlay so cache repo must be explicitly initialized.
// We make sure it's present during all builds to avoid noisy TDNF error messages in the logs.
reposToInitialize := []string{chrootLocalRpmsDir, chrootDownloadDir, cacheRepoDir}
for _, repoToInitialize := range reposToInitialize {
logger.Log.Debugf("Initializing the '%s' repository.", repoToInitialize)
err = r.initializeMountedChrootRepo(repoToInitialize)
if err != nil {
logger.Log.Errorf("Failed while trying to initialize the '%s' repository.", repoToInitialize)
return
}
}

logger.Log.Info("Initializing repository configurations")
Expand Down Expand Up @@ -239,6 +251,11 @@ func appendRepoFile(repoFilePath string, dstFile *os.File) (err error) {
// initializeMountedChrootRepo will initialize a local RPM repository inside the chroot.
func (r *RpmRepoCloner) initializeMountedChrootRepo(repoDir string) (err error) {
return r.chroot.Run(func() (err error) {
err = os.MkdirAll(repoDir, os.ModePerm)
if err != nil {
logger.Log.Errorf("Failed to create repo directory '%s'.", repoDir)
return
}
return rpmrepomanager.CreateRepo(repoDir)
})
}
Expand All @@ -247,12 +264,6 @@ func (r *RpmRepoCloner) initializeMountedChrootRepo(repoDir string) (err error)
// If cloneDeps is set, package dependencies will also be cloned.
// It will automatically resolve packages that describe a provide or file from a package.
func (r *RpmRepoCloner) Clone(cloneDeps bool, packagesToClone ...*pkgjson.PackageVer) (err error) {
const (
strictComparisonOperator = "="
lessThanOrEqualComparisonOperator = "<="
versionSuffixFormat = "-%s"
)

for _, pkg := range packagesToClone {
pkgName := convertPackageVersionToTdnfArg(pkg)

Expand Down Expand Up @@ -317,19 +328,14 @@ func (r *RpmRepoCloner) WhatProvides(pkgVer *pkgjson.PackageVer) (packageNames [
return
}

splitStdout := strings.Split(stdout, "\n")
for _, line := range splitStdout {
matches := packageLookupNameMatchRegex.FindStringSubmatch(line)
if len(matches) == 0 {
continue
}

packageName := matches[1]
if !foundPackages[packageName] {
for _, matches := range packageLookupNameMatchRegex.FindAllStringSubmatch(stdout, -1) {
packageName := matches[packageNameIndex]
if _, found := foundPackages[packageName]; !found {
foundPackages[packageName] = true
logger.Log.Debugf("'%s' is available from package '%s'", pkgVer.Name, packageName)
}
}

return
})
if err != nil {
Expand Down Expand Up @@ -378,7 +384,7 @@ func (r *RpmRepoCloner) ConvertDownloadedPackagesIntoRepo() (err error) {

if !buildpipeline.IsRegularBuild() {
// Docker based build doesn't use overlay so cache repo
// must be explicitely initialized
// must be explicitly initialized
err = r.initializeMountedChrootRepo(cacheRepoDir)
}

Expand Down Expand Up @@ -420,7 +426,7 @@ func (r *RpmRepoCloner) ClonedRepoContents() (repoContents *repocloner.RepoConte
tdnfArgs := []string{
"list",
"ALL",
"--disablerepo=*",
fmt.Sprintf("--disablerepo=%s", allRepoIDs),
fmt.Sprintf("--enablerepo=%s", checkedRepoID),
}
return shell.ExecuteLiveWithCallback(onStdout, logger.Log.Warn, true, "tdnf", tdnfArgs...)
Expand Down Expand Up @@ -466,11 +472,6 @@ func (r *RpmRepoCloner) clonePackage(baseArgs []string, enabledRepoOrder ...stri
enabledRepoArgs = append(enabledRepoArgs, fmt.Sprintf("--enablerepo=%s", repoID))
args := append(baseArgs, enabledRepoArgs...)

// Do not enable the fetcher's own repo as it is only used for listing cloned files
// and will not been initialized until ConvertDownloadedPackagesIntoRepo is called on it
// when all cloning is complete.
args = append(args, fmt.Sprintf("--disablerepo=%s", fetcherRepoID))

if !r.usePreviewRepo {
args = append(args, fmt.Sprintf("--disablerepo=%s", previewRepoID))
}
Expand Down