Skip to content

Commit

Permalink
Updates to logging for artifact uploads (#949)
Browse files Browse the repository at this point in the history
* More details logs during artifact upload

* extra logging

* Updates to artifact logging + clarifications around upload size

* Fix linting errors

* Update packages/artifact/src/internal/artifact-client.ts

Co-authored-by: campersau <[email protected]>

Co-authored-by: campersau <[email protected]>
  • Loading branch information
konradpabjan and campersau authored Nov 30, 2021
1 parent e19e426 commit 4df5abb
Show file tree
Hide file tree
Showing 5 changed files with 73 additions and 8 deletions.
27 changes: 26 additions & 1 deletion packages/artifact/src/internal/artifact-client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,10 @@ export class DefaultArtifactClient implements ArtifactClient {
rootDirectory: string,
options?: UploadOptions | undefined
): Promise<UploadResponse> {
core.info(
`Starting artifact upload
For more detailed logs during the artifact upload process, enable step-debugging: https://docs.github.com/actions/monitoring-and-troubleshooting-workflows/enabling-debug-logging#enabling-step-debug-logging`
)
checkArtifactName(name)

// Get specification for the files being uploaded
Expand Down Expand Up @@ -103,7 +107,11 @@ export class DefaultArtifactClient implements ArtifactClient {
'No URL provided by the Artifact Service to upload an artifact to'
)
}

core.debug(`Upload Resource URL: ${response.fileContainerResourceUrl}`)
core.info(
`Container for artifact "${name}" successfully created. Starting upload of file(s)`
)

// Upload each of the files that were found concurrently
const uploadResult = await uploadHttpClient.uploadArtifactToFileContainer(
Expand All @@ -114,10 +122,27 @@ export class DefaultArtifactClient implements ArtifactClient {

// Update the size of the artifact to indicate we are done uploading
// The uncompressed size is used for display when downloading a zip of the artifact from the UI
core.info(
`File upload process has finished. Finalizing the artifact upload`
)
await uploadHttpClient.patchArtifactSize(uploadResult.totalSize, name)

if (uploadResult.failedItems.length > 0) {
core.info(
`Upload finished. There were ${uploadResult.failedItems.length} items that failed to upload`
)
} else {
core.info(
`Artifact has been finalized. All files have been successfully uploaded!`
)
}

core.info(
`Finished uploading artifact ${name}. Reported size is ${uploadResult.uploadSize} bytes. There were ${uploadResult.failedItems.length} items that failed to upload`
`
The raw size of all the files that were specified for upload is ${uploadResult.totalSize} bytes
The size of all the files that were uploaded is ${uploadResult.uploadSize} bytes. This takes into account any gzip compression used to reduce the upload size, time and storage
Note: The size of downloaded zips can differ significantly from the reported size. For more information see: https://github.com/actions/upload-artifact#zipped-artifact-downloads \r\n`
)

uploadResponse.artifactItems = uploadSpecification.map(
Expand Down
12 changes: 12 additions & 0 deletions packages/artifact/src/internal/contracts.ts
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,20 @@ export interface PatchArtifactSizeSuccessResponse {
}

export interface UploadResults {
/**
* The size in bytes of data that was transferred during the upload process to the actions backend service. This takes into account possible
* gzip compression to reduce the amount of data that needs to be transferred
*/
uploadSize: number

/**
* The raw size of the files that were specified for upload
*/
totalSize: number

/**
* An array of files that failed to upload
*/
failedItems: string[]
}

Expand Down
22 changes: 21 additions & 1 deletion packages/artifact/src/internal/upload-http-client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -235,19 +235,28 @@ export class UploadHttpClient {
// for creating a new GZip file, an in-memory buffer is used for compression
// with named pipes the file size is reported as zero in that case don't read the file in memory
if (!isFIFO && totalFileSize < 65536) {
core.debug(
`${parameters.file} is less than 64k in size. Creating a gzip file in-memory to potentially reduce the upload size`
)
const buffer = await createGZipFileInBuffer(parameters.file)

//An open stream is needed in the event of a failure and we need to retry. If a NodeJS.ReadableStream is directly passed in,
// An open stream is needed in the event of a failure and we need to retry. If a NodeJS.ReadableStream is directly passed in,
// it will not properly get reset to the start of the stream if a chunk upload needs to be retried
let openUploadStream: () => NodeJS.ReadableStream

if (totalFileSize < buffer.byteLength) {
// compression did not help with reducing the size, use a readable stream from the original file for upload
core.debug(
`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`
)
openUploadStream = () => fs.createReadStream(parameters.file)
isGzip = false
uploadFileSize = totalFileSize
} else {
// create a readable stream using a PassThrough stream that is both readable and writable
core.debug(
`A gzip file created for ${parameters.file} helped with reducing the size of the original file. The file will be uploaded using gzip.`
)
openUploadStream = () => {
const passThrough = new stream.PassThrough()
passThrough.end(buffer)
Expand Down Expand Up @@ -283,6 +292,9 @@ export class UploadHttpClient {
// the file that is being uploaded is greater than 64k in size, a temporary file gets created on disk using the
// npm tmp-promise package and this file gets used to create a GZipped file
const tempFile = await tmp.file()
core.debug(
`${parameters.file} is greater than 64k in size. Creating a gzip file on-disk ${tempFile.path} to potentially reduce the upload size`
)

// create a GZip file of the original file being uploaded, the original file should not be modified in any way
uploadFileSize = await createGZipFileOnDisk(
Expand All @@ -295,9 +307,16 @@ export class UploadHttpClient {
// compression did not help with size reduction, use the original file for upload and delete the temp GZip file
// for named pipes totalFileSize is zero, this assumes compression did help
if (!isFIFO && totalFileSize < uploadFileSize) {
core.debug(
`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`
)
uploadFileSize = totalFileSize
uploadFilePath = parameters.file
isGzip = false
} else {
core.debug(
`The gzip file created for ${parameters.file} is smaller than the original file. The file will be uploaded using gzip.`
)
}

let abortFileUpload = false
Expand Down Expand Up @@ -355,6 +374,7 @@ export class UploadHttpClient {

// Delete the temporary file that was created as part of the upload. If the temp file does not get manually deleted by
// calling cleanup, it gets removed when the node process exits. For more info see: https://www.npmjs.com/package/tmp-promise#about
core.debug(`deleting temporary gzip file ${tempFile.path}`)
await tempFile.cleanup()

return {
Expand Down
5 changes: 2 additions & 3 deletions packages/artifact/src/internal/upload-specification.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import * as fs from 'fs'
import {debug} from '@actions/core'
import {join, normalize, resolve} from 'path'
import {checkArtifactName, checkArtifactFilePath} from './utils'
import {checkArtifactFilePath} from './utils'

export interface UploadSpecification {
absoluteFilePath: string
Expand All @@ -19,8 +19,7 @@ export function getUploadSpecification(
rootDirectory: string,
artifactFiles: string[]
): UploadSpecification[] {
checkArtifactName(artifactName)

// artifact name was checked earlier on, no need to check again
const specifications: UploadSpecification[] = []

if (!fs.existsSync(rootDirectory)) {
Expand Down
15 changes: 12 additions & 3 deletions packages/artifact/src/internal/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ export function getExponentialRetryTimeInMilliseconds(
const maxTime = minTime * getRetryMultiplier()

// returns a random number between the minTime (inclusive) and the maxTime (exclusive)
return Math.random() * (maxTime - minTime) + minTime
return Math.trunc(Math.random() * (maxTime - minTime) + minTime)
}

/**
Expand Down Expand Up @@ -263,10 +263,16 @@ export function checkArtifactName(name: string): void {
for (const invalidChar of invalidArtifactNameCharacters) {
if (name.includes(invalidChar)) {
throw new Error(
`Artifact name is not valid: ${name}. Contains character: "${invalidChar}". Invalid artifact name characters include: ${invalidArtifactNameCharacters.toString()}.`
`Artifact name is not valid: ${name}. Contains the following character: "${invalidChar}".
Invalid characters include: ${invalidArtifactNameCharacters.toString()}.
These characters are not allowed in the artifact name due to limitations with certain file systems such as NTFS. To maintain file system agnostic behavior, these characters are intentionally not allowed to prevent potential problems with downloads on different file systems.`
)
}
}

info(`Artifact name is valid!`)
}

/**
Expand All @@ -280,7 +286,10 @@ export function checkArtifactFilePath(path: string): void {
for (const invalidChar of invalidArtifactFilePathCharacters) {
if (path.includes(invalidChar)) {
throw new Error(
`Artifact path is not valid: ${path}. Contains character: "${invalidChar}". Invalid characters include: ${invalidArtifactFilePathCharacters.toString()}.`
`Artifact path is not valid: ${path}. Contains the following character: "${invalidChar}". Invalid characters include: ${invalidArtifactFilePathCharacters.toString()}.
The following characters are not allowed in files that are uploaded due to limitations with certain file systems such as NTFS. To maintain file system agnostic behavior, these characters are intentionally not allowed to prevent potential problems with downloads on different file systems.
`
)
}
}
Expand Down

0 comments on commit 4df5abb

Please sign in to comment.