diff --git a/dataset/GitHub_Java/bazelbuild.bazel/src/main/java/com/google/devtools/build/lib/remote/ByteStreamBuildEventArtifactUploader.java b/dataset/GitHub_Java/bazelbuild.bazel/src/main/java/com/google/devtools/build/lib/remote/ByteStreamBuildEventArtifactUploader.java index 3f3308c5e99..71e12f02761 100644 --- a/dataset/GitHub_Java/bazelbuild.bazel/src/main/java/com/google/devtools/build/lib/remote/ByteStreamBuildEventArtifactUploader.java +++ b/dataset/GitHub_Java/bazelbuild.bazel/src/main/java/com/google/devtools/build/lib/remote/ByteStreamBuildEventArtifactUploader.java @@ -13,46 +13,199 @@ // limitations under the License. package com.google.devtools.build.lib.remote; +import build.bazel.remote.execution.v2.Digest; +import build.bazel.remote.execution.v2.RequestMetadata; import com.google.common.base.Preconditions; -import com.google.common.base.Strings; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Iterables; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; +import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; +import com.google.common.util.concurrent.ThreadFactoryBuilder; import com.google.devtools.build.lib.buildeventstream.BuildEvent.LocalFile; import com.google.devtools.build.lib.buildeventstream.BuildEventArtifactUploader; import com.google.devtools.build.lib.buildeventstream.PathConverter; +import com.google.devtools.build.lib.collect.ImmutableIterable; +import com.google.devtools.build.lib.remote.common.MissingDigestsFinder; +import com.google.devtools.build.lib.remote.common.RemoteActionExecutionContext; +import com.google.devtools.build.lib.remote.util.DigestUtil; +import com.google.devtools.build.lib.remote.util.TracingMetadataUtils; import com.google.devtools.build.lib.vfs.Path; -import com.google.devtools.remoteexecution.v1test.Digest; -import io.grpc.Context; +import io.netty.util.AbstractReferenceCounted; +import io.netty.util.ReferenceCounted; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicBoolean; -import javax.annotation.Nullable; -/** - * A {@link BuildEventArtifactUploader} backed by {@link ByteStreamUploader}. - */ -class ByteStreamBuildEventArtifactUploader implements BuildEventArtifactUploader { +/** A {@link BuildEventArtifactUploader} backed by {@link ByteStreamUploader}. */ +class ByteStreamBuildEventArtifactUploader extends AbstractReferenceCounted + implements BuildEventArtifactUploader { - private final Context ctx; + private final ListeningExecutorService uploadExecutor; + private final String buildRequestId; + private final String commandId; private final ByteStreamUploader uploader; private final String remoteServerInstanceName; + private final MissingDigestsFinder missingDigestsFinder; private final AtomicBoolean shutdown = new AtomicBoolean(); ByteStreamBuildEventArtifactUploader( - ByteStreamUploader uploader, String remoteServerName, Context ctx, - @Nullable String remoteInstanceName) { + ByteStreamUploader uploader, + MissingDigestsFinder missingDigestsFinder, + String remoteServerInstanceName, + String buildRequestId, + String commandId, + int maxUploadThreads) { this.uploader = Preconditions.checkNotNull(uploader); - String remoteServerInstanceName = Preconditions.checkNotNull(remoteServerName); - if (!Strings.isNullOrEmpty(remoteInstanceName)) { - remoteServerInstanceName += "/" + remoteInstanceName; - } - this.ctx = ctx; + this.buildRequestId = buildRequestId; + this.commandId = commandId; this.remoteServerInstanceName = remoteServerInstanceName; + // Limit the maximum threads number to 1000 (chosen arbitrarily) + this.uploadExecutor = + MoreExecutors.listeningDecorator( + Executors.newFixedThreadPool( + Math.min(maxUploadThreads, 1000), + new ThreadFactoryBuilder().setNameFormat("bes-artifact-uploader-%d").build())); + this.missingDigestsFinder = missingDigestsFinder; + } + + /** Returns {@code true} if Bazel knows that the file is stored on a remote system. */ + private static boolean isRemoteFile(Path file) { + return file.getFileSystem() instanceof RemoteActionFileSystem + && ((RemoteActionFileSystem) file.getFileSystem()).isRemote(file); + } + + private static final class PathMetadata { + + private final Path path; + private final Digest digest; + private final boolean directory; + private final boolean remote; + + PathMetadata(Path path, Digest digest, boolean directory, boolean remote) { + this.path = path; + this.digest = digest; + this.directory = directory; + this.remote = remote; + } + + public Path getPath() { + return path; + } + + public Digest getDigest() { + return digest; + } + + public boolean isDirectory() { + return directory; + } + + public boolean isRemote() { + return remote; + } + } + + /** + * Collects metadata for {@code file}. Depending on the underlying filesystem used this method + * might do I/O. + */ + private static PathMetadata readPathMetadata(Path file) throws IOException { + if (file.isDirectory()) { + return new PathMetadata(file, /* digest= */ null, /* directory= */ true, /* remote= */ false); + } + DigestUtil digestUtil = new DigestUtil(file.getFileSystem().getDigestFunction()); + Digest digest = digestUtil.compute(file); + return new PathMetadata(file, digest, /* directory= */ false, isRemoteFile(file)); + } + + private static List processQueryResult( + ImmutableSet missingDigests, List filesToQuery) { + List allPaths = new ArrayList<>(filesToQuery.size()); + for (PathMetadata file : filesToQuery) { + if (missingDigests.contains(file.getDigest())) { + allPaths.add(file); + } else { + PathMetadata remotePathMetadata = + new PathMetadata( + file.getPath(), file.getDigest(), file.isDirectory(), /* remote= */ true); + allPaths.add(remotePathMetadata); + } + } + return allPaths; + } + + /** + * For files where {@link PathMetadata#isRemote()} returns {@code false} this method checks if the + * remote cache already contains the file. If so {@link PathMetadata#isRemote()} is set to {@code + * true}. + */ + private ListenableFuture> queryRemoteCache( + ImmutableList> allPaths) throws Exception { + RequestMetadata metadata = + TracingMetadataUtils.buildMetadata(buildRequestId, commandId, "bes-upload", null); + RemoteActionExecutionContext context = RemoteActionExecutionContext.create(metadata); + + List knownRemotePaths = new ArrayList<>(allPaths.size()); + List filesToQuery = new ArrayList<>(); + Set digestsToQuery = new HashSet<>(); + for (ListenableFuture pathMetadataFuture : allPaths) { + // This line is guaranteed to not block, as this code is only called after all futures in + // allPaths have completed. + PathMetadata pathMetadata = pathMetadataFuture.get(); + if (pathMetadata.isRemote() || pathMetadata.isDirectory()) { + knownRemotePaths.add(pathMetadata); + } else { + filesToQuery.add(pathMetadata); + digestsToQuery.add(pathMetadata.getDigest()); + } + } + if (digestsToQuery.isEmpty()) { + return Futures.immediateFuture(ImmutableIterable.from(knownRemotePaths)); + } + return Futures.transform( + missingDigestsFinder.findMissingDigests(context, digestsToQuery), + (missingDigests) -> { + List filesToQueryUpdated = processQueryResult(missingDigests, filesToQuery); + return ImmutableIterable.from(Iterables.concat(knownRemotePaths, filesToQueryUpdated)); + }, + MoreExecutors.directExecutor()); + } + + /** + * Uploads any files from {@code allPaths} where {@link PathMetadata#isRemote()} returns {@code + * false}. + */ + private ListenableFuture> uploadLocalFiles( + ImmutableIterable allPaths) { + RequestMetadata metadata = + TracingMetadataUtils.buildMetadata(buildRequestId, commandId, "bes-upload", null); + RemoteActionExecutionContext context = RemoteActionExecutionContext.create(metadata); + + ImmutableList.Builder> allPathsUploaded = + ImmutableList.builder(); + for (PathMetadata path : allPaths) { + if (!path.isRemote() && !path.isDirectory()) { + Chunker chunker = + Chunker.builder().setInput(path.getDigest().getSizeBytes(), path.getPath()).build(); + final ListenableFuture upload; + upload = + uploader.uploadBlobAsync(context, path.getDigest(), chunker, /* forceUpload= */ false); + allPathsUploaded.add(Futures.transform(upload, unused -> path, uploadExecutor)); + } else { + allPathsUploaded.add(Futures.immediateFuture(path)); + } + } + return Futures.allAsList(allPathsUploaded.build()); } @Override @@ -60,52 +213,73 @@ public ListenableFuture upload(Map files) { if (files.isEmpty()) { return Futures.immediateFuture(PathConverter.NO_CONVERSION); } - List> uploads = new ArrayList<>(files.size()); - - Context prevCtx = ctx.attach(); - try { - for (Path file : files.keySet()) { - Chunker chunker = new Chunker(file); - Digest digest = chunker.digest(); - ListenableFuture upload = - Futures.transform( - uploader.uploadBlobAsync(chunker, /*forceUpload=*/false), - unused -> new PathDigestPair(file, digest), - MoreExecutors.directExecutor()); - uploads.add(upload); - } - - return Futures.transform(Futures.allAsList(uploads), - (uploadsDone) -> new PathConverterImpl(remoteServerInstanceName, uploadsDone), - MoreExecutors.directExecutor()); - } catch (IOException e) { - return Futures.immediateFailedFuture(e); - } finally { - ctx.detach(prevCtx); + // Collect metadata about each path + ImmutableList.Builder> allPathMetadata = ImmutableList.builder(); + for (Path file : files.keySet()) { + ListenableFuture pathMetadata = + uploadExecutor.submit(() -> readPathMetadata(file)); + allPathMetadata.add(pathMetadata); } + + // Query the remote cache to check which files need to be uploaded + ImmutableList> allPaths = allPathMetadata.build(); + ListenableFuture> allPathsUpdatedMetadata = + Futures.whenAllSucceed(allPaths) + .callAsync(() -> queryRemoteCache(allPaths), MoreExecutors.directExecutor()); + + // Upload local files (if any) + ListenableFuture> allPathsMetadata = + Futures.transformAsync( + allPathsUpdatedMetadata, + (paths) -> uploadLocalFiles(paths), + MoreExecutors.directExecutor()); + + return Futures.transform( + allPathsMetadata, + (metadata) -> new PathConverterImpl(remoteServerInstanceName, metadata), + MoreExecutors.directExecutor()); } @Override - public void shutdown() { + public boolean mayBeSlow() { + return true; + } + + @Override + protected void deallocate() { if (shutdown.getAndSet(true)) { return; } uploader.release(); + uploadExecutor.shutdown(); + } + + @Override + public ReferenceCounted touch(Object o) { + return this; } private static class PathConverterImpl implements PathConverter { private final String remoteServerInstanceName; private final Map pathToDigest; + private final Set skippedPaths; - PathConverterImpl(String remoteServerInstanceName, - List uploads) { + PathConverterImpl(String remoteServerInstanceName, List uploads) { Preconditions.checkNotNull(uploads); this.remoteServerInstanceName = remoteServerInstanceName; pathToDigest = new HashMap<>(uploads.size()); - for (PathDigestPair pair : uploads) { - pathToDigest.put(pair.getPath(), pair.getDigest()); + ImmutableSet.Builder skippedPaths = ImmutableSet.builder(); + for (PathMetadata pair : uploads) { + Path path = pair.getPath(); + Digest digest = pair.getDigest(); + if (digest != null) { + pathToDigest.put(path, digest); + } else { + skippedPaths.add(path); + } } + this.skippedPaths = skippedPaths.build(); } @Override @@ -113,6 +287,9 @@ public String apply(Path path) { Preconditions.checkNotNull(path); Digest digest = pathToDigest.get(path); if (digest == null) { + if (skippedPaths.contains(path)) { + return null; + } // It's a programming error to reference a file that has not been uploaded. throw new IllegalStateException( String.format("Illegal file reference: '%s'", path.getPathString())); @@ -122,23 +299,4 @@ public String apply(Path path) { remoteServerInstanceName, digest.getHash(), digest.getSizeBytes()); } } - - private static class PathDigestPair { - - private final Path path; - private final Digest digest; - - PathDigestPair(Path path, Digest digest) { - this.path = path; - this.digest = digest; - } - - public Path getPath() { - return path; - } - - public Digest getDigest() { - return digest; - } - } } diff --git a/dataset/GitHub_Java/bazelbuild.bazel/src/main/java/com/google/devtools/build/lib/remote/RemoteActionInputFetcher.java b/dataset/GitHub_Java/bazelbuild.bazel/src/main/java/com/google/devtools/build/lib/remote/RemoteActionInputFetcher.java index 7f3960a2ae2..6817e924432 100644 --- a/dataset/GitHub_Java/bazelbuild.bazel/src/main/java/com/google/devtools/build/lib/remote/RemoteActionInputFetcher.java +++ b/dataset/GitHub_Java/bazelbuild.bazel/src/main/java/com/google/devtools/build/lib/remote/RemoteActionInputFetcher.java @@ -14,11 +14,11 @@ package com.google.devtools.build.lib.remote; import build.bazel.remote.execution.v2.Digest; +import build.bazel.remote.execution.v2.RequestMetadata; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableSet; -import com.google.common.util.concurrent.FutureCallback; -import com.google.common.util.concurrent.Futures; +import com.google.common.flogger.GoogleLogger; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.MoreExecutors; import com.google.devtools.build.lib.actions.ActionInput; @@ -26,22 +26,23 @@ import com.google.devtools.build.lib.actions.FileArtifactValue; import com.google.devtools.build.lib.actions.MetadataProvider; import com.google.devtools.build.lib.actions.cache.VirtualActionInput; +import com.google.devtools.build.lib.actions.cache.VirtualActionInput.EmptyActionInput; import com.google.devtools.build.lib.profiler.Profiler; +import com.google.devtools.build.lib.profiler.ProfilerTask; import com.google.devtools.build.lib.profiler.SilentCloseable; +import com.google.devtools.build.lib.remote.common.CacheNotFoundException; +import com.google.devtools.build.lib.remote.common.RemoteActionExecutionContext; +import com.google.devtools.build.lib.remote.util.AsyncTaskCache; import com.google.devtools.build.lib.remote.util.DigestUtil; +import com.google.devtools.build.lib.remote.util.RxFutures; +import com.google.devtools.build.lib.remote.util.TracingMetadataUtils; import com.google.devtools.build.lib.remote.util.Utils; +import com.google.devtools.build.lib.sandbox.SandboxHelpers; import com.google.devtools.build.lib.vfs.Path; -import io.grpc.Context; +import io.reactivex.rxjava3.core.Completable; import java.io.IOException; -import java.io.OutputStream; import java.util.HashMap; -import java.util.HashSet; import java.util.Map; -import java.util.Set; -import java.util.concurrent.ExecutionException; -import java.util.logging.Level; -import java.util.logging.Logger; -import javax.annotation.concurrent.GuardedBy; /** * Stages output files that are stored remotely to the local filesystem. @@ -51,26 +52,22 @@ */ class RemoteActionInputFetcher implements ActionInputPrefetcher { - private static final Logger logger = Logger.getLogger(RemoteActionInputFetcher.class.getName()); + private static final GoogleLogger logger = GoogleLogger.forEnclosingClass(); + private final AsyncTaskCache.NoResult downloadCache = AsyncTaskCache.NoResult.create(); private final Object lock = new Object(); - /** Set of successfully downloaded output files. */ - @GuardedBy("lock") - private final Set downloadedPaths = new HashSet<>(); - - @VisibleForTesting - @GuardedBy("lock") - final Map> downloadsInProgress = new HashMap<>(); - - private final AbstractRemoteActionCache remoteCache; + private final String buildRequestId; + private final String commandId; + private final RemoteCache remoteCache; private final Path execRoot; - private final Context ctx; - RemoteActionInputFetcher(AbstractRemoteActionCache remoteCache, Path execRoot, Context ctx) { + RemoteActionInputFetcher( + String buildRequestId, String commandId, RemoteCache remoteCache, Path execRoot) { + this.buildRequestId = Preconditions.checkNotNull(buildRequestId); + this.commandId = Preconditions.checkNotNull(commandId); this.remoteCache = Preconditions.checkNotNull(remoteCache); this.execRoot = Preconditions.checkNotNull(execRoot); - this.ctx = Preconditions.checkNotNull(ctx); } /** @@ -86,15 +83,15 @@ class RemoteActionInputFetcher implements ActionInputPrefetcher { public void prefetchFiles( Iterable inputs, MetadataProvider metadataProvider) throws IOException, InterruptedException { - try (SilentCloseable c = Profiler.instance().profile("Remote.fetchInputs")) { + try (SilentCloseable c = + Profiler.instance().profile(ProfilerTask.REMOTE_DOWNLOAD, "stage remote inputs")) { Map> downloadsToWaitFor = new HashMap<>(); for (ActionInput input : inputs) { if (input instanceof VirtualActionInput) { - VirtualActionInput paramFileActionInput = (VirtualActionInput) input; - Path outputPath = execRoot.getRelative(paramFileActionInput.getExecPath()); - outputPath.getParentDirectory().createDirectoryAndParents(); - try (OutputStream out = outputPath.getOutputStream()) { - paramFileActionInput.writeTo(out); + if (!(input instanceof EmptyActionInput)) { + VirtualActionInput virtualActionInput = (VirtualActionInput) input; + Path outputPath = execRoot.getRelative(virtualActionInput.getExecPath()); + SandboxHelpers.atomicallyWriteVirtualInput(virtualActionInput, outputPath, ".remote"); } } else { FileArtifactValue metadata = metadataProvider.getMetadata(input); @@ -104,116 +101,89 @@ public void prefetchFiles( Path path = execRoot.getRelative(input.getExecPath()); synchronized (lock) { - if (downloadedPaths.contains(path)) { - continue; - } - ListenableFuture download = downloadFileAsync(path, metadata); - downloadsToWaitFor.putIfAbsent(path, download); + downloadsToWaitFor.computeIfAbsent( + path, key -> RxFutures.toListenableFuture(downloadFileAsync(path, metadata))); } } } - IOException ioException = null; - InterruptedException interruptedException = null; - for (Map.Entry> entry : downloadsToWaitFor.entrySet()) { - try { - Utils.getFromFuture(entry.getValue()); - } catch (IOException e) { - if (e instanceof CacheNotFoundException) { - e = + try { + RemoteCache.waitForBulkTransfer( + downloadsToWaitFor.values(), /* cancelRemainingOnInterrupt=*/ true); + } catch (BulkTransferException e) { + if (e.onlyCausedByCacheNotFoundException()) { + BulkTransferException bulkAnnotatedException = new BulkTransferException(); + for (Throwable t : e.getSuppressed()) { + IOException annotatedException = new IOException( String.format( "Failed to fetch file with hash '%s' because it does not exist remotely." - + " --experimental_remote_outputs=minimal does not work if" + + " --remote_download_outputs=minimal does not work if" + " your remote cache evicts files during builds.", - ((CacheNotFoundException) e).getMissingDigest().getHash())); + ((CacheNotFoundException) t).getMissingDigest().getHash())); + bulkAnnotatedException.add(annotatedException); } - ioException = ioException == null ? e : ioException; - } catch (InterruptedException e) { - interruptedException = interruptedException == null ? e : interruptedException; + e = bulkAnnotatedException; } - } - - if (interruptedException != null) { - throw interruptedException; - } - if (ioException != null) { - throw ioException; + throw e; } } } ImmutableSet downloadedFiles() { - synchronized (lock) { - return ImmutableSet.copyOf(downloadedPaths); - } + return downloadCache.getFinishedTasks(); + } + + ImmutableSet downloadsInProgress() { + return downloadCache.getInProgressTasks(); + } + + @VisibleForTesting + AsyncTaskCache.NoResult getDownloadCache() { + return downloadCache; } void downloadFile(Path path, FileArtifactValue metadata) throws IOException, InterruptedException { + Utils.getFromFuture(RxFutures.toListenableFuture(downloadFileAsync(path, metadata))); + } + + private Completable downloadFileAsync(Path path, FileArtifactValue metadata) { + Completable download = + RxFutures.toCompletable( + () -> { + RequestMetadata requestMetadata = + TracingMetadataUtils.buildMetadata( + buildRequestId, commandId, metadata.getActionId(), null); + RemoteActionExecutionContext context = + RemoteActionExecutionContext.create(requestMetadata); + + Digest digest = DigestUtil.buildDigest(metadata.getDigest(), metadata.getSize()); + + return remoteCache.downloadFile(context, path, digest); + }, + MoreExecutors.directExecutor()) + .doOnComplete(() -> finalizeDownload(path)) + .doOnError(error -> deletePartialDownload(path)) + .doOnDispose(() -> deletePartialDownload(path)); + + return downloadCache.executeIfNot(path, download); + } + + private void finalizeDownload(Path path) { try { - downloadFileAsync(path, metadata).get(); - } catch (ExecutionException e) { - if (e.getCause() instanceof IOException) { - throw (IOException) e.getCause(); - } - throw new IOException(e.getCause()); + path.chmod(0755); + } catch (IOException e) { + logger.atWarning().withCause(e).log("Failed to chmod 755 on %s", path); } } - private ListenableFuture downloadFileAsync(Path path, FileArtifactValue metadata) - throws IOException { - synchronized (lock) { - if (downloadedPaths.contains(path)) { - return Futures.immediateFuture(null); - } - - ListenableFuture download = downloadsInProgress.get(path); - if (download == null) { - Context prevCtx = ctx.attach(); - try { - Digest digest = DigestUtil.buildDigest(metadata.getDigest(), metadata.getSize()); - download = remoteCache.downloadFile(path, digest); - downloadsInProgress.put(path, download); - Futures.addCallback( - download, - new FutureCallback() { - @Override - public void onSuccess(Void v) { - synchronized (lock) { - downloadsInProgress.remove(path); - downloadedPaths.add(path); - } - - try { - path.setReadable(true); - path.setExecutable(true); - } catch (IOException e) { - logger.log(Level.WARNING, "Failed to chmod +xr on " + path, e); - } - } - - @Override - public void onFailure(Throwable t) { - synchronized (lock) { - downloadsInProgress.remove(path); - } - try { - path.delete(); - } catch (IOException e) { - logger.log( - Level.WARNING, - "Failed to delete output file after incomplete download: " + path, - e); - } - } - }, - MoreExecutors.directExecutor()); - } finally { - ctx.detach(prevCtx); - } - } - return download; + private void deletePartialDownload(Path path) { + try { + path.delete(); + } catch (IOException e) { + logger.atWarning().withCause(e).log( + "Failed to delete output file after incomplete download: %s", path); } } } diff --git a/dataset/GitHub_Java/bazelbuild.bazel/src/main/java/com/google/devtools/build/lib/remote/RemoteRepositoryRemoteExecutor.java b/dataset/GitHub_Java/bazelbuild.bazel/src/main/java/com/google/devtools/build/lib/remote/RemoteRepositoryRemoteExecutor.java index ba889b9c135..54e200b6de7 100644 --- a/dataset/GitHub_Java/bazelbuild.bazel/src/main/java/com/google/devtools/build/lib/remote/RemoteRepositoryRemoteExecutor.java +++ b/dataset/GitHub_Java/bazelbuild.bazel/src/main/java/com/google/devtools/build/lib/remote/RemoteRepositoryRemoteExecutor.java @@ -13,8 +13,6 @@ // limitations under the License. package com.google.devtools.build.lib.remote; -import static com.google.devtools.build.lib.remote.util.Utils.buildAction; - import build.bazel.remote.execution.v2.Action; import build.bazel.remote.execution.v2.ActionResult; import build.bazel.remote.execution.v2.Command; @@ -46,7 +44,6 @@ import java.io.IOException; import java.time.Duration; import java.util.Map; -import java.util.TreeSet; /** The remote package's implementation of {@link RepositoryRemoteExecutor}. */ public class RemoteRepositoryRemoteExecutor implements RepositoryRemoteExecutor { @@ -113,25 +110,14 @@ public ExecutionResult execute( RemoteActionExecutionContext context = RemoteActionExecutionContext.create(metadata); Platform platform = PlatformUtils.buildPlatformProto(executionProperties); - - Command.Builder commandBuilder = Command.newBuilder().addAllArguments(arguments); - // Sorting the environment pairs by variable name. - TreeSet variables = new TreeSet<>(environment.keySet()); - for (String var : variables) { - commandBuilder.addEnvironmentVariablesBuilder().setName(var).setValue(environment.get(var)); - } - if (platform != null) { - commandBuilder.setPlatform(platform); - } - if (workingDirectory != null) { - commandBuilder.setWorkingDirectory(workingDirectory); - } - - Command command = commandBuilder.build(); + Command command = + RemoteSpawnRunner.buildCommand( + /* outputs= */ ImmutableList.of(), arguments, environment, platform, workingDirectory); Digest commandHash = digestUtil.compute(command); MerkleTree merkleTree = MerkleTree.build(inputFiles, digestUtil); Action action = - buildAction(commandHash, merkleTree.getRootDigest(), platform, timeout, acceptCached); + RemoteSpawnRunner.buildAction( + commandHash, merkleTree.getRootDigest(), platform, timeout, acceptCached); Digest actionDigest = digestUtil.compute(action); ActionKey actionKey = new ActionKey(actionDigest); ActionResult actionResult; diff --git a/dataset/GitHub_Java/bazelbuild.bazel/src/main/java/com/google/devtools/build/lib/remote/RemoteServerCapabilities.java b/dataset/GitHub_Java/bazelbuild.bazel/src/main/java/com/google/devtools/build/lib/remote/RemoteServerCapabilities.java index 332fc20d47a..8e99ec7dc91 100644 --- a/dataset/GitHub_Java/bazelbuild.bazel/src/main/java/com/google/devtools/build/lib/remote/RemoteServerCapabilities.java +++ b/dataset/GitHub_Java/bazelbuild.bazel/src/main/java/com/google/devtools/build/lib/remote/RemoteServerCapabilities.java @@ -20,12 +20,17 @@ import build.bazel.remote.execution.v2.DigestFunction; import build.bazel.remote.execution.v2.ExecutionCapabilities; import build.bazel.remote.execution.v2.GetCapabilitiesRequest; +import build.bazel.remote.execution.v2.PriorityCapabilities; +import build.bazel.remote.execution.v2.PriorityCapabilities.PriorityRange; +import build.bazel.remote.execution.v2.RequestMetadata; import build.bazel.remote.execution.v2.ServerCapabilities; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; +import com.google.devtools.build.lib.remote.common.RemoteActionExecutionContext; +import com.google.devtools.build.lib.remote.options.RemoteOptions; import com.google.devtools.build.lib.remote.util.TracingMetadataUtils; import io.grpc.CallCredentials; -import io.grpc.Context; +import io.grpc.StatusRuntimeException; import java.io.IOException; import java.util.List; import java.util.concurrent.TimeUnit; @@ -53,26 +58,30 @@ public RemoteServerCapabilities( this.retrier = retrier; } - private CapabilitiesBlockingStub capabilitiesBlockingStub() { + private CapabilitiesBlockingStub capabilitiesBlockingStub(RemoteActionExecutionContext context) { return CapabilitiesGrpc.newBlockingStub(channel) - .withInterceptors(TracingMetadataUtils.attachMetadataFromContextInterceptor()) + .withInterceptors( + TracingMetadataUtils.attachMetadataInterceptor(context.getRequestMetadata())) .withCallCredentials(callCredentials) .withDeadlineAfter(callTimeoutSecs, TimeUnit.SECONDS); } public ServerCapabilities get(String buildRequestId, String commandId) throws IOException, InterruptedException { - Context withMetadata = - TracingMetadataUtils.contextWithMetadata(buildRequestId, commandId, "capabilities"); - Context previous = withMetadata.attach(); + RequestMetadata metadata = + TracingMetadataUtils.buildMetadata(buildRequestId, commandId, "capabilities", null); + RemoteActionExecutionContext context = RemoteActionExecutionContext.create(metadata); try { GetCapabilitiesRequest request = instanceName == null ? GetCapabilitiesRequest.getDefaultInstance() : GetCapabilitiesRequest.newBuilder().setInstanceName(instanceName).build(); - return retrier.execute(() -> capabilitiesBlockingStub().getCapabilities(request)); - } finally { - withMetadata.detach(previous); + return retrier.execute(() -> capabilitiesBlockingStub(context).getCapabilities(request)); + } catch (StatusRuntimeException e) { + if (e.getCause() instanceof IOException) { + throw (IOException) e.getCause(); + } + throw new IOException(e); } } @@ -116,12 +125,54 @@ public List getErrors() { } } + private static void checkPriorityInRange( + int priority, + String optionName, + PriorityCapabilities prCap, + ClientServerCompatibilityStatus.Builder result) { + if (priority != 0) { + boolean found = false; + StringBuilder rangeBuilder = new StringBuilder(); + for (PriorityRange pr : prCap.getPrioritiesList()) { + rangeBuilder.append(String.format("%d-%d,", pr.getMinPriority(), pr.getMaxPriority())); + if (pr.getMinPriority() <= priority && priority <= pr.getMaxPriority()) { + found = true; + break; + } + } + if (!found) { + String range = rangeBuilder.toString(); + if (!range.isEmpty()) { + range = range.substring(0, range.length() - 1); + } + result.addError( + String.format( + "--%s %d is outside of server supported range %s.", optionName, priority, range)); + } + } + } + + public enum ServerCapabilitiesRequirement { + NONE, + CACHE, + EXECUTION, + EXECUTION_AND_CACHE, + } + /** Compare the remote server capabilities with those requested by current execution. */ public static ClientServerCompatibilityStatus checkClientServerCompatibility( - ServerCapabilities capabilities, RemoteOptions remoteOptions, DigestFunction digestFunction) { + ServerCapabilities capabilities, + RemoteOptions remoteOptions, + DigestFunction.Value digestFunction, + ServerCapabilitiesRequirement requirement) { ClientServerCompatibilityStatus.Builder result = new ClientServerCompatibilityStatus.Builder(); - boolean remoteExecution = !Strings.isNullOrEmpty(remoteOptions.remoteExecutor); - if (!remoteExecution && Strings.isNullOrEmpty(remoteOptions.remoteCache)) { + boolean shouldCheckExecutionCapabilities = + (requirement == ServerCapabilitiesRequirement.EXECUTION + || requirement == ServerCapabilitiesRequirement.EXECUTION_AND_CACHE); + boolean shouldCheckCacheCapabilities = + (requirement == ServerCapabilitiesRequirement.CACHE + || requirement == ServerCapabilitiesRequirement.EXECUTION_AND_CACHE); + if (!(shouldCheckCacheCapabilities || shouldCheckExecutionCapabilities)) { return result.build(); } @@ -135,26 +186,18 @@ public static ClientServerCompatibilityStatus checkClientServerCompatibility( result.addWarning(st.getMessage()); } - // Check cache digest function. - CacheCapabilities cacheCap = capabilities.getCacheCapabilities(); - if (!cacheCap.getDigestFunctionList().contains(digestFunction)) { - result.addError( - String.format( - "Cannot use hash function %s with remote cache. " - + "Server supported functions are: %s", - digestFunction, cacheCap.getDigestFunctionList())); - } - - if (remoteExecution) { + if (shouldCheckExecutionCapabilities) { // Check remote execution is enabled. ExecutionCapabilities execCap = capabilities.getExecutionCapabilities(); if (!execCap.getExecEnabled()) { - result.addError("Remote execution is not supported by the remote server."); + result.addError( + "Remote execution is not supported by the remote server, or the current " + + "account is not authorized to use remote execution."); return result.build(); // No point checking other execution fields. } // Check execution digest function. - if (execCap.getDigestFunction() == DigestFunction.UNKNOWN) { + if (execCap.getDigestFunction() == DigestFunction.Value.UNKNOWN) { // Server side error -- this is not supposed to happen. result.addError("Remote server error: UNKNOWN execution digest function."); } @@ -166,23 +209,52 @@ public static ClientServerCompatibilityStatus checkClientServerCompatibility( digestFunction, execCap.getDigestFunction())); } - // Check updating remote cache is allowed, if we ever need to do that. - if (remoteOptions.remoteLocalFallback - && remoteOptions.remoteUploadLocalResults - && !cacheCap.getActionCacheUpdateCapabilities().getUpdateEnabled()) { + // Check execution priority is in the supported range. + checkPriorityInRange( + remoteOptions.remoteExecutionPriority, + "remote_execution_priority", + execCap.getExecutionPriorityCapabilities(), + result); + } + + if (shouldCheckCacheCapabilities) { + // Check cache digest function. + CacheCapabilities cacheCap = capabilities.getCacheCapabilities(); + if (!cacheCap.getDigestFunctionList().contains(digestFunction)) { result.addError( - "--remote_local_fallback and --remote_upload_local_results are set, " - + "but the remote server prohibits writing local results to the " - + "remote cache."); + String.format( + "Cannot use hash function %s with remote cache. " + + "Server supported functions are: %s", + digestFunction, cacheCap.getDigestFunctionList())); } - } else { - // Local execution: check updating remote cache is allowed. - if (remoteOptions.remoteUploadLocalResults - && !cacheCap.getActionCacheUpdateCapabilities().getUpdateEnabled()) { - result.addError( - "--remote_upload_local_results is set, but the remote server prohibits " - + "writing local results to remote cache."); + + // Check updating remote cache is allowed, if we ever need to do that. + boolean remoteExecution = !Strings.isNullOrEmpty(remoteOptions.remoteExecutor); + if (remoteExecution) { + if (remoteOptions.remoteLocalFallback + && remoteOptions.remoteUploadLocalResults + && !cacheCap.getActionCacheUpdateCapabilities().getUpdateEnabled()) { + result.addError( + "--remote_local_fallback and --remote_upload_local_results are set, " + + "but the current account is not authorized to write local results " + + "to the remote cache."); + } + } else { + // Local execution: check updating remote cache is allowed. + if (remoteOptions.remoteUploadLocalResults + && !cacheCap.getActionCacheUpdateCapabilities().getUpdateEnabled()) { + result.addError( + "--remote_upload_local_results is set, but the current account is not authorized " + + "to write local results to the remote cache."); + } } + + // Check result cache priority is in the supported range. + checkPriorityInRange( + remoteOptions.remoteResultCachePriority, + "remote_result_cache_priority", + cacheCap.getCachePriorityCapabilities(), + result); } return result.build(); diff --git a/dataset/GitHub_Java/bazelbuild.bazel/src/main/java/com/google/devtools/build/lib/remote/RemoteSpawnCache.java b/dataset/GitHub_Java/bazelbuild.bazel/src/main/java/com/google/devtools/build/lib/remote/RemoteSpawnCache.java index d6b3e1c9236..488105b91e1 100644 --- a/dataset/GitHub_Java/bazelbuild.bazel/src/main/java/com/google/devtools/build/lib/remote/RemoteSpawnCache.java +++ b/dataset/GitHub_Java/bazelbuild.bazel/src/main/java/com/google/devtools/build/lib/remote/RemoteSpawnCache.java @@ -14,11 +14,21 @@ package com.google.devtools.build.lib.remote; import static com.google.common.base.Strings.isNullOrEmpty; -import static com.google.devtools.build.lib.profiler.ProfilerTask.REMOTE_DOWNLOAD; import static com.google.devtools.build.lib.remote.util.Utils.createSpawnResult; +import static com.google.devtools.build.lib.remote.util.Utils.getInMemoryOutputPath; +import static com.google.devtools.build.lib.remote.util.Utils.hasFilesToDownload; +import static com.google.devtools.build.lib.remote.util.Utils.shouldDownloadAllSpawnOutputs; +import build.bazel.remote.execution.v2.Action; +import build.bazel.remote.execution.v2.ActionResult; +import build.bazel.remote.execution.v2.Command; +import build.bazel.remote.execution.v2.Digest; +import build.bazel.remote.execution.v2.Platform; +import build.bazel.remote.execution.v2.RequestMetadata; +import com.google.common.base.Preconditions; import com.google.common.base.Stopwatch; import com.google.common.base.Throwables; +import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.actions.ActionInput; import com.google.devtools.build.lib.actions.ExecException; import com.google.devtools.build.lib.actions.FileArtifactValue; @@ -28,6 +38,7 @@ import com.google.devtools.build.lib.actions.SpawnResult.Status; import com.google.devtools.build.lib.actions.Spawns; import com.google.devtools.build.lib.actions.cache.VirtualActionInput; +import com.google.devtools.build.lib.analysis.platform.PlatformUtils; import com.google.devtools.build.lib.concurrent.ThreadSafety.ThreadSafe; import com.google.devtools.build.lib.events.Event; import com.google.devtools.build.lib.events.Reporter; @@ -37,17 +48,24 @@ import com.google.devtools.build.lib.profiler.Profiler; import com.google.devtools.build.lib.profiler.ProfilerTask; import com.google.devtools.build.lib.profiler.SilentCloseable; -import com.google.devtools.build.lib.remote.RemoteExecutionService.RemoteAction; -import com.google.devtools.build.lib.remote.RemoteExecutionService.RemoteActionResult; import com.google.devtools.build.lib.remote.common.CacheNotFoundException; +import com.google.devtools.build.lib.remote.common.RemoteActionExecutionContext; +import com.google.devtools.build.lib.remote.common.RemoteCacheClient.ActionKey; +import com.google.devtools.build.lib.remote.merkletree.MerkleTree; import com.google.devtools.build.lib.remote.options.RemoteOptions; +import com.google.devtools.build.lib.remote.options.RemoteOutputsMode; +import com.google.devtools.build.lib.remote.util.DigestUtil; +import com.google.devtools.build.lib.remote.util.TracingMetadataUtils; import com.google.devtools.build.lib.remote.util.Utils; import com.google.devtools.build.lib.remote.util.Utils.InMemoryOutput; import com.google.devtools.build.lib.vfs.Path; +import com.google.devtools.build.lib.vfs.PathFragment; import java.io.IOException; +import java.util.Collection; import java.util.HashSet; import java.util.NoSuchElementException; import java.util.Set; +import java.util.SortedMap; import javax.annotation.Nullable; /** A remote {@link SpawnCache} implementation. */ @@ -57,21 +75,42 @@ final class RemoteSpawnCache implements SpawnCache { private final Path execRoot; private final RemoteOptions options; private final boolean verboseFailures; + + private final RemoteCache remoteCache; + private final String buildRequestId; + private final String commandId; + @Nullable private final Reporter cmdlineReporter; + private final Set reportedErrors = new HashSet<>(); - private final RemoteExecutionService remoteExecutionService; + + private final DigestUtil digestUtil; + + /** + * If {@link RemoteOutputsMode#TOPLEVEL} is specified it contains the artifacts that should be + * downloaded. + */ + private final ImmutableSet filesToDownload; RemoteSpawnCache( Path execRoot, RemoteOptions options, boolean verboseFailures, + RemoteCache remoteCache, + String buildRequestId, + String commandId, @Nullable Reporter cmdlineReporter, - RemoteExecutionService remoteExecutionService) { + DigestUtil digestUtil, + ImmutableSet filesToDownload) { this.execRoot = execRoot; this.options = options; this.verboseFailures = verboseFailures; + this.remoteCache = remoteCache; this.cmdlineReporter = cmdlineReporter; - this.remoteExecutionService = remoteExecutionService; + this.buildRequestId = buildRequestId; + this.commandId = commandId; + this.digestUtil = digestUtil; + this.filesToDownload = Preconditions.checkNotNull(filesToDownload, "filesToDownload"); } @Override @@ -86,11 +125,38 @@ public CacheHandle lookup(Spawn spawn, SpawnExecutionContext context) Stopwatch totalTime = Stopwatch.createStarted(); - RemoteAction action = remoteExecutionService.buildRemoteAction(spawn, context); + SortedMap inputMap = + context.getInputMapping(PathFragment.create(execRoot.getBaseName())); + MerkleTree merkleTree = + MerkleTree.build(inputMap, context.getMetadataProvider(), execRoot, digestUtil); SpawnMetrics.Builder spawnMetrics = SpawnMetrics.Builder.forRemoteExec() - .setInputBytes(action.getInputBytes()) - .setInputFiles(action.getInputFiles()); + .setInputBytes(merkleTree.getInputBytes()) + .setInputFiles(merkleTree.getInputFiles()); + Digest merkleTreeRoot = merkleTree.getRootDigest(); + + // Get the remote platform properties. + Platform platform = PlatformUtils.getPlatformProto(spawn, options); + + Command command = + RemoteSpawnRunner.buildCommand( + spawn.getOutputFiles(), + spawn.getArguments(), + spawn.getEnvironment(), + platform, + execRoot.getBaseName()); + RemoteOutputsMode remoteOutputsMode = options.remoteOutputsMode; + Action action = + RemoteSpawnRunner.buildAction( + digestUtil.compute(command), merkleTreeRoot, platform, context.getTimeout(), true); + // Look up action cache, and reuse the action output if it is found. + ActionKey actionKey = digestUtil.computeActionKey(action); + + RequestMetadata metadata = + TracingMetadataUtils.buildMetadata( + buildRequestId, commandId, actionKey.getDigest().getHash(), spawn.getResourceOwner()); + RemoteActionExecutionContext remoteActionExecutionContext = + RemoteActionExecutionContext.create(metadata); Profiler prof = Profiler.instance(); if (options.remoteAcceptCached @@ -99,24 +165,56 @@ public CacheHandle lookup(Spawn spawn, SpawnExecutionContext context) // Metadata will be available in context.current() until we detach. // This is done via a thread-local variable. try { - RemoteActionResult result; + ActionResult result; try (SilentCloseable c = prof.profile(ProfilerTask.REMOTE_CACHE_CHECK, "check cache hit")) { - result = remoteExecutionService.lookupCache(action); + result = + remoteCache.downloadActionResult( + remoteActionExecutionContext, actionKey, /* inlineOutErr= */ false); } // In case the remote cache returned a failed action (exit code != 0) we treat it as a // cache miss if (result != null && result.getExitCode() == 0) { + InMemoryOutput inMemoryOutput = null; + boolean downloadOutputs = + shouldDownloadAllSpawnOutputs( + remoteOutputsMode, + /* exitCode = */ 0, + hasFilesToDownload(spawn.getOutputFiles(), filesToDownload)); Stopwatch fetchTime = Stopwatch.createStarted(); - InMemoryOutput inMemoryOutput; - try (SilentCloseable c = prof.profile(REMOTE_DOWNLOAD, "download outputs")) { - inMemoryOutput = remoteExecutionService.downloadOutputs(action, result); + if (downloadOutputs) { + try (SilentCloseable c = + prof.profile(ProfilerTask.REMOTE_DOWNLOAD, "download outputs")) { + remoteCache.download( + remoteActionExecutionContext, + result, + execRoot, + context.getFileOutErr(), + context::lockOutputFiles); + } + } else { + PathFragment inMemoryOutputPath = getInMemoryOutputPath(spawn); + // inject output metadata + try (SilentCloseable c = + prof.profile(ProfilerTask.REMOTE_DOWNLOAD, "download outputs minimal")) { + inMemoryOutput = + remoteCache.downloadMinimal( + remoteActionExecutionContext, + actionKey.getDigest().getHash(), + result, + spawn.getOutputFiles(), + inMemoryOutputPath, + context.getFileOutErr(), + execRoot, + context.getMetadataInjector(), + context::lockOutputFiles); + } } fetchTime.stop(); totalTime.stop(); spawnMetrics .setFetchTime(fetchTime.elapsed()) .setTotalTime(totalTime.elapsed()) - .setNetworkTime(action.getNetworkTime().getDuration()); + .setNetworkTime(remoteActionExecutionContext.getNetworkTime().getDuration()); SpawnResult spawnResult = createSpawnResult( result.getExitCode(), @@ -185,8 +283,17 @@ public void store(SpawnResult result) throws ExecException, InterruptedException } } + Collection files = + RemoteSpawnRunner.resolveActionInputs(execRoot, spawn.getOutputFiles()); try (SilentCloseable c = prof.profile(ProfilerTask.UPLOAD_TIME, "upload outputs")) { - remoteExecutionService.uploadOutputs(action); + remoteCache.upload( + remoteActionExecutionContext, + actionKey, + action, + command, + execRoot.getParentDirectory(), + files, + context.getFileOutErr()); } catch (IOException e) { String errorMessage; if (!verboseFailures) { @@ -207,7 +314,7 @@ public void store(SpawnResult result) throws ExecException, InterruptedException public void close() {} private void checkForConcurrentModifications() throws IOException { - for (ActionInput input : action.getInputMap().values()) { + for (ActionInput input : inputMap.values()) { if (input instanceof VirtualActionInput) { continue; } diff --git a/dataset/GitHub_Java/bazelbuild.bazel/src/main/java/com/google/devtools/build/lib/remote/RemoteSpawnRunner.java b/dataset/GitHub_Java/bazelbuild.bazel/src/main/java/com/google/devtools/build/lib/remote/RemoteSpawnRunner.java index 0dc4c8ab57d..92027b8410e 100644 --- a/dataset/GitHub_Java/bazelbuild.bazel/src/main/java/com/google/devtools/build/lib/remote/RemoteSpawnRunner.java +++ b/dataset/GitHub_Java/bazelbuild.bazel/src/main/java/com/google/devtools/build/lib/remote/RemoteSpawnRunner.java @@ -19,17 +19,35 @@ import static com.google.devtools.build.lib.profiler.ProfilerTask.REMOTE_EXECUTION; import static com.google.devtools.build.lib.profiler.ProfilerTask.UPLOAD_TIME; import static com.google.devtools.build.lib.remote.util.Utils.createSpawnResult; - +import static com.google.devtools.build.lib.remote.util.Utils.getFromFuture; +import static com.google.devtools.build.lib.remote.util.Utils.getInMemoryOutputPath; +import static com.google.devtools.build.lib.remote.util.Utils.hasFilesToDownload; +import static com.google.devtools.build.lib.remote.util.Utils.shouldDownloadAllSpawnOutputs; + +import build.bazel.remote.execution.v2.Action; +import build.bazel.remote.execution.v2.ActionResult; +import build.bazel.remote.execution.v2.Command; +import build.bazel.remote.execution.v2.Digest; import build.bazel.remote.execution.v2.ExecuteOperationMetadata; +import build.bazel.remote.execution.v2.ExecuteRequest; import build.bazel.remote.execution.v2.ExecuteResponse; import build.bazel.remote.execution.v2.ExecutedActionMetadata; import build.bazel.remote.execution.v2.ExecutionStage.Value; +import build.bazel.remote.execution.v2.LogFile; +import build.bazel.remote.execution.v2.Platform; +import build.bazel.remote.execution.v2.RequestMetadata; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.base.Stopwatch; +import com.google.common.base.Strings; import com.google.common.base.Throwables; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Maps; import com.google.common.util.concurrent.ListeningScheduledExecutorService; import com.google.devtools.build.lib.actions.ActionInput; +import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.CommandLines.ParamFileActionInput; import com.google.devtools.build.lib.actions.ExecException; import com.google.devtools.build.lib.actions.Spawn; @@ -38,6 +56,7 @@ import com.google.devtools.build.lib.actions.SpawnResult.Status; import com.google.devtools.build.lib.actions.Spawns; import com.google.devtools.build.lib.actions.cache.VirtualActionInput; +import com.google.devtools.build.lib.analysis.platform.PlatformUtils; import com.google.devtools.build.lib.concurrent.ThreadSafety.ThreadSafe; import com.google.devtools.build.lib.events.Event; import com.google.devtools.build.lib.events.Reporter; @@ -48,11 +67,15 @@ import com.google.devtools.build.lib.profiler.Profiler; import com.google.devtools.build.lib.profiler.ProfilerTask; import com.google.devtools.build.lib.profiler.SilentCloseable; -import com.google.devtools.build.lib.remote.RemoteExecutionService.RemoteAction; -import com.google.devtools.build.lib.remote.RemoteExecutionService.RemoteActionResult; -import com.google.devtools.build.lib.remote.RemoteExecutionService.ServerLogs; import com.google.devtools.build.lib.remote.common.OperationObserver; +import com.google.devtools.build.lib.remote.common.RemoteActionExecutionContext; +import com.google.devtools.build.lib.remote.common.RemoteCacheClient.ActionKey; +import com.google.devtools.build.lib.remote.common.RemoteExecutionClient; +import com.google.devtools.build.lib.remote.merkletree.MerkleTree; import com.google.devtools.build.lib.remote.options.RemoteOptions; +import com.google.devtools.build.lib.remote.options.RemoteOutputsMode; +import com.google.devtools.build.lib.remote.util.DigestUtil; +import com.google.devtools.build.lib.remote.util.TracingMetadataUtils; import com.google.devtools.build.lib.remote.util.Utils; import com.google.devtools.build.lib.remote.util.Utils.InMemoryOutput; import com.google.devtools.build.lib.sandbox.SandboxHelpers; @@ -63,15 +86,21 @@ import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.longrunning.Operation; +import com.google.protobuf.Message; import com.google.protobuf.Timestamp; import com.google.protobuf.util.Durations; import com.google.protobuf.util.Timestamps; import io.grpc.Status.Code; import java.io.IOException; import java.time.Duration; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.SortedMap; +import java.util.TreeSet; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Supplier; import javax.annotation.Nullable; @@ -84,10 +113,21 @@ public class RemoteSpawnRunner implements SpawnRunner { private final RemoteOptions remoteOptions; private final ExecutionOptions executionOptions; private final boolean verboseFailures; + @Nullable private final Reporter cmdlineReporter; + private final RemoteExecutionCache remoteCache; + private final RemoteExecutionClient remoteExecutor; private final RemoteRetrier retrier; + private final String buildRequestId; + private final String commandId; + private final DigestUtil digestUtil; private final Path logDir; - private final RemoteExecutionService remoteExecutionService; + + /** + * If {@link RemoteOutputsMode#TOPLEVEL} is specified it contains the artifacts that should be + * downloaded. + */ + private final ImmutableSet filesToDownload; // Used to ensure that a warning is reported only once. private final AtomicBoolean warningReported = new AtomicBoolean(); @@ -98,17 +138,27 @@ public class RemoteSpawnRunner implements SpawnRunner { ExecutionOptions executionOptions, boolean verboseFailures, @Nullable Reporter cmdlineReporter, + String buildRequestId, + String commandId, + RemoteExecutionCache remoteCache, + RemoteExecutionClient remoteExecutor, ListeningScheduledExecutorService retryService, + DigestUtil digestUtil, Path logDir, - RemoteExecutionService remoteExecutionService) { + ImmutableSet filesToDownload) { this.execRoot = execRoot; this.remoteOptions = remoteOptions; this.executionOptions = executionOptions; + this.remoteCache = Preconditions.checkNotNull(remoteCache, "remoteCache"); + this.remoteExecutor = Preconditions.checkNotNull(remoteExecutor, "remoteExecutor"); this.verboseFailures = verboseFailures; this.cmdlineReporter = cmdlineReporter; + this.buildRequestId = buildRequestId; + this.commandId = commandId; this.retrier = createExecuteRetrier(remoteOptions, retryService); + this.digestUtil = digestUtil; this.logDir = logDir; - this.remoteExecutionService = remoteExecutionService; + this.filesToDownload = Preconditions.checkNotNull(filesToDownload, "filesToDownload"); } @Override @@ -156,32 +206,70 @@ public void reportExecutingIfNot() { @Override public SpawnResult exec(Spawn spawn, SpawnExecutionContext context) throws ExecException, InterruptedException, IOException { - Preconditions.checkArgument( - Spawns.mayBeExecutedRemotely(spawn), "Spawn can't be executed remotely. This is a bug."); - Stopwatch totalTime = Stopwatch.createStarted(); boolean spawnCacheableRemotely = Spawns.mayBeCachedRemotely(spawn); boolean uploadLocalResults = remoteOptions.remoteUploadLocalResults && spawnCacheableRemotely; boolean acceptCachedResult = remoteOptions.remoteAcceptCached && spawnCacheableRemotely; context.report(ProgressStatus.SCHEDULING, getName()); - - RemoteAction action = remoteExecutionService.buildRemoteAction(spawn, context); + RemoteOutputsMode remoteOutputsMode = remoteOptions.remoteOutputsMode; + // The "root directory" of the action from the point of view of RBE is the parent directory of + // the execroot locally. This is so that paths of artifacts in external repositories don't + // start with an uplevel reference... + SortedMap inputMap = + context.getInputMapping(PathFragment.create(execRoot.getBaseName())); + + // ...however, MerkleTree.build() uses its execRoot parameter to resolve artifacts based on + // ActionInput.getExecPath(), so it needs the execroot and not its parent directory. + final MerkleTree merkleTree = + MerkleTree.build(inputMap, context.getMetadataProvider(), execRoot, digestUtil); SpawnMetrics.Builder spawnMetrics = SpawnMetrics.Builder.forRemoteExec() - .setInputBytes(action.getInputBytes()) - .setInputFiles(action.getInputFiles()); - + .setInputBytes(merkleTree.getInputBytes()) + .setInputFiles(merkleTree.getInputFiles()); maybeWriteParamFilesLocally(spawn); + // Get the remote platform properties. + Platform platform = PlatformUtils.getPlatformProto(spawn, remoteOptions); + + Command command = + buildCommand( + spawn.getOutputFiles(), + spawn.getArguments(), + spawn.getEnvironment(), + platform, + execRoot.getBaseName()); + Digest commandHash = digestUtil.compute(command); + Action action = + buildAction( + commandHash, + merkleTree.getRootDigest(), + platform, + context.getTimeout(), + spawnCacheableRemotely); + spawnMetrics.setParseTime(totalTime.elapsed()); + Preconditions.checkArgument( + Spawns.mayBeExecutedRemotely(spawn), "Spawn can't be executed remotely. This is a bug."); + // Look up action cache, and reuse the action output if it is found. + ActionKey actionKey = digestUtil.computeActionKey(action); + + RequestMetadata metadata = + TracingMetadataUtils.buildMetadata( + buildRequestId, commandId, actionKey.getDigest().getHash(), spawn.getResourceOwner()); + RemoteActionExecutionContext remoteActionExecutionContext = + RemoteActionExecutionContext.create(metadata); Profiler prof = Profiler.instance(); try { // Try to lookup the action in the action cache. - RemoteActionResult cachedResult; + ActionResult cachedResult; try (SilentCloseable c = prof.profile(ProfilerTask.REMOTE_CACHE_CHECK, "check cache hit")) { - cachedResult = acceptCachedResult ? remoteExecutionService.lookupCache(action) : null; + cachedResult = + acceptCachedResult + ? remoteCache.downloadActionResult( + remoteActionExecutionContext, actionKey, /* inlineOutErr= */ false) + : null; } if (cachedResult != null) { if (cachedResult.getExitCode() != 0) { @@ -192,12 +280,15 @@ public SpawnResult exec(Spawn spawn, SpawnExecutionContext context) } else { try { return downloadAndFinalizeSpawnResult( - action, + remoteActionExecutionContext, + actionKey.getDigest().getHash(), cachedResult, /* cacheHit= */ true, spawn, + context, + remoteOutputsMode, totalTime, - () -> action.getNetworkTime().getDuration(), + () -> remoteActionExecutionContext.getNetworkTime().getDuration(), spawnMetrics); } catch (BulkTransferException e) { if (!e.onlyCausedByCacheNotFoundException()) { @@ -210,31 +301,64 @@ public SpawnResult exec(Spawn spawn, SpawnExecutionContext context) } } } catch (IOException e) { - return execLocallyAndUploadOrFail(action, spawn, context, uploadLocalResults, e); + return execLocallyAndUploadOrFail( + remoteActionExecutionContext, + spawn, + context, + inputMap, + actionKey, + action, + command, + uploadLocalResults, + e); } - AtomicBoolean useCachedResult = new AtomicBoolean(acceptCachedResult); + ExecuteRequest.Builder requestBuilder = + ExecuteRequest.newBuilder() + .setInstanceName(remoteOptions.remoteInstanceName) + .setActionDigest(actionKey.getDigest()) + .setSkipCacheLookup(!acceptCachedResult); + if (remoteOptions.remoteResultCachePriority != 0) { + requestBuilder + .getResultsCachePolicyBuilder() + .setPriority(remoteOptions.remoteResultCachePriority); + } + if (remoteOptions.remoteExecutionPriority != 0) { + requestBuilder.getExecutionPolicyBuilder().setPriority(remoteOptions.remoteExecutionPriority); + } try { return retrier.execute( () -> { + ExecuteRequest request = requestBuilder.build(); + // Upload the command and all the inputs into the remote cache. try (SilentCloseable c = prof.profile(UPLOAD_TIME, "upload missing inputs")) { - Duration networkTimeStart = action.getNetworkTime().getDuration(); + Map additionalInputs = Maps.newHashMapWithExpectedSize(2); + additionalInputs.put(actionKey.getDigest(), action); + additionalInputs.put(commandHash, command); + Duration networkTimeStart = + remoteActionExecutionContext.getNetworkTime().getDuration(); Stopwatch uploadTime = Stopwatch.createStarted(); - remoteExecutionService.uploadInputsIfNotPresent(action); + remoteCache.ensureInputsPresent( + remoteActionExecutionContext, merkleTree, additionalInputs); // subtract network time consumed here to ensure wall clock during upload is not // double // counted, and metrics time computation does not exceed total time spawnMetrics.setUploadTime( uploadTime .elapsed() - .minus(action.getNetworkTime().getDuration().minus(networkTimeStart))); + .minus( + remoteActionExecutionContext + .getNetworkTime() + .getDuration() + .minus(networkTimeStart))); } ExecutingStatusReporter reporter = new ExecutingStatusReporter(context); - RemoteActionResult result; + ExecuteResponse reply; try (SilentCloseable c = prof.profile(REMOTE_EXECUTION, "execute remotely")) { - result = remoteExecutionService.execute(action, useCachedResult.get(), reporter); + reply = + remoteExecutor.executeRemotely(remoteActionExecutionContext, request, reporter); } // In case of replies from server contains metadata, but none of them has EXECUTING // status. @@ -242,37 +366,51 @@ public SpawnResult exec(Spawn spawn, SpawnExecutionContext context) reporter.reportExecutingIfNot(); FileOutErr outErr = context.getFileOutErr(); - String message = result.getMessage(); - if (!result.success() && !message.isEmpty()) { + String message = reply.getMessage(); + ActionResult actionResult = reply.getResult(); + if ((actionResult.getExitCode() != 0 || reply.getStatus().getCode() != Code.OK.value()) + && !message.isEmpty()) { outErr.printErr(message + "\n"); } - spawnMetricsAccounting(spawnMetrics, result.getExecutionMetadata()); + spawnMetricsAccounting(spawnMetrics, actionResult.getExecutionMetadata()); try (SilentCloseable c = prof.profile(REMOTE_DOWNLOAD, "download server logs")) { - maybeDownloadServerLogs(action, result.getResponse()); + maybeDownloadServerLogs(remoteActionExecutionContext, reply, actionKey); } try { return downloadAndFinalizeSpawnResult( - action, - result, - result.cacheHit(), + remoteActionExecutionContext, + actionKey.getDigest().getHash(), + actionResult, + reply.getCachedResult(), spawn, + context, + remoteOutputsMode, totalTime, - () -> action.getNetworkTime().getDuration(), + () -> remoteActionExecutionContext.getNetworkTime().getDuration(), spawnMetrics); } catch (BulkTransferException e) { if (e.onlyCausedByCacheNotFoundException()) { // No cache hit, so if we retry this execution, we must no longer accept // cached results, it must be reexecuted - useCachedResult.set(false); + requestBuilder.setSkipCacheLookup(true); } throw e; } }); } catch (IOException e) { - return execLocallyAndUploadOrFail(action, spawn, context, uploadLocalResults, e); + return execLocallyAndUploadOrFail( + remoteActionExecutionContext, + spawn, + context, + inputMap, + actionKey, + action, + command, + uploadLocalResults, + e); } } @@ -322,29 +460,58 @@ static void spawnMetricsAccounting( } private SpawnResult downloadAndFinalizeSpawnResult( - RemoteAction action, - RemoteActionResult result, + RemoteActionExecutionContext remoteActionExecutionContext, + String actionId, + ActionResult actionResult, boolean cacheHit, Spawn spawn, + SpawnExecutionContext context, + RemoteOutputsMode remoteOutputsMode, Stopwatch totalTime, Supplier networkTime, SpawnMetrics.Builder spawnMetrics) throws ExecException, IOException, InterruptedException { + boolean downloadOutputs = + shouldDownloadAllSpawnOutputs( + remoteOutputsMode, + /* exitCode = */ actionResult.getExitCode(), + hasFilesToDownload(spawn.getOutputFiles(), filesToDownload)); + InMemoryOutput inMemoryOutput = null; Duration networkTimeStart = networkTime.get(); Stopwatch fetchTime = Stopwatch.createStarted(); - - InMemoryOutput inMemoryOutput; - try (SilentCloseable c = Profiler.instance().profile(REMOTE_DOWNLOAD, "download outputs")) { - inMemoryOutput = remoteExecutionService.downloadOutputs(action, result); + if (downloadOutputs) { + try (SilentCloseable c = Profiler.instance().profile(REMOTE_DOWNLOAD, "download outputs")) { + remoteCache.download( + remoteActionExecutionContext, + actionResult, + execRoot, + context.getFileOutErr(), + context::lockOutputFiles); + } + } else { + PathFragment inMemoryOutputPath = getInMemoryOutputPath(spawn); + try (SilentCloseable c = + Profiler.instance().profile(REMOTE_DOWNLOAD, "download outputs minimal")) { + inMemoryOutput = + remoteCache.downloadMinimal( + remoteActionExecutionContext, + actionId, + actionResult, + spawn.getOutputFiles(), + inMemoryOutputPath, + context.getFileOutErr(), + execRoot, + context.getMetadataInjector(), + context::lockOutputFiles); + } } - fetchTime.stop(); totalTime.stop(); Duration networkTimeEnd = networkTime.get(); // subtract network time consumed here to ensure wall clock during fetch is not double // counted, and metrics time computation does not exceed total time return createSpawnResult( - result.getExitCode(), + actionResult.getExitCode(), cacheHit, getName(), inMemoryOutput, @@ -379,18 +546,30 @@ private void maybeWriteParamFilesLocally(Spawn spawn) throws IOException { } } - private void maybeDownloadServerLogs(RemoteAction action, ExecuteResponse resp) + private void maybeDownloadServerLogs( + RemoteActionExecutionContext context, ExecuteResponse resp, ActionKey actionKey) throws InterruptedException { - try { - ServerLogs serverLogs = remoteExecutionService.maybeDownloadServerLogs(action, resp, logDir); - if (serverLogs.logCount > 0 && verboseFailures) { + ActionResult result = resp.getResult(); + if (resp.getServerLogsCount() > 0 + && (result.getExitCode() != 0 || resp.getStatus().getCode() != Code.OK.value())) { + Path parent = logDir.getRelative(actionKey.getDigest().getHash()); + Path logPath = null; + int logCount = 0; + for (Map.Entry e : resp.getServerLogsMap().entrySet()) { + if (e.getValue().getHumanReadable()) { + logPath = parent.getRelative(e.getKey()); + logCount++; + try { + getFromFuture(remoteCache.downloadFile(context, logPath, e.getValue().getDigest())); + } catch (IOException ex) { + reportOnce(Event.warn("Failed downloading server logs from the remote cache.")); + } + } + } + if (logCount > 0 && verboseFailures) { report( - Event.info( - "Server logs of failing action:\n " - + (serverLogs.logCount > 1 ? serverLogs.directory : serverLogs.lastLogPath))); + Event.info("Server logs of failing action:\n " + (logCount > 1 ? parent : logPath))); } - } catch (IOException e) { - reportOnce(Event.warn("Failed downloading server logs from the remote cache.")); } } @@ -408,9 +587,13 @@ private SpawnResult execLocally(Spawn spawn, SpawnExecutionContext context) } private SpawnResult execLocallyAndUploadOrFail( - RemoteAction action, + RemoteActionExecutionContext remoteActionExecutionContext, Spawn spawn, SpawnExecutionContext context, + SortedMap inputMap, + ActionKey actionKey, + Action action, + Command command, boolean uploadLocalResults, IOException cause) throws ExecException, InterruptedException, IOException { @@ -420,12 +603,26 @@ private SpawnResult execLocallyAndUploadOrFail( throw new InterruptedException(); } if (remoteOptions.remoteLocalFallback && !RemoteRetrierUtils.causedByExecTimeout(cause)) { - return execLocallyAndUpload(action, spawn, context, uploadLocalResults); + return execLocallyAndUpload( + remoteActionExecutionContext, + spawn, + context, + inputMap, + actionKey, + action, + command, + uploadLocalResults); } - return handleError(action, cause); + return handleError( + remoteActionExecutionContext, cause, context.getFileOutErr(), actionKey, context); } - private SpawnResult handleError(RemoteAction action, IOException exception) + private SpawnResult handleError( + RemoteActionExecutionContext remoteActionExecutionContext, + IOException exception, + FileOutErr outErr, + ActionKey actionKey, + SpawnExecutionContext context) throws ExecException, InterruptedException, IOException { boolean remoteCacheFailed = BulkTransferException.isOnlyCausedByCacheNotFoundException(exception); @@ -433,11 +630,16 @@ private SpawnResult handleError(RemoteAction action, IOException exception) ExecutionStatusException e = (ExecutionStatusException) exception.getCause(); if (e.getResponse() != null) { ExecuteResponse resp = e.getResponse(); - maybeDownloadServerLogs(action, resp); + maybeDownloadServerLogs(remoteActionExecutionContext, resp, actionKey); if (resp.hasResult()) { try { - remoteExecutionService.downloadOutputs( - action, RemoteActionResult.createFromResponse(resp)); + // We try to download all (partial) results even on server error, for debuggability. + remoteCache.download( + remoteActionExecutionContext, + resp.getResult(), + execRoot, + outErr, + context::lockOutputFiles); } catch (BulkTransferException bulkTransferEx) { exception.addSuppressed(bulkTransferEx); } @@ -497,6 +699,71 @@ private SpawnResult handleError(RemoteAction action, IOException exception) .build(); } + static Action buildAction( + Digest command, + Digest inputRoot, + @Nullable Platform platform, + Duration timeout, + boolean cacheable) { + + Action.Builder action = Action.newBuilder(); + action.setCommandDigest(command); + action.setInputRootDigest(inputRoot); + if (!timeout.isZero()) { + action.setTimeout(com.google.protobuf.Duration.newBuilder().setSeconds(timeout.getSeconds())); + } + if (!cacheable) { + action.setDoNotCache(true); + } + if (platform != null) { + action.setPlatform(platform); + } + return action.build(); + } + + static Command buildCommand( + Collection outputs, + List arguments, + ImmutableMap env, + @Nullable Platform platform, + @Nullable String workingDirectoryString) { + Command.Builder command = Command.newBuilder(); + ArrayList outputFiles = new ArrayList<>(); + ArrayList outputDirectories = new ArrayList<>(); + PathFragment workingDirectoryPathFragment = + workingDirectoryString == null + ? PathFragment.EMPTY_FRAGMENT + : PathFragment.create(workingDirectoryString); + for (ActionInput output : outputs) { + String pathString = + workingDirectoryPathFragment.getRelative(output.getExecPath()).getPathString(); + if (output instanceof Artifact && ((Artifact) output).isTreeArtifact()) { + outputDirectories.add(pathString); + } else { + outputFiles.add(pathString); + } + } + Collections.sort(outputFiles); + Collections.sort(outputDirectories); + command.addAllOutputFiles(outputFiles); + command.addAllOutputDirectories(outputDirectories); + + if (platform != null) { + command.setPlatform(platform); + } + command.addAllArguments(arguments); + // Sorting the environment pairs by variable name. + TreeSet variables = new TreeSet<>(env.keySet()); + for (String var : variables) { + command.addEnvironmentVariablesBuilder().setName(var).setValue(env.get(var)); + } + + if (!Strings.isNullOrEmpty(workingDirectoryString)) { + command.setWorkingDirectory(workingDirectoryString); + } + return command.build(); + } + private Map getInputCtimes(SortedMap inputMap) { HashMap ctimes = new HashMap<>(); for (Map.Entry e : inputMap.entrySet()) { @@ -519,11 +786,18 @@ private Map getInputCtimes(SortedMap inpu @VisibleForTesting SpawnResult execLocallyAndUpload( - RemoteAction action, Spawn spawn, SpawnExecutionContext context, boolean uploadLocalResults) + RemoteActionExecutionContext remoteActionExecutionContext, + Spawn spawn, + SpawnExecutionContext context, + SortedMap inputMap, + ActionKey actionKey, + Action action, + Command command, + boolean uploadLocalResults) throws ExecException, IOException, InterruptedException { - Map ctimesBefore = getInputCtimes(action.getInputMap()); + Map ctimesBefore = getInputCtimes(inputMap); SpawnResult result = execLocally(spawn, context); - Map ctimesAfter = getInputCtimes(action.getInputMap()); + Map ctimesAfter = getInputCtimes(inputMap); uploadLocalResults = uploadLocalResults && Status.SUCCESS.equals(result.status()) && result.exitCode() == 0; if (!uploadLocalResults) { @@ -537,8 +811,16 @@ SpawnResult execLocallyAndUpload( } } + Collection outputFiles = resolveActionInputs(execRoot, spawn.getOutputFiles()); try (SilentCloseable c = Profiler.instance().profile(UPLOAD_TIME, "upload outputs")) { - remoteExecutionService.uploadOutputs(action); + remoteCache.upload( + remoteActionExecutionContext, + actionKey, + action, + command, + execRoot, + outputFiles, + context.getFileOutErr()); } catch (IOException e) { if (verboseFailures) { report(Event.debug("Upload to remote cache failed: " + e.getMessage())); @@ -561,6 +843,17 @@ private void report(Event evt) { } } + /** + * Resolve a collection of {@link com.google.devtools.build.lib.actions.ActionInput}s to {@link + * Path}s. + */ + static Collection resolveActionInputs( + Path execRoot, Collection actionInputs) { + return actionInputs.stream() + .map((inp) -> execRoot.getRelative(inp.getExecPath())) + .collect(ImmutableList.toImmutableList()); + } + private static RemoteRetrier createExecuteRetrier( RemoteOptions options, ListeningScheduledExecutorService retryService) { return new ExecuteRetrier( diff --git a/dataset/GitHub_Java/bazelbuild.bazel/src/main/java/com/google/devtools/build/lib/remote/downloader/GrpcRemoteDownloader.java b/dataset/GitHub_Java/bazelbuild.bazel/src/main/java/com/google/devtools/build/lib/remote/downloader/GrpcRemoteDownloader.java index 81ccf51ca80..a0bc56b0b12 100644 --- a/dataset/GitHub_Java/bazelbuild.bazel/src/main/java/com/google/devtools/build/lib/remote/downloader/GrpcRemoteDownloader.java +++ b/dataset/GitHub_Java/bazelbuild.bazel/src/main/java/com/google/devtools/build/lib/remote/downloader/GrpcRemoteDownloader.java @@ -20,6 +20,7 @@ import build.bazel.remote.asset.v1.FetchGrpc.FetchBlockingStub; import build.bazel.remote.asset.v1.Qualifier; import build.bazel.remote.execution.v2.Digest; +import build.bazel.remote.execution.v2.RequestMetadata; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Strings; import com.google.devtools.build.lib.bazel.repository.downloader.Checksum; @@ -28,6 +29,7 @@ import com.google.devtools.build.lib.events.ExtendedEventHandler; import com.google.devtools.build.lib.remote.ReferenceCountedChannel; import com.google.devtools.build.lib.remote.RemoteRetrier; +import com.google.devtools.build.lib.remote.common.RemoteActionExecutionContext; import com.google.devtools.build.lib.remote.common.RemoteCacheClient; import com.google.devtools.build.lib.remote.options.RemoteOptions; import com.google.devtools.build.lib.remote.util.TracingMetadataUtils; @@ -36,7 +38,6 @@ import com.google.gson.Gson; import com.google.gson.JsonObject; import io.grpc.CallCredentials; -import io.grpc.Context; import io.grpc.StatusRuntimeException; import java.io.IOException; import java.io.OutputStream; @@ -58,10 +59,11 @@ */ public class GrpcRemoteDownloader implements AutoCloseable, Downloader { + private final String buildRequestId; + private final String commandId; private final ReferenceCountedChannel channel; private final Optional credentials; private final RemoteRetrier retrier; - private final Context requestCtx; private final RemoteCacheClient cacheClient; private final RemoteOptions options; @@ -75,17 +77,19 @@ public class GrpcRemoteDownloader implements AutoCloseable, Downloader { private static final String QUALIFIER_AUTH_HEADERS = "bazel.auth_headers"; public GrpcRemoteDownloader( + String buildRequestId, + String commandId, ReferenceCountedChannel channel, Optional credentials, RemoteRetrier retrier, - Context requestCtx, RemoteCacheClient cacheClient, RemoteOptions options) { + this.buildRequestId = buildRequestId; + this.commandId = commandId; this.channel = channel; this.credentials = credentials; this.retrier = retrier; this.cacheClient = cacheClient; - this.requestCtx = requestCtx; this.options = options; } @@ -106,24 +110,29 @@ public void download( String canonicalId, Path destination, ExtendedEventHandler eventHandler, - Map clientEnv) + Map clientEnv, + com.google.common.base.Optional type) throws IOException, InterruptedException { + RequestMetadata metadata = + TracingMetadataUtils.buildMetadata(buildRequestId, commandId, "remote_downloader", null); + RemoteActionExecutionContext remoteActionExecutionContext = + RemoteActionExecutionContext.create(metadata); + final FetchBlobRequest request = newFetchBlobRequest(options.remoteInstanceName, urls, authHeaders, checksum, canonicalId); try { FetchBlobResponse response = - retrier.execute(() -> requestCtx.call(() -> fetchBlockingStub().fetchBlob(request))); + retrier.execute(() -> fetchBlockingStub(remoteActionExecutionContext).fetchBlob(request)); final Digest blobDigest = response.getBlobDigest(); retrier.execute( - () -> - requestCtx.call( - () -> { - try (OutputStream out = newOutputStream(destination, checksum)) { - Utils.getFromFuture(cacheClient.downloadBlob(blobDigest, out)); - } - return null; - })); + () -> { + try (OutputStream out = newOutputStream(destination, checksum)) { + Utils.getFromFuture( + cacheClient.downloadBlob(remoteActionExecutionContext, blobDigest, out)); + } + return null; + }); } catch (StatusRuntimeException e) { throw new IOException(e); } @@ -163,12 +172,13 @@ static FetchBlobRequest newFetchBlobRequest( return requestBuilder.build(); } - private FetchBlockingStub fetchBlockingStub() { + private FetchBlockingStub fetchBlockingStub(RemoteActionExecutionContext context) { return FetchGrpc.newBlockingStub(channel) - .withInterceptors(TracingMetadataUtils.attachMetadataFromContextInterceptor()) + .withInterceptors( + TracingMetadataUtils.attachMetadataInterceptor(context.getRequestMetadata())) .withInterceptors(TracingMetadataUtils.newDownloaderHeadersInterceptor(options)) .withCallCredentials(credentials.orElse(null)) - .withDeadlineAfter(options.remoteTimeout, TimeUnit.SECONDS); + .withDeadlineAfter(options.remoteTimeout.getSeconds(), TimeUnit.SECONDS); } private OutputStream newOutputStream( @@ -196,6 +206,6 @@ private static String authHeadersJson(Map> authHeaders) authHeadersJson.add(entry.getKey(), entry.getValue()); } - return (new Gson()).toJson(authHeadersJson); + return new Gson().toJson(authHeadersJson); } } diff --git a/dataset/GitHub_Java/bazelbuild.bazel/src/main/java/com/google/devtools/build/lib/remote/util/TracingMetadataUtils.java b/dataset/GitHub_Java/bazelbuild.bazel/src/main/java/com/google/devtools/build/lib/remote/util/TracingMetadataUtils.java index e70726060d4..55797383263 100644 --- a/dataset/GitHub_Java/bazelbuild.bazel/src/main/java/com/google/devtools/build/lib/remote/util/TracingMetadataUtils.java +++ b/dataset/GitHub_Java/bazelbuild.bazel/src/main/java/com/google/devtools/build/lib/remote/util/TracingMetadataUtils.java @@ -17,8 +17,9 @@ import build.bazel.remote.execution.v2.ToolDetails; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; +import com.google.devtools.build.lib.actions.ActionExecutionMetadata; import com.google.devtools.build.lib.analysis.BlazeVersionInfo; -import com.google.devtools.build.lib.remote.common.RemoteCacheClient.ActionKey; +import com.google.devtools.build.lib.remote.options.RemoteOptions; import io.grpc.ClientInterceptor; import io.grpc.Context; import io.grpc.Contexts; @@ -29,6 +30,8 @@ import io.grpc.ServerInterceptor; import io.grpc.protobuf.ProtoUtils; import io.grpc.stub.MetadataUtils; +import java.util.List; +import java.util.Map.Entry; import javax.annotation.Nullable; /** Utility functions to handle Metadata for remote Grpc calls. */ @@ -43,48 +46,35 @@ private TracingMetadataUtils() {} public static final Metadata.Key METADATA_KEY = ProtoUtils.keyForProto(RequestMetadata.getDefaultInstance()); - /** - * Returns a new gRPC context derived from the current context, with {@link RequestMetadata} - * accessible by the {@link fromCurrentContext()} method. - * - *

The {@link RequestMetadata} is constructed using the provided arguments and the current tool - * version. - */ - public static Context contextWithMetadata( - String buildRequestId, String commandId, ActionKey actionKey) { - return contextWithMetadata(buildRequestId, commandId, actionKey.getDigest().getHash()); - } - - /** - * Returns a new gRPC context derived from the current context, with {@link RequestMetadata} - * accessible by the {@link fromCurrentContext()} method. - * - *

The {@link RequestMetadata} is constructed using the provided arguments and the current tool - * version. - */ - public static Context contextWithMetadata( - String buildRequestId, String commandId, String actionId) { + public static RequestMetadata buildMetadata( + String buildRequestId, + String commandId, + String actionId, + @Nullable ActionExecutionMetadata actionMetadata) { Preconditions.checkNotNull(buildRequestId); Preconditions.checkNotNull(commandId); Preconditions.checkNotNull(actionId); - RequestMetadata.Builder metadata = + RequestMetadata.Builder builder = RequestMetadata.newBuilder() .setCorrelatedInvocationsId(buildRequestId) - .setToolInvocationId(commandId); - metadata.setActionId(actionId); - metadata - .setToolDetails( - ToolDetails.newBuilder() - .setToolName("bazel") - .setToolVersion(BlazeVersionInfo.instance().getVersion())) - .build(); - return Context.current().withValue(CONTEXT_KEY, metadata.build()); + .setToolInvocationId(commandId) + .setActionId(actionId) + .setToolDetails( + ToolDetails.newBuilder() + .setToolName("bazel") + .setToolVersion(BlazeVersionInfo.instance().getVersion())); + if (actionMetadata != null) { + builder.setActionMnemonic(actionMetadata.getMnemonic()); + builder.setTargetId(actionMetadata.getOwner().getLabel().getCanonicalForm()); + builder.setConfigurationId(actionMetadata.getOwner().getConfigurationChecksum()); + } + return builder.build(); } /** * Fetches a {@link RequestMetadata} defined on the current context. * - * @throws {@link IllegalStateException} when the metadata is not defined in the current context. + * @throws IllegalStateException when the metadata is not defined in the current context. */ public static RequestMetadata fromCurrentContext() { RequestMetadata metadata = CONTEXT_KEY.get(); @@ -94,15 +84,10 @@ public static RequestMetadata fromCurrentContext() { return metadata; } - /** - * Creates a {@link Metadata} containing the {@link RequestMetadata} defined on the current - * context. - * - * @throws {@link IllegalStateException} when the metadata is not defined in the current context. - */ - public static Metadata headersFromCurrentContext() { + /** Creates a {@link Metadata} containing the {@link RequestMetadata}. */ + public static Metadata headersFromRequestMetadata(RequestMetadata requestMetadata) { Metadata headers = new Metadata(); - headers.put(METADATA_KEY, fromCurrentContext()); + headers.put(METADATA_KEY, requestMetadata); return headers; } @@ -114,8 +99,36 @@ public static Metadata headersFromCurrentContext() { return headers.get(METADATA_KEY); } - public static ClientInterceptor attachMetadataFromContextInterceptor() { - return MetadataUtils.newAttachHeadersInterceptor(headersFromCurrentContext()); + public static ClientInterceptor attachMetadataInterceptor(RequestMetadata requestMetadata) { + return MetadataUtils.newAttachHeadersInterceptor(headersFromRequestMetadata(requestMetadata)); + } + + private static Metadata newMetadataForHeaders(List> headers) { + Metadata metadata = new Metadata(); + headers.forEach( + header -> + metadata.put( + Metadata.Key.of(header.getKey(), Metadata.ASCII_STRING_MARSHALLER), + header.getValue())); + return metadata; + } + + public static ClientInterceptor newCacheHeadersInterceptor(RemoteOptions options) { + Metadata metadata = newMetadataForHeaders(options.remoteHeaders); + metadata.merge(newMetadataForHeaders(options.remoteCacheHeaders)); + return MetadataUtils.newAttachHeadersInterceptor(metadata); + } + + public static ClientInterceptor newDownloaderHeadersInterceptor(RemoteOptions options) { + Metadata metadata = newMetadataForHeaders(options.remoteHeaders); + metadata.merge(newMetadataForHeaders(options.remoteDownloaderHeaders)); + return MetadataUtils.newAttachHeadersInterceptor(metadata); + } + + public static ClientInterceptor newExecHeadersInterceptor(RemoteOptions options) { + Metadata metadata = newMetadataForHeaders(options.remoteHeaders); + metadata.merge(newMetadataForHeaders(options.remoteExecHeaders)); + return MetadataUtils.newAttachHeadersInterceptor(metadata); } /** GRPC interceptor to add logging metadata to the GRPC context. */ @@ -135,5 +148,4 @@ public Listener interceptCall( return Contexts.interceptCall(ctx, call, headers, next); } } - } diff --git a/dataset/GitHub_Java/bazelbuild.bazel/src/test/java/com/google/devtools/build/lib/remote/ByteStreamUploaderTest.java b/dataset/GitHub_Java/bazelbuild.bazel/src/test/java/com/google/devtools/build/lib/remote/ByteStreamUploaderTest.java index d9b5d3e3dde..604e87c42aa 100644 --- a/dataset/GitHub_Java/bazelbuild.bazel/src/test/java/com/google/devtools/build/lib/remote/ByteStreamUploaderTest.java +++ b/dataset/GitHub_Java/bazelbuild.bazel/src/test/java/com/google/devtools/build/lib/remote/ByteStreamUploaderTest.java @@ -14,35 +14,59 @@ package com.google.devtools.build.lib.remote; import static com.google.common.truth.Truth.assertThat; +import static java.nio.charset.StandardCharsets.UTF_8; +import static org.junit.Assert.assertThrows; import static org.junit.Assert.fail; +import static org.mockito.ArgumentMatchers.any; +import build.bazel.remote.execution.v2.Digest; +import build.bazel.remote.execution.v2.RequestMetadata; import com.google.bytestream.ByteStreamGrpc; import com.google.bytestream.ByteStreamGrpc.ByteStreamImplBase; +import com.google.bytestream.ByteStreamProto.QueryWriteStatusRequest; +import com.google.bytestream.ByteStreamProto.QueryWriteStatusResponse; import com.google.bytestream.ByteStreamProto.WriteRequest; import com.google.bytestream.ByteStreamProto.WriteResponse; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Maps; +import com.google.common.collect.Sets; +import com.google.common.hash.HashCode; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningScheduledExecutorService; import com.google.common.util.concurrent.MoreExecutors; -import com.google.devtools.build.lib.remote.Chunker.SingleSourceBuilder; +import com.google.devtools.build.lib.analysis.BlazeVersionInfo; +import com.google.devtools.build.lib.authandtls.CallCredentialsProvider; +import com.google.devtools.build.lib.remote.common.RemoteActionExecutionContext; +import com.google.devtools.build.lib.remote.grpc.ChannelConnectionFactory; +import com.google.devtools.build.lib.remote.util.DigestUtil; +import com.google.devtools.build.lib.remote.util.TestUtils; +import com.google.devtools.build.lib.remote.util.TracingMetadataUtils; +import com.google.devtools.build.lib.vfs.DigestHashFunction; import com.google.protobuf.ByteString; -import io.grpc.Channel; +import io.grpc.BindableService; +import io.grpc.CallCredentials; import io.grpc.Metadata; import io.grpc.Server; import io.grpc.ServerCall; import io.grpc.ServerCall.Listener; import io.grpc.ServerCallHandler; +import io.grpc.ServerInterceptor; +import io.grpc.ServerInterceptors; import io.grpc.ServerServiceDefinition; import io.grpc.Status; import io.grpc.Status.Code; +import io.grpc.StatusRuntimeException; import io.grpc.inprocess.InProcessChannelBuilder; import io.grpc.inprocess.InProcessServerBuilder; +import io.grpc.stub.MetadataUtils; import io.grpc.stub.StreamObserver; import io.grpc.util.MutableHandlerRegistry; +import io.reactivex.rxjava3.core.Single; +import java.io.ByteArrayInputStream; import java.io.IOException; +import java.io.InputStream; import java.util.ArrayList; import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Random; @@ -53,6 +77,7 @@ import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; +import javax.annotation.Nullable; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -62,54 +87,88 @@ import org.mockito.Mockito; import org.mockito.MockitoAnnotations; -/** - * Tests for {@link ByteStreamUploader}. - */ +/** Tests for {@link ByteStreamUploader}. */ @RunWith(JUnit4.class) public class ByteStreamUploaderTest { + private static final DigestUtil DIGEST_UTIL = new DigestUtil(DigestHashFunction.SHA256); + private static final int CHUNK_SIZE = 10; private static final String INSTANCE_NAME = "foo"; private final MutableHandlerRegistry serviceRegistry = new MutableHandlerRegistry(); - private final ListeningScheduledExecutorService retryService = - MoreExecutors.listeningDecorator(Executors.newScheduledThreadPool(1)); + private ListeningScheduledExecutorService retryService; + private final String serverName = "Server for " + this.getClass(); private Server server; - private Channel channel; + private ChannelConnectionFactory channelConnectionFactory; + private RemoteActionExecutionContext context; - @Mock - private Retrier.Backoff mockBackoff; + @Mock private Retrier.Backoff mockBackoff; @Before - public void init() throws Exception { + public final void setUp() throws Exception { MockitoAnnotations.initMocks(this); - String serverName = "Server for " + this.getClass(); - server = InProcessServerBuilder.forName(serverName).fallbackHandlerRegistry(serviceRegistry) - .build().start(); - channel = InProcessChannelBuilder.forName(serverName).build(); + server = + InProcessServerBuilder.forName(serverName) + .fallbackHandlerRegistry(serviceRegistry) + .build() + .start(); + channelConnectionFactory = + new ChannelConnectionFactory() { + @Override + public Single create() { + return Single.just( + new ChannelConnection(InProcessChannelBuilder.forName(serverName).build())); + } + + @Override + public int maxConcurrency() { + return 100; + } + }; + RequestMetadata metadata = + TracingMetadataUtils.buildMetadata( + "none", + "none", + DIGEST_UTIL.asActionKey(Digest.getDefaultInstance()).getDigest().getHash(), + null); + context = RemoteActionExecutionContext.create(metadata); + + retryService = MoreExecutors.listeningDecorator(Executors.newScheduledThreadPool(1)); } @After - public void shutdown() { - server.shutdownNow(); + public void tearDown() throws Exception { retryService.shutdownNow(); + retryService.awaitTermination( + com.google.devtools.build.lib.testutil.TestUtils.WAIT_TIMEOUT_SECONDS, TimeUnit.SECONDS); + + server.shutdownNow(); + server.awaitTermination(); } - @Test(timeout = 10000) + @Test public void singleBlobUploadShouldWork() throws Exception { - Retrier retrier = new Retrier(() -> mockBackoff, (Status s) -> true); + RemoteRetrier retrier = + TestUtils.newRemoteRetrier(() -> mockBackoff, (e) -> true, retryService); ByteStreamUploader uploader = - new ByteStreamUploader(INSTANCE_NAME, channel, null, 3, retrier, retryService); + new ByteStreamUploader( + INSTANCE_NAME, + new ReferenceCountedChannel(channelConnectionFactory), + CallCredentialsProvider.NO_CREDENTIALS, + /* callTimeoutSecs= */ 60, + retrier); byte[] blob = new byte[CHUNK_SIZE * 2 + 1]; new Random().nextBytes(blob); - Chunker.SingleSourceBuilder builder = - new SingleSourceBuilder().chunkSize(CHUNK_SIZE).input(blob); + Chunker chunker = Chunker.builder().setInput(blob).setChunkSize(CHUNK_SIZE).build(); + HashCode hash = HashCode.fromString(DIGEST_UTIL.compute(blob).getHash()); - serviceRegistry.addService(new ByteStreamImplBase() { + serviceRegistry.addService( + new ByteStreamImplBase() { @Override public StreamObserver write(StreamObserver streamObserver) { return new StreamObserver() { @@ -131,8 +190,8 @@ public void onNext(WriteRequest writeRequest) { ByteString data = writeRequest.getData(); - System.arraycopy(data.toByteArray(), 0, receivedData, (int) nextOffset, - data.size()); + System.arraycopy( + data.toByteArray(), 0, receivedData, (int) nextOffset, data.size()); nextOffset += data.size(); boolean lastWrite = blob.length == nextOffset; @@ -158,221 +217,717 @@ public void onCompleted() { } }); - uploader.uploadBlob(builder); + uploader.uploadBlob(context, hash, chunker, true); // This test should not have triggered any retries. Mockito.verifyZeroInteractions(mockBackoff); - assertThat(uploader.uploadsInProgress()).isFalse(); + blockUntilInternalStateConsistent(uploader); } - @Test(timeout = 20000) - public void multipleBlobsUploadShouldWork() throws Exception { - Retrier retrier = new Retrier(() -> new FixedBackoff(1, 0), (Status s) -> true); + @Test + public void progressiveUploadShouldWork() throws Exception { + Mockito.when(mockBackoff.getRetryAttempts()).thenReturn(0); + RemoteRetrier retrier = + TestUtils.newRemoteRetrier(() -> mockBackoff, (e) -> true, retryService); ByteStreamUploader uploader = - new ByteStreamUploader(INSTANCE_NAME, channel, null, 3, retrier, retryService); + new ByteStreamUploader( + INSTANCE_NAME, + new ReferenceCountedChannel(channelConnectionFactory), + CallCredentialsProvider.NO_CREDENTIALS, + 3, + retrier); - int numUploads = 100; - Map blobsByHash = new HashMap<>(); - List builders = new ArrayList<>(numUploads); - Random rand = new Random(); - for (int i = 0; i < numUploads; i++) { - int blobSize = rand.nextInt(CHUNK_SIZE * 10) + CHUNK_SIZE; - byte[] blob = new byte[blobSize]; - rand.nextBytes(blob); - Chunker.SingleSourceBuilder builder = - new Chunker.SingleSourceBuilder().chunkSize(CHUNK_SIZE).input(blob); - builders.add(builder); - blobsByHash.put(builder.getDigest().getHash(), blob); - } + byte[] blob = new byte[CHUNK_SIZE * 2 + 1]; + new Random().nextBytes(blob); - Set uploadsFailedOnce = Collections.synchronizedSet(new HashSet<>()); + Chunker chunker = Chunker.builder().setInput(blob).setChunkSize(CHUNK_SIZE).build(); + HashCode hash = HashCode.fromString(DIGEST_UTIL.compute(blob).getHash()); - serviceRegistry.addService(new ByteStreamImplBase() { - @Override - public StreamObserver write(StreamObserver response) { - return new StreamObserver() { + serviceRegistry.addService( + new ByteStreamImplBase() { - private String digestHash; - private byte[] receivedData; - private long nextOffset; + byte[] receivedData = new byte[blob.length]; + String receivedResourceName = null; + boolean receivedComplete = false; + long nextOffset = 0; + long initialOffset = 0; + boolean mustQueryWriteStatus = false; @Override - public void onNext(WriteRequest writeRequest) { - if (nextOffset == 0) { - String resourceName = writeRequest.getResourceName(); - assertThat(resourceName).isNotEmpty(); - - String[] components = resourceName.split("/"); - assertThat(components).hasLength(6); - digestHash = components[4]; - assertThat(blobsByHash).containsKey(digestHash); - receivedData = new byte[Integer.parseInt(components[5])]; - } - assertThat(digestHash).isNotNull(); - // An upload for a given blob has a 10% chance to fail once during its lifetime. - // This is to exercise the retry mechanism a bit. - boolean shouldFail = - rand.nextInt(10) == 0 && !uploadsFailedOnce.contains(digestHash); - if (shouldFail) { - uploadsFailedOnce.add(digestHash); - response.onError(Status.INTERNAL.asException()); - return; - } + public StreamObserver write(StreamObserver streamObserver) { + return new StreamObserver() { + @Override + public void onNext(WriteRequest writeRequest) { + assertThat(mustQueryWriteStatus).isFalse(); - ByteString data = writeRequest.getData(); - System.arraycopy( - data.toByteArray(), 0, receivedData, (int) nextOffset, data.size()); - nextOffset += data.size(); + String resourceName = writeRequest.getResourceName(); + if (nextOffset == initialOffset) { + if (initialOffset == 0) { + receivedResourceName = resourceName; + } + assertThat(resourceName).startsWith(INSTANCE_NAME + "/uploads"); + assertThat(resourceName).endsWith(String.valueOf(blob.length)); + } else { + assertThat(resourceName).isEmpty(); + } + + assertThat(writeRequest.getWriteOffset()).isEqualTo(nextOffset); - boolean lastWrite = nextOffset == receivedData.length; - assertThat(writeRequest.getFinishWrite()).isEqualTo(lastWrite); + ByteString data = writeRequest.getData(); + + System.arraycopy( + data.toByteArray(), 0, receivedData, (int) nextOffset, data.size()); + + nextOffset += data.size(); + receivedComplete = blob.length == nextOffset; + assertThat(writeRequest.getFinishWrite()).isEqualTo(receivedComplete); + + if (initialOffset == 0) { + streamObserver.onError(Status.DEADLINE_EXCEEDED.asException()); + mustQueryWriteStatus = true; + initialOffset = nextOffset; + } + } + + @Override + public void onError(Throwable throwable) { + fail("onError should never be called."); + } + + @Override + public void onCompleted() { + assertThat(nextOffset).isEqualTo(blob.length); + assertThat(receivedData).isEqualTo(blob); + + WriteResponse response = + WriteResponse.newBuilder().setCommittedSize(nextOffset).build(); + streamObserver.onNext(response); + streamObserver.onCompleted(); + } + }; } @Override - public void onError(Throwable throwable) { - fail("onError should never be called."); + public void queryWriteStatus( + QueryWriteStatusRequest request, StreamObserver response) { + String resourceName = request.getResourceName(); + final long committedSize; + final boolean complete; + if (receivedResourceName != null && receivedResourceName.equals(resourceName)) { + assertThat(mustQueryWriteStatus).isTrue(); + mustQueryWriteStatus = false; + committedSize = nextOffset; + complete = receivedComplete; + } else { + committedSize = 0; + complete = false; + } + response.onNext( + QueryWriteStatusResponse.newBuilder() + .setCommittedSize(committedSize) + .setComplete(complete) + .build()); + response.onCompleted(); } + }); + + uploader.uploadBlob(context, hash, chunker, true); + + // This test should not have triggered any retries. + Mockito.verify(mockBackoff, Mockito.never()).nextDelayMillis(any(Exception.class)); + Mockito.verify(mockBackoff, Mockito.times(1)).getRetryAttempts(); + + blockUntilInternalStateConsistent(uploader); + } + + @Test + public void concurrentlyCompletedUploadIsNotRetried() throws Exception { + // Test that after an upload has failed and the QueryWriteStatus call returns + // that the upload has completed that we'll not retry the upload. + RemoteRetrier retrier = + TestUtils.newRemoteRetrier(() -> new FixedBackoff(1, 0), (e) -> true, retryService); + ByteStreamUploader uploader = + new ByteStreamUploader( + INSTANCE_NAME, + new ReferenceCountedChannel(channelConnectionFactory), + CallCredentialsProvider.NO_CREDENTIALS, + 1, + retrier); + + byte[] blob = new byte[CHUNK_SIZE * 2 + 1]; + new Random().nextBytes(blob); + Chunker chunker = Chunker.builder().setInput(blob).setChunkSize(CHUNK_SIZE).build(); + HashCode hash = HashCode.fromString(DIGEST_UTIL.compute(blob).getHash()); + + AtomicInteger numWriteCalls = new AtomicInteger(0); + + serviceRegistry.addService( + new ByteStreamImplBase() { @Override - public void onCompleted() { - byte[] expectedBlob = blobsByHash.get(digestHash); - assertThat(receivedData).isEqualTo(expectedBlob); + public StreamObserver write(StreamObserver streamObserver) { + numWriteCalls.getAndIncrement(); + streamObserver.onError(Status.DEADLINE_EXCEEDED.asException()); + return new StreamObserver() { + @Override + public void onNext(WriteRequest writeRequest) {} + + @Override + public void onError(Throwable throwable) {} - WriteResponse writeResponse = - WriteResponse.newBuilder().setCommittedSize(receivedData.length).build(); + @Override + public void onCompleted() {} + }; + } - response.onNext(writeResponse); + @Override + public void queryWriteStatus( + QueryWriteStatusRequest request, StreamObserver response) { + response.onNext( + QueryWriteStatusResponse.newBuilder() + .setCommittedSize(blob.length) + .setComplete(true) + .build()); response.onCompleted(); } - }; - } - }); + }); - uploader.uploadBlobs(builders); + uploader.uploadBlob(context, hash, chunker, true); - assertThat(uploader.uploadsInProgress()).isFalse(); + // This test should not have triggered any retries. + assertThat(numWriteCalls.get()).isEqualTo(1); + + blockUntilInternalStateConsistent(uploader); } - @Test(timeout = 10000) - public void sameBlobShouldNotBeUploadedTwice() throws Exception { - // Test that uploading the same file concurrently triggers only one file upload. + @Test + public void unimplementedQueryShouldRestartUpload() throws Exception { + Mockito.when(mockBackoff.getRetryAttempts()).thenReturn(0); + RemoteRetrier retrier = + TestUtils.newRemoteRetrier(() -> mockBackoff, (e) -> true, retryService); + ByteStreamUploader uploader = + new ByteStreamUploader( + INSTANCE_NAME, + new ReferenceCountedChannel(channelConnectionFactory), + CallCredentialsProvider.NO_CREDENTIALS, + 3, + retrier); - Retrier retrier = new Retrier(() -> mockBackoff, (Status s) -> true); + byte[] blob = new byte[CHUNK_SIZE * 2 + 1]; + new Random().nextBytes(blob); + + Chunker chunker = Chunker.builder().setInput(blob).setChunkSize(CHUNK_SIZE).build(); + HashCode hash = HashCode.fromString(DIGEST_UTIL.compute(blob).getHash()); + + serviceRegistry.addService( + new ByteStreamImplBase() { + boolean expireCall = true; + boolean sawReset = false; + + @Override + public StreamObserver write(StreamObserver streamObserver) { + return new StreamObserver() { + @Override + public void onNext(WriteRequest writeRequest) { + if (expireCall) { + streamObserver.onError(Status.DEADLINE_EXCEEDED.asException()); + expireCall = false; + } else if (!sawReset && writeRequest.getWriteOffset() != 0) { + streamObserver.onError(Status.INVALID_ARGUMENT.asException()); + } else { + sawReset = true; + if (writeRequest.getFinishWrite()) { + long committedSize = + writeRequest.getWriteOffset() + writeRequest.getData().size(); + streamObserver.onNext( + WriteResponse.newBuilder().setCommittedSize(committedSize).build()); + streamObserver.onCompleted(); + } + } + } + + @Override + public void onError(Throwable throwable) { + fail("onError should never be called."); + } + + @Override + public void onCompleted() {} + }; + } + + @Override + public void queryWriteStatus( + QueryWriteStatusRequest request, StreamObserver response) { + response.onError(Status.UNIMPLEMENTED.asException()); + } + }); + + uploader.uploadBlob(context, hash, chunker, true); + + // This test should have triggered a single retry, because it made + // no progress. + Mockito.verify(mockBackoff, Mockito.times(1)).nextDelayMillis(any(Exception.class)); + + blockUntilInternalStateConsistent(uploader); + } + + @Test + public void earlyWriteResponseShouldCompleteUpload() throws Exception { + RemoteRetrier retrier = + TestUtils.newRemoteRetrier(() -> mockBackoff, (e) -> true, retryService); ByteStreamUploader uploader = - new ByteStreamUploader(INSTANCE_NAME, channel, null, 3, retrier, retryService); + new ByteStreamUploader( + INSTANCE_NAME, + new ReferenceCountedChannel(channelConnectionFactory), + CallCredentialsProvider.NO_CREDENTIALS, + 3, + retrier); - byte[] blob = new byte[CHUNK_SIZE * 10]; - Chunker.SingleSourceBuilder builder = - new Chunker.SingleSourceBuilder().chunkSize(CHUNK_SIZE).input(blob); + byte[] blob = new byte[CHUNK_SIZE * 2 + 1]; + new Random().nextBytes(blob); + // provide only enough data to write a single chunk + InputStream in = new ByteArrayInputStream(blob, 0, CHUNK_SIZE); - AtomicInteger numWriteCalls = new AtomicInteger(); + Chunker chunker = Chunker.builder().setInput(blob.length, in).setChunkSize(CHUNK_SIZE).build(); + HashCode hash = HashCode.fromString(DIGEST_UTIL.compute(blob).getHash()); + + serviceRegistry.addService( + new ByteStreamImplBase() { + @Override + public StreamObserver write(StreamObserver streamObserver) { + streamObserver.onNext(WriteResponse.newBuilder().setCommittedSize(blob.length).build()); + streamObserver.onCompleted(); + return new NoopStreamObserver(); + } + }); + + uploader.uploadBlob(context, hash, chunker, true); + + // This test should not have triggered any retries. + Mockito.verifyZeroInteractions(mockBackoff); + + blockUntilInternalStateConsistent(uploader); + } - serviceRegistry.addService(new ByteStreamImplBase() { - @Override - public StreamObserver write(StreamObserver response) { - numWriteCalls.incrementAndGet(); + @Test + public void incorrectCommittedSizeFailsUpload() throws Exception { + RemoteRetrier retrier = + TestUtils.newRemoteRetrier(() -> mockBackoff, (e) -> true, retryService); + ByteStreamUploader uploader = + new ByteStreamUploader( + INSTANCE_NAME, + new ReferenceCountedChannel(channelConnectionFactory), + CallCredentialsProvider.NO_CREDENTIALS, + 3, + retrier); - return new StreamObserver() { + byte[] blob = new byte[CHUNK_SIZE * 2 + 1]; + new Random().nextBytes(blob); - private long bytesReceived; + Chunker chunker = Chunker.builder().setInput(blob).setChunkSize(CHUNK_SIZE).build(); + HashCode hash = HashCode.fromString(DIGEST_UTIL.compute(blob).getHash()); + serviceRegistry.addService( + new ByteStreamImplBase() { @Override - public void onNext(WriteRequest writeRequest) { - bytesReceived += writeRequest.getData().size(); + public StreamObserver write(StreamObserver streamObserver) { + streamObserver.onNext( + WriteResponse.newBuilder().setCommittedSize(blob.length + 1).build()); + streamObserver.onCompleted(); + return new NoopStreamObserver(); } + }); + + try { + uploader.uploadBlob(context, hash, chunker, true); + fail("Should have thrown an exception."); + } catch (IOException e) { + // expected + } + + // This test should not have triggered any retries. + Mockito.verifyZeroInteractions(mockBackoff); + + blockUntilInternalStateConsistent(uploader); + } + @Test + public void multipleBlobsUploadShouldWork() throws Exception { + RemoteRetrier retrier = + TestUtils.newRemoteRetrier(() -> new FixedBackoff(1, 0), (e) -> true, retryService); + ByteStreamUploader uploader = + new ByteStreamUploader( + INSTANCE_NAME, + new ReferenceCountedChannel(channelConnectionFactory), + CallCredentialsProvider.NO_CREDENTIALS, + /* callTimeoutSecs= */ 60, + retrier); + + int numUploads = 10; + Map blobsByHash = Maps.newHashMap(); + Map chunkers = Maps.newHashMapWithExpectedSize(numUploads); + Random rand = new Random(); + for (int i = 0; i < numUploads; i++) { + int blobSize = rand.nextInt(CHUNK_SIZE * 10) + CHUNK_SIZE; + byte[] blob = new byte[blobSize]; + rand.nextBytes(blob); + Chunker chunker = Chunker.builder().setInput(blob).setChunkSize(CHUNK_SIZE).build(); + HashCode hash = HashCode.fromString(DIGEST_UTIL.compute(blob).getHash()); + chunkers.put(hash, chunker); + blobsByHash.put(hash, blob); + } + + serviceRegistry.addService(new MaybeFailOnceUploadService(blobsByHash)); + + uploader.uploadBlobs(context, chunkers, true); + + blockUntilInternalStateConsistent(uploader); + } + + @Test + public void contextShouldBePreservedUponRetries() throws Exception { + // We upload blobs with different context, and retry 3 times for each upload. + // We verify that the correct metadata is passed to the server with every blob. + RemoteRetrier retrier = + TestUtils.newRemoteRetrier(() -> new FixedBackoff(5, 0), (e) -> true, retryService); + ByteStreamUploader uploader = + new ByteStreamUploader( + INSTANCE_NAME, + new ReferenceCountedChannel(channelConnectionFactory), + CallCredentialsProvider.NO_CREDENTIALS, + /* callTimeoutSecs= */ 60, + retrier); + + List toUpload = ImmutableList.of("aaaaaaaaaa", "bbbbbbbbbb", "cccccccccc"); + Map chunkers = Maps.newHashMapWithExpectedSize(toUpload.size()); + Map uploadsFailed = Maps.newHashMap(); + for (String s : toUpload) { + Chunker chunker = Chunker.builder().setInput(s.getBytes(UTF_8)).setChunkSize(3).build(); + Digest digest = DIGEST_UTIL.computeAsUtf8(s); + chunkers.put(digest, chunker); + uploadsFailed.put(digest.getHash(), 0); + } + + BindableService bsService = + new ByteStreamImplBase() { @Override - public void onError(Throwable throwable) { - fail("onError should never be called."); + public StreamObserver write(StreamObserver response) { + return new StreamObserver() { + + private String digestHash; + + @Override + public void onNext(WriteRequest writeRequest) { + String resourceName = writeRequest.getResourceName(); + if (!resourceName.isEmpty()) { + String[] components = resourceName.split("/"); + assertThat(components).hasLength(6); + digestHash = components[4]; + } + assertThat(digestHash).isNotNull(); + RequestMetadata meta = TracingMetadataUtils.fromCurrentContext(); + assertThat(meta.getCorrelatedInvocationsId()).isEqualTo("build-req-id"); + assertThat(meta.getToolInvocationId()).isEqualTo("command-id"); + assertThat(meta.getActionId()).isEqualTo(digestHash); + assertThat(meta.getToolDetails().getToolName()).isEqualTo("bazel"); + assertThat(meta.getToolDetails().getToolVersion()) + .isEqualTo(BlazeVersionInfo.instance().getVersion()); + synchronized (this) { + Integer numFailures = uploadsFailed.get(digestHash); + if (numFailures < 3) { + uploadsFailed.put(digestHash, numFailures + 1); + response.onError(Status.INTERNAL.asException()); + return; + } + } + } + + @Override + public void onError(Throwable throwable) { + fail("onError should never be called."); + } + + @Override + public void onCompleted() { + response.onNext(WriteResponse.newBuilder().setCommittedSize(10).build()); + response.onCompleted(); + } + }; } @Override - public void onCompleted() { - response.onNext(WriteResponse.newBuilder().setCommittedSize(bytesReceived).build()); + public void queryWriteStatus( + QueryWriteStatusRequest request, StreamObserver response) { + response.onNext( + QueryWriteStatusResponse.newBuilder() + .setCommittedSize(0) + .setComplete(false) + .build()); response.onCompleted(); } }; - } - }); + serviceRegistry.addService( + ServerInterceptors.intercept( + bsService, new TracingMetadataUtils.ServerHeadersInterceptor())); + + List> uploads = new ArrayList<>(); + + for (Map.Entry chunkerEntry : chunkers.entrySet()) { + Digest actionDigest = chunkerEntry.getKey(); + RequestMetadata metadata = + TracingMetadataUtils.buildMetadata( + "build-req-id", + "command-id", + DIGEST_UTIL.asActionKey(actionDigest).getDigest().getHash(), + null); + RemoteActionExecutionContext remoteActionExecutionContext = + RemoteActionExecutionContext.create(metadata); + uploads.add( + uploader.uploadBlobAsync( + remoteActionExecutionContext, + actionDigest, + chunkerEntry.getValue(), + /* forceUpload= */ true)); + } + + for (ListenableFuture upload : uploads) { + upload.get(); + } + + blockUntilInternalStateConsistent(uploader); + } + + @Test + public void customHeadersAreAttachedToRequest() throws Exception { + RemoteRetrier retrier = + TestUtils.newRemoteRetrier(() -> new FixedBackoff(1, 0), (e) -> true, retryService); + + Metadata metadata = new Metadata(); + metadata.put(Metadata.Key.of("Key1", Metadata.ASCII_STRING_MARSHALLER), "Value1"); + metadata.put(Metadata.Key.of("Key2", Metadata.ASCII_STRING_MARSHALLER), "Value2"); + + ByteStreamUploader uploader = + new ByteStreamUploader( + INSTANCE_NAME, + new ReferenceCountedChannel( + new ChannelConnectionFactory() { + @Override + public Single create() { + return Single.just( + new ChannelConnection( + InProcessChannelBuilder.forName(serverName) + .intercept(MetadataUtils.newAttachHeadersInterceptor(metadata)) + .build())); + } + + @Override + public int maxConcurrency() { + return 100; + } + }), + CallCredentialsProvider.NO_CREDENTIALS, + /* callTimeoutSecs= */ 60, + retrier); + + byte[] blob = new byte[CHUNK_SIZE]; + Chunker chunker = Chunker.builder().setInput(blob).setChunkSize(CHUNK_SIZE).build(); + HashCode hash = HashCode.fromString(DIGEST_UTIL.compute(blob).getHash()); + + serviceRegistry.addService( + ServerInterceptors.intercept( + new ByteStreamImplBase() { + @Override + public StreamObserver write( + StreamObserver streamObserver) { + return new StreamObserver() { + @Override + public void onNext(WriteRequest writeRequest) {} + + @Override + public void onError(Throwable throwable) { + fail("onError should never be called."); + } - Future upload1 = uploader.uploadBlobAsync(builder); - Future upload2 = uploader.uploadBlobAsync(builder); + @Override + public void onCompleted() { + WriteResponse response = + WriteResponse.newBuilder().setCommittedSize(blob.length).build(); + streamObserver.onNext(response); + streamObserver.onCompleted(); + } + }; + } + }, + new ServerInterceptor() { + @Override + public ServerCall.Listener interceptCall( + ServerCall call, + Metadata metadata, + ServerCallHandler next) { + assertThat(metadata.get(Metadata.Key.of("Key1", Metadata.ASCII_STRING_MARSHALLER))) + .isEqualTo("Value1"); + assertThat(metadata.get(Metadata.Key.of("Key2", Metadata.ASCII_STRING_MARSHALLER))) + .isEqualTo("Value2"); + assertThat(metadata.get(Metadata.Key.of("Key3", Metadata.ASCII_STRING_MARSHALLER))) + .isEqualTo(null); + return next.startCall(call, metadata); + } + })); + + uploader.uploadBlob(context, hash, chunker, true); + } - assertThat(upload1).isSameAs(upload2); + @Test + public void sameBlobShouldNotBeUploadedTwice() throws Exception { + // Test that uploading the same file concurrently triggers only one file upload. + RemoteRetrier retrier = + TestUtils.newRemoteRetrier(() -> mockBackoff, (e) -> true, retryService); + ByteStreamUploader uploader = + new ByteStreamUploader( + INSTANCE_NAME, + new ReferenceCountedChannel(channelConnectionFactory), + CallCredentialsProvider.NO_CREDENTIALS, + /* callTimeoutSecs= */ 60, + retrier); + + byte[] blob = new byte[CHUNK_SIZE * 10]; + Chunker chunker = Chunker.builder().setInput(blob).setChunkSize(CHUNK_SIZE).build(); + Digest digest = DIGEST_UTIL.compute(blob); + + AtomicInteger numWriteCalls = new AtomicInteger(); + CountDownLatch blocker = new CountDownLatch(1); + + serviceRegistry.addService( + new ByteStreamImplBase() { + @Override + public StreamObserver write(StreamObserver response) { + numWriteCalls.incrementAndGet(); + try { + // Ensures that the first upload does not finish, before the second upload is started. + blocker.await(); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + + return new StreamObserver() { + + private long bytesReceived; + + @Override + public void onNext(WriteRequest writeRequest) { + bytesReceived += writeRequest.getData().size(); + } + + @Override + public void onError(Throwable throwable) { + fail("onError should never be called."); + } + + @Override + public void onCompleted() { + response.onNext(WriteResponse.newBuilder().setCommittedSize(bytesReceived).build()); + response.onCompleted(); + } + }; + } + }); + + Future upload1 = uploader.uploadBlobAsync(context, digest, chunker, true); + Future upload2 = uploader.uploadBlobAsync(context, digest, chunker, true); + + blocker.countDown(); + + assertThat(upload1).isSameInstanceAs(upload2); upload1.get(); assertThat(numWriteCalls.get()).isEqualTo(1); } - @Test(timeout = 10000) + @Test public void errorsShouldBeReported() throws IOException, InterruptedException { - Retrier retrier = new Retrier(() -> new FixedBackoff(1, 10), (Status s) -> true); + RemoteRetrier retrier = + TestUtils.newRemoteRetrier(() -> new FixedBackoff(1, 10), (e) -> true, retryService); ByteStreamUploader uploader = - new ByteStreamUploader(INSTANCE_NAME, channel, null, 3, retrier, retryService); + new ByteStreamUploader( + INSTANCE_NAME, + new ReferenceCountedChannel(channelConnectionFactory), + CallCredentialsProvider.NO_CREDENTIALS, + /* callTimeoutSecs= */ 60, + retrier); byte[] blob = new byte[CHUNK_SIZE]; - Chunker.SingleSourceBuilder builder = - new Chunker.SingleSourceBuilder().chunkSize(CHUNK_SIZE).input(blob); - - serviceRegistry.addService(new ByteStreamImplBase() { - @Override - public StreamObserver write(StreamObserver response) { - response.onError(Status.INTERNAL.asException()); - return new NoopStreamObserver(); - } - }); + Chunker chunker = Chunker.builder().setInput(blob).setChunkSize(CHUNK_SIZE).build(); + HashCode hash = HashCode.fromString(DIGEST_UTIL.compute(blob).getHash()); + + serviceRegistry.addService( + new ByteStreamImplBase() { + @Override + public StreamObserver write(StreamObserver response) { + response.onError(Status.INTERNAL.asException()); + return new NoopStreamObserver(); + } + }); try { - uploader.uploadBlob(builder); + uploader.uploadBlob(context, hash, chunker, true); fail("Should have thrown an exception."); - } catch (RetryException e) { - assertThat(e.getAttempts()).isEqualTo(2); - assertThat(e.causedByStatusCode(Code.INTERNAL)).isTrue(); + } catch (IOException e) { + assertThat(RemoteRetrierUtils.causedByStatus(e, Code.INTERNAL)).isTrue(); } } - @Test(timeout = 10000) + @Test public void shutdownShouldCancelOngoingUploads() throws Exception { - Retrier retrier = new Retrier(() -> new FixedBackoff(1, 10), (Status s) -> true); + RemoteRetrier retrier = + TestUtils.newRemoteRetrier(() -> new FixedBackoff(1, 10), (e) -> true, retryService); ByteStreamUploader uploader = - new ByteStreamUploader(INSTANCE_NAME, channel, null, 3, retrier, retryService); + new ByteStreamUploader( + INSTANCE_NAME, + new ReferenceCountedChannel(channelConnectionFactory), + CallCredentialsProvider.NO_CREDENTIALS, + /* callTimeoutSecs= */ 60, + retrier); CountDownLatch cancellations = new CountDownLatch(2); ServerServiceDefinition service = ServerServiceDefinition.builder(ByteStreamGrpc.SERVICE_NAME) - .addMethod(ByteStreamGrpc.METHOD_WRITE, - new ServerCallHandler() { - @Override - public Listener startCall(ServerCall call, - Metadata headers) { - // Don't request() any messages from the client, so that the client will be blocked - // on flow control and thus the call will sit there idle long enough to receive the - // cancellation. - return new Listener() { + .addMethod( + ByteStreamGrpc.getWriteMethod(), + new ServerCallHandler() { @Override - public void onCancel() { - cancellations.countDown(); + public Listener startCall( + ServerCall call, Metadata headers) { + // Don't request() any messages from the client, so that the client will be + // blocked + // on flow control and thus the call will sit there idle long enough to receive + // the + // cancellation. + return new Listener() { + @Override + public void onCancel() { + cancellations.countDown(); + } + }; } - }; - } - }) - .build(); + }) + .build(); serviceRegistry.addService(service); byte[] blob1 = new byte[CHUNK_SIZE]; - Chunker.SingleSourceBuilder builder1 = - new Chunker.SingleSourceBuilder().chunkSize(CHUNK_SIZE).input(blob1); + Chunker chunker1 = Chunker.builder().setInput(blob1).setChunkSize(CHUNK_SIZE).build(); + Digest digest1 = DIGEST_UTIL.compute(blob1); byte[] blob2 = new byte[CHUNK_SIZE + 1]; - Chunker.SingleSourceBuilder builder2 = - new Chunker.SingleSourceBuilder().chunkSize(CHUNK_SIZE).input(blob2); + Chunker chunker2 = Chunker.builder().setInput(blob2).setChunkSize(CHUNK_SIZE).build(); + Digest digest2 = DIGEST_UTIL.compute(blob2); - ListenableFuture f1 = uploader.uploadBlobAsync(builder1); - ListenableFuture f2 = uploader.uploadBlobAsync(builder2); + ListenableFuture f1 = uploader.uploadBlobAsync(context, digest1, chunker1, true); + ListenableFuture f2 = uploader.uploadBlobAsync(context, digest2, chunker2, true); assertThat(uploader.uploadsInProgress()).isTrue(); @@ -383,23 +938,32 @@ public void onCancel() { assertThat(f1.isCancelled()).isTrue(); assertThat(f2.isCancelled()).isTrue(); - assertThat(uploader.uploadsInProgress()).isFalse(); + blockUntilInternalStateConsistent(uploader); } - @Test(timeout = 10000) + @Test public void failureInRetryExecutorShouldBeHandled() throws Exception { - Retrier retrier = new Retrier(() -> new FixedBackoff(1, 10), (Status s) -> true); + ListeningScheduledExecutorService retryService = + MoreExecutors.listeningDecorator(Executors.newScheduledThreadPool(1)); + RemoteRetrier retrier = + TestUtils.newRemoteRetrier(() -> new FixedBackoff(1, 10), (e) -> true, retryService); ByteStreamUploader uploader = - new ByteStreamUploader(INSTANCE_NAME, channel, null, 3, retrier, retryService); - - serviceRegistry.addService(new ByteStreamImplBase() { - @Override - public StreamObserver write(StreamObserver response) { - // Immediately fail the call, so that it is retried. - response.onError(Status.ABORTED.asException()); - return new NoopStreamObserver(); - } - }); + new ByteStreamUploader( + INSTANCE_NAME, + new ReferenceCountedChannel(channelConnectionFactory), + CallCredentialsProvider.NO_CREDENTIALS, + /* callTimeoutSecs= */ 60, + retrier); + + serviceRegistry.addService( + new ByteStreamImplBase() { + @Override + public StreamObserver write(StreamObserver response) { + // Immediately fail the call, so that it is retried. + response.onError(Status.ABORTED.asException()); + return new NoopStreamObserver(); + } + }); retryService.shutdownNow(); // Random very high timeout, as the test will timeout by itself. @@ -407,95 +971,392 @@ public StreamObserver write(StreamObserver response assertThat(retryService.isShutdown()).isTrue(); byte[] blob = new byte[1]; - Chunker.SingleSourceBuilder builder = new Chunker.SingleSourceBuilder().input(blob); + Chunker chunker = Chunker.builder().setInput(blob).setChunkSize(CHUNK_SIZE).build(); + HashCode hash = HashCode.fromString(DIGEST_UTIL.compute(blob).getHash()); try { - uploader.uploadBlob(builder); + uploader.uploadBlob(context, hash, chunker, true); fail("Should have thrown an exception."); - } catch (RetryException e) { + } catch (IOException e) { assertThat(e).hasCauseThat().isInstanceOf(RejectedExecutionException.class); } } - @Test(timeout = 10000) + @Test public void resourceNameWithoutInstanceName() throws Exception { - Retrier retrier = new Retrier(() -> mockBackoff, (Status s) -> true); + RemoteRetrier retrier = + TestUtils.newRemoteRetrier(() -> mockBackoff, (e) -> true, retryService); ByteStreamUploader uploader = - new ByteStreamUploader(/* instanceName */ null, channel, null, 3, retrier, retryService); - - serviceRegistry.addService(new ByteStreamImplBase() { - @Override - public StreamObserver write(StreamObserver response) { - return new StreamObserver() { + new ByteStreamUploader( + /* instanceName= */ null, + new ReferenceCountedChannel(channelConnectionFactory), + CallCredentialsProvider.NO_CREDENTIALS, + /* callTimeoutSecs= */ 60, + retrier); + + serviceRegistry.addService( + new ByteStreamImplBase() { @Override - public void onNext(WriteRequest writeRequest) { - // Test that the resource name doesn't start with an instance name. - assertThat(writeRequest.getResourceName()).startsWith("uploads/"); - } - - @Override - public void onError(Throwable throwable) { + public StreamObserver write(StreamObserver response) { + return new StreamObserver() { + @Override + public void onNext(WriteRequest writeRequest) { + // Test that the resource name doesn't start with an instance name. + assertThat(writeRequest.getResourceName()).startsWith("uploads/"); + } - } + @Override + public void onError(Throwable throwable) {} - @Override - public void onCompleted() { - response.onNext(WriteResponse.newBuilder().setCommittedSize(1).build()); - response.onCompleted(); + @Override + public void onCompleted() { + response.onNext(WriteResponse.newBuilder().setCommittedSize(1).build()); + response.onCompleted(); + } + }; } - }; - } - }); + }); byte[] blob = new byte[1]; - Chunker.SingleSourceBuilder builder = new Chunker.SingleSourceBuilder().input(blob); + Chunker chunker = Chunker.builder().setInput(blob).setChunkSize(CHUNK_SIZE).build(); + HashCode hash = HashCode.fromString(DIGEST_UTIL.compute(blob).getHash()); - uploader.uploadBlob(builder); + uploader.uploadBlob(context, hash, chunker, true); } - @Test(timeout = 10000) + @Test public void nonRetryableStatusShouldNotBeRetried() throws Exception { - Retrier retrier = new Retrier(() -> new FixedBackoff(1, 0), - /* No Status is retriable. */ (Status s) -> false); + RemoteRetrier retrier = + TestUtils.newRemoteRetrier( + () -> new FixedBackoff(1, 0), /* No Status is retriable. */ (e) -> false, retryService); ByteStreamUploader uploader = - new ByteStreamUploader(/* instanceName */ null, channel, null, 3, retrier, retryService); + new ByteStreamUploader( + /* instanceName= */ null, + new ReferenceCountedChannel(channelConnectionFactory), + CallCredentialsProvider.NO_CREDENTIALS, + /* callTimeoutSecs= */ 60, + retrier); AtomicInteger numCalls = new AtomicInteger(); - serviceRegistry.addService(new ByteStreamImplBase() { - @Override - public StreamObserver write(StreamObserver response) { - numCalls.incrementAndGet(); - response.onError(Status.INTERNAL.asException()); - return new NoopStreamObserver(); - } - }); + serviceRegistry.addService( + new ByteStreamImplBase() { + @Override + public StreamObserver write(StreamObserver response) { + numCalls.incrementAndGet(); + response.onError(Status.INTERNAL.asException()); + return new NoopStreamObserver(); + } + }); byte[] blob = new byte[1]; - Chunker.SingleSourceBuilder builder = new Chunker.SingleSourceBuilder().input(blob); + Chunker chunker = Chunker.builder().setInput(blob).setChunkSize(CHUNK_SIZE).build(); + HashCode hash = HashCode.fromString(DIGEST_UTIL.compute(blob).getHash()); try { - uploader.uploadBlob(builder); + uploader.uploadBlob(context, hash, chunker, true); fail("Should have thrown an exception."); - } catch (RetryException e) { + } catch (IOException e) { assertThat(numCalls.get()).isEqualTo(1); } } + @Test + public void failedUploadsShouldNotDeduplicate() throws Exception { + RemoteRetrier retrier = + TestUtils.newRemoteRetrier(() -> Retrier.RETRIES_DISABLED, (e) -> false, retryService); + ByteStreamUploader uploader = + new ByteStreamUploader( + INSTANCE_NAME, + new ReferenceCountedChannel(channelConnectionFactory), + CallCredentialsProvider.NO_CREDENTIALS, + /* callTimeoutSecs= */ 60, + retrier); + + byte[] blob = new byte[CHUNK_SIZE * 2 + 1]; + new Random().nextBytes(blob); + + Chunker chunker = Chunker.builder().setInput(blob).setChunkSize(CHUNK_SIZE).build(); + HashCode hash = HashCode.fromString(DIGEST_UTIL.compute(blob).getHash()); + + AtomicInteger numUploads = new AtomicInteger(); + serviceRegistry.addService( + new ByteStreamImplBase() { + boolean failRequest = true; + + @Override + public StreamObserver write(StreamObserver streamObserver) { + numUploads.incrementAndGet(); + return new StreamObserver() { + long nextOffset = 0; + + @Override + public void onNext(WriteRequest writeRequest) { + if (failRequest) { + streamObserver.onError(Status.UNKNOWN.asException()); + failRequest = false; + } else { + nextOffset += writeRequest.getData().size(); + boolean lastWrite = blob.length == nextOffset; + assertThat(writeRequest.getFinishWrite()).isEqualTo(lastWrite); + } + } + + @Override + public void onError(Throwable throwable) { + fail("onError should never be called."); + } + + @Override + public void onCompleted() { + assertThat(nextOffset).isEqualTo(blob.length); + + WriteResponse response = + WriteResponse.newBuilder().setCommittedSize(nextOffset).build(); + streamObserver.onNext(response); + streamObserver.onCompleted(); + } + }; + } + }); + + StatusRuntimeException expected = null; + try { + // This should fail + uploader.uploadBlob(context, hash, chunker, true); + } catch (IOException e) { + if (e.getCause() instanceof StatusRuntimeException) { + expected = (StatusRuntimeException) e.getCause(); + } + } + assertThat(expected).isNotNull(); + assertThat(Status.fromThrowable(expected).getCode()).isEqualTo(Code.UNKNOWN); + // This should trigger an upload. + uploader.uploadBlob(context, hash, chunker, false); + + assertThat(numUploads.get()).isEqualTo(2); + + blockUntilInternalStateConsistent(uploader); + } + + @Test + public void deduplicationOfUploadsShouldWork() throws Exception { + RemoteRetrier retrier = + TestUtils.newRemoteRetrier(() -> mockBackoff, (e) -> true, retryService); + ByteStreamUploader uploader = + new ByteStreamUploader( + INSTANCE_NAME, + new ReferenceCountedChannel(channelConnectionFactory), + CallCredentialsProvider.NO_CREDENTIALS, + /* callTimeoutSecs= */ 60, + retrier); + + byte[] blob = new byte[CHUNK_SIZE * 2 + 1]; + new Random().nextBytes(blob); + + Chunker chunker = Chunker.builder().setInput(blob).setChunkSize(CHUNK_SIZE).build(); + HashCode hash = HashCode.fromString(DIGEST_UTIL.compute(blob).getHash()); + + AtomicInteger numUploads = new AtomicInteger(); + serviceRegistry.addService( + new ByteStreamImplBase() { + @Override + public StreamObserver write(StreamObserver streamObserver) { + numUploads.incrementAndGet(); + return new StreamObserver() { + + long nextOffset = 0; + + @Override + public void onNext(WriteRequest writeRequest) { + nextOffset += writeRequest.getData().size(); + boolean lastWrite = blob.length == nextOffset; + assertThat(writeRequest.getFinishWrite()).isEqualTo(lastWrite); + } + + @Override + public void onError(Throwable throwable) { + fail("onError should never be called."); + } + + @Override + public void onCompleted() { + assertThat(nextOffset).isEqualTo(blob.length); + + WriteResponse response = + WriteResponse.newBuilder().setCommittedSize(nextOffset).build(); + streamObserver.onNext(response); + streamObserver.onCompleted(); + } + }; + } + }); + + uploader.uploadBlob(context, hash, chunker, true); + // This should not trigger an upload. + uploader.uploadBlob(context, hash, chunker, false); + + assertThat(numUploads.get()).isEqualTo(1); + + // This test should not have triggered any retries. + Mockito.verifyZeroInteractions(mockBackoff); + + blockUntilInternalStateConsistent(uploader); + } + + @Test + public void unauthenticatedErrorShouldNotBeRetried() throws Exception { + RemoteRetrier retrier = + TestUtils.newRemoteRetrier( + () -> mockBackoff, RemoteRetrier.RETRIABLE_GRPC_ERRORS, retryService); + + AtomicInteger refreshTimes = new AtomicInteger(); + CallCredentialsProvider callCredentialsProvider = + new CallCredentialsProvider() { + @Nullable + @Override + public CallCredentials getCallCredentials() { + return null; + } + + @Override + public void refresh() throws IOException { + refreshTimes.incrementAndGet(); + } + }; + ByteStreamUploader uploader = + new ByteStreamUploader( + INSTANCE_NAME, + new ReferenceCountedChannel(channelConnectionFactory), + callCredentialsProvider, + /* callTimeoutSecs= */ 60, + retrier); + + byte[] blob = new byte[CHUNK_SIZE * 2 + 1]; + new Random().nextBytes(blob); + + Chunker chunker = Chunker.builder().setInput(blob).setChunkSize(CHUNK_SIZE).build(); + HashCode hash = HashCode.fromString(DIGEST_UTIL.compute(blob).getHash()); + + AtomicInteger numUploads = new AtomicInteger(); + serviceRegistry.addService( + new ByteStreamImplBase() { + @Override + public StreamObserver write(StreamObserver streamObserver) { + numUploads.incrementAndGet(); + + streamObserver.onError(Status.UNAUTHENTICATED.asException()); + return new NoopStreamObserver(); + } + }); + + assertThrows(IOException.class, () -> uploader.uploadBlob(context, hash, chunker, true)); + + assertThat(refreshTimes.get()).isEqualTo(1); + assertThat(numUploads.get()).isEqualTo(2); + + // This test should not have triggered any retries. + Mockito.verifyZeroInteractions(mockBackoff); + + blockUntilInternalStateConsistent(uploader); + } + + @Test + public void shouldRefreshCredentialsOnAuthenticationError() throws Exception { + RemoteRetrier retrier = + TestUtils.newRemoteRetrier( + () -> mockBackoff, RemoteRetrier.RETRIABLE_GRPC_ERRORS, retryService); + + AtomicInteger refreshTimes = new AtomicInteger(); + CallCredentialsProvider callCredentialsProvider = + new CallCredentialsProvider() { + @Nullable + @Override + public CallCredentials getCallCredentials() { + return null; + } + + @Override + public void refresh() throws IOException { + refreshTimes.incrementAndGet(); + } + }; + ByteStreamUploader uploader = + new ByteStreamUploader( + INSTANCE_NAME, + new ReferenceCountedChannel(channelConnectionFactory), + callCredentialsProvider, + /* callTimeoutSecs= */ 60, + retrier); + + byte[] blob = new byte[CHUNK_SIZE * 2 + 1]; + new Random().nextBytes(blob); + + Chunker chunker = Chunker.builder().setInput(blob).setChunkSize(CHUNK_SIZE).build(); + HashCode hash = HashCode.fromString(DIGEST_UTIL.compute(blob).getHash()); + + AtomicInteger numUploads = new AtomicInteger(); + serviceRegistry.addService( + new ByteStreamImplBase() { + @Override + public StreamObserver write(StreamObserver streamObserver) { + numUploads.incrementAndGet(); + + if (refreshTimes.get() == 0) { + streamObserver.onError(Status.UNAUTHENTICATED.asException()); + return new NoopStreamObserver(); + } + + return new StreamObserver() { + long nextOffset = 0; + + @Override + public void onNext(WriteRequest writeRequest) { + nextOffset += writeRequest.getData().size(); + boolean lastWrite = blob.length == nextOffset; + assertThat(writeRequest.getFinishWrite()).isEqualTo(lastWrite); + } + + @Override + public void onError(Throwable throwable) { + fail("onError should never be called."); + } + + @Override + public void onCompleted() { + assertThat(nextOffset).isEqualTo(blob.length); + + WriteResponse response = + WriteResponse.newBuilder().setCommittedSize(nextOffset).build(); + streamObserver.onNext(response); + streamObserver.onCompleted(); + } + }; + } + }); + + uploader.uploadBlob(context, hash, chunker, true); + + assertThat(refreshTimes.get()).isEqualTo(1); + assertThat(numUploads.get()).isEqualTo(2); + + // This test should not have triggered any retries. + Mockito.verifyZeroInteractions(mockBackoff); + + blockUntilInternalStateConsistent(uploader); + } + private static class NoopStreamObserver implements StreamObserver { @Override - public void onNext(WriteRequest writeRequest) { - } + public void onNext(WriteRequest writeRequest) {} @Override - public void onError(Throwable throwable) { - } + public void onError(Throwable throwable) {} @Override - public void onCompleted() { - } + public void onCompleted() {} } - private static class FixedBackoff implements Retrier.Backoff { + static class FixedBackoff implements Retrier.Backoff { private final int maxRetries; private final int delayMillis; @@ -508,7 +1369,7 @@ public FixedBackoff(int maxRetries, int delayMillis) { } @Override - public long nextDelayMillis() { + public long nextDelayMillis(Exception e) { if (retries < maxRetries) { retries++; return delayMillis; @@ -521,4 +1382,93 @@ public int getRetryAttempts() { return retries; } } + + /** + * An byte stream service where an upload for a given blob may or may not fail on the first + * attempt but is guaranteed to succeed on the second try. + */ + static class MaybeFailOnceUploadService extends ByteStreamImplBase { + + private final Map blobsByHash; + private final Set uploadsFailedOnce = Collections.synchronizedSet(Sets.newHashSet()); + private final Random rand = new Random(); + + MaybeFailOnceUploadService(Map blobsByHash) { + this.blobsByHash = blobsByHash; + } + + @Override + public StreamObserver write(StreamObserver response) { + return new StreamObserver() { + + private HashCode digestHash; + private byte[] receivedData; + private long nextOffset; + + @Override + public void onNext(WriteRequest writeRequest) { + if (nextOffset == 0) { + String resourceName = writeRequest.getResourceName(); + assertThat(resourceName).isNotEmpty(); + + String[] components = resourceName.split("/"); + assertThat(components).hasLength(6); + digestHash = HashCode.fromString(components[4]); + assertThat(blobsByHash).containsKey(digestHash); + receivedData = new byte[Integer.parseInt(components[5])]; + } + assertThat(digestHash).isNotNull(); + // An upload for a given blob has a 10% chance to fail once during its lifetime. + // This is to exercise the retry mechanism a bit. + boolean shouldFail = rand.nextInt(10) == 0 && !uploadsFailedOnce.contains(digestHash); + if (shouldFail) { + uploadsFailedOnce.add(digestHash); + response.onError(Status.INTERNAL.asException()); + return; + } + + ByteString data = writeRequest.getData(); + System.arraycopy(data.toByteArray(), 0, receivedData, (int) nextOffset, data.size()); + nextOffset += data.size(); + + boolean lastWrite = nextOffset == receivedData.length; + assertThat(writeRequest.getFinishWrite()).isEqualTo(lastWrite); + } + + @Override + public void onError(Throwable throwable) { + fail("onError should never be called."); + } + + @Override + public void onCompleted() { + byte[] expectedBlob = blobsByHash.get(digestHash); + assertThat(receivedData).isEqualTo(expectedBlob); + + WriteResponse writeResponse = + WriteResponse.newBuilder().setCommittedSize(receivedData.length).build(); + + response.onNext(writeResponse); + response.onCompleted(); + } + }; + } + + @Override + public void queryWriteStatus( + QueryWriteStatusRequest request, StreamObserver response) { + // force the client to reset the write + response.onNext( + QueryWriteStatusResponse.newBuilder().setCommittedSize(0).setComplete(false).build()); + response.onCompleted(); + } + } + + private void blockUntilInternalStateConsistent(ByteStreamUploader uploader) throws Exception { + // Poll until all upload futures have been removed from the internal hash map. The polling is + // necessary, as listeners are executed after Future.get() calls are notified about completion. + while (uploader.uploadsInProgress()) { + Thread.sleep(1); + } + } } diff --git a/dataset/GitHub_Java/bazelbuild.bazel/src/test/java/com/google/devtools/build/lib/remote/GrpcCacheClientTest.java b/dataset/GitHub_Java/bazelbuild.bazel/src/test/java/com/google/devtools/build/lib/remote/GrpcCacheClientTest.java index 94975c5efbd..4a05a10775a 100644 --- a/dataset/GitHub_Java/bazelbuild.bazel/src/test/java/com/google/devtools/build/lib/remote/GrpcCacheClientTest.java +++ b/dataset/GitHub_Java/bazelbuild.bazel/src/test/java/com/google/devtools/build/lib/remote/GrpcCacheClientTest.java @@ -15,8 +15,8 @@ import static com.google.common.truth.Truth.assertThat; import static com.google.devtools.build.lib.remote.util.Utils.getFromFuture; -import static com.google.devtools.build.lib.testutil.MoreAsserts.assertThrows; import static java.nio.charset.StandardCharsets.UTF_8; +import static org.junit.Assert.assertThrows; import static org.junit.Assert.fail; import static org.mockito.AdditionalAnswers.answerVoid; import static org.mockito.ArgumentMatchers.any; @@ -35,6 +35,7 @@ import build.bazel.remote.execution.v2.FindMissingBlobsRequest; import build.bazel.remote.execution.v2.FindMissingBlobsResponse; import build.bazel.remote.execution.v2.GetActionResultRequest; +import build.bazel.remote.execution.v2.RequestMetadata; import build.bazel.remote.execution.v2.Tree; import build.bazel.remote.execution.v2.UpdateActionResultRequest; import com.google.api.client.json.GenericJson; @@ -49,20 +50,24 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSortedMap; +import com.google.common.collect.Maps; import com.google.common.util.concurrent.ListeningScheduledExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.devtools.build.lib.actions.ActionInputHelper; import com.google.devtools.build.lib.actions.cache.VirtualActionInput; +import com.google.devtools.build.lib.actions.util.ActionsTestUtil; import com.google.devtools.build.lib.authandtls.AuthAndTLSOptions; +import com.google.devtools.build.lib.authandtls.CallCredentialsProvider; import com.google.devtools.build.lib.authandtls.GoogleAuthUtils; import com.google.devtools.build.lib.clock.JavaClock; import com.google.devtools.build.lib.remote.RemoteRetrier.ExponentialBackoff; import com.google.devtools.build.lib.remote.Retrier.Backoff; +import com.google.devtools.build.lib.remote.common.RemoteActionExecutionContext; import com.google.devtools.build.lib.remote.common.RemoteCacheClient.ActionKey; +import com.google.devtools.build.lib.remote.grpc.ChannelConnectionFactory; import com.google.devtools.build.lib.remote.merkletree.MerkleTree; import com.google.devtools.build.lib.remote.options.RemoteOptions; import com.google.devtools.build.lib.remote.util.DigestUtil; -import com.google.devtools.build.lib.remote.util.StringActionInput; import com.google.devtools.build.lib.remote.util.TestUtils; import com.google.devtools.build.lib.remote.util.TracingMetadataUtils; import com.google.devtools.build.lib.testutil.Scratch; @@ -75,31 +80,37 @@ import com.google.devtools.build.lib.vfs.inmemoryfs.InMemoryFileSystem; import com.google.devtools.common.options.Options; import com.google.protobuf.ByteString; +import io.grpc.BindableService; import io.grpc.CallCredentials; import io.grpc.CallOptions; import io.grpc.Channel; import io.grpc.ClientCall; import io.grpc.ClientInterceptor; -import io.grpc.Context; +import io.grpc.ManagedChannel; +import io.grpc.Metadata; import io.grpc.MethodDescriptor; import io.grpc.Server; +import io.grpc.ServerCall; +import io.grpc.ServerCallHandler; +import io.grpc.ServerInterceptor; +import io.grpc.ServerInterceptors; import io.grpc.Status; import io.grpc.inprocess.InProcessChannelBuilder; import io.grpc.inprocess.InProcessServerBuilder; import io.grpc.stub.StreamObserver; import io.grpc.util.MutableHandlerRegistry; +import io.reactivex.rxjava3.core.Single; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.util.List; import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Supplier; import org.junit.After; -import org.junit.AfterClass; import org.junit.Before; -import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; @@ -121,14 +132,8 @@ public class GrpcCacheClientTest { private final MutableHandlerRegistry serviceRegistry = new MutableHandlerRegistry(); private final String fakeServerName = "fake server for " + getClass(); private Server fakeServer; - private Context withEmptyMetadata; - private Context prevContext; - private static ListeningScheduledExecutorService retryService; - - @BeforeClass - public static void beforeEverything() { - retryService = MoreExecutors.listeningDecorator(Executors.newScheduledThreadPool(1)); - } + private RemoteActionExecutionContext context; + private ListeningScheduledExecutorService retryService; @Before public final void setUp() throws Exception { @@ -141,7 +146,7 @@ public final void setUp() throws Exception { .start(); Chunker.setDefaultChunkSizeForTesting(1000); // Enough for everything to be one chunk. fs = new InMemoryFileSystem(new JavaClock(), DigestHashFunction.SHA256); - execRoot = fs.getPath("/exec/root"); + execRoot = fs.getPath("/execroot/main"); FileSystemUtils.createDirectoryAndParents(execRoot); fakeFileCache = new FakeActionInputFileCache(execRoot); @@ -150,24 +155,23 @@ public final void setUp() throws Exception { FileSystemUtils.createDirectoryAndParents(stdout.getParentDirectory()); FileSystemUtils.createDirectoryAndParents(stderr.getParentDirectory()); outErr = new FileOutErr(stdout, stderr); - withEmptyMetadata = - TracingMetadataUtils.contextWithMetadata( - "none", "none", DIGEST_UTIL.asActionKey(Digest.getDefaultInstance())); - prevContext = withEmptyMetadata.attach(); + RequestMetadata metadata = + TracingMetadataUtils.buildMetadata( + "none", "none", Digest.getDefaultInstance().getHash(), null); + context = RemoteActionExecutionContext.create(metadata); + retryService = MoreExecutors.listeningDecorator(Executors.newScheduledThreadPool(1)); } @After public void tearDown() throws Exception { - withEmptyMetadata.detach(prevContext); + retryService.shutdownNow(); + retryService.awaitTermination( + com.google.devtools.build.lib.testutil.TestUtils.WAIT_TIMEOUT_SECONDS, TimeUnit.SECONDS); + fakeServer.shutdownNow(); fakeServer.awaitTermination(); } - @AfterClass - public static void afterEverything() { - retryService.shutdownNow(); - } - private static class CallCredentialsInterceptor implements ClientInterceptor { private final CallCredentials credentials; @@ -196,7 +200,7 @@ private GrpcCacheClient newClient(RemoteOptions remoteOptions, Supplier throws IOException { AuthAndTLSOptions authTlsOptions = Options.getDefaults(AuthAndTLSOptions.class); authTlsOptions.useGoogleDefaultCredentials = true; - authTlsOptions.googleCredentials = "/exec/root/creds.json"; + authTlsOptions.googleCredentials = "/execroot/main/creds.json"; authTlsOptions.googleAuthScopes = ImmutableList.of("dummy.scope"); GenericJson json = new GenericJson(); @@ -207,34 +211,52 @@ private GrpcCacheClient newClient(RemoteOptions remoteOptions, Supplier Scratch scratch = new Scratch(); scratch.file(authTlsOptions.googleCredentials, new JacksonFactory().toString(json)); - CallCredentials creds; + CallCredentialsProvider callCredentialsProvider; try (InputStream in = scratch.resolve(authTlsOptions.googleCredentials).getInputStream()) { - creds = GoogleAuthUtils.newCallCredentials(in, authTlsOptions.googleAuthScopes); + callCredentialsProvider = + GoogleAuthUtils.newCallCredentialsProvider( + GoogleAuthUtils.newCredentials(in, authTlsOptions.googleAuthScopes)); } + CallCredentials creds = callCredentialsProvider.getCallCredentials(); + RemoteRetrier retrier = TestUtils.newRemoteRetrier( backoffSupplier, RemoteRetrier.RETRIABLE_GRPC_ERRORS, retryService); ReferenceCountedChannel channel = new ReferenceCountedChannel( - InProcessChannelBuilder.forName(fakeServerName) - .directExecutor() - .intercept(new CallCredentialsInterceptor(creds)) - .build()); + new ChannelConnectionFactory() { + @Override + public Single create() { + ManagedChannel ch = + InProcessChannelBuilder.forName(fakeServerName) + .directExecutor() + .intercept(new CallCredentialsInterceptor(creds)) + .intercept(TracingMetadataUtils.newCacheHeadersInterceptor(remoteOptions)) + .build(); + return Single.just(new ChannelConnection(ch)); + } + + @Override + public int maxConcurrency() { + return 100; + } + }); ByteStreamUploader uploader = new ByteStreamUploader( remoteOptions.remoteInstanceName, channel.retain(), - creds, - remoteOptions.remoteTimeout, + callCredentialsProvider, + remoteOptions.remoteTimeout.getSeconds(), retrier); return new GrpcCacheClient( - channel.retain(), creds, remoteOptions, retrier, DIGEST_UTIL, uploader); + channel.retain(), callCredentialsProvider, remoteOptions, retrier, DIGEST_UTIL, uploader); } - private static byte[] downloadBlob(GrpcCacheClient cacheClient, Digest digest) + private static byte[] downloadBlob( + RemoteActionExecutionContext context, GrpcCacheClient cacheClient, Digest digest) throws IOException, InterruptedException { try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { - getFromFuture(cacheClient.downloadBlob(digest, out)); + getFromFuture(cacheClient.downloadBlob(context, digest, out)); return out.toByteArray(); } } @@ -245,7 +267,8 @@ public void testVirtualActionInputSupport() throws Exception { RemoteExecutionCache client = new RemoteExecutionCache(newClient(options), options, DIGEST_UTIL); PathFragment execPath = PathFragment.create("my/exec/path"); - VirtualActionInput virtualActionInput = new StringActionInput("hello", execPath); + VirtualActionInput virtualActionInput = + ActionsTestUtil.createVirtualActionInput(execPath, "hello"); MerkleTree merkleTree = MerkleTree.build( ImmutableSortedMap.of(execPath, virtualActionInput), @@ -298,7 +321,7 @@ public void onError(Throwable t) { }); // Upload all missing inputs (that is, the virtual action input from above) - client.ensureInputsPresent(merkleTree, ImmutableMap.of()); + client.ensureInputsPresent(context, merkleTree, ImmutableMap.of()); } @Test @@ -306,7 +329,7 @@ public void testDownloadEmptyBlob() throws Exception { GrpcCacheClient client = newClient(); Digest emptyDigest = DIGEST_UTIL.compute(new byte[0]); // Will not call the mock Bytestream interface at all. - assertThat(downloadBlob(client, emptyDigest)).isEmpty(); + assertThat(downloadBlob(context, client, emptyDigest)).isEmpty(); } @Test @@ -323,7 +346,7 @@ public void read(ReadRequest request, StreamObserver responseObser responseObserver.onCompleted(); } }); - assertThat(new String(downloadBlob(client, digest), UTF_8)).isEqualTo("abcdefg"); + assertThat(new String(downloadBlob(context, client, digest), UTF_8)).isEqualTo("abcdefg"); } @Test @@ -344,7 +367,7 @@ public void read(ReadRequest request, StreamObserver responseObser responseObserver.onCompleted(); } }); - assertThat(new String(downloadBlob(client, digest), UTF_8)).isEqualTo("abcdefg"); + assertThat(new String(downloadBlob(context, client, digest), UTF_8)).isEqualTo("abcdefg"); } @Test @@ -360,10 +383,11 @@ public void testDownloadAllResults() throws Exception { new FakeImmutableCacheByteStreamImpl(fooDigest, "foo-contents", barDigest, "bar-contents")); ActionResult.Builder result = ActionResult.newBuilder(); - result.addOutputFilesBuilder().setPath("a/foo").setDigest(fooDigest); - result.addOutputFilesBuilder().setPath("b/empty").setDigest(emptyDigest); - result.addOutputFilesBuilder().setPath("a/bar").setDigest(barDigest).setIsExecutable(true); - remoteCache.download(result.build(), execRoot, null, /* outputFilesLocker= */ () -> {}); + result.addOutputFilesBuilder().setPath("main/a/foo").setDigest(fooDigest); + result.addOutputFilesBuilder().setPath("main/b/empty").setDigest(emptyDigest); + result.addOutputFilesBuilder().setPath("main/a/bar").setDigest(barDigest).setIsExecutable(true); + remoteCache.download( + context, result.build(), execRoot, null, /* outputFilesLocker= */ () -> {}); assertThat(DIGEST_UTIL.compute(execRoot.getRelative("a/foo"))).isEqualTo(fooDigest); assertThat(DIGEST_UTIL.compute(execRoot.getRelative("b/empty"))).isEqualTo(emptyDigest); assertThat(DIGEST_UTIL.compute(execRoot.getRelative("a/bar"))).isEqualTo(barDigest); @@ -397,9 +421,10 @@ public void testDownloadDirectory() throws Exception { quxDigest, "qux-contents"))); ActionResult.Builder result = ActionResult.newBuilder(); - result.addOutputFilesBuilder().setPath("a/foo").setDigest(fooDigest); - result.addOutputDirectoriesBuilder().setPath("a/bar").setTreeDigest(barTreeDigest); - remoteCache.download(result.build(), execRoot, null, /* outputFilesLocker= */ () -> {}); + result.addOutputFilesBuilder().setPath("main/a/foo").setDigest(fooDigest); + result.addOutputDirectoriesBuilder().setPath("main/a/bar").setTreeDigest(barTreeDigest); + remoteCache.download( + context, result.build(), execRoot, null, /* outputFilesLocker= */ () -> {}); assertThat(DIGEST_UTIL.compute(execRoot.getRelative("a/foo"))).isEqualTo(fooDigest); assertThat(DIGEST_UTIL.compute(execRoot.getRelative("a/bar/qux"))).isEqualTo(quxDigest); @@ -419,8 +444,9 @@ public void testDownloadDirectoryEmpty() throws Exception { ImmutableMap.of(barTreeDigest, barTreeMessage.toByteString()))); ActionResult.Builder result = ActionResult.newBuilder(); - result.addOutputDirectoriesBuilder().setPath("a/bar").setTreeDigest(barTreeDigest); - remoteCache.download(result.build(), execRoot, null, /* outputFilesLocker= */ () -> {}); + result.addOutputDirectoriesBuilder().setPath("main/a/bar").setTreeDigest(barTreeDigest); + remoteCache.download( + context, result.build(), execRoot, null, /* outputFilesLocker= */ () -> {}); assertThat(execRoot.getRelative("a/bar").isDirectory()).isTrue(); } @@ -460,9 +486,10 @@ public void testDownloadDirectoryNested() throws Exception { quxDigest, "qux-contents"))); ActionResult.Builder result = ActionResult.newBuilder(); - result.addOutputFilesBuilder().setPath("a/foo").setDigest(fooDigest); - result.addOutputDirectoriesBuilder().setPath("a/bar").setTreeDigest(barTreeDigest); - remoteCache.download(result.build(), execRoot, null, /* outputFilesLocker= */ () -> {}); + result.addOutputFilesBuilder().setPath("main/a/foo").setDigest(fooDigest); + result.addOutputDirectoriesBuilder().setPath("main/a/bar").setTreeDigest(barTreeDigest); + remoteCache.download( + context, result.build(), execRoot, null, /* outputFilesLocker= */ () -> {}); assertThat(DIGEST_UTIL.compute(execRoot.getRelative("a/foo"))).isEqualTo(fooDigest); assertThat(DIGEST_UTIL.compute(execRoot.getRelative("a/bar/wobble/qux"))).isEqualTo(quxDigest); @@ -682,7 +709,78 @@ private ActionResult uploadDirectory(RemoteCache remoteCache, List outputs Action action = Action.getDefaultInstance(); ActionKey actionKey = DIGEST_UTIL.computeActionKey(action); Command cmd = Command.getDefaultInstance(); - return remoteCache.upload(actionKey, action, cmd, execRoot, outputs, outErr); + return remoteCache.upload(context, actionKey, action, cmd, execRoot, outputs, outErr); + } + + @Test + public void extraHeaders() throws Exception { + RemoteOptions remoteOptions = Options.getDefaults(RemoteOptions.class); + remoteOptions.remoteHeaders = + ImmutableList.of( + Maps.immutableEntry("CommonKey1", "CommonValue1"), + Maps.immutableEntry("CommonKey2", "CommonValue2")); + remoteOptions.remoteExecHeaders = + ImmutableList.of( + Maps.immutableEntry("ExecKey1", "ExecValue1"), + Maps.immutableEntry("ExecKey2", "ExecValue2")); + remoteOptions.remoteCacheHeaders = + ImmutableList.of( + Maps.immutableEntry("CacheKey1", "CacheValue1"), + Maps.immutableEntry("CacheKey2", "CacheValue2")); + + ServerInterceptor interceptor = + new ServerInterceptor() { + @Override + public ServerCall.Listener interceptCall( + ServerCall call, + Metadata metadata, + ServerCallHandler next) { + assertThat( + metadata.get(Metadata.Key.of("CommonKey1", Metadata.ASCII_STRING_MARSHALLER))) + .isEqualTo("CommonValue1"); + assertThat( + metadata.get(Metadata.Key.of("CommonKey2", Metadata.ASCII_STRING_MARSHALLER))) + .isEqualTo("CommonValue2"); + assertThat(metadata.get(Metadata.Key.of("CacheKey1", Metadata.ASCII_STRING_MARSHALLER))) + .isEqualTo("CacheValue1"); + assertThat(metadata.get(Metadata.Key.of("CacheKey2", Metadata.ASCII_STRING_MARSHALLER))) + .isEqualTo("CacheValue2"); + assertThat(metadata.get(Metadata.Key.of("ExecKey1", Metadata.ASCII_STRING_MARSHALLER))) + .isEqualTo(null); + assertThat(metadata.get(Metadata.Key.of("ExecKey2", Metadata.ASCII_STRING_MARSHALLER))) + .isEqualTo(null); + return next.startCall(call, metadata); + } + }; + + BindableService cas = + new ContentAddressableStorageImplBase() { + @Override + public void findMissingBlobs( + FindMissingBlobsRequest request, + StreamObserver responseObserver) { + responseObserver.onNext(FindMissingBlobsResponse.getDefaultInstance()); + responseObserver.onCompleted(); + } + }; + serviceRegistry.addService(cas); + BindableService actionCache = + new ActionCacheImplBase() { + @Override + public void getActionResult( + GetActionResultRequest request, StreamObserver responseObserver) { + responseObserver.onNext(ActionResult.getDefaultInstance()); + responseObserver.onCompleted(); + } + }; + serviceRegistry.addService(ServerInterceptors.intercept(actionCache, interceptor)); + + GrpcCacheClient client = newClient(remoteOptions); + RemoteCache remoteCache = new RemoteCache(client, remoteOptions, DIGEST_UTIL); + remoteCache.downloadActionResult( + context, + DIGEST_UTIL.asActionKey(DIGEST_UTIL.computeAsUtf8("key")), + /* inlineOutErr= */ false); } @Test @@ -737,6 +835,7 @@ public void updateActionResult( ActionResult result = remoteCache.upload( + context, DIGEST_UTIL.asActionKey(actionDigest), action, command, @@ -799,6 +898,7 @@ public void updateActionResult( ActionResult result = remoteCache.upload( + context, DIGEST_UTIL.asActionKey(actionDigest), action, command, @@ -947,6 +1047,7 @@ public void onError(Throwable t) { .when(mockByteStreamImpl) .queryWriteStatus(any(), any()); remoteCache.upload( + context, actionKey, Action.getDefaultInstance(), Command.getDefaultInstance(), @@ -973,7 +1074,10 @@ public void getActionResult( (numErrors-- <= 0 ? Status.NOT_FOUND : Status.UNAVAILABLE).asRuntimeException()); } }); - assertThat(getFromFuture(client.downloadActionResult(actionKey))).isNull(); + assertThat( + getFromFuture( + client.downloadActionResult(context, actionKey, /* inlineOutErr= */ false))) + .isNull(); } @Test @@ -1002,14 +1106,14 @@ public void read(ReadRequest request, StreamObserver responseObser } } }); - assertThat(new String(downloadBlob(client, digest), UTF_8)).isEqualTo("abcdefg"); - Mockito.verify(mockBackoff, Mockito.never()).nextDelayMillis(); + assertThat(new String(downloadBlob(context, client, digest), UTF_8)).isEqualTo("abcdefg"); + Mockito.verify(mockBackoff, Mockito.never()).nextDelayMillis(any(Exception.class)); } @Test public void downloadBlobPassesThroughDeadlineExceededWithoutProgress() throws IOException { Backoff mockBackoff = Mockito.mock(Backoff.class); - Mockito.when(mockBackoff.nextDelayMillis()).thenReturn(-1L); + Mockito.when(mockBackoff.nextDelayMillis(any(Exception.class))).thenReturn(-1L); final GrpcCacheClient client = newClient(Options.getDefaults(RemoteOptions.class), () -> mockBackoff); final Digest digest = DIGEST_UTIL.computeAsUtf8("abcdefg"); @@ -1026,10 +1130,10 @@ public void read(ReadRequest request, StreamObserver responseObser responseObserver.onError(Status.DEADLINE_EXCEEDED.asException()); } }); - IOException e = assertThrows(IOException.class, () -> downloadBlob(client, digest)); + IOException e = assertThrows(IOException.class, () -> downloadBlob(context, client, digest)); Status st = Status.fromThrowable(e); assertThat(st.getCode()).isEqualTo(Status.Code.DEADLINE_EXCEEDED); - Mockito.verify(mockBackoff, Mockito.times(1)).nextDelayMillis(); + Mockito.verify(mockBackoff, Mockito.times(1)).nextDelayMillis(any(Exception.class)); } @Test @@ -1047,7 +1151,7 @@ public void read(ReadRequest request, StreamObserver responseObser responseObserver.onCompleted(); } }); - IOException e = assertThrows(IOException.class, () -> downloadBlob(client, digest)); + IOException e = assertThrows(IOException.class, () -> downloadBlob(context, client, digest)); assertThat(e).hasMessageThat().contains(digest.getHash()); assertThat(e).hasMessageThat().contains(DIGEST_UTIL.computeAsUtf8("bar").getHash()); } @@ -1071,7 +1175,7 @@ public void read(ReadRequest request, StreamObserver responseObser } }); - assertThat(downloadBlob(client, digest)).isEqualTo(downloadContents.toByteArray()); + assertThat(downloadBlob(context, client, digest)).isEqualTo(downloadContents.toByteArray()); } @Test diff --git a/dataset/GitHub_Java/bazelbuild.bazel/src/test/java/com/google/devtools/build/lib/remote/RemoteCacheTests.java b/dataset/GitHub_Java/bazelbuild.bazel/src/test/java/com/google/devtools/build/lib/remote/RemoteCacheTests.java index ab6fd42206f..8761b23de98 100644 --- a/dataset/GitHub_Java/bazelbuild.bazel/src/test/java/com/google/devtools/build/lib/remote/RemoteCacheTests.java +++ b/dataset/GitHub_Java/bazelbuild.bazel/src/test/java/com/google/devtools/build/lib/remote/RemoteCacheTests.java @@ -56,10 +56,8 @@ import com.google.devtools.build.lib.clock.JavaClock; import com.google.devtools.build.lib.remote.RemoteCache.OutputFilesLocker; import com.google.devtools.build.lib.remote.RemoteCache.UploadManifest; -import com.google.devtools.build.lib.remote.common.ProgressStatusListener; import com.google.devtools.build.lib.remote.common.RemoteActionExecutionContext; import com.google.devtools.build.lib.remote.common.RemoteCacheClient.ActionKey; -import com.google.devtools.build.lib.remote.common.RemotePathResolver; import com.google.devtools.build.lib.remote.options.RemoteOptions; import com.google.devtools.build.lib.remote.util.DigestUtil; import com.google.devtools.build.lib.remote.util.InMemoryCacheClient; @@ -103,10 +101,8 @@ public class RemoteCacheTests { @Mock private OutputFilesLocker outputFilesLocker; - private final ProgressStatusListener progressStatusListener = progress -> {}; private RemoteActionExecutionContext context; - private RemotePathResolver remotePathResolver; private FileSystem fs; private Path execRoot; ArtifactRoot artifactRoot; @@ -119,12 +115,12 @@ public class RemoteCacheTests { public void setUp() throws Exception { MockitoAnnotations.initMocks(this); RequestMetadata metadata = - TracingMetadataUtils.buildMetadata("none", "none", "action-id", null); + TracingMetadataUtils.buildMetadata( + "none", "none", Digest.getDefaultInstance().getHash(), null); context = RemoteActionExecutionContext.create(metadata); fs = new InMemoryFileSystem(new JavaClock(), DigestHashFunction.SHA256); execRoot = fs.getPath("/execroot/main"); execRoot.createDirectoryAndParents(); - remotePathResolver = RemotePathResolver.createDefault(execRoot); fakeFileCache = new FakeActionInputFileCache(execRoot); artifactRoot = ArtifactRoot.asDerivedRoot(execRoot, RootType.Output, "outputs"); artifactRoot.getRoot().asPath().createDirectoryAndParents(); @@ -147,11 +143,7 @@ public void uploadAbsoluteFileSymlinkAsFile() throws Exception { UploadManifest um = new UploadManifest( - digestUtil, - remotePathResolver, - result, - /*uploadSymlinks=*/ true, - /*allowSymlinks=*/ true); + digestUtil, result, execRoot, /*uploadSymlinks=*/ true, /*allowSymlinks=*/ true); um.addFiles(ImmutableList.of(link)); Digest digest = digestUtil.compute(target); assertThat(um.getDigestToFile()).containsExactly(digest, link); @@ -173,11 +165,7 @@ public void uploadAbsoluteDirectorySymlinkAsDirectory() throws Exception { UploadManifest um = new UploadManifest( - digestUtil, - remotePathResolver, - result, - /*uploadSymlinks=*/ true, - /*allowSymlinks=*/ true); + digestUtil, result, execRoot, /*uploadSymlinks=*/ true, /*allowSymlinks=*/ true); um.addFiles(ImmutableList.of(link)); Digest digest = digestUtil.compute(foo); assertThat(um.getDigestToFile()).containsExactly(digest, fs.getPath("/execroot/main/link/foo")); @@ -205,11 +193,7 @@ public void uploadRelativeFileSymlinkAsFile() throws Exception { UploadManifest um = new UploadManifest( - digestUtil, - remotePathResolver, - result, - /*uploadSymlinks=*/ false, - /*allowSymlinks=*/ true); + digestUtil, result, execRoot, /*uploadSymlinks=*/ false, /*allowSymlinks=*/ true); um.addFiles(ImmutableList.of(link)); Digest digest = digestUtil.compute(target); assertThat(um.getDigestToFile()).containsExactly(digest, link); @@ -231,11 +215,7 @@ public void uploadRelativeDirectorySymlinkAsDirectory() throws Exception { UploadManifest um = new UploadManifest( - digestUtil, - remotePathResolver, - result, - /*uploadSymlinks=*/ false, - /*allowSymlinks=*/ true); + digestUtil, result, execRoot, /*uploadSymlinks=*/ false, /*allowSymlinks=*/ true); um.addFiles(ImmutableList.of(link)); Digest digest = digestUtil.compute(foo); assertThat(um.getDigestToFile()).containsExactly(digest, fs.getPath("/execroot/main/link/foo")); @@ -263,11 +243,7 @@ public void uploadRelativeFileSymlink() throws Exception { UploadManifest um = new UploadManifest( - digestUtil, - remotePathResolver, - result, - /*uploadSymlinks=*/ true, - /*allowSymlinks=*/ true); + digestUtil, result, execRoot, /*uploadSymlinks=*/ true, /*allowSymlinks=*/ true); um.addFiles(ImmutableList.of(link)); assertThat(um.getDigestToFile()).isEmpty(); @@ -288,11 +264,7 @@ public void uploadRelativeDirectorySymlink() throws Exception { UploadManifest um = new UploadManifest( - digestUtil, - remotePathResolver, - result, - /*uploadSymlinks=*/ true, - /*allowSymlinks=*/ true); + digestUtil, result, execRoot, /*uploadSymlinks=*/ true, /*allowSymlinks=*/ true); um.addFiles(ImmutableList.of(link)); assertThat(um.getDigestToFile()).isEmpty(); @@ -310,11 +282,7 @@ public void uploadDanglingSymlinkError() throws Exception { UploadManifest um = new UploadManifest( - digestUtil, - remotePathResolver, - result, - /*uploadSymlinks=*/ true, - /*allowSymlinks=*/ true); + digestUtil, result, execRoot, /*uploadSymlinks=*/ true, /*allowSymlinks=*/ true); IOException e = assertThrows(IOException.class, () -> um.addFiles(ImmutableList.of(link))); assertThat(e).hasMessageThat().contains("dangling"); assertThat(e).hasMessageThat().contains("/execroot/main/link"); @@ -331,11 +299,7 @@ public void uploadSymlinksNoAllowError() throws Exception { UploadManifest um = new UploadManifest( - digestUtil, - remotePathResolver, - result, - /*uploadSymlinks=*/ true, - /*allowSymlinks=*/ false); + digestUtil, result, execRoot, /*uploadSymlinks=*/ true, /*allowSymlinks=*/ false); ExecException e = assertThrows(ExecException.class, () -> um.addFiles(ImmutableList.of(link))); assertThat(e).hasMessageThat().contains("symbolic link"); assertThat(e).hasMessageThat().contains("--remote_allow_symlink_upload"); @@ -353,11 +317,7 @@ public void uploadAbsoluteFileSymlinkInDirectoryAsFile() throws Exception { UploadManifest um = new UploadManifest( - digestUtil, - remotePathResolver, - result, - /*uploadSymlinks=*/ true, - /*allowSymlinks=*/ true); + digestUtil, result, execRoot, /*uploadSymlinks=*/ true, /*allowSymlinks=*/ true); um.addFiles(ImmutableList.of(dir)); Digest digest = digestUtil.compute(target); assertThat(um.getDigestToFile()).containsExactly(digest, link); @@ -389,11 +349,7 @@ public void uploadAbsoluteDirectorySymlinkInDirectoryAsDirectory() throws Except UploadManifest um = new UploadManifest( - digestUtil, - remotePathResolver, - result, - /*uploadSymlinks=*/ true, - /*allowSymlinks=*/ true); + digestUtil, result, execRoot, /*uploadSymlinks=*/ true, /*allowSymlinks=*/ true); um.addFiles(ImmutableList.of(dir)); Digest digest = digestUtil.compute(foo); assertThat(um.getDigestToFile()) @@ -431,11 +387,7 @@ public void uploadRelativeFileSymlinkInDirectoryAsFile() throws Exception { UploadManifest um = new UploadManifest( - digestUtil, - remotePathResolver, - result, - /*uploadSymlinks=*/ false, - /*allowSymlinks=*/ true); + digestUtil, result, execRoot, /*uploadSymlinks=*/ false, /*allowSymlinks=*/ true); um.addFiles(ImmutableList.of(dir)); Digest digest = digestUtil.compute(target); assertThat(um.getDigestToFile()).containsExactly(digest, link); @@ -467,11 +419,7 @@ public void uploadRelativeDirectorySymlinkInDirectoryAsDirectory() throws Except UploadManifest um = new UploadManifest( - digestUtil, - remotePathResolver, - result, - /*uploadSymlinks=*/ false, - /*allowSymlinks=*/ true); + digestUtil, result, execRoot, /*uploadSymlinks=*/ false, /*allowSymlinks=*/ true); um.addFiles(ImmutableList.of(dir)); Digest digest = digestUtil.compute(foo); assertThat(um.getDigestToFile()) @@ -509,11 +457,7 @@ public void uploadRelativeFileSymlinkInDirectory() throws Exception { UploadManifest um = new UploadManifest( - digestUtil, - remotePathResolver, - result, - /*uploadSymlinks=*/ true, - /*allowSymlinks=*/ true); + digestUtil, result, execRoot, /*uploadSymlinks=*/ true, /*allowSymlinks=*/ true); um.addFiles(ImmutableList.of(dir)); assertThat(um.getDigestToFile()).isEmpty(); @@ -544,11 +488,7 @@ public void uploadRelativeDirectorySymlinkInDirectory() throws Exception { UploadManifest um = new UploadManifest( - digestUtil, - remotePathResolver, - result, - /*uploadSymlinks=*/ true, - /*allowSymlinks=*/ true); + digestUtil, result, execRoot, /*uploadSymlinks=*/ true, /*allowSymlinks=*/ true); um.addFiles(ImmutableList.of(dir)); assertThat(um.getDigestToFile()).isEmpty(); @@ -576,11 +516,7 @@ public void uploadDanglingSymlinkInDirectoryError() throws Exception { UploadManifest um = new UploadManifest( - digestUtil, - remotePathResolver, - result, - /*uploadSymlinks=*/ true, - /*allowSymlinks=*/ true); + digestUtil, result, execRoot, /*uploadSymlinks=*/ true, /*allowSymlinks=*/ true); IOException e = assertThrows(IOException.class, () -> um.addFiles(ImmutableList.of(dir))); assertThat(e).hasMessageThat().contains("dangling"); assertThat(e).hasMessageThat().contains("/execroot/dir/link"); @@ -599,31 +535,20 @@ public void uploadSymlinkInDirectoryNoAllowError() throws Exception { UploadManifest um = new UploadManifest( - digestUtil, - remotePathResolver, - result, - /*uploadSymlinks=*/ true, - /*allowSymlinks=*/ false); + digestUtil, result, execRoot, /*uploadSymlinks=*/ true, /*allowSymlinks=*/ false); ExecException e = assertThrows(ExecException.class, () -> um.addFiles(ImmutableList.of(dir))); assertThat(e).hasMessageThat().contains("symbolic link"); assertThat(e).hasMessageThat().contains("dir/link"); assertThat(e).hasMessageThat().contains("--remote_allow_symlink_upload"); } - // TODO(chiwang): Cleanup the tests, e.g. use java test data builder pattern. @Test public void downloadRelativeFileSymlink() throws Exception { RemoteCache cache = newRemoteCache(); ActionResult.Builder result = ActionResult.newBuilder(); - result.addOutputFileSymlinksBuilder().setPath("a/b/link").setTarget("../../foo"); + result.addOutputFileSymlinksBuilder().setPath("main/a/b/link").setTarget("../../foo"); // Doesn't check for dangling links, hence download succeeds. - cache.download( - context, - remotePathResolver, - result.build(), - null, - outputFilesLocker, - progressStatusListener); + cache.download(context, result.build(), execRoot, null, outputFilesLocker); Path path = execRoot.getRelative("a/b/link"); assertThat(path.isSymbolicLink()).isTrue(); assertThat(path.readSymbolicLink()).isEqualTo(PathFragment.create("../../foo")); @@ -634,15 +559,9 @@ public void downloadRelativeFileSymlink() throws Exception { public void downloadRelativeDirectorySymlink() throws Exception { RemoteCache cache = newRemoteCache(); ActionResult.Builder result = ActionResult.newBuilder(); - result.addOutputDirectorySymlinksBuilder().setPath("a/b/link").setTarget("foo"); + result.addOutputDirectorySymlinksBuilder().setPath("main/a/b/link").setTarget("foo"); // Doesn't check for dangling links, hence download succeeds. - cache.download( - context, - remotePathResolver, - result.build(), - null, - outputFilesLocker, - progressStatusListener); + cache.download(context, result.build(), execRoot, null, outputFilesLocker); Path path = execRoot.getRelative("a/b/link"); assertThat(path.isSymbolicLink()).isTrue(); assertThat(path.readSymbolicLink()).isEqualTo(PathFragment.create("foo")); @@ -660,15 +579,9 @@ public void downloadRelativeSymlinkInDirectory() throws Exception { .build(); Digest treeDigest = cache.addContents(context, tree.toByteArray()); ActionResult.Builder result = ActionResult.newBuilder(); - result.addOutputDirectoriesBuilder().setPath("dir").setTreeDigest(treeDigest); + result.addOutputDirectoriesBuilder().setPath("main/dir").setTreeDigest(treeDigest); // Doesn't check for dangling links, hence download succeeds. - cache.download( - context, - remotePathResolver, - result.build(), - null, - outputFilesLocker, - progressStatusListener); + cache.download(context, result.build(), execRoot, null, outputFilesLocker); Path path = execRoot.getRelative("dir/link"); assertThat(path.isSymbolicLink()).isTrue(); assertThat(path.readSymbolicLink()).isEqualTo(PathFragment.create("../foo")); @@ -683,14 +596,7 @@ public void downloadAbsoluteDirectorySymlinkError() throws Exception { IOException expected = assertThrows( IOException.class, - () -> - cache.download( - context, - remotePathResolver, - result.build(), - null, - outputFilesLocker, - progressStatusListener)); + () -> cache.download(context, result.build(), execRoot, null, outputFilesLocker)); assertThat(expected).hasMessageThat().contains("/abs/link"); assertThat(expected).hasMessageThat().contains("absolute path"); verify(outputFilesLocker).lock(); @@ -704,14 +610,7 @@ public void downloadAbsoluteFileSymlinkError() throws Exception { IOException expected = assertThrows( IOException.class, - () -> - cache.download( - context, - remotePathResolver, - result.build(), - null, - outputFilesLocker, - progressStatusListener)); + () -> cache.download(context, result.build(), execRoot, null, outputFilesLocker)); assertThat(expected).hasMessageThat().contains("/abs/link"); assertThat(expected).hasMessageThat().contains("absolute path"); verify(outputFilesLocker).lock(); @@ -732,14 +631,7 @@ public void downloadAbsoluteSymlinkInDirectoryError() throws Exception { IOException expected = assertThrows( IOException.class, - () -> - cache.download( - context, - remotePathResolver, - result.build(), - null, - outputFilesLocker, - progressStatusListener)); + () -> cache.download(context, result.build(), execRoot, null, outputFilesLocker)); assertThat(expected.getSuppressed()).isEmpty(); assertThat(expected).hasMessageThat().contains("dir/link"); assertThat(expected).hasMessageThat().contains("/foo"); @@ -757,20 +649,14 @@ public void downloadFailureMaintainsDirectories() throws Exception { Digest otherFileDigest = cache.addContents(context, "otherfile"); ActionResult.Builder result = ActionResult.newBuilder(); - result.addOutputDirectoriesBuilder().setPath("outputdir").setTreeDigest(treeDigest); + result.addOutputDirectoriesBuilder().setPath("main/outputdir").setTreeDigest(treeDigest); + result.addOutputFiles( + OutputFile.newBuilder().setPath("main/outputdir/outputfile").setDigest(outputFileDigest)); result.addOutputFiles( - OutputFile.newBuilder().setPath("outputdir/outputfile").setDigest(outputFileDigest)); - result.addOutputFiles(OutputFile.newBuilder().setPath("otherfile").setDigest(otherFileDigest)); + OutputFile.newBuilder().setPath("main/otherfile").setDigest(otherFileDigest)); assertThrows( BulkTransferException.class, - () -> - cache.download( - context, - remotePathResolver, - result.build(), - null, - outputFilesLocker, - progressStatusListener)); + () -> cache.download(context, result.build(), execRoot, null, outputFilesLocker)); assertThat(cache.getNumFailedDownloads()).isEqualTo(1); assertThat(execRoot.getRelative("outputdir").exists()).isTrue(); assertThat(execRoot.getRelative("outputdir/outputfile").exists()).isFalse(); @@ -804,12 +690,7 @@ public void onErrorWaitForRemainingDownloadsToComplete() throws Exception { BulkTransferException.class, () -> cache.download( - context, - remotePathResolver, - result, - new FileOutErr(stdout, stderr), - outputFilesLocker, - progressStatusListener)); + context, result, execRoot, new FileOutErr(stdout, stderr), outputFilesLocker)); assertThat(downloadException.getSuppressed()).hasLength(1); assertThat(cache.getNumSuccessfulDownloads()).isEqualTo(2); assertThat(cache.getNumFailedDownloads()).isEqualTo(1); @@ -841,12 +722,7 @@ public void downloadWithMultipleErrorsAddsThemAsSuppressed() throws Exception { BulkTransferException.class, () -> cache.download( - context, - remotePathResolver, - result, - new FileOutErr(stdout, stderr), - outputFilesLocker, - progressStatusListener)); + context, result, execRoot, new FileOutErr(stdout, stderr), outputFilesLocker)); assertThat(e.getSuppressed()).hasLength(2); assertThat(e.getSuppressed()[0]).isInstanceOf(IOException.class); @@ -878,12 +754,7 @@ public void downloadWithDuplicateIOErrorsDoesNotSuppress() throws Exception { BulkTransferException.class, () -> cache.download( - context, - remotePathResolver, - result, - new FileOutErr(stdout, stderr), - outputFilesLocker, - progressStatusListener)); + context, result, execRoot, new FileOutErr(stdout, stderr), outputFilesLocker)); for (Throwable t : downloadException.getSuppressed()) { assertThat(t).isInstanceOf(IOException.class); @@ -915,12 +786,7 @@ public void downloadWithDuplicateInterruptionsDoesNotSuppress() throws Exception InterruptedException.class, () -> cache.download( - context, - remotePathResolver, - result, - new FileOutErr(stdout, stderr), - outputFilesLocker, - progressStatusListener)); + context, result, execRoot, new FileOutErr(stdout, stderr), outputFilesLocker)); assertThat(e.getSuppressed()).isEmpty(); assertThat(Throwables.getRootCause(e)).hasMessageThat().isEqualTo("reused interruption"); @@ -949,8 +815,7 @@ public void testDownloadWithStdoutStderrOnSuccess() throws Exception { .setStderrDigest(digestStderr) .build(); - cache.download( - context, remotePathResolver, result, spyOutErr, outputFilesLocker, progressStatusListener); + cache.download(context, result, execRoot, spyOutErr, outputFilesLocker); verify(spyOutErr, Mockito.times(2)).childOutErr(); verify(spyChildOutErr).clearOut(); @@ -993,14 +858,7 @@ public void testDownloadWithStdoutStderrOnFailure() throws Exception { .build(); assertThrows( BulkTransferException.class, - () -> - cache.download( - context, - remotePathResolver, - result, - spyOutErr, - outputFilesLocker, - progressStatusListener)); + () -> cache.download(context, result, execRoot, spyOutErr, outputFilesLocker)); verify(spyOutErr, Mockito.times(2)).childOutErr(); verify(spyChildOutErr).clearOut(); verify(spyChildOutErr).clearErr(); @@ -1028,8 +886,8 @@ public void testDownloadClashes() throws Exception { ActionResult r = ActionResult.newBuilder() .setExitCode(0) - .addOutputFiles(OutputFile.newBuilder().setPath("outputs/foo.tmp").setDigest(d1)) - .addOutputFiles(OutputFile.newBuilder().setPath("outputs/foo").setDigest(d2)) + .addOutputFiles(OutputFile.newBuilder().setPath("main/outputs/foo.tmp").setDigest(d1)) + .addOutputFiles(OutputFile.newBuilder().setPath("main/outputs/foo").setDigest(d2)) .build(); Artifact a1 = ActionsTestUtil.createArtifact(artifactRoot, "foo.tmp"); @@ -1037,13 +895,7 @@ public void testDownloadClashes() throws Exception { // act - remoteCache.download( - context, - remotePathResolver, - r, - new FileOutErr(), - outputFilesLocker, - progressStatusListener); + remoteCache.download(context, r, execRoot, new FileOutErr(), outputFilesLocker); // assert @@ -1065,8 +917,8 @@ public void testDownloadMinimalFiles() throws Exception { ActionResult r = ActionResult.newBuilder() .setExitCode(0) - .addOutputFiles(OutputFile.newBuilder().setPath("outputs/file1").setDigest(d1)) - .addOutputFiles(OutputFile.newBuilder().setPath("outputs/file2").setDigest(d2)) + .addOutputFiles(OutputFile.newBuilder().setPath("main/outputs/file1").setDigest(d1)) + .addOutputFiles(OutputFile.newBuilder().setPath("main/outputs/file2").setDigest(d2)) .build(); Artifact a1 = ActionsTestUtil.createArtifact(artifactRoot, "file1"); @@ -1078,11 +930,12 @@ public void testDownloadMinimalFiles() throws Exception { InMemoryOutput inMemoryOutput = remoteCache.downloadMinimal( context, - remotePathResolver, + "action-id", r, ImmutableList.of(a1, a2), /* inMemoryOutputPath= */ null, new FileOutErr(), + execRoot, injector, outputFilesLocker); @@ -1123,7 +976,7 @@ public void testDownloadMinimalDirectory() throws Exception { ActionResult.newBuilder() .setExitCode(0) .addOutputDirectories( - OutputDirectory.newBuilder().setPath("outputs/dir").setTreeDigest(dt)) + OutputDirectory.newBuilder().setPath("main/outputs/dir").setTreeDigest(dt)) .build(); SpecialArtifact dir = @@ -1140,11 +993,12 @@ public void testDownloadMinimalDirectory() throws Exception { InMemoryOutput inMemoryOutput = remoteCache.downloadMinimal( context, - remotePathResolver, + "action-id", r, ImmutableList.of(dir), /* inMemoryOutputPath= */ null, new FileOutErr(), + execRoot, injector, outputFilesLocker); @@ -1215,11 +1069,12 @@ public void testDownloadMinimalDirectoryFails() throws Exception { () -> remoteCache.downloadMinimal( context, - remotePathResolver, + "action-id", r, ImmutableList.of(dir), /* inMemoryOutputPath= */ null, new FileOutErr(), + execRoot, injector, outputFilesLocker)); assertThat(e.getSuppressed()).hasLength(1); @@ -1249,11 +1104,12 @@ public void testDownloadMinimalWithStdoutStderr() throws Exception { InMemoryOutput inMemoryOutput = remoteCache.downloadMinimal( context, - remotePathResolver, + "action-id", r, ImmutableList.of(), /* inMemoryOutputPath= */ null, outErr, + execRoot, injector, outputFilesLocker); @@ -1279,8 +1135,8 @@ public void testDownloadMinimalWithInMemoryOutput() throws Exception { ActionResult r = ActionResult.newBuilder() .setExitCode(0) - .addOutputFiles(OutputFile.newBuilder().setPath("outputs/file1").setDigest(d1)) - .addOutputFiles(OutputFile.newBuilder().setPath("outputs/file2").setDigest(d2)) + .addOutputFiles(OutputFile.newBuilder().setPath("main/outputs/file1").setDigest(d1)) + .addOutputFiles(OutputFile.newBuilder().setPath("main/outputs/file2").setDigest(d2)) .build(); Artifact a1 = ActionsTestUtil.createArtifact(artifactRoot, "file1"); Artifact a2 = ActionsTestUtil.createArtifact(artifactRoot, "file2"); @@ -1292,11 +1148,12 @@ public void testDownloadMinimalWithInMemoryOutput() throws Exception { InMemoryOutput inMemoryOutput = remoteCache.downloadMinimal( context, - remotePathResolver, + "action-id", r, ImmutableList.of(a1, a2), inMemoryOutputPathFragment, new FileOutErr(), + execRoot, injector, outputFilesLocker); @@ -1332,11 +1189,12 @@ public void testDownloadMinimalWithMissingInMemoryOutput() throws Exception { InMemoryOutput inMemoryOutput = remoteCache.downloadMinimal( context, - remotePathResolver, + "action-id", r, ImmutableList.of(a1), inMemoryOutputPathFragment, new FileOutErr(), + execRoot, injector, outputFilesLocker); @@ -1365,27 +1223,6 @@ public void testDownloadEmptyBlobAndFile() throws Exception { assertThat(file.getFileSize()).isEqualTo(0); } - @Test - public void downloadOutErr_empty_doNotPerformDownload() throws Exception { - // Test that downloading empty stdout/stderr does not try to perform a download. - - InMemoryRemoteCache remoteCache = newRemoteCache(); - Digest emptyDigest = digestUtil.compute(new byte[0]); - ActionResult.Builder result = ActionResult.newBuilder(); - result.setStdoutDigest(emptyDigest); - result.setStderrDigest(emptyDigest); - - RemoteCache.waitForBulkTransfer( - remoteCache.downloadOutErr( - context, - result.build(), - new FileOutErr(execRoot.getRelative("stdout"), execRoot.getRelative("stderr"))), - true); - - assertThat(remoteCache.getNumSuccessfulDownloads()).isEqualTo(0); - assertThat(remoteCache.getNumFailedDownloads()).isEqualTo(0); - } - @Test public void testDownloadFileWithSymlinkTemplate() throws Exception { // Test that when a symlink template is provided, we don't actually download files to disk. @@ -1441,18 +1278,13 @@ public void testDownloadDirectory() throws Exception { cas.put(barTreeDigest, barTreeMessage.toByteArray()); ActionResult.Builder result = ActionResult.newBuilder(); - result.addOutputFilesBuilder().setPath("a/foo").setDigest(fooDigest); - result.addOutputDirectoriesBuilder().setPath("a/bar").setTreeDigest(barTreeDigest); + result.addOutputFilesBuilder().setPath("main/a/foo").setDigest(fooDigest); + result.addOutputDirectoriesBuilder().setPath("main/a/bar").setTreeDigest(barTreeDigest); // act RemoteCache remoteCache = newRemoteCache(cas); remoteCache.download( - context, - remotePathResolver, - result.build(), - null, - outputFilesLocker, - progressStatusListener); + context, result.build(), execRoot, null, /* outputFilesLocker= */ () -> {}); // assert assertThat(digestUtil.compute(execRoot.getRelative("a/foo"))).isEqualTo(fooDigest); @@ -1472,17 +1304,12 @@ public void testDownloadEmptyDirectory() throws Exception { map.put(barTreeDigest, barTreeMessage.toByteArray()); ActionResult.Builder result = ActionResult.newBuilder(); - result.addOutputDirectoriesBuilder().setPath("a/bar").setTreeDigest(barTreeDigest); + result.addOutputDirectoriesBuilder().setPath("main/a/bar").setTreeDigest(barTreeDigest); // act RemoteCache remoteCache = newRemoteCache(map); remoteCache.download( - context, - remotePathResolver, - result.build(), - null, - outputFilesLocker, - progressStatusListener); + context, result.build(), execRoot, null, /* outputFilesLocker= */ () -> {}); // assert assertThat(execRoot.getRelative("a/bar").isDirectory()).isTrue(); @@ -1521,18 +1348,13 @@ public void testDownloadNestedDirectory() throws Exception { map.put(quxDigest, "qux-contents".getBytes(UTF_8)); ActionResult.Builder result = ActionResult.newBuilder(); - result.addOutputFilesBuilder().setPath("a/foo").setDigest(fooDigest); - result.addOutputDirectoriesBuilder().setPath("a/bar").setTreeDigest(barTreeDigest); + result.addOutputFilesBuilder().setPath("main/a/foo").setDigest(fooDigest); + result.addOutputDirectoriesBuilder().setPath("main/a/bar").setTreeDigest(barTreeDigest); // act RemoteCache remoteCache = newRemoteCache(map); remoteCache.download( - context, - remotePathResolver, - result.build(), - null, - outputFilesLocker, - progressStatusListener); + context, result.build(), execRoot, null, /* outputFilesLocker= */ () -> {}); // assert assertThat(digestUtil.compute(execRoot.getRelative("a/foo"))).isEqualTo(fooDigest); @@ -1578,17 +1400,12 @@ public void testDownloadDirectoryWithSameHash() throws Exception { map.put(treeDigest, tree.toByteArray()); ActionResult.Builder result = ActionResult.newBuilder(); - result.addOutputDirectoriesBuilder().setPath("a/").setTreeDigest(treeDigest); + result.addOutputDirectoriesBuilder().setPath("main/a/").setTreeDigest(treeDigest); // act RemoteCache remoteCache = newRemoteCache(map); remoteCache.download( - context, - remotePathResolver, - result.build(), - null, - outputFilesLocker, - progressStatusListener); + context, result.build(), execRoot, null, /* outputFilesLocker= */ () -> {}); // assert assertThat(digestUtil.compute(execRoot.getRelative("a/bar/foo/file"))).isEqualTo(fileDigest); @@ -1631,10 +1448,10 @@ public void testUploadDirectory() throws Exception { ActionResult result = remoteCache.upload( context, - remotePathResolver, digestUtil.asActionKey(actionDigest), action, cmd, + execRoot, ImmutableList.of(fooFile, barDir), new FileOutErr(execRoot.getRelative("stdout"), execRoot.getRelative("stderr"))); @@ -1649,50 +1466,6 @@ public void testUploadDirectory() throws Exception { assertThat(remoteCache.findMissingDigests(context, toQuery)).isEmpty(); } - @Test - public void upload_emptyBlobAndFile_doNotPerformUpload() throws Exception { - // Test that uploading an empty BLOB/file does not try to perform an upload. - InMemoryRemoteCache remoteCache = newRemoteCache(); - Digest emptyDigest = fakeFileCache.createScratchInput(ActionInputHelper.fromPath("file"), ""); - Path file = execRoot.getRelative("file"); - - Utils.getFromFuture(remoteCache.uploadBlob(context, emptyDigest, ByteString.EMPTY)); - assertThat(remoteCache.findMissingDigests(context, ImmutableSet.of(emptyDigest))) - .containsExactly(emptyDigest); - - Utils.getFromFuture(remoteCache.uploadFile(context, emptyDigest, file)); - assertThat(remoteCache.findMissingDigests(context, ImmutableSet.of(emptyDigest))) - .containsExactly(emptyDigest); - } - - @Test - public void upload_emptyOutputs_doNotPerformUpload() throws Exception { - // Test that uploading an empty output does not try to perform an upload. - - // arrange - Digest emptyDigest = - fakeFileCache.createScratchInput(ActionInputHelper.fromPath("bar/test/wobble"), ""); - Path file = execRoot.getRelative("bar/test/wobble"); - InMemoryRemoteCache remoteCache = newRemoteCache(); - Action action = Action.getDefaultInstance(); - ActionKey actionDigest = digestUtil.computeActionKey(action); - Command cmd = Command.getDefaultInstance(); - - // act - remoteCache.upload( - context, - remotePathResolver, - actionDigest, - action, - cmd, - ImmutableList.of(file), - new FileOutErr(execRoot.getRelative("stdout"), execRoot.getRelative("stderr"))); - - // assert - assertThat(remoteCache.findMissingDigests(context, ImmutableSet.of(emptyDigest))) - .containsExactly(emptyDigest); - } - @Test public void testUploadEmptyDirectory() throws Exception { // Test that uploading an empty directory works. @@ -1712,10 +1485,10 @@ public void testUploadEmptyDirectory() throws Exception { ActionResult result = remoteCache.upload( context, - remotePathResolver, actionDigest, action, cmd, + execRoot, ImmutableList.of(barDir), new FileOutErr(execRoot.getRelative("stdout"), execRoot.getRelative("stderr"))); @@ -1769,10 +1542,10 @@ public void testUploadNestedDirectory() throws Exception { ActionResult result = remoteCache.upload( context, - remotePathResolver, actionDigest, action, cmd, + execRoot, ImmutableList.of(barDir), new FileOutErr(execRoot.getRelative("stdout"), execRoot.getRelative("stderr"))); diff --git a/dataset/GitHub_Java/bazelbuild.bazel/src/test/java/com/google/devtools/build/lib/remote/downloader/GrpcRemoteDownloaderTest.java b/dataset/GitHub_Java/bazelbuild.bazel/src/test/java/com/google/devtools/build/lib/remote/downloader/GrpcRemoteDownloaderTest.java index e4e61345a95..69907640764 100644 --- a/dataset/GitHub_Java/bazelbuild.bazel/src/test/java/com/google/devtools/build/lib/remote/downloader/GrpcRemoteDownloaderTest.java +++ b/dataset/GitHub_Java/bazelbuild.bazel/src/test/java/com/google/devtools/build/lib/remote/downloader/GrpcRemoteDownloaderTest.java @@ -16,8 +16,8 @@ import static com.google.common.truth.Truth.assertThat; import static com.google.devtools.build.lib.remote.util.Utils.getFromFuture; -import static com.google.devtools.build.lib.testutil.MoreAsserts.assertThrows; import static java.nio.charset.StandardCharsets.UTF_8; +import static org.junit.Assert.assertThrows; import static org.mockito.Mockito.mock; import build.bazel.remote.asset.v1.FetchBlobRequest; @@ -25,6 +25,7 @@ import build.bazel.remote.asset.v1.FetchGrpc.FetchImplBase; import build.bazel.remote.asset.v1.Qualifier; import build.bazel.remote.execution.v2.Digest; +import build.bazel.remote.execution.v2.RequestMetadata; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.io.ByteStreams; @@ -37,7 +38,9 @@ import com.google.devtools.build.lib.remote.ReferenceCountedChannel; import com.google.devtools.build.lib.remote.RemoteRetrier; import com.google.devtools.build.lib.remote.RemoteRetrier.ExponentialBackoff; +import com.google.devtools.build.lib.remote.common.RemoteActionExecutionContext; import com.google.devtools.build.lib.remote.common.RemoteCacheClient; +import com.google.devtools.build.lib.remote.grpc.ChannelConnectionFactory; import com.google.devtools.build.lib.remote.options.RemoteOptions; import com.google.devtools.build.lib.remote.util.DigestUtil; import com.google.devtools.build.lib.remote.util.InMemoryCacheClient; @@ -49,12 +52,13 @@ import com.google.devtools.common.options.Options; import com.google.protobuf.ByteString; import io.grpc.CallCredentials; -import io.grpc.Context; +import io.grpc.ManagedChannel; import io.grpc.Server; import io.grpc.inprocess.InProcessChannelBuilder; import io.grpc.inprocess.InProcessServerBuilder; import io.grpc.stub.StreamObserver; import io.grpc.util.MutableHandlerRegistry; +import io.reactivex.rxjava3.core.Single; import java.io.IOException; import java.io.InputStream; import java.net.URI; @@ -63,10 +67,9 @@ import java.util.Map; import java.util.Optional; import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; import org.junit.After; -import org.junit.AfterClass; import org.junit.Before; -import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; @@ -80,14 +83,8 @@ public class GrpcRemoteDownloaderTest { private final MutableHandlerRegistry serviceRegistry = new MutableHandlerRegistry(); private final String fakeServerName = "fake server for " + getClass(); private Server fakeServer; - private Context withEmptyMetadata; - private Context prevContext; - private static ListeningScheduledExecutorService retryService; - - @BeforeClass - public static void beforeEverything() { - retryService = MoreExecutors.listeningDecorator(Executors.newScheduledThreadPool(1)); - } + private RemoteActionExecutionContext context; + private ListeningScheduledExecutorService retryService; @Before public final void setUp() throws Exception { @@ -98,24 +95,27 @@ public final void setUp() throws Exception { .directExecutor() .build() .start(); - withEmptyMetadata = - TracingMetadataUtils.contextWithMetadata( - "none", "none", DIGEST_UTIL.asActionKey(Digest.getDefaultInstance())); - prevContext = withEmptyMetadata.attach(); + RequestMetadata metadata = + TracingMetadataUtils.buildMetadata( + "none", + "none", + DIGEST_UTIL.asActionKey(Digest.getDefaultInstance()).getDigest().getHash(), + null); + context = RemoteActionExecutionContext.create(metadata); + + retryService = MoreExecutors.listeningDecorator(Executors.newScheduledThreadPool(1)); } @After public void tearDown() throws Exception { - withEmptyMetadata.detach(prevContext); + retryService.shutdownNow(); + retryService.awaitTermination( + com.google.devtools.build.lib.testutil.TestUtils.WAIT_TIMEOUT_SECONDS, TimeUnit.SECONDS); + fakeServer.shutdownNow(); fakeServer.awaitTermination(); } - @AfterClass - public static void afterEverything() { - retryService.shutdownNow(); - } - private GrpcRemoteDownloader newDownloader(RemoteCacheClient cacheClient) throws IOException { final RemoteOptions remoteOptions = Options.getDefaults(RemoteOptions.class); final RemoteRetrier retrier = @@ -125,12 +125,25 @@ private GrpcRemoteDownloader newDownloader(RemoteCacheClient cacheClient) throws retryService); final ReferenceCountedChannel channel = new ReferenceCountedChannel( - InProcessChannelBuilder.forName(fakeServerName).directExecutor().build()); + new ChannelConnectionFactory() { + @Override + public Single create() { + ManagedChannel ch = + InProcessChannelBuilder.forName(fakeServerName).directExecutor().build(); + return Single.just(new ChannelConnection(ch)); + } + + @Override + public int maxConcurrency() { + return 100; + } + }); return new GrpcRemoteDownloader( + "none", + "none", channel.retain(), Optional.empty(), retrier, - withEmptyMetadata, cacheClient, remoteOptions); } @@ -153,7 +166,14 @@ private static byte[] downloadBlob( Scratch scratch = new Scratch(); final Path destination = scratch.resolve("output file path"); downloader.download( - urls, authHeaders, guavaChecksum, canonicalId, destination, eventHandler, clientEnv); + urls, + authHeaders, + guavaChecksum, + canonicalId, + destination, + eventHandler, + clientEnv, + com.google.common.base.Optional.absent()); try (InputStream in = destination.getInputStream()) { return ByteStreams.toByteArray(in); @@ -184,7 +204,7 @@ public void fetchBlob( final RemoteCacheClient cacheClient = new InMemoryCacheClient(); final GrpcRemoteDownloader downloader = newDownloader(cacheClient); - getFromFuture(cacheClient.uploadBlob(contentDigest, ByteString.copyFrom(content))); + getFromFuture(cacheClient.uploadBlob(context, contentDigest, ByteString.copyFrom(content))); final byte[] downloaded = downloadBlob( downloader, new URL("http://example.com/content.txt"), Optional.empty()); @@ -220,12 +240,12 @@ public void fetchBlob( final RemoteCacheClient cacheClient = new InMemoryCacheClient(); final GrpcRemoteDownloader downloader = newDownloader(cacheClient); - getFromFuture(cacheClient.uploadBlob(contentDigest, ByteString.copyFrom(content))); + getFromFuture(cacheClient.uploadBlob(context, contentDigest, ByteString.copyFrom(content))); final byte[] downloaded = downloadBlob( downloader, new URL("http://example.com/content.txt"), - Optional.of(Checksum.fromString(KeyType.SHA256, contentDigest.getHash()))); + Optional.of(Checksum.fromString(KeyType.SHA256, contentDigest.getHash()))); assertThat(downloaded).isEqualTo(content); } @@ -258,7 +278,8 @@ public void fetchBlob( final RemoteCacheClient cacheClient = new InMemoryCacheClient(); final GrpcRemoteDownloader downloader = newDownloader(cacheClient); - getFromFuture(cacheClient.uploadBlob(contentDigest, ByteString.copyFromUtf8("wrong content"))); + getFromFuture( + cacheClient.uploadBlob(context, contentDigest, ByteString.copyFromUtf8("wrong content"))); IOException e = assertThrows( @@ -267,8 +288,7 @@ public void fetchBlob( downloadBlob( downloader, new URL("http://example.com/content.txt"), - Optional.of( - Checksum.fromString(KeyType.SHA256, contentDigest.getHash())))); + Optional.of(Checksum.fromString(KeyType.SHA256, contentDigest.getHash())))); assertThat(e).hasMessageThat().contains(contentDigest.getHash()); assertThat(e).hasMessageThat().contains(DIGEST_UTIL.computeAsUtf8("wrong content").getHash()); diff --git a/dataset/GitHub_Java/bazelbuild.bazel/src/test/java/com/google/devtools/build/lib/remote/http/HttpCacheClientTest.java b/dataset/GitHub_Java/bazelbuild.bazel/src/test/java/com/google/devtools/build/lib/remote/http/HttpCacheClientTest.java index f01f93be87b..bcf5ce984bf 100644 --- a/dataset/GitHub_Java/bazelbuild.bazel/src/test/java/com/google/devtools/build/lib/remote/http/HttpCacheClientTest.java +++ b/dataset/GitHub_Java/bazelbuild.bazel/src/test/java/com/google/devtools/build/lib/remote/http/HttpCacheClientTest.java @@ -15,10 +15,10 @@ import static com.google.common.truth.Truth.assertThat; import static com.google.devtools.build.lib.remote.util.Utils.getFromFuture; -import static com.google.devtools.build.lib.testutil.MoreAsserts.assertThrows; import static java.util.Collections.singletonList; +import static org.junit.Assert.assertThrows; import static org.junit.Assert.fail; -import static org.mockito.Matchers.any; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.reset; @@ -28,12 +28,13 @@ import static org.mockito.Mockito.when; import build.bazel.remote.execution.v2.Digest; -import com.google.api.client.util.Preconditions; import com.google.auth.Credentials; import com.google.common.base.Charsets; +import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; +import com.google.devtools.build.lib.remote.common.RemoteActionExecutionContext; import com.google.devtools.build.lib.remote.util.DigestUtil; -import com.google.devtools.build.lib.testutil.MoreAsserts.ThrowingRunnable; +import com.google.devtools.build.lib.remote.util.TracingMetadataUtils; import com.google.devtools.build.lib.vfs.DigestHashFunction; import com.google.devtools.build.remote.worker.http.HttpCacheServerHandler; import com.google.protobuf.ByteString; @@ -88,6 +89,7 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.function.IntFunction; import javax.annotation.Nullable; +import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; @@ -102,6 +104,8 @@ public class HttpCacheClientTest { private static final DigestUtil DIGEST_UTIL = new DigestUtil(DigestHashFunction.SHA256); private static final Digest DIGEST = DIGEST_UTIL.computeAsUtf8("File Contents"); + private RemoteActionExecutionContext remoteActionExecutionContext; + private static ServerChannel createServer( Class serverChannelClass, IntFunction newEventLoopGroup, @@ -292,6 +296,14 @@ private HttpCacheClient createHttpBlobStore( serverChannel, timeoutSeconds, /* remoteVerifyDownloads= */ true, creds); } + @Before + public void setUp() throws Exception { + remoteActionExecutionContext = + RemoteActionExecutionContext.create( + TracingMetadataUtils.buildMetadata( + "none", "none", Digest.getDefaultInstance().getHash(), null)); + } + @Test public void testUploadAtMostOnce() throws Exception { ServerChannel server = null; @@ -304,7 +316,7 @@ public void testUploadAtMostOnce() throws Exception { ByteString data = ByteString.copyFrom("foo bar", StandardCharsets.UTF_8); Digest digest = DIGEST_UTIL.compute(data.toByteArray()); - blobStore.uploadBlob(digest, data).get(); + blobStore.uploadBlob(remoteActionExecutionContext, digest, data).get(); assertThat(cacheContents).hasSize(1); String cacheKey = "/cas/" + digest.getHash(); @@ -314,7 +326,7 @@ public void testUploadAtMostOnce() throws Exception { // Clear the remote cache contents cacheContents.clear(); - blobStore.uploadBlob(digest, data).get(); + blobStore.uploadBlob(remoteActionExecutionContext, digest, data).get(); // Nothing should have been uploaded again. assertThat(cacheContents).isEmpty(); @@ -331,7 +343,8 @@ public void connectTimeout() throws Exception { Credentials credentials = newCredentials(); HttpCacheClient blobStore = createHttpBlobStore(server, /* timeoutSeconds= */ 1, credentials); - getFromFuture(blobStore.downloadBlob(DIGEST, new ByteArrayOutputStream())); + getFromFuture( + blobStore.downloadBlob(remoteActionExecutionContext, DIGEST, new ByteArrayOutputStream())); fail("Exception expected"); } @@ -353,7 +366,9 @@ protected void channelRead0( Credentials credentials = newCredentials(); HttpCacheClient blobStore = createHttpBlobStore(server, /* timeoutSeconds= */ 1, credentials); byte[] data = "File Contents".getBytes(Charsets.US_ASCII); - getFromFuture(blobStore.uploadBlob(DIGEST_UTIL.compute(data), ByteString.copyFrom(data))); + getFromFuture( + blobStore.uploadBlob( + remoteActionExecutionContext, DIGEST_UTIL.compute(data), ByteString.copyFrom(data))); fail("Exception expected"); } finally { testServer.stop(server); @@ -376,7 +391,9 @@ protected void channelRead0( Credentials credentials = newCredentials(); HttpCacheClient blobStore = createHttpBlobStore(server, /* timeoutSeconds= */ 1, credentials); - getFromFuture(blobStore.downloadBlob(DIGEST, new ByteArrayOutputStream())); + getFromFuture( + blobStore.downloadBlob( + remoteActionExecutionContext, DIGEST, new ByteArrayOutputStream())); fail("Exception expected"); } finally { testServer.stop(server); @@ -414,7 +431,10 @@ protected void channelRead0( IOException.class, () -> getFromFuture( - blobStore.uploadBlob(DIGEST_UTIL.compute(data.toByteArray()), data))); + blobStore.uploadBlob( + remoteActionExecutionContext, + DIGEST_UTIL.compute(data.toByteArray()), + data))); assertThat(e.getCause()).isInstanceOf(TooLongFrameException.class); } finally { testServer.stop(server); @@ -449,8 +469,12 @@ protected void channelRead0(ChannelHandlerContext ctx, FullHttpRequest request) server, /* timeoutSeconds= */ 1, /* remoteVerifyDownloads= */ true, credentials); Digest fooDigest = DIGEST_UTIL.compute("foo".getBytes(Charsets.UTF_8)); try (OutputStream out = new ByteArrayOutputStream()) { - ThrowingRunnable download = () -> getFromFuture(blobStore.downloadBlob(fooDigest, out)); - IOException e = assertThrows(IOException.class, download); + IOException e = + assertThrows( + IOException.class, + () -> + getFromFuture( + blobStore.downloadBlob(remoteActionExecutionContext, fooDigest, out))); assertThat(e).hasMessageThat().contains(fooDigest.getHash()); assertThat(e).hasMessageThat().contains(DIGEST_UTIL.computeAsUtf8("bar").getHash()); } @@ -487,7 +511,7 @@ protected void channelRead0(ChannelHandlerContext ctx, FullHttpRequest request) server, /* timeoutSeconds= */ 1, /* remoteVerifyDownloads= */ false, credentials); Digest fooDigest = DIGEST_UTIL.compute("foo".getBytes(Charsets.UTF_8)); try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { - getFromFuture(blobStore.downloadBlob(fooDigest, out)); + getFromFuture(blobStore.downloadBlob(remoteActionExecutionContext, fooDigest, out)); assertThat(out.toByteArray()).isEqualTo("bar".getBytes(Charsets.UTF_8)); } } finally { @@ -512,7 +536,7 @@ private void expiredAuthTokensShouldBeRetried_get( Credentials credentials = newCredentials(); HttpCacheClient blobStore = createHttpBlobStore(server, /* timeoutSeconds= */ 1, credentials); ByteArrayOutputStream out = Mockito.spy(new ByteArrayOutputStream()); - getFromFuture(blobStore.downloadBlob(DIGEST, out)); + getFromFuture(blobStore.downloadBlob(remoteActionExecutionContext, DIGEST, out)); assertThat(out.toString(Charsets.US_ASCII.name())).isEqualTo("File Contents"); verify(credentials, times(1)).refresh(); verify(credentials, times(2)).getRequestMetadata(any(URI.class)); @@ -542,7 +566,10 @@ private void expiredAuthTokensShouldBeRetried_put( Credentials credentials = newCredentials(); HttpCacheClient blobStore = createHttpBlobStore(server, /* timeoutSeconds= */ 1, credentials); byte[] data = "File Contents".getBytes(Charsets.US_ASCII); - blobStore.uploadBlob(DIGEST_UTIL.compute(data), ByteString.copyFrom(data)).get(); + blobStore + .uploadBlob( + remoteActionExecutionContext, DIGEST_UTIL.compute(data), ByteString.copyFrom(data)) + .get(); verify(credentials, times(1)).refresh(); verify(credentials, times(2)).getRequestMetadata(any(URI.class)); verify(credentials, times(2)).hasRequestMetadata(); @@ -568,7 +595,9 @@ private void errorCodeThatShouldNotBeRetried_get( Credentials credentials = newCredentials(); HttpCacheClient blobStore = createHttpBlobStore(server, /* timeoutSeconds= */ 1, credentials); - getFromFuture(blobStore.downloadBlob(DIGEST, new ByteArrayOutputStream())); + getFromFuture( + blobStore.downloadBlob( + remoteActionExecutionContext, DIGEST, new ByteArrayOutputStream())); fail("Exception expected."); } catch (Exception e) { assertThat(e).isInstanceOf(HttpException.class); @@ -597,7 +626,10 @@ private void errorCodeThatShouldNotBeRetried_put( HttpCacheClient blobStore = createHttpBlobStore(server, /* timeoutSeconds= */ 1, credentials); byte[] oneByte = new byte[] {0}; getFromFuture( - blobStore.uploadBlob(DIGEST_UTIL.compute(oneByte), ByteString.copyFrom(oneByte))); + blobStore.uploadBlob( + remoteActionExecutionContext, + DIGEST_UTIL.compute(oneByte), + ByteString.copyFrom(oneByte))); fail("Exception expected."); } catch (Exception e) { assertThat(e).isInstanceOf(HttpException.class);