Skip to content

Commit

Permalink
Change logging level
Browse files Browse the repository at this point in the history
  • Loading branch information
davidkyle committed Aug 15, 2023
1 parent a830787 commit 8b0a15f
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 6 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ protected void doRun() throws Exception {
NlpConfig nlpConfig = (NlpConfig) config;
NlpTask.Request request = processor.getRequestBuilder(nlpConfig)
.buildRequest(text, requestIdStr, nlpConfig.getTokenization().getTruncate(), nlpConfig.getTokenization().getSpan());
logger.trace(() -> format("handling request [%s]", requestIdStr));
logger.debug(() -> format("handling request [%s]", requestIdStr));

// Tokenization is non-trivial, so check for cancellation one last time before sending request to the native process
if (isCancelled()) {
Expand All @@ -110,6 +110,7 @@ protected void doRun() throws Exception {
logger.error(() -> "[" + getDeploymentId() + "] error writing to inference process", e);
onFailure(ExceptionsHelper.serverError("Error writing to inference process", e));
} catch (Exception e) {
logger.error(() -> "[" + getDeploymentId() + "] error running inference", e);
onFailure(e);
}
}
Expand Down Expand Up @@ -141,7 +142,7 @@ private void processResult(
return;
}
InferenceResults results = inferenceResultsProcessor.processResult(tokenization, pyTorchResult.inferenceResult());
logger.trace(() -> format("[%s] processed result for request [%s]", getDeploymentId(), getRequestId()));
logger.debug(() -> format("[%s] processed result for request [%s]", getDeploymentId(), getRequestId()));
onSuccess(results);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,7 @@ void processInferenceResult(PyTorchResult result) {
timeMs = 0L;
}

logger.trace(() -> format("[%s] Parsed inference result with id [%s]", modelId, result.requestId()));
logger.debug(() -> format("[%s] Parsed inference result with id [%s]", modelId, result.requestId()));
updateStats(timeMs, Boolean.TRUE.equals(result.isCacheHit()));
PendingResult pendingResult = pendingResults.remove(result.requestId());
if (pendingResult == null) {
Expand All @@ -163,7 +163,7 @@ void processThreadSettings(PyTorchResult result) {
ThreadSettings threadSettings = result.threadSettings();
assert threadSettings != null;

logger.trace(() -> format("[%s] Parsed thread settings result with id [%s]", modelId, result.requestId()));
logger.debug(() -> format("[%s] Parsed thread settings result with id [%s]", modelId, result.requestId()));
PendingResult pendingResult = pendingResults.remove(result.requestId());
if (pendingResult == null) {
logger.debug(() -> format("[%s] no pending result for thread settings [%s]", modelId, result.requestId()));
Expand All @@ -176,7 +176,7 @@ void processAcknowledgement(PyTorchResult result) {
AckResult ack = result.ackResult();
assert ack != null;

logger.trace(() -> format("[%s] Parsed ack result with id [%s]", modelId, result.requestId()));
logger.debug(() -> format("[%s] Parsed ack result with id [%s]", modelId, result.requestId()));
PendingResult pendingResult = pendingResults.remove(result.requestId());
if (pendingResult == null) {
logger.debug(() -> format("[%s] no pending result for ack [%s]", modelId, result.requestId()));
Expand All @@ -194,7 +194,7 @@ void processErrorResult(PyTorchResult result) {
errorCount++;
}

logger.trace(() -> format("[%s] Parsed error with id [%s]", modelId, result.requestId()));
logger.debug(() -> format("[%s] Parsed error with id [%s]", modelId, result.requestId()));
PendingResult pendingResult = pendingResults.remove(result.requestId());
if (pendingResult == null) {
logger.debug(() -> format("[%s] no pending result for error [%s]", modelId, result.requestId()));
Expand Down

0 comments on commit 8b0a15f

Please sign in to comment.