Skip to content

Commit

Permalink
Java: Amazon Bedrock - Add code examples for async model invocation (#…
Browse files Browse the repository at this point in the history
…5709)

* Fix test output
  • Loading branch information
DennisTraub authored and ford-at-aws committed Dec 15, 2023
1 parent 1fae95c commit af79074
Show file tree
Hide file tree
Showing 12 changed files with 819 additions and 444 deletions.
92 changes: 80 additions & 12 deletions .doc_gen/metadata/bedrock-runtime_metadata.yaml
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
# zexi 0.4.0
bedrock-runtime_InvokeModel:
title: Invoke the specified &BR; model to run inference
title: Invoke the specified &BR; model to run an inference
title_abbrev: Invoke a model
synopsis: invoke an &BR; model to run inference.
synopsis: invoke an &BR; model to run an inference.
category:
languages:
SAP ABAP:
Expand All @@ -21,8 +21,8 @@ bedrock-runtime_InvokeModel:

bedrock-runtime_InvokeClaude:
title: Invoke the Anthropic Claude model on &BR; to run an inference
title_abbrev: Invoke Anthropic Claude 2 on &BR;
synopsis: invoke the anthropic claude model on &BR; to run inference.
title_abbrev: Invoke Anthropic Claude on &BR;
synopsis: invoke the Anthropic Claude model on &BR; to run an inference.
category:
languages:
Java:
Expand Down Expand Up @@ -52,10 +52,27 @@ bedrock-runtime_InvokeClaude:
services:
bedrock-runtime: {InvokeModel}

bedrock-runtime_InvokeClaudeAsync:
title: Asynchronously invoke the Anthropic Claude model on &BR; to run an inference
title_abbrev: Asynchronously invoke Anthropic Claude on &BR;
synopsis: asynchronously invoke the Anthropic Claude model on &BR; to run an inference.
category:
languages:
Java:
versions:
- sdk_version: 2
github: javav2/example_code/bedrock-runtime
excerpts:
- description: Invoke the Anthropic Claude foundation model (async).
snippet_tags:
- bedrock-runtime.java2.invoke_claude_async.main
services:
bedrock-runtime: {InvokeModel}

bedrock-runtime_InvokeJurassic2:
title: Invoke the AI21 Labs Jurassic-2 model on &BR; to run an inference
title_abbrev: Invoke AI21 Labs Jurassic-2 on &BR;
synopsis: invoke the ai21 labs jurassic-2 model on &BR; to run inference.
synopsis: invoke the AI21 Labs Jurassic-2 model on &BR; to run an inference.
category:
languages:
Java:
Expand Down Expand Up @@ -86,6 +103,23 @@ bedrock-runtime_InvokeJurassic2:
services:
bedrock-runtime: {InvokeModel}

bedrock-runtime_InvokeJurassic2Async:
title: Asynchronously invoke the AI21 Labs Jurassic-2 model on &BR; to run an inference
title_abbrev: Asynchronously invoke AI21 Labs Jurassic-2 on &BR;
synopsis: asynchronously invoke the AI21 Labs Jurassic-2 model on &BR; to run an inference.
category:
languages:
Java:
versions:
- sdk_version: 2
github: javav2/example_code/bedrock-runtime
excerpts:
- description: Invoke the AI21 Labs Jurassic-2 foundation model (async).
snippet_tags:
- bedrock-runtime.java2.invoke_jurassic-2_async.main
services:
bedrock-runtime: {InvokeModel}

bedrock-runtime_InvokeLlama2:
title: Invoke the Meta Llama 2 Chat model on &BR; to run an inference
title_abbrev: Invoke Meta Llama 2 on &BR;
Expand Down Expand Up @@ -120,10 +154,27 @@ bedrock-runtime_InvokeLlama2:
services:
bedrock-runtime: {InvokeModel}

bedrock-runtime_InvokeLlama2Async:
title: Asynchronously invoke the Meta Llama 2 Chat model on &BR; to run an inference
title_abbrev: Asynchronously invoke Meta Llama 2 on &BR;
synopsis: asynchronously invoke the Meta Llama 2 Chat model on &BR; to run an inference.
category:
languages:
Java:
versions:
- sdk_version: 2
github: javav2/example_code/bedrock-runtime
excerpts:
- description: Invoke the Meta Llama 2 foundation model (async).
snippet_tags:
- bedrock-runtime.java2.invoke_llama2_async.main
services:
bedrock-runtime: {InvokeModel}

bedrock-runtime_InvokeStableDiffusion:
title: Invoke the Stability.ai Stable Diffusion XL model on &BR; to run an inference
title_abbrev: Invoke Stability.ai Stable Diffusion XL on &BR;
synopsis: invoke the stability.ai stable diffusion xl on &BR; model to run inference.
synopsis: invoke the Stability.ai Stable Diffusion xl on &BR; model to run inference.
category:
languages:
Java:
Expand All @@ -145,26 +196,43 @@ bedrock-runtime_InvokeStableDiffusion:
services:
bedrock-runtime: {InvokeModel}

bedrock-runtime_InvokeStableDiffusionAsync:
title: Asynchronously invoke the Stability.ai Stable Diffusion XL model on &BR; to run an inference
title_abbrev: Asynchronously invoke Stability.ai Stable Diffusion XL on &BR;
synopsis: asynchronously invoke the Stability.ai Stable Diffusion xl on &BR; model to run an inference.
category:
languages:
Java:
versions:
- sdk_version: 2
github: javav2/example_code/bedrock-runtime
excerpts:
- description: Invoke the Stability.ai Stable Diffusion XL foundation model (async).
snippet_tags:
- bedrock-runtime.java2.invoke_stable_diffusion_async.main
services:
bedrock-runtime: {InvokeModel}

bedrock-runtime_InvokeModelWithResponseStream:
title: Invoke the specified model on &BR; to run an inference with a response stream
title_abbrev: Invoke a model on &BR; with a response stream
synopsis: invoke an model on &BR; to run an inference with a response stream.
title: Invoke Anthropic Claude on &BR; to run an inference with a response stream
title_abbrev: Invoke Anthropic Claude on &BR; and process the response stream
synopsis: invoke Anthropic Claude on &BR; to run an inference with a response stream.
category:
languages:
Java:
versions:
- sdk_version: 2
github: javav2/example_code/bedrock-runtime
excerpts:
- description: Invoke a model on &BR; and process the response stream.
- description: Invoke Anthropic Claude on &BR; and process the response stream.
snippet_tags:
- bedrock-runtime.java2.invoke_model_with_response_stream.main
Python:
versions:
- sdk_version: 3
github: python/example_code/bedrock-runtime
excerpts:
- description: Invoke a model on &BR; and process the response stream.
- description: Invoke Anthropic Claude on &BR; and process the response stream.
snippet_tags:
- python.example_code.bedrock-runtime.InvokeModelWithResponseStream
services:
Expand All @@ -175,7 +243,7 @@ bedrock-runtime_Scenario_Invoke_models:
title_abbrev: Invoke multiple LLMs on &BR;
synopsis: invoke multiple large-language-models (LLMs) on &BR;.
synopsis_list:
- Generate text with Anthropic Claude 2.
- Generate text with Anthropic Claude.
- Generate text with AI21 Labs Jurassic-2.
- Generate text with Meta Llama 2 Chat.
category: Scenarios
Expand Down
16 changes: 10 additions & 6 deletions javav2/example_code/bedrock-runtime/Readme.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
<!--Generated by WRITEME on 2023-11-14 10:10:38.181587 (UTC)-->
<!--Generated by WRITEME on 2023-11-24 20:21:03.808605 (UTC)-->
# Amazon Bedrock Runtime code examples for the SDK for Java 2.x

## Overview
Expand Down Expand Up @@ -35,11 +35,15 @@ For prerequisites, see the [README](../../README.md#Prerequisites) in the `javav

Code excerpts that show you how to call individual service functions.

* [Invoke AI21 Labs Jurassic-2 on Amazon Bedrock](src/main/java/com/example/bedrockruntime/InvokeModel.java#L101) (`InvokeModel`)
* [Invoke Anthropic Claude 2 on Amazon Bedrock](src/main/java/com/example/bedrockruntime/InvokeModel.java#L51) (`InvokeModel`)
* [Invoke Meta Llama 2 on Amazon Bedrock](src/main/java/com/example/bedrockruntime/InvokeModel.java#L151) (`InvokeModel`)
* [Invoke Stability.ai Stable Diffusion XL on Amazon Bedrock](src/main/java/com/example/bedrockruntime/InvokeModel.java#L198) (`InvokeModel`)
* [Invoke a model on Amazon Bedrock with a response stream](src/main/java/com/example/bedrockruntime/InvokeModelWithResponseStream.java#L49) (`InvokeModelWithResponseStream`)
* [Asynchronously invoke AI21 Labs Jurassic-2 on Amazon Bedrock](src/main/java/com/example/bedrockruntime/InvokeModelAsync.java#L98) (`InvokeModel`)
* [Asynchronously invoke Anthropic Claude on Amazon Bedrock](src/main/java/com/example/bedrockruntime/InvokeModelAsync.java#L37) (`InvokeModel`)
* [Asynchronously invoke Meta Llama 2 on Amazon Bedrock](src/main/java/com/example/bedrockruntime/InvokeModelAsync.java#L160) (`InvokeModel`)
* [Asynchronously invoke Stability.ai Stable Diffusion XL on Amazon Bedrock](src/main/java/com/example/bedrockruntime/InvokeModelAsync.java#L219) (`InvokeModel`)
* [Invoke AI21 Labs Jurassic-2 on Amazon Bedrock](src/main/java/com/example/bedrockruntime/InvokeModel.java#L85) (`InvokeModel`)
* [Invoke Anthropic Claude on Amazon Bedrock](src/main/java/com/example/bedrockruntime/InvokeModel.java#L36) (`InvokeModel`)
* [Invoke Anthropic Claude on Amazon Bedrock and process the response stream](src/main/java/com/example/bedrockruntime/InvokeModelWithResponseStream.java#L34) (`InvokeModelWithResponseStream`)
* [Invoke Meta Llama 2 on Amazon Bedrock](src/main/java/com/example/bedrockruntime/InvokeModel.java#L133) (`InvokeModel`)
* [Invoke Stability.ai Stable Diffusion XL on Amazon Bedrock](src/main/java/com/example/bedrockruntime/InvokeModel.java#L178) (`InvokeModel`)

## Run the examples

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,138 @@
/*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
SPDX-License-Identifier: Apache-2.0
*/

package com.example.bedrockruntime;

import software.amazon.awssdk.services.bedrockruntime.model.BedrockRuntimeException;

import java.io.FileOutputStream;
import java.net.URI;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Base64;
import java.util.Random;

import static com.example.bedrockruntime.InvokeModel.*;

/**
* Demonstrates the invocation of the following models:
* Anthropic Claude 2, AI21 Labs Jurassic-2, Meta Llama 2 Chat, and Stability.ai Stable Diffusion XL.
*/
public class BedrockRuntimeUsageDemo {

private static final Random random = new Random();

private static final String CLAUDE = "anthropic.claude-v2";
private static final String JURASSIC2 = "ai21.j2-mid-v1";
private static final String LLAMA2 = "meta.llama2-13b-chat-v1";
private static final String STABLE_DIFFUSION = "stability.stable-diffusion-xl";

public static void main(String[] args) {
BedrockRuntimeUsageDemo.textToText();
BedrockRuntimeUsageDemo.textToTextWithResponseStream();
BedrockRuntimeUsageDemo.textToImage();
}

private static void textToText() {

String prompt = "In one sentence, what is a large-language model?";
BedrockRuntimeUsageDemo.invoke(CLAUDE, prompt, null);
BedrockRuntimeUsageDemo.invoke(JURASSIC2, prompt, null);
BedrockRuntimeUsageDemo.invoke(LLAMA2, prompt, null);
}

private static void invoke(String modelId, String prompt, String stylePreset) {
System.out.println("\n" + new String(new char[88]).replace("\0", "-"));
System.out.println("Invoking: " + modelId);
System.out.println("Prompt: " + prompt);

try {
switch (modelId) {
case CLAUDE:
printResponse(invokeClaude(prompt));
return;
case JURASSIC2:
printResponse(invokeJurassic2(prompt));
return;
case LLAMA2:
printResponse(invokeLlama2(prompt));
return;
case STABLE_DIFFUSION:
long seed = (random.nextLong() & 0xFFFFFFFFL);
String base64ImageData = invokeStableDiffusion(prompt, seed, stylePreset);
String imagePath = saveImage(base64ImageData);
System.out.printf("Success: The generated image has been saved to %s%n", imagePath);
return;
default:
throw new IllegalStateException("Unexpected value: " + modelId);
}
} catch (BedrockRuntimeException e) {
System.out.println("Couldn't invoke model " + modelId + ": " + e.getMessage());
throw e;
}
}

private static void textToTextWithResponseStream() {
String prompt = "What is a large-language model?";
BedrockRuntimeUsageDemo.invoke(CLAUDE, prompt);
}

private static void invoke(String modelId, String prompt) {
System.out.println(new String(new char[88]).replace("\0", "-"));
System.out.printf("Invoking %s with response stream%n", modelId);
System.out.println("Prompt: " + prompt);

try {
var silent = false;
InvokeModelWithResponseStream.invokeClaude(prompt, silent);
} catch (BedrockRuntimeException e) {
System.out.println("Couldn't invoke model " + modelId + ": " + e.getMessage());
throw e;
}
}

private static void textToImage() {
String imagePrompt = "A sunset over the ocean";
String stylePreset = "photographic";
BedrockRuntimeUsageDemo.invoke(STABLE_DIFFUSION, imagePrompt, stylePreset);
}

private static void printResponse(String response) {
System.out.printf("Generated text: %s%n", response);
}

private static String saveImage(String base64ImageData) {
try {
String directory = "output";
URI uri = InvokeModel.class.getProtectionDomain().getCodeSource().getLocation().toURI();
Path outputPath = Paths.get(uri).getParent().getParent().resolve(directory);

if (!Files.exists(outputPath)) {
Files.createDirectories(outputPath);
}

int i = 1;
String fileName;
do {
fileName = String.format("image_%d.png", i);
i++;
} while (Files.exists(outputPath.resolve(fileName)));

byte[] imageBytes = Base64.getDecoder().decode(base64ImageData);

Path filePath = outputPath.resolve(fileName);
try (FileOutputStream fileOutputStream = new FileOutputStream(filePath.toFile())) {
fileOutputStream.write(imageBytes);
}

return filePath.toString();
} catch (Exception e) {
System.err.println(e.getMessage());
System.exit(1);
}
return null;
}
}
Loading

0 comments on commit af79074

Please sign in to comment.