Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Examples for the new Amazon Bedrock Converse API (Python, C#, JavaScript) #6499

Merged
merged 30 commits into from
Jun 4, 2024
Merged
Show file tree
Hide file tree
Changes from 26 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
978 changes: 731 additions & 247 deletions .doc_gen/metadata/bedrock-runtime_metadata.yaml

Large diffs are not rendered by default.

2 changes: 2 additions & 0 deletions .doc_gen/validation.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,8 @@ allow_list:
- "ListTagsExample/ListTagsExample/ListTags"
- "ListTagsForVaultExample/ListTagsForVault"
- "ListVoiceConnectorTerminationCredentials"
- "Models/AmazonTitanText/Converse/Converse"
- "Models/AnthropicClaude/Converse/Converse"
- "ModifyReplicationGroupShardConfiguration"
- "ModifyTrafficMirrorFilterNetworkServices"
- "PutBucketIntelligentTieringConfiguration"
Expand Down
2 changes: 2 additions & 0 deletions dotnetv3/Bedrock-runtime/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
/.vs/
/Tools/
211 changes: 202 additions & 9 deletions dotnetv3/Bedrock-runtime/BedrockRuntimeExamples.sln

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net8.0</TargetFramework>
</PropertyGroup>

<ItemGroup>
<Compile Include="Converse.csx" />
</ItemGroup>

<ItemGroup>
<PackageReference Include="AWSSDK.BedrockRuntime" Version="3.7.303" />
<PackageReference Include="AWSSDK.Core" Version="3.7.304.8" />
</ItemGroup>
</Project>
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0

// snippet-start:[BedrockRuntime.dotnetv3.Converse_Ai21LabsJurassic2]
// Use the Converse API to send a text message to AI21 Labs Jurassic-2.

using Amazon;
using Amazon.BedrockRuntime;
using Amazon.BedrockRuntime.Model;
using System;
using System.Collections.Generic;

// Create a Bedrock Runtime client in the AWS Region you want to use.
var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1);

// Set the model ID, e.g., Jurassic-2 Mid.
var modelId = "ai21.j2-mid-v1";

// Define the user message.
var userMessage = "Describe the purpose of a 'hello world' program in one line.";

// Create a request with the model ID, the user message, and an inference configuration.
var request = new ConverseRequest
{
ModelId = modelId,
Messages = new List<Message>
{
new Message
{
Role = ConversationRole.User,
Content = new List<ContentBlock> { new ContentBlock { Text = userMessage } }
}
},
InferenceConfig = new InferenceConfiguration()
{
MaxTokens = 512,
Temperature = 0.5F,
TopP = 0.9F
}
};

try
{
// Send the request to the Bedrock Runtime and wait for the result.
var response = client.ConverseAsync(request).Result;

// Extract and print the response text.
string responseText = response?.Output?.Message?.Content?[0]?.Text ?? "";
Console.WriteLine(responseText);

}
catch (AmazonBedrockRuntimeException e)
{
DennisTraub marked this conversation as resolved.
Show resolved Hide resolved
Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}");
Environment.Exit(1);
}

// snippet-end:[BedrockRuntime.dotnetv3.Converse_Ai21LabsJurassic2]
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net8.0</TargetFramework>
</PropertyGroup>

<ItemGroup>
<Compile Include="InvokeModel.csx" />
</ItemGroup>

<ItemGroup>
<PackageReference Include="AWSSDK.BedrockRuntime" Version="3.7.303" />
<PackageReference Include="AWSSDK.Core" Version="3.7.304.8" />
</ItemGroup>
</Project>
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0

// snippet-start:[BedrockRuntime.dotnetv3.InvokeModel_Ai21LabsJurassic2]
// Use the native inference API to send a text message to AI21 Labs Jurassic-2.

using Amazon;
using Amazon.BedrockRuntime;
using Amazon.BedrockRuntime.Model;
using System;
using System.IO;
using System.Text.Json;
using System.Text.Json.Nodes;

// Create a Bedrock Runtime client in the AWS Region you want to use.
var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1);

// Set the model ID, e.g., Jurassic-2 Mid.
var modelId = "ai21.j2-mid-v1";

// Define the user message.
var userMessage = "Describe the purpose of a 'hello world' program in one line.";

//Format the request payload using the model's native structure.
var nativeRequest = JsonSerializer.Serialize(new
{
prompt = userMessage,
maxTokens = 512,
temperature = 0.5
});

// Create a request with the model ID and the model's native request payload.
var request = new InvokeModelRequest()
{
ModelId = modelId,
Body = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(nativeRequest)),
ContentType = "application/json"
};

try
{
// Send the request to the Bedrock Runtime and wait for the response.
var response = await client.InvokeModelAsync(request);

// Decode the response body.
var modelResponse = await JsonNode.ParseAsync(response.Body);

// Extract and print the response text.
var responseText = modelResponse["completions"]?[0]?["data"]?["text"] ?? "";
Console.WriteLine(responseText);
}
catch (AmazonBedrockRuntimeException e)
{
Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}");
Environment.Exit(1);
}

// snippet-end:[BedrockRuntime.dotnetv3.InvokeModel_Ai21LabsJurassic2]
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net8.0</TargetFramework>
</PropertyGroup>

<ItemGroup>
<Compile Include="Converse.csx" />
</ItemGroup>

<ItemGroup>
<PackageReference Include="AWSSDK.BedrockRuntime" Version="3.7.303" />
<PackageReference Include="AWSSDK.Core" Version="3.7.304.8" />
</ItemGroup>
</Project>
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0

// snippet-start:[BedrockRuntime.dotnetv3.Converse_AmazonTitanText]
// Use the Converse API to send a text message to Amazon Titan Text.

using Amazon;
using Amazon.BedrockRuntime;
using Amazon.BedrockRuntime.Model;
using System;
using System.Collections.Generic;

// Create a Bedrock Runtime client in the AWS Region you want to use.
var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1);

// Set the model ID, e.g., Titan Text Premier.
var modelId = "amazon.titan-text-premier-v1:0";

// Define the user message.
var userMessage = "Describe the purpose of a 'hello world' program in one line.";

// Create a request with the model ID, the user message, and an inference configuration.
var request = new ConverseRequest
{
ModelId = modelId,
Messages = new List<Message>
{
new Message
{
Role = ConversationRole.User,
Content = new List<ContentBlock> { new ContentBlock { Text = userMessage } }
}
},
InferenceConfig = new InferenceConfiguration()
{
MaxTokens = 512,
Temperature = 0.5F,
TopP = 0.9F
}
};

try
{
// Send the request to the Bedrock Runtime and wait for the result.
var response = await client.ConverseAsync(request);

// Extract and print the response text.
string responseText = response?.Output?.Message?.Content?[0]?.Text ?? "";
Console.WriteLine(responseText);

}
catch (AmazonBedrockRuntimeException e)
{
Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}");
Environment.Exit(1);
}

// snippet-end:[BedrockRuntime.dotnetv3.Converse_AmazonTitanText]
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net8.0</TargetFramework>
</PropertyGroup>

<ItemGroup>
<Compile Include="ConverseStream.csx" />
</ItemGroup>

<ItemGroup>
<PackageReference Include="AWSSDK.BedrockRuntime" Version="3.7.303" />
<PackageReference Include="AWSSDK.Core" Version="3.7.304.8" />
</ItemGroup>
</Project>
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0

// snippet-start:[BedrockRuntime.dotnetv3.ConverseStream_AmazonTitanText]
// Use the Converse API to send a text message to Amazon Titan Text
// and print the response stream.

using Amazon;
using Amazon.BedrockRuntime;
using Amazon.BedrockRuntime.Model;
using System;
using System.Collections.Generic;
using System.Linq;

// Create a Bedrock Runtime client in the AWS Region you want to use.
var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1);

// Set the model ID, e.g., Titan Text Premier.
var modelId = "amazon.titan-text-premier-v1:0";

// Define the user message.
var userMessage = "Describe the purpose of a 'hello world' program in one line.";

// Create a request with the model ID, the user message, and an inference configuration.
var request = new ConverseStreamRequest
{
ModelId = modelId,
Messages = new List<Message>
{
new Message
{
Role = ConversationRole.User,
Content = new List<ContentBlock> { new ContentBlock { Text = userMessage } }
}
},
InferenceConfig = new InferenceConfiguration()
{
MaxTokens = 512,
Temperature = 0.5F,
TopP = 0.9F
}
};

try
{
// Send the request to the Bedrock Runtime and wait for the result.
var response = await client.ConverseStreamAsync(request);

// Extract and print the streamed response text in real-time.
foreach (var chunk in response.Stream.AsEnumerable())
{
if (chunk is ContentBlockDeltaEvent)
{
Console.Write((chunk as ContentBlockDeltaEvent).Delta.Text);
}
}
}
catch (AmazonBedrockRuntimeException e)
{
Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}");
Environment.Exit(1);
}

// snippet-end:[BedrockRuntime.dotnetv3.ConverseStream_AmazonTitanText]
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net8.0</TargetFramework>
</PropertyGroup>

<ItemGroup>
<Compile Include="InvokeModel.csx" />
</ItemGroup>

<ItemGroup>
<PackageReference Include="AWSSDK.BedrockRuntime" Version="3.7.303" />
<PackageReference Include="AWSSDK.Core" Version="3.7.304.8" />
</ItemGroup>
</Project>
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0

// snippet-start:[BedrockRuntime.dotnetv3.InvokeModel_AmazonTitanText]
// Use the native inference API to send a text message to Amazon Titan Text.

using Amazon;
using Amazon.BedrockRuntime;
using Amazon.BedrockRuntime.Model;
using System;
using System.IO;
using System.Text.Json;
using System.Text.Json.Nodes;

// Create a Bedrock Runtime client in the AWS Region you want to use.
var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1);

// Set the model ID, e.g., Titan Text Premier.
var modelId = "amazon.titan-text-premier-v1:0";

// Define the user message.
var userMessage = "Describe the purpose of a 'hello world' program in one line.";

//Format the request payload using the model's native structure.
var nativeRequest = JsonSerializer.Serialize(new
{
inputText = userMessage,
textGenerationConfig = new
{
maxTokenCount = 512,
temperature = 0.5
}
});

// Create a request with the model ID and the model's native request payload.
var request = new InvokeModelRequest()
{
ModelId = modelId,
Body = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(nativeRequest)),
ContentType = "application/json"
};

try
{
// Send the request to the Bedrock Runtime and wait for the response.
var response = await client.InvokeModelAsync(request);

// Decode the response body.
var modelResponse = await JsonNode.ParseAsync(response.Body);

// Extract and print the response text.
var responseText = modelResponse["results"]?[0]?["outputText"] ?? "";
Console.WriteLine(responseText);
}
catch (AmazonBedrockRuntimeException e)
{
Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}");
Environment.Exit(1);
}

// snippet-end:[BedrockRuntime.dotnetv3.InvokeModel_AmazonTitanText]
Loading
Loading