Skip to content

Commit

Permalink
system message
Browse files Browse the repository at this point in the history
  • Loading branch information
gyliu513 committed Sep 17, 2024
1 parent 0578cdf commit 8429bed
Show file tree
Hide file tree
Showing 4 changed files with 71 additions and 83 deletions.
16 changes: 16 additions & 0 deletions aws/aws-model-list.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
from dotenv import load_dotenv
load_dotenv()

import boto3

# Initialize the Bedrock client (note the service_name is 'bedrock', not 'bedrock-runtime')
client = boto3.client(service_name="bedrock", region_name="us-west-2")

# List available foundation models
response = client.list_foundation_models()

# Print model IDs and their details
for model in response['modelSummaries']:
print(f"Model ID: {model['modelId']}")
print(f"Provider: {model.get('providerName', 'N/A')}")
print("-" * 50)
2 changes: 1 addition & 1 deletion aws/bedrock1.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
from traceloop.sdk import Traceloop
from traceloop.sdk.decorators import task, workflow

Traceloop.init(app_name="joke_generation_service")
# Traceloop.init(app_name="joke_generation_service")
bedrock_runtime = boto3.client(service_name="bedrock-runtime", region_name="us-west-2")


Expand Down
98 changes: 54 additions & 44 deletions aws/bedrock2.py
Original file line number Diff line number Diff line change
@@ -1,63 +1,73 @@
'''
Put following parameter to a .env file
TRACELOOP_API_KEY=
AWS_ACCESS_KEY_ID=
AWS_SECRET_ACCESS_KEY=
'''

# Use the native inference API to send a text message to Anthropic Claude.
from dotenv import load_dotenv
load_dotenv()

import boto3
import json

from traceloop.sdk import Traceloop
from traceloop.sdk.decorators import task, workflow
from botocore.exceptions import ClientError

Traceloop.init(app_name="joke_generation_service")
bedrock_runtime = boto3.client(service_name="bedrock-runtime", region_name="us-west-2")
# Create a Bedrock Runtime client in the AWS Region of your choice.
client = boto3.client("bedrock-runtime", region_name="us-east-1")

# Set the model ID, e.g., Claude 3 Haiku.
model_id = "anthropic.claude-v2"

@task(name="gyliu_joke_creation")
def create_joke():
# Define the prompt for the model.
# prompt = "Describe the purpose of a 'hello world' program in one line."

messages = [
{
"role": "system",
"content": "You are a helpful assistant that provides weather forecasts."
},
# Prepare the messages
'''
messages = [
{
"role": "system",
"content": [{"type": "text", "text": "You are a helpful assistant that provides information about AWS services."}],
},
{
"role": "user",
"content": [{"type": "text", "text": "Tell me about AWS Bedrock"}],
}
]
native_request = {
"anthropic_version": "bedrock-2023-05-31",
"max_tokens": 512,
"temperature": 0.5,
"messages": messages,
}
'''

# Format the request payload using the model's native structure.
native_request = {
"system": "You are a helpful assistant that provides information about AWS services.",
"anthropic_version": "bedrock-2023-05-31",
"max_tokens": 512,
"temperature": 0.5,
"messages": [

{
"role": "user",
"content": "What's the weather like today in New York City?"
"content": [{"type": "text", "text": "Tell me about AWS Bedrock"}],
}
]

# Prepare the payload
payload = {
"messages": messages,
"max_tokens_to_sample": 150
}

body = json.dumps(payload)

],
}

response = bedrock_runtime.invoke_model(
body=body,
modelId="amazon.titan-text-express-v1",
accept="application/json",
contentType="application/json"
)

response_body = json.loads(response.get('body').read())
outputText = response_body.get('results')[0].get('outputText')
# Convert the native request to JSON.
request = json.dumps(native_request)

text = outputText[outputText.index('\n')+1:]
about_lambda = text.strip()
return about_lambda
try:
# Invoke the model with the request.
response = client.invoke_model(modelId=model_id, body=request)

@workflow(name="gyliu_joke_generator")
def joke_workflow():
print(create_joke())
except (ClientError, Exception) as e:
print(f"ERROR: Can't invoke '{model_id}'. Reason: {e}")
exit(1)

# Decode the response body.
model_response = json.loads(response["body"].read())

joke_workflow()
# Extract and print the response text.
response_text = model_response["content"][0]["text"]
print(response_text)
38 changes: 0 additions & 38 deletions aws/bedrock3.py

This file was deleted.

0 comments on commit 8429bed

Please sign in to comment.