Skip to content

Commit

Permalink
Merge pull request #1 from JakeWalker23/claude-1-vision
Browse files Browse the repository at this point in the history
Claude 1 vision
  • Loading branch information
JakeWalker23 authored Sep 13, 2024
2 parents ea76735 + 51d5ea2 commit c8c7ee6
Show file tree
Hide file tree
Showing 3 changed files with 93 additions and 3 deletions.
27 changes: 27 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
# Anthropic - Claude API fundamentals

### Context

This is a small tutorial covering the following basic Anthropic API fundamentals:

- Messages
- Models
- Parameters
- Streaming
- Vision

It covers enough to get started with llm prompting. In upcoming additions we will add more real world scenarios where LLM's can excel.



### Activate Virtual Environment:

``` Windows
.\Scripts\activate
```


### To run:
```
python {file}.py
```
45 changes: 42 additions & 3 deletions stream.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,21 @@
from anthropic import Anthropic
from anthropic import AsyncAnthropic
from dotenv import load_dotenv
import os

load_dotenv()

anthropic_key = os.getenv('ANTHROPIC_API_KEY')

# Using Python

client = Anthropic()

stream = client.messages.create(
messages=[
{
"role": "user",
"content": "Tell me about liverpool football club",
"content": "Can you give me some key details on the flop golf shot",
}
],
model="claude-3-haiku-20240307",
Expand All @@ -22,5 +25,41 @@
)

for event in stream:
if event.type == "content_block_delta":
print(event.delta.text, flush=True, end="")
if event.type == "message_start":
input_tokens = event.message.usage.input_tokens
print("MESSAGE START EVENT", flush=True)
print(f"Input tokens used: {input_tokens}", flush=True)
elif event.type == "content_block_delta":
print(event.delta.text, flush=True, end="")
elif event.type == "message_delta":
output_tokens = event.usage.output_tokens
print("\n========================", flush=True)
print("MESSAGE DELTA EVENT", flush=True)
print(f"Output tokens used: {output_tokens}", flush=True)



# Using AsyncAnthropic

# client = AsyncAnthropic()

# async def streaming_with_helpers():
# async with client.messages.stream(
# max_tokens=1024,
# messages=[
# {
# "role": "user",
# "content": "Write me a sonnet about orchids"
# }
# ],
# model="claude-3-opus-20240229",
# ) as stream:
# async for text in stream.text_stream:
# print(text, end="", flush=True)

# final_message = await stream.get_final_message()
# print("\n\nSTREAMING IS DONE. HERE IS THE FINAL ACCUMULATED MESSAGE: ")
# print(final_message.to_json())


# await streaming_with_helpers()
24 changes: 24 additions & 0 deletions vision.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
from anthropic import Anthropic
from dotenv import load_dotenv
import os

load_dotenv()

anthropic_key = os.getenv('ANTHROPIC_API_KEY')

client = Anthropic()

response = client.messages.create(
model="claude-3-haiku-20240307",
max_tokens=500,
messages=[
{
"role": "user",
"content": [
{"type": "text", "text": "tell me a joke"},
]
}
]
)

print(response.content[0].text)

0 comments on commit c8c7ee6

Please sign in to comment.