Skip to content

Commit

Permalink
Groq Llama 3
Browse files Browse the repository at this point in the history
  • Loading branch information
ashpreetbedi committed Apr 19, 2024
1 parent e234587 commit 79708fe
Show file tree
Hide file tree
Showing 24 changed files with 450 additions and 322 deletions.
4 changes: 2 additions & 2 deletions cookbook/knowledge/arxiv_kb.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,15 @@
from phi.knowledge.arxiv import ArxivKnowledgeBase
from phi.vectordb.pgvector import PgVector2

from resources import vector_db # type: ignore
db_url = "postgresql+psycopg://ai:ai@localhost:5532/ai"

# Create a knowledge base with the ArXiv documents
knowledge_base = ArxivKnowledgeBase(
queries=["Generative AI", "Machine Learning"],
# Table name: ai.arxiv_documents
vector_db=PgVector2(
collection="arxiv_documents",
db_url=vector_db.get_db_connection_local(),
db_url=db_url,
),
)
# Load the knowledge base
Expand Down
4 changes: 2 additions & 2 deletions cookbook/knowledge/custom_references.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,11 @@
from phi.knowledge.pdf import PDFUrlKnowledgeBase
from phi.vectordb.pgvector import PgVector2

from resources import vector_db # type: ignore
db_url = "postgresql+psycopg://ai:ai@localhost:5532/ai"

knowledge_base = PDFUrlKnowledgeBase(
urls=["https://phi-public.s3.amazonaws.com/recipes/ThaiRecipes.pdf"],
vector_db=PgVector2(collection="recipes", db_url=vector_db.get_db_connection_local()),
vector_db=PgVector2(collection="recipes", db_url=db_url),
)
# Comment out after first run
# knowledge_base.load(recreate=False)
Expand Down
4 changes: 2 additions & 2 deletions cookbook/knowledge/json.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,15 @@
from phi.knowledge.json import JSONKnowledgeBase
from phi.vectordb.pgvector import PgVector2

from resources import vector_db # type: ignore
db_url = "postgresql+psycopg://ai:ai@localhost:5532/ai"


# Initialize the JSONKnowledgeBase
knowledge_base = JSONKnowledgeBase(
path=Path("data/docs"), # Table name: ai.json_documents
vector_db=PgVector2(
collection="json_documents",
db_url=vector_db.get_db_connection_local(),
db_url=db_url,
),
num_documents=5, # Number of documents to return on search
)
Expand Down
4 changes: 2 additions & 2 deletions cookbook/knowledge/pdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,15 @@
from phi.knowledge.pdf import PDFKnowledgeBase, PDFReader
from phi.vectordb.pgvector import PgVector2

from resources import vector_db # type: ignore
db_url = "postgresql+psycopg://ai:ai@localhost:5532/ai"

# Create a knowledge base with the PDFs from the data/pdfs directory
knowledge_base = PDFKnowledgeBase(
path="data/pdfs",
vector_db=PgVector2(
collection="pdf_documents",
# Can inspect database via psql e.g. "psql -h localhost -p 5432 -U ai -d ai"
db_url=vector_db.get_db_connection_local(),
db_url=db_url,
),
reader=PDFReader(chunk=True),
)
Expand Down
4 changes: 2 additions & 2 deletions cookbook/knowledge/pdf_url.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,11 @@
from phi.knowledge.pdf import PDFUrlKnowledgeBase
from phi.vectordb.pgvector import PgVector2

from resources import vector_db # type: ignore
db_url = "postgresql+psycopg://ai:ai@localhost:5532/ai"

knowledge_base = PDFUrlKnowledgeBase(
urls=["https://phi-public.s3.amazonaws.com/recipes/ThaiRecipes.pdf"],
vector_db=PgVector2(collection="recipes", db_url=vector_db.get_db_connection_local()),
vector_db=PgVector2(collection="recipes", db_url=db_url),
)
knowledge_base.load(recreate=False) # Comment out after first run

Expand Down
4 changes: 2 additions & 2 deletions cookbook/knowledge/text.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,15 @@
from phi.knowledge.text import TextKnowledgeBase
from phi.vectordb.pgvector import PgVector2

from resources import vector_db # type: ignore
db_url = "postgresql+psycopg://ai:ai@localhost:5532/ai"


# Initialize the TextKnowledgeBase
knowledge_base = TextKnowledgeBase(
path=Path("data/docs"), # Table name: ai.text_documents
vector_db=PgVector2(
collection="text_documents",
db_url=vector_db.get_db_connection_local(),
db_url=db_url,
),
num_documents=5, # Number of documents to return on search
)
Expand Down
4 changes: 2 additions & 2 deletions cookbook/knowledge/website.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from phi.vectordb.pgvector import PgVector2
from phi.assistant import Assistant

from resources import vector_db # type: ignore
db_url = "postgresql+psycopg://ai:ai@localhost:5532/ai"

# Create a knowledge base with the seed URLs
knowledge_base = WebsiteKnowledgeBase(
Expand All @@ -12,7 +12,7 @@
# Table name: ai.website_documents
vector_db=PgVector2(
collection="website_documents",
db_url=vector_db.get_db_connection_local(),
db_url=db_url,
),
)
# Load the knowledge base
Expand Down
4 changes: 2 additions & 2 deletions cookbook/knowledge/wikipedia_kb.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,15 @@
from phi.knowledge.wikipedia import WikipediaKnowledgeBase
from phi.vectordb.pgvector import PgVector2

from resources import vector_db # type: ignore
db_url = "postgresql+psycopg://ai:ai@localhost:5532/ai"

# Create a knowledge base with the PDFs from the data/pdfs directory
knowledge_base = WikipediaKnowledgeBase(
topics=["Manchester United", "Real Madrid"],
# Table name: ai.wikipedia_documents
vector_db=PgVector2(
collection="wikipedia_documents",
db_url=vector_db.get_db_connection_local(),
db_url=db_url,
),
)
# Load the knowledge base
Expand Down
37 changes: 37 additions & 0 deletions cookbook/llms/bedrock/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
# AWS Bedrock

> Note: Fork and clone this repository if needed
### 1. Create and activate a virtual environment

```shell
python3 -m venv ~/.venvs/aienv
source ~/.venvs/aienv/bin/activate
```

### 2. Export your AWS Credentials

```shell
export AWS_ACCESS_KEY_ID=***
export AWS_SECRET_ACCESS_KEY=***
```

### 3. Install libraries

```shell
pip install -U boto3 phidata
```

### 4. Run Assistant

- stream on

```shell
python cookbook/llms/bedrock/basic.py
```

- stream off

```shell
python cookbook/llms/bedrock/basic_stream_off.py
```
Empty file.
9 changes: 9 additions & 0 deletions cookbook/llms/bedrock/basic.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
from phi.assistant import Assistant
from phi.llm.aws.claude import Claude

assistant = Assistant(
llm=Claude(model="anthropic.claude-v2"),
description="You help people with their health and fitness goals.",
debug_mode=True,
)
assistant.print_response("Share a quick healthy breakfast recipe.", markdown=True, stream=False)
8 changes: 8 additions & 0 deletions cookbook/llms/bedrock/basic_stream_off.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
from phi.assistant import Assistant
from phi.llm.anthropic import Claude

assistant = Assistant(
llm=Claude(model="claude-3-haiku-20240307"),
description="You help people with their health and fitness goals.",
)
assistant.print_response("Share a quick healthy breakfast recipe.", markdown=True, stream=False)
4 changes: 2 additions & 2 deletions cookbook/llms/claude/README.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Claude function calling
# Anthropic Claude

[Models overview](https://docs.anthropic.com/claude/docs/models-overview)

Expand All @@ -11,7 +11,7 @@ python3 -m venv ~/.venvs/aienv
source ~/.venvs/aienv/bin/activate
```

### 2. Export your Anthropic API Key
### 2. Set your `ANTHROPIC_API_KEY`

```shell
export ANTHROPIC_API_KEY=xxx
Expand Down
55 changes: 10 additions & 45 deletions cookbook/llms/groq/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,70 +2,35 @@

> Note: Fork and clone this repository if needed
## RAG AI App with Groq & PgVector

1. Create and activate a virtual environment
### 1. Create a virtual environment

```shell
python3 -m venv ~/.venvs/aienv
source ~/.venvs/aienv/bin/activate
```

2. Export your Groq & OpenAI API Key

> Need to use OpenAI for embeddings as Groq doesn't support embeddings yet.
```shell
export GROQ_API_KEY=***
export OPENAI_API_KEY=sk-***
```

3. Install libraries

```shell
pip install -r cookbook/llms/groq/requirements.txt
```

4. Start pgvector

> Install [docker desktop](https://docs.docker.com/desktop/install/mac-install/) first.
- Run using a helper script
### 2. Install libraries

```shell
./cookbook/run_pgvector.sh
pip install -U groq phidata
```

- OR run using the docker run command

```shell
docker run -d \
-e POSTGRES_DB=ai \
-e POSTGRES_USER=ai \
-e POSTGRES_PASSWORD=ai \
-e PGDATA=/var/lib/postgresql/data/pgdata \
-v pgvolume:/var/lib/postgresql/data \
-p 5532:5432 \
--name pgvector \
phidata/pgvector:16
```
### 3. Run Assistants

5. Run RAG App
- basic

```shell
streamlit run cookbook/llms/groq/app.py
python cookbook/llms/groq/basic.py
```

## Build AI Assistants with Groq

1. Install libraries
- web search

```shell
pip install -U groq phidata
python cookbook/llms/groq/assistant.py
```

2. Run Assistant
- structured output

```shell
python cookbook/llms/groq/assistant.py
python cookbook/llms/groq/structured_output.py
```
2 changes: 1 addition & 1 deletion cookbook/llms/groq/basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from phi.llm.groq import Groq

assistant = Assistant(
llm=Groq(model="mixtral-8x7b-32768"),
llm=Groq(model="llama3-70b-8192"),
description="You help people with their health and fitness goals.",
)
assistant.print_response("Share a quick healthy breakfast recipe.", markdown=True)
61 changes: 61 additions & 0 deletions cookbook/llms/groq/rag/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
# RAG with Groq & PgVector

> Note: Fork and clone this repository if needed
### 1. Create a virtual environment

```shell
python3 -m venv ~/.venvs/aienv
source ~/.venvs/aienv/bin/activate
```

### 2. Export your Groq & OpenAI API Key

> Need to use OpenAI for embeddings as Groq doesn't support embeddings yet.
```shell
export GROQ_API_KEY=***
export OPENAI_API_KEY=sk-***
```

### 3. Install libraries

```shell
pip install -r cookbook/llms/groq/rag/requirements.txt
```

### 4. Run PgVector

> Install [docker desktop](https://docs.docker.com/desktop/install/mac-install/) first.
- Run using a helper script

```shell
./cookbook/run_pgvector.sh
```

- OR run using the docker run command

```shell
docker run -d \
-e POSTGRES_DB=ai \
-e POSTGRES_USER=ai \[app.py](..%2Fapp.py)
-e POSTGRES_PASSWORD=ai \
-e PGDATA=/var/lib/postgresql/data/pgdata \
-v pgvolume:/var/lib/postgresql/data \
-p 5532:5432 \
--name pgvector \
phidata/pgvector:16
```

### 5. Run RAG App

```shell
streamlit run cookbook/llms/groq/rag/app.py
```

- Open [localhost:8501](http://localhost:8501) to view your RAG app.

### 6. Message on [discord](https://discord.gg/4MtYHHrgA8) if you have any questions

### 7. Star ⭐️ the project if you like it.
Empty file.
Loading

0 comments on commit 79708fe

Please sign in to comment.