Skip to content

Commit

Permalink
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #96 from vintasoftware/feat/docs
Browse files Browse the repository at this point in the history
Basic docs with tutorial
pamella authored Jun 19, 2024
2 parents d4b05f3 + 9d9d544 commit 84aabec
Showing 14 changed files with 1,033 additions and 121 deletions.
1 change: 1 addition & 0 deletions .editorconfig
Original file line number Diff line number Diff line change
@@ -15,6 +15,7 @@ indent_size = 2

[*.md]
trim_trailing_whitespace = false
indent_size = 4

[Makefile]
indent_style = tab
8 changes: 7 additions & 1 deletion django_ai_assistant/api/views.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from typing import List

from django.http import Http404
from django.shortcuts import get_object_or_404

from langchain_core.messages import message_to_dict
@@ -71,7 +72,12 @@ def create_thread(request, payload: ThreadSchemaIn):

@api.get("threads/{thread_id}/", response=ThreadSchema, url_name="thread_detail_update_delete")
def get_thread(request, thread_id: str):
thread = use_cases.get_single_thread(thread_id=thread_id, user=request.user, request=request)
try:
thread = use_cases.get_single_thread(
thread_id=thread_id, user=request.user, request=request
)
except Thread.DoesNotExist:
raise Http404("No %s matches the given query." % Thread._meta.object_name) from None
return thread


11 changes: 11 additions & 0 deletions django_ai_assistant/helpers/assistants.py
Original file line number Diff line number Diff line change
@@ -192,7 +192,9 @@ def get_contextualize_prompt(self) -> ChatPromptTemplate:
return ChatPromptTemplate.from_messages(
[
("system", contextualize_q_system_prompt),
# TODO: make history key confirgurable?
MessagesPlaceholder("history"),
# TODO: make input key confirgurable?
("human", "{input}"),
]
)
@@ -284,6 +286,15 @@ def invoke(self, *args, thread_id: int | None, **kwargs):
chain = self.as_chain(thread_id)
return chain.invoke(*args, **kwargs)

def run(self, message, thread_id: int | None, **kwargs):
return self.invoke(
{
"input": message,
},
thread_id=thread_id,
**kwargs,
)["output"]

def run_as_tool(self, message: str, **kwargs):
chain = self.as_chain(thread_id=None)
output = chain.invoke({"input": message}, **kwargs)
8 changes: 7 additions & 1 deletion django_ai_assistant/helpers/use_cases.py
Original file line number Diff line number Diff line change
@@ -17,6 +17,7 @@
can_delete_message,
can_delete_thread,
can_run_assistant,
can_view_thread,
)


@@ -98,7 +99,12 @@ def get_single_thread(
user: Any,
request: HttpRequest | None = None,
):
return Thread.objects.filter(created_by=user).get(id=thread_id)
thread = Thread.objects.get(id=thread_id)

if not can_view_thread(thread=thread, user=user, request=request):
raise AIUserNotAllowedError("User is not allowed to view this thread")

return thread


def get_threads(
14 changes: 14 additions & 0 deletions django_ai_assistant/permissions.py
Original file line number Diff line number Diff line change
@@ -25,6 +25,20 @@ def can_create_thread(
)


def can_view_thread(
thread: Thread,
user: Any,
request: HttpRequest | None = None,
**kwargs,
) -> bool:
return app_settings.call_fn(
"CAN_VIEW_THREAD_FN",
**_get_default_kwargs(user, request),
thread=thread,
**kwargs,
)


def can_update_thread(
thread: Thread,
user: Any,
32 changes: 32 additions & 0 deletions docs/get-started.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
# Get started

## Prerequisites

- Python: <a href="https://pypi.org/project/django-ai-assistant" target="_blank"><img src="https://img.shields.io/pypi/pyversions/django-ai-assistant.svg?color=%2334D058" alt="Supported Python versions"></a>
- Django: <a href="https://pypi.org/project/django-ai-assistant" target="_blank"><img src="https://img.shields.io/pypi/frameworkversions/django/django-ai-assistant.svg" alt="Supported Django versions"></a>

## How to install

Install Django AI Assistant package:

```bash
pip install django-ai-assistant
```

Add Django AI Assistant to your Django project's `INSTALLED_APPS`:

```python title="myproject/settings.py"
INSTALLED_APPS = [
...
'django_ai_assistant',
...
]
```

Run the migrations:

```bash
python manage.py migrate
```

Learn how to use the package in the [Tutorial](tutorial.md) section.
24 changes: 24 additions & 0 deletions docs/index.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
# Django AI Assistant

Implement powerful AI Assistants using Django.
Combine the power of Large Language Models with Django's productivity.

Regardless of the feasibility of AGI, AI assistants are (already!) a new paradigm for computation.
AI agents and assistants allow devs to easily build applications with smart decision logic
that would otherwise be too expensive to build and maintain.

The latest LLMs from major AI providers have a "killer feature" called Tool Calling,
which enables AI models to call provided methods from Django's side, and essentially
do anything a Django view can, such as accessing DBs, checking permissions, sending emails,
downloading and uploading media files, etc.

While users commonly interact with LLMs via conversations, AI Assistants can do a lot with any kind of string input, including JSON.
Your application's end users won't even realize that a LLM is doing the heavy-lifting behind the scenes!
Some ideas for innovative AI assistants:

- A movie recommender chatbot that helps users manage their movie backlogs
- An autofill button for certain forms of your application
- Personalized email reminders that consider users' written preferences and the application's recent notifications
- A real-time audio guide for tourists that recommends attractions given the user's current location

We have an open-source example with some of those applications, but it's best to start with the [Get Started](get-started.md) guide.
396 changes: 396 additions & 0 deletions docs/tutorial.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,396 @@
# Tutorial

In this tutorial, you will learn how to use Django AI Assistant to supercharge your Django project with LLM capabilities.

## Prerequisites

Make sure you properly configured Django AI Assistant as described in the [Get Started](get-started.md) guide.

## Setting up API keys

The tutorial below uses OpenAI's gpt-4o model, so make sure you have `OPENAI_API_KEY` set as an environment variable for your Django project.
You can also use other models, keep reading to learn more. Just make sure their keys are properly set.

!!! note
An easy way to set environment variables is to use a `.env` file in your project's root directory and use `python-dotenv` to load them.
Our [example project](https://github.com/vintasoftware/django-ai-assistant/tree/main/example) uses this approach.

## What AI Assistants can do

AI Assistants are LLMs that can answer to user queries as ChatGPT does, i.e. inputting and outputting strings.
But when integrated with Django, they can also do anything a Django view can, such as accessing the database,
checking permissions, sending emails, downloading and uploading media files, etc.
This is possible by defining "tools" the AI can use. These tools are methods in an AI Assistant class on the Django side.

## Defining an AI Assistant

### Registering

To create an AI Assistant, you need to:

1. Create an `ai_assistants.py` file;
2. Define a class that inherits from `AIAssistant` with the decorator `@register_assistant` over it;
3. Provide an `id`, a `name`, some `instructions` for the LLM (a system prompt), and a `model` name:

```python title="myapp/ai_assistants.py"
from django_ai_assistant import AIAssistant, register_assistant

@register_assistant
class WeatherAIAssistant(AIAssistant):
id = "weather_assistant"
name = "Weather Assistant"
instructions = "You are a weather bot."
model = "gpt-4o"
```

### Defining tools

Useful tools give abilities the LLM doesn't have out-of-the-box,
such as getting the current date and finding the current weather by calling an API.

Use the `@method_tool` decorator to define a tool method in the AI Assistant:

```{.python title="myapp/ai_assistants.py" hl_lines="15-22"}
from django.utils import timezone
from django_ai_assistant import AIAssistant, method_tool, register_assistant
import json
@register_assistant
class WeatherAIAssistant(AIAssistant):
id = "weather_assistant"
name = "Weather Assistant"
instructions = "You are a weather bot."
model = "gpt-4o"
def get_instructions(self):
return f"{self.instructions} Today is {timezone.now().isoformat()}."
@method_tool
def get_weather(self, location: str) -> str:
"""Fetch the current weather data for a location"""
return json.dumps({
"location": location,
"temperature": "25°C",
"weather": "sunny"
}) # imagine some weather API here, this is just a placeholder
```

The `get_weather` method is a tool that the AI Assistant can use to get the current weather for a location, when the user asks for it.
The tool method must be fully type-hinted (all parameters and return value), and it must include a descriptive docstring.
This is necessary for the LLM model to understand the tool's purpose.

A conversation with this Weather Assistant looks like this:

```txt
User: What's the weather in New York City?
AI: The weather in NYC is sunny with a temperature of 25°C.
```

!!! note
State of the art models such as gpt-4o can process JSON well.
You can return a `json.dumps(api_output)` from a tool method and the model will be able to process it before responding the user.

### Using Django logic in tools

You have access to the current request user in tools:

```{.python title="myapp/ai_assistants.py" hl_lines=13}
from django_ai_assistant import AIAssistant, method_tool, register_assistant
@register_assistant
class PersonalAIAssistant(AIAssistant):
id = "personal_assistant"
name = "Personal Assistant"
instructions = "You are a personal assistant."
model = "gpt-4o"
@method_tool
def get_current_user_username(self) -> str:
"""Get the username of the current user"""
return self._user.username
```

You can also add any Django logic to tools, such as querying the database:

```{.python title="myapp/ai_assistants.py" hl_lines=14-16}
from django_ai_assistant import AIAssistant, method_tool, register_assistant
import json
@register_assistant
class IssueManagementAIAssistant(AIAssistant):
id = "issue_mgmt_assistant"
name = "Issue Management Assistant"
instructions = "You are an issue management bot."
model = "gpt-4o"
@method_tool
def get_current_user_assigned_issues(self) -> str:
"""Get the issues assigned to the current user"""
return json.dumps({
"issues": list(Issue.objects.filter(assignee=self._user).values())
})
```

!!! warning
Make sure you only return to the LLM what the user can see, considering permissions and privacy.
Code the tools as if they were Django views.

### Using pre-implemented tools

Django AI Assistant works with [any LangChain-compatible tool](https://python.langchain.com/v0.2/docs/integrations/tools/).
Just override the `get_tools` method in your AI Assistant class to include the tools you want to use.

For example, you can use the `TavilySearch` tool to provide your AI Assistant with the ability to search the web
for information about upcoming movies.

First install dependencies:

```bash
pip install -U langchain-community tavily-python
```

Then, set the `TAVILY_API_KEY` environment variable. You'll need to sign up at [Tavily](https://tavily.com/).

Finally, add the tool to your AI Assistant class by overriding the `get_tools` method:

```{.python title="myapp/ai_assistants.py" hl_lines="2 20"}
from django_ai_assistant import AIAssistant, register_assistant
from langchain_community.tools.tavily_search import TavilySearchResults
@register_assistant
class MovieSearchAIAssistant(AIAssistant):
id = "movie_search_assistant" # noqa: A003
instructions = (
"You're a helpful movie search assistant. "
"Help the user find more information about movies. "
"Use the provided tools to search the web for upcoming movies. "
)
name = "Movie Search Assistant"
model = "gpt-4o"
def get_instructions(self):
return f"{self.instructions} Today is {timezone.now().isoformat()}."
def get_tools(self):
return [
TavilySearchResults(),
*super().get_tools(),
]
```

!!! note
As of now, Django AI Assistant is powered by [LangChain](https://python.langchain.com/v0.2/docs/introduction/),
but previous knowledge on LangChain is NOT necessary to use this library, at least for the main use cases.

## Using an AI Assistant

### Manually calling an AI Assistant

You can manually call an AI Assistant from anywhere in your Django application:

```python
from myapp.ai_assistants import WeatherAIAssistant

assistant = WeatherAIAssistant()
output = assistant.run("What's the weather in New York City?")
assert output == "The weather in NYC is sunny with a temperature of 25°C."
```

The constructor of `AIAssistant` receives `user`, `request`, `view` as optional parameters,
which can be used in the tools with `self._user`, `self._request`, `self._view`.
Also, any extra parameters passed in constructor are stored at `self._init_kwargs`.

### Threads of Messages

The django-ai-assistant app provides two models `Thread` and `Message` to store and retrieve conversations with AI Assistants.
LLMs are stateless by design, meaning they don't hold any context between calls. All they know is the current input.
But by using the `AIAssistant` class, the conversation state is stored in the database as multiple `Message` of a `Thread`,
and automatically retrieved then passed to the LLM when calling the AI Assistant.

To create a `Thread`, you can use a helper from the `django_ai_assistant.use_cases` module. For example:

```{.python hl_lines="4 8"}
from django_ai_assistant.use_cases import create_thread, get_thread_messages
from myapp.ai_assistants import WeatherAIAssistant
thread = create_thread(name="Weather Chat", user=some_user)
assistant = WeatherAIAssistant()
assistant.run("What's the weather in New York City?", thread_id=thread.id)
messages = get_thread_messages(thread) # returns both user and AI messages
```

More CRUD helpers are available at `django_ai_assistant.use_cases` module. Check the API Reference for more information.
<!--- TODO: create API reference -->

### Using built-in API views

You can use the built-in API views to interact with AI Assistants via HTTP requests from any frontend,
such as a React application or a mobile app. Add the following to your Django project's `urls.py`:

```python title="myproject/urls.py"
from django.urls import include, path

urlpatterns = [
path("ai-assistant/", include("django_ai_assistant.urls")),
...
]
```

The built-in API supports retrieval of Assistants info, as well as CRUD for Threads and Messages.
It has a OpenAPI schema that you can explore at `ai-assistant/docs/`.

### Configuring permissions

The API uses the helpers from the `django_ai_assistant.use_cases` module, which have permission checks
to ensure the user can use a certain AI Assistant or do CRUD on Threads and Messages.

By default, any authenticated user can use any AI Assistant, and create a thread.
Users can manage both their own threads and the messages on them. Therefore, the default permissions are:

```python title="myproject/settings.py"
AI_ASSISTANT_CAN_CREATE_THREAD_FN = "django_ai_assistant.permissions.allow_all"
AI_ASSISTANT_CAN_VIEW_THREAD_FN = "django_ai_assistant.permissions.owns_thread"
AI_ASSISTANT_CAN_UPDATE_THREAD_FN = "django_ai_assistant.permissions.owns_thread"
AI_ASSISTANT_CAN_DELETE_THREAD_FN = "django_ai_assistant.permissions.owns_thread"
AI_ASSISTANT_CAN_CREATE_MESSAGE_FN = "django_ai_assistant.permissions.owns_thread"
AI_ASSISTANT_CAN_UPDATE_MESSAGE_FN = "django_ai_assistant.permissions.owns_thread"
AI_ASSISTANT_CAN_DELETE_MESSAGE_FN = "django_ai_assistant.permissions.owns_thread"
AI_ASSISTANT_CAN_RUN_ASSISTANT = "django_ai_assistant.permissions.allow_all"
```

You can override these settings in your Django project's `settings.py` to customize the permissions.

Thread permission signatures look like this:

```python
from django_ai_assistant.models import Thread
from django.http import HttpRequest

def check_custom_thread_permission(
thread: Thread,
user: Any,
request: HttpRequest | None = None) -> bool:
return ...
```

While Message permission signatures look like this:

```python
from django_ai_assistant.models import Thread, Message
from django.http import HttpRequest

def check_custom_message_permission(
message: Message,
thread: Thread,
user: Any,
request: HttpRequest | None = None) -> bool:
return ...
```

## Advanced usage

### Using other AI models

By default the supported models are OpenAI ones,
but you can use [any chat model from Langchain that supports Tool Calling](https://python.langchain.com/v0.2/docs/integrations/chat/#advanced-features) by overriding `get_llm`:

```python title="myapp/ai_assistants.py"
from django_ai_assistant import AIAssistant, register_assistant
from langchain_anthropic import ChatAnthropic

@register_assistant
class WeatherAIAssistant(AIAssistant):
id = "weather_assistant"
name = "Weather Assistant"
instructions = "You are a weather bot."
model = "claude-3-opus-20240229"

def get_llm(self):
model = self.get_model()
temperature = self.get_temperature()
model_kwargs = self.get_model_kwargs()
return ChatAnthropic(
model_name=model,
temperature=temperature,
model_kwargs=model_kwargs,
timeout=None,
max_retries=2,
)
```

### Composing AI Assistants

One AI Assistant can call another AI Assistant as a tool. This is useful for composing complex AI Assistants.
Use the `as_tool` method for that:

```{.python title="myapp/ai_assistants.py" hl_lines="15 17"}
@register_assistant
class SimpleAssistant(AIAssistant):
...
@register_assistant
class AnotherSimpleAssistant(AIAssistant):
...
@register_assistant
class ComplexAssistant(AIAssistant):
...
def get_tools(self) -> Sequence[BaseTool]:
return [
SimpleAssistant().as_tool(
description="Tool to <...add a meaningful description here...>"),
AnotherSimpleAssistant().as_tool(
description="Tool to <...add a meaningful description here...>"),
*super().get_tools(),
]
```

The `movies/ai_assistants.py` file in the [example project](https://github.com/vintasoftware/django-ai-assistant/tree/main/example)
shows an example of a composed AI Assistant that's able to recommend movies and manage the user's movie backlog.

### Retrieval Augmented Generation (RAG)

You can use RAG in your AI Assistants. RAG means using a retriever to fetch chunks of textual data from a pre-existing DB to give
context to the LLM. This context goes into the `{context}` placeholder in the `instructions` string, namely the system prompt.
This means the LLM will have access to a context your retriever logic provides when generating the response,
thereby improving the quality of the response by avoiding generic or off-topic answers.

For this to work, your must do the following in your AI Assistant:

1. Add a `{context}` placeholder in the `instructions` string;
2. Add `has_rag = True` as a class attribute;
3. Override the `get_retriever` method to return a [Langchain Retriever](https://python.langchain.com/v0.2/docs/how_to/#retrievers).

For example:

```{.python title="myapp/ai_assistants.py" hl_lines="12 16 18"}
from django_ai_assistant import AIAssistant, register_assistant
@register_assistant
class DocsAssistant(AIAssistant):
id = "docs_assistant" # noqa: A003
name = "Docs Assistant"
instructions = (
"You are an assistant for answering questions related to the provided context. "
"Use the following pieces of retrieved context to answer the user's question. "
"\n\n"
"---START OF CONTEXT---\n"
"{context}"
"---END OF CONTEXT---\n"
)
model = "gpt-4o"
has_rag = True
def get_retriever(self) -> BaseRetriever:
return ... # use a Langchain Retriever here
```

The `rag/ai_assistants.py` file in the [example project](https://github.com/vintasoftware/django-ai-assistant/tree/main/example)
shows an example of a RAG-powered AI Assistant that's able to answer questions about Django using the Django Documentation as context.

### Further configuration of AI Assistants

You can further configure the `AIAssistant` subclasses by overriding its public methods. Check the API Reference for more information.
<!--- TODO: create API reference -->
1 change: 0 additions & 1 deletion example/example/settings.py
Original file line number Diff line number Diff line change
@@ -157,7 +157,6 @@

# django-ai-assistant

OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
AI_ASSISTANT_CAN_CREATE_THREAD_FN = "django_ai_assistant.permissions.allow_all"
AI_ASSISTANT_CAN_VIEW_THREAD_FN = "django_ai_assistant.permissions.owns_thread"
AI_ASSISTANT_CAN_UPDATE_THREAD_FN = "django_ai_assistant.permissions.owns_thread"
14 changes: 7 additions & 7 deletions example/weather/ai_assistants.py
Original file line number Diff line number Diff line change
@@ -16,6 +16,13 @@ class WeatherAIAssistant(AIAssistant):
name = "Weather Assistant"
model = "gpt-4o"

def get_instructions(self):
# Warning: this will use the server's timezone
# See: https://docs.djangoproject.com/en/5.0/topics/i18n/timezones/#default-time-zone-and-current-time-zone
# In a real application, you should use the user's timezone
current_date_str = timezone.now().date().isoformat()
return f"You are a weather bot. Use the provided functions to answer questions. Today is: {current_date_str}."

@method_tool
def fetch_current_weather(self, location: str) -> dict:
"""Fetch the current weather data for a location"""
@@ -57,10 +64,3 @@ def who_am_i(self) -> str:
return self._user.username
else:
return "Anonymous"

def get_instructions(self):
# Warning: this will use the server's timezone
# See: https://docs.djangoproject.com/en/5.0/topics/i18n/timezones/#default-time-zone-and-current-time-zone
# In a real application, you should use the user's timezone
current_date_str = timezone.now().date().isoformat()
return f"You are a weather bot. Use the provided functions to answer questions. Today is: {current_date_str}."
48 changes: 48 additions & 0 deletions mkdocs.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
site_name: django-ai-assistant
site_description: Implement powerful AI assistants using Django

repo_name: vintasoftware/django-ai-assistant
repo_url: https://github.com/vintasoftware/django-ai-assistant/
edit_uri: blob/main/docs/

theme:
name: material
palette:
# Palette toggle for automatic mode
- media: "(prefers-color-scheme)"
toggle:
icon: material/brightness-auto
name: Switch to light mode

# Palette toggle for light mode
- media: "(prefers-color-scheme: light)"
scheme: default
toggle:
icon: material/brightness-7
name: Switch to dark mode

# Palette toggle for dark mode
- media: "(prefers-color-scheme: dark)"
scheme: slate
toggle:
icon: material/brightness-4
name: Switch to system preference

copyright: <a href="https://www.vintasoftware.com">Vinta Software</a>

markdown_extensions:
- admonition
- pymdownx.highlight:
use_pygments: true
- pymdownx.inlinehilite
- pymdownx.superfences
- pymdownx.snippets:
check_paths: true
- toc:
permalink: true
- attr_list

nav:
- Home: index.md
- Get Started: get-started.md
- Tutorial: tutorial.md
514 changes: 435 additions & 79 deletions poetry.lock

Large diffs are not rendered by default.

80 changes: 50 additions & 30 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -26,6 +26,11 @@ ipython = "^8.24.0"
pytest-asyncio = "^0.23.7"
pytest-recording = "^0.13.1"
coveralls = "^4.0.1"
mkdocs = "^1.6.0"
mkdocs-material = "^9.5.27"
pymdown-extensions = "^10.8.1"
markdown = "^3.6"
pygments = "^2.18.0"
model-bakery = "^1.18.1"

[tool.poetry.group.example.dependencies]
@@ -69,7 +74,7 @@ select = [
# flake8-no-pep420
"INP",
# Ruff-specific rules
"RUF"
"RUF",
]
exclude = [
".bzr",
@@ -97,33 +102,33 @@ exclude = [
"*/migrations/*",
]
ignore = [
# Disable eradicate (commented code removal)
"ERA001",
# Disable Conflicting lint rules,
# see https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules
"W191",
"E501",
"E111",
"E117",
"D206",
"D300",
"Q000",
"Q001",
"Q002",
"Q003",
"COM812",
"COM819",
"ISC001",
"ISC002",
# Allow `except Exception`:
"BLE001",
# Disable unused `noqa` directive
"RUF100",
# Disable pyupgrade UP rules that conflict with django-ninja
"UP006",
"UP035",
"UP037",
"UP040",
# Disable eradicate (commented code removal)
"ERA001",
# Disable Conflicting lint rules,
# see https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules
"W191",
"E501",
"E111",
"E117",
"D206",
"D300",
"Q000",
"Q001",
"Q002",
"Q003",
"COM812",
"COM819",
"ISC001",
"ISC002",
# Allow `except Exception`:
"BLE001",
# Disable unused `noqa` directive
"RUF100",
# Disable pyupgrade UP rules that conflict with django-ninja
"UP006",
"UP035",
"UP037",
"UP040",
]
line-length = 100
indent-width = 4
@@ -135,7 +140,14 @@ dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
ignore-overlong-task-comments = true

[tool.ruff.lint.isort]
section-order = ["future", "standard-library", "django", "third-party", "first-party", "local-folder"]
section-order = [
"future",
"standard-library",
"django",
"third-party",
"first-party",
"local-folder",
]
lines-after-imports = 2

[tool.ruff.lint.isort.sections]
@@ -155,7 +167,15 @@ lines-after-imports = 2
[tool.coverage.run]
branch = true
source = ["backend"]
omit = ["**/venv/*", "**/env/*", "**/virtualenvs/*", "**/node_modules/*", "**/migrations/*", "**/settings/*", "**/tests/*"]
omit = [
"**/venv/*",
"**/env/*",
"**/virtualenvs/*",
"**/node_modules/*",
"**/migrations/*",
"**/settings/*",
"**/tests/*",
]

[tool.pyright]
pythonVersion = "3.12"
3 changes: 1 addition & 2 deletions tests/settings.py
Original file line number Diff line number Diff line change
@@ -106,8 +106,7 @@

# django-ai-assistant

# Comment the OPENAI_API_KEY below and set one on .env.tests file at root when updating the VCRs:
OPENAI_API_KEY = "sk-fake-test-key-123"
# NOTE: set a OPENAI_API_KEY on .env.tests file at root when updating the VCRs.
AI_ASSISTANT_CAN_CREATE_THREAD_FN = "django_ai_assistant.permissions.allow_all"
AI_ASSISTANT_CAN_VIEW_THREAD_FN = "django_ai_assistant.permissions.owns_thread"
AI_ASSISTANT_CAN_UPDATE_THREAD_FN = "django_ai_assistant.permissions.owns_thread"

0 comments on commit 84aabec

Please sign in to comment.