diff --git a/.env.example b/.env.example index 290025d..60a9f8d 100644 --- a/.env.example +++ b/.env.example @@ -7,3 +7,11 @@ EMAIL_PW="email-password" EMAIL_RECEIVER="email-to-receive-surf-report" COMMAND="localhost:8000" SUBJECT="Surf Report" + +GPT_PROMPT= "With these conditions, give me a short surf report estimation for +this specific spot. Talk about how this spot may be good or bad with this height, +period and swell direction given. Recommend what board to ride, if there is a +nearby spot that may be better and rate the surf with these conditions on a scale of 1-10 +Please keep it short and don't repeast unecessary data (like repeating the surf height, etc.)" +API_KEY= +GPT_MODEL="gpt-3.5-turbo" diff --git a/README.md b/README.md index c643c8d..3fa3ca6 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,7 @@ Inspired by [wttr.in](https://github.com/chubin/wttr.in) [Documentation](https://ryansurf.github.io/cli-surf/) | [Discord](https://discord.gg/He2UpxRuJP) -## Usage +## 💻 Usage Using your browser or command-line interface you can access the service. @@ -65,7 +65,7 @@ Wave Period: 9.8 * `curl localhost:8000/help` -## Setup +## 🛠️ Setup ### How to Start Locally with `Poetry` To use cli-surf, clone the project locally and install the necessary dependencies via `poetry`. @@ -137,7 +137,9 @@ Note that when starting with Docker, the `.env` file will be automatically creat | `EMAIL_RECEIVER` | The email that will receive the report (your personal email) | | `COMMAND` | The command that will be ran and shown in the email. Default = `localhost:8000` | | `SUBJECT` | The email's subject. Default = Surf Report | -| `GPT_PROMPT` | Given the surf data (height, swell direction, etc.), you can tell the GPT what kind of report you would like. For example: `With this data, recommend what size board I should ride.` | +| `GPT_PROMPT` | Given the surf data (height, swell direction, etc.), you can tell the GPT what kind of report you would like. For example: `With this data, recommend what size board I should ride and nearby surf spots that may be better with the given conditions.` | +| `API_KEY` | Your OpenAI API key. Optional, the default GPT does not need an API key (and has slighly worse performance). Create one [here](https://platform.openai.com/api-keys) | +| `GPT_MODEL` | The OpenAI GPT model. Default = `gpt-3.5-turbo` (if possible, using `gpt-4o` is recommended.) Explore other models [here](https://platform.openai.com/docs/overview| ### Email Server @@ -171,7 +173,7 @@ You may need to change `IP_ADDRESS` in `.env` to match the ip of the host runnin Now, running `python3 server.py` will launch the website! -## Contributing +## 📈 Contributing Thank you for considering contributing to cli-surf! diff --git a/src/api.py b/src/api.py index 2d59dd9..3a1b2e5 100644 --- a/src/api.py +++ b/src/api.py @@ -190,6 +190,7 @@ def gather_data(lat, long, ocean): "Period": ocean_data[2], "UV Index": uv_index, "Forecast": json_forecast, + "Unit": ocean["unit"], } return data_dict diff --git a/src/cli.py b/src/cli.py index 8428594..530db22 100644 --- a/src/cli.py +++ b/src/cli.py @@ -9,6 +9,10 @@ # Load environment variables from .env file env = settings.GPTSettings() gpt_prompt = env.GPT_PROMPT +api_key = env.API_KEY +model = env.GPT_MODEL + +gpt_info = [api_key, model] def run(lat=0, long=0): @@ -40,7 +44,7 @@ def run(lat=0, long=0): # Non-JSON output if arguments["json_output"] == 0: - helper.print_outputs(city, data_dict, arguments, gpt_prompt) + helper.print_outputs(city, data_dict, arguments, gpt_prompt, gpt_info) return data_dict # JSON Output else: diff --git a/src/gpt.py b/src/gpt.py index bc6caf6..bf235cc 100644 --- a/src/gpt.py +++ b/src/gpt.py @@ -3,6 +3,7 @@ """ from g4f.client import Client +from openai import OpenAI def simple_gpt(surf_summary, gpt_prompt): @@ -17,3 +18,23 @@ def simple_gpt(surf_summary, gpt_prompt): messages=[{"role": "user", "content": surf_summary + gpt_prompt}], ) return response.choices[0].message.content + + +def openai_gpt(surf_summary, gpt_prompt, api_key, model): + """ + Uses openai's GPT, needs an API key + """ + client = OpenAI( + # This is the default and can be omitted + api_key=api_key, + ) + chat_completion = client.chat.completions.create( + messages=[ + { + "role": "user", + "content": surf_summary + gpt_prompt, + } + ], + model=model, + ) + return chat_completion.choices[0].message.content diff --git a/src/helper.py b/src/helper.py index 65fe3b6..7d373bd 100644 --- a/src/helper.py +++ b/src/helper.py @@ -32,7 +32,7 @@ def arguments_dictionary(lat, long, city, args): "decimal": extract_decimal(args), "forecast_days": get_forecast_days(args), "color": get_color(args), - "gpt": 0, + "gpt": 1, } return arguments @@ -180,7 +180,7 @@ def json_output(data_dict): return data_dict -def print_outputs(city, data_dict, arguments, gpt_prompt): +def print_outputs(city, data_dict, arguments, gpt_prompt, gpt_info): """ Basically the main printing function, calls all the other printing functions @@ -208,7 +208,7 @@ def print_outputs(city, data_dict, arguments, gpt_prompt): ) print_forecast(arguments, forecast) if arguments["gpt"] == 1: - gpt_response = print_gpt(data_dict, gpt_prompt) + gpt_response = print_gpt(data_dict, gpt_prompt, gpt_info) print(gpt_response) @@ -250,17 +250,25 @@ def surf_summary(surf_data): height = surf_data["Height"] direction = surf_data["Direction"] period = surf_data["Period"] + unit = surf_data["Unit"] report = f""" - Today at {location}, the surf height is {height}, the direction of the - swell is {direction} degrees and the swell period is {period} seconds. + Today at {location}, the surf height is {height} {unit}, the direction + of the swell is {direction} degrees and the swell period is {period} + seconds. """ return report -def print_gpt(surf_data, gpt_prompt): +def print_gpt(surf_data, gpt_prompt, gpt_info): """ Returns the GPT response """ summary = surf_summary(surf_data) - gpt_response = gpt.simple_gpt(summary, gpt_prompt) + api_key = gpt_info[0] + gpt_model = gpt_info[1] + minumum_key_length = 5 + if api_key is None or not api_key or len(api_key) < minumum_key_length: + gpt_response = gpt.simple_gpt(summary, gpt_prompt) + else: + gpt_response = gpt.openai_gpt(summary, gpt_prompt, api_key, gpt_model) return gpt_response diff --git a/src/settings.py b/src/settings.py index 41618db..d9087f5 100644 --- a/src/settings.py +++ b/src/settings.py @@ -58,3 +58,6 @@ class GPTSettings(CommonSettings): Recommend another nearby spot to surf at if you think it may be better. """ ) + + API_KEY: str + GPT_MODEL: str = Field(default="gpt-3.5-turbo")