Skip to content

Commit

Permalink
F
Browse files Browse the repository at this point in the history
  • Loading branch information
rmusser01 committed Dec 28, 2024
1 parent 9d78c72 commit 2f48ffc
Show file tree
Hide file tree
Showing 17 changed files with 630 additions and 443 deletions.
19 changes: 19 additions & 0 deletions App_Function_Libraries/Chat/Chat_Pipeline.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
# Chat_Pipeline.py
#
# Description: This file contains functions related to the prompt modification pipeline, available as a means of
# complex prompt modification/replacement without modification of original intent/messaging.
#
# Imports
import os
#
# 3rd-party Libraries
#
# Local Imports
#
#######################################################################################################################
#
# Functions:

#
# End of Chat_Pipeline.py
#######################################################################################################################
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,9 @@
#
# Function Definitions


################################## Meta Prompt Engineering Functions ##############################################

# Function to generate prompt using metaprompt
def generate_prompt(api_endpoint, api_key, task, variables_str, temperature):
# Convert variables into a list from comma-separated input
Expand Down
24 changes: 24 additions & 0 deletions App_Function_Libraries/Researcher/Research_Manager.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
# Research_Manager.py
#
# Description: This file contains functions related to the research manager functionality
# Handles all functionality related to 'research', including the management of 'research' data, the retrieval of
# 'research' data, and final creation of 'research' reports.
#
# Imports
import os
#
# 3rd-party Libraries
#
# Local Imports
#
########################################################################################################################
#
# Functions:





#
# End of Research_Manager.py
#######################################################################################################################
Empty file.
5 changes: 5 additions & 0 deletions App_Function_Libraries/Utils/Utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -388,6 +388,8 @@ def load_and_log_configs():
kagi_search_api_key = config.get('Search-Engines', 'search_engine_api_key_kagi', fallback='')
# Searx Search Settings
search_engine_searx_api = config.get('Search-Engines', 'search_engine_searx_api', fallback='')
# Tavily Search Settings
tavily_search_api_key = config.get('Search-Engines', 'search_engine_api_key_tavily', fallback='')
# Yandex Search Settings
yandex_search_api_key = config.get('Search-Engines', 'search_engine_api_key_yandex', fallback='')
yandex_search_engine_id = config.get('Search-Engines', 'search_engine_id_yandex', fallback='')
Expand Down Expand Up @@ -532,6 +534,7 @@ def load_and_log_configs():
'google_enable_site_search' : google_enable_site_search,
'kagi_search_api_key': kagi_search_api_key,
'searx_search_api_url': search_engine_searx_api,
'tavily_search_api_key': tavily_search_api_key,
'yandex_search_api_key': yandex_search_api_key,
'yandex_search_engine_id': yandex_search_engine_id
}
Expand All @@ -544,6 +547,8 @@ def load_and_log_configs():

global_api_endpoints = ["anthropic", "cohere", "google", "groq", "openai", "huggingface", "openrouter", "deepseek", "mistral", "custom_openai_api", "llama", "ooba", "kobold", "tabby", "vllm", "ollama", "aphrodite"]

global_search_engines = ["baidu", "bing", "brave", "duckduckgo", "google", "kagi", "searx", "tavily", "yandex"]

openai_tts_voices = ["alloy", "echo", "fable", "onyx", "nova", "shimmer"]

# Setup Default API Endpoint
Expand Down
56 changes: 49 additions & 7 deletions App_Function_Libraries/Web_Scraping/WebSearch_APIs.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
from requests import RequestException
from requests.adapters import HTTPAdapter
from urllib3 import Retry

#
# Local Imports
from App_Function_Libraries.Utils.Utils import loaded_config_data
Expand All @@ -26,9 +25,23 @@
# Functions:
# 1. perform_websearch
# 2. search_web_baidu
# 3. search_web_bing
# 4. search_web_brave
# 5. search_web_duckduckgo
# 6. search_web_google
# 7. search_web_kagi
# 8. search_web_serper
# 9. search_web_tavily
# 10. search_web_searx
# 11. search_web_yandex
# 12. parse_html_search_results_generic
#
#######################################################################################################################
#
# Functions:

# FIXME - parsing for results from each search engine
# FIXME - Create results dictionary format/specification

def perform_websearch(search_engine, search_query, content_country, search_lang, output_lang, result_count, date_range=None,
safesearch=None, site_blacklist=None, exactTerms=None, excludeTerms=None, filter=None, geolocation=None, search_result_language=None, sort_results_by=None):
Expand All @@ -51,7 +64,7 @@ def perform_websearch(search_engine, search_query, content_country, search_lang,
elif search_engine.lower() == "serper":
return search_web_serper()
elif search_engine.lower() == "tavily":
return search_web_tavily()
return search_web_tavily(search_query, result_count, site_blacklist)
elif search_engine.lower() == "searx":
return search_web_searx(search_query, language='auto', time_range='', safesearch=0, pageno=1, categories='general')
elif search_engine.lower() == "yandex":
Expand Down Expand Up @@ -536,7 +549,6 @@ def test_search_kagi():
# https://searx.space
# https://searx.github.io/searx/dev/search_api.html
def search_web_searx(search_query, language='auto', time_range='', safesearch=0, pageno=1, categories='general'):

# Check if API URL is configured
searx_url = loaded_config_data['search_engines']['searx_search_api_url']
if not searx_url:
Expand All @@ -560,7 +572,7 @@ def search_web_searx(search_query, language='auto', time_range='', safesearch=0,
# Perform the search request
try:
headers = {
'User-Agent': 'anything-llm'
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:133.0) Gecko/20100101 Firefox/133.0'
}

response = requests.get(search_url, headers=headers)
Expand Down Expand Up @@ -615,12 +627,42 @@ def test_search_serper():
######################### Tavily Search #########################
#
# https://github.com/YassKhazzan/openperplex_backend_os/blob/main/sources_searcher.py
def search_web_tavily():
pass
def search_web_tavily(search_query, result_count=10, site_whitelist=None, site_blacklist=None):
# Check if API URL is configured
tavily_api_url = "https://api.tavily.com/search"

tavily_api_key = loaded_config_data['search_engines']['tavily_search_api_key']

# Prepare the request payload
payload = {
"api_key": tavily_api_key,
"query": search_query,
"max_results": result_count
}

# Add optional parameters if provided
if site_whitelist:
payload["include_domains"] = site_whitelist
if site_blacklist:
payload["exclude_domains"] = site_blacklist

# Perform the search request
try:
headers = {
'Content-Type': 'application/json',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:133.0) Gecko/20100101 Firefox/133.0'
}

response = requests.post(tavily_api_url, headers=headers, data=json.dumps(payload))
response.raise_for_status()
return response.json()
except requests.exceptions.RequestException as e:
return f"There was an error searching for content. {str(e)}"


def test_search_tavily():
pass
result = search_web_tavily("How can I bake a cherry cake?")
print(result)

######################### Yandex Search #########################
#
Expand Down
25 changes: 25 additions & 0 deletions Docs/Design/Chunking.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
# Chunking

## Overview
- Chunking is the process of breaking down a document into smaller pieces, or "chunks". This is useful for a variety of reasons, such as:
-






### Types of Chunking


### Implementation in tldw
-



### Link Dump:
https://gleen.ai/blog/agentic-chunking-enhancing-rag-answers-for-completeness-and-accuracy/




3 changes: 3 additions & 0 deletions Docs/Design/Coding_Page.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,9 @@ https://github.com/abinthomasonline/repo2txt
https://github.com/charmandercha/ArchiDoc
https://pythontutor.com/c.html#mode=edit
https://pythontutor.com/articles/c-cpp-visualizer.html
https://gitingest.com/
https://gitdiagram.com/
https://www.ilograph.com/blog/posts/diagrams-ai-can-and-cannot-generate/#system-diagramming-with-ai



Expand Down
1 change: 1 addition & 0 deletions Docs/Design/Prompts.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,3 +8,4 @@ https://github.com/LouisShark/chatgpt_system_prompt
https://github.com/microsoft/PromptWizard


https://medium.com/@camauger/crafting-effective-chatgpt-prompts-for-tabletop-roleplaying-games-a-step-by-step-guide-part-1-b81a791d278d
Loading

0 comments on commit 2f48ffc

Please sign in to comment.