forked from aiplanethub/ai-stacks
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathChat-with-Git-Repo.json
1 lines (1 loc) · 45.9 KB
/
Chat-with-Git-Repo.json
1
{"id":"25793490-ec2a-4b82-910d-731445eca690","data":{"nodes":[{"width":384,"height":625,"id":"GitLoader-Xj3Wb","type":"genericNode","position":{"x":-72.11167294878089,"y":288.56702971894737},"data":{"type":"GitLoader","node":{"template":{"branch":{"required":true,"placeholder":"","show":true,"multiline":false,"value":"main","password":false,"name":"branch","display_name":"Branch","advanced":false,"dynamic":false,"info":"","type":"str","list":false},"clone_url":{"required":false,"placeholder":"","show":true,"multiline":false,"value":"https://github.com/adithyaGHegde/Computer-Vision-Posture-Analysis","password":false,"name":"clone_url","display_name":"Clone URL","advanced":false,"dynamic":false,"info":"","type":"str","list":false},"file_filter":{"required":false,"placeholder":"","show":true,"multiline":false,"value":".py, .md","password":false,"name":"file_filter","display_name":"File extensions (comma-separated)","advanced":false,"dynamic":false,"info":"","type":"str","list":false},"metadata":{"required":false,"placeholder":"","show":true,"multiline":false,"value":[{"":""}],"password":false,"name":"metadata","display_name":"Metadata","advanced":false,"dynamic":false,"info":"","type":"dict","list":false},"repo_path":{"required":true,"placeholder":"","show":true,"multiline":false,"value":"/tmp/cv-project","password":false,"name":"repo_path","display_name":"Path to repository","advanced":false,"dynamic":false,"info":"","type":"str","list":false},"_type":"GitLoader"},"description":"Load `Git` repository files.","base_classes":["Document"],"display_name":"GitLoader","custom_fields":{},"output_types":["Document"],"documentation":"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/git","beta":false,"error":null},"id":"GitLoader-Xj3Wb"},"selected":false,"positionAbsolute":{"x":-72.11167294878089,"y":288.56702971894737},"dragging":false},{"width":384,"height":563,"id":"CustomComponent-GQeV1","type":"genericNode","position":{"x":2219.6980942865225,"y":438.36282568197976},"data":{"type":"CustomComponent","node":{"template":{"code":{"dynamic":true,"required":true,"placeholder":"","show":true,"multiline":true,"value":"from typing import Optional, Union, Callable\nfrom genflow import CustomComponent\nfrom genflow.field_typing import (\n BasePromptTemplate,\n BaseLanguageModel,\n BaseMemory,\n Chain,\n BaseRetriever,\n)\nfrom langchain.chains import RetrievalQA\n\n\nclass RetrievalQAPromptComponent(CustomComponent):\n \"\"\"\n A custom component for implementing a RetrievalQA using Prompt.\n \"\"\"\n\n display_name: str = \"RetrievalQAPrompt\"\n description: str = \"Implementation of RetrievalQA using Prompt\"\n beta: bool = True\n\n CHAIN_TYPE_OPTIONS = [\n 'stuff','map_reduce','refine','map_rerank'\n ]\n\n def build_config(self):\n \"\"\"\n Builds the configuration for the component.\n Returns:\n - dict: A dictionary containing the configuration options for the component.\n \"\"\"\n return {\n \"chain_type\":{\"display\":\"chain_type\",\"value\":\"stuff\",\"options\":self.CHAIN_TYPE_OPTIONS,\"required\":True},\n \"llm\":{\"display_name\":\"LLM\",\"required\":True},\n \"prompt\":{\"display_name\":\"Prompt\",\"required\":True},\n \"memory\":{\"display_name\":\"Memory\"},\n \"retriever\":{\"display_name\":\"Retriever\",\"required\":True}\n }\n\n def build(\n self,\n llm:BaseLanguageModel,\n prompt: BasePromptTemplate,\n chain_type:str,\n retriever: BaseRetriever,\n memory: Optional[BaseMemory] = None,\n ) -> Chain:\n \"\"\"\n Builds the RetrievalQA with prompt\n Args:\n - llm: Large Language Models\n - chain_type: used to load a specific type of chain for question-answering\n - chain_type_kwargs: chain keywords argument to pass prompt\n - retriever: vector store to retrieve k relevant context information\n Returns:\n - Chain: The RetrievalQA chain with Prompt, Retriever and LLM\n \"\"\"\n return RetrievalQA.from_chain_type(llm=llm,\n chain_type=chain_type,\n chain_type_kwargs={\"prompt\":prompt},\n retriever=retriever,\n return_source_documents=True\n )","password":false,"name":"code","advanced":false,"type":"code","list":false},"_type":"CustomComponent","chain_type":{"required":true,"placeholder":"","show":true,"multiline":false,"value":"stuff","password":false,"options":["stuff","map_reduce","refine","map_rerank"],"name":"chain_type","display_name":"chain_type","advanced":false,"dynamic":false,"info":"","type":"str","list":true},"llm":{"required":true,"placeholder":"","show":true,"multiline":false,"password":false,"name":"llm","display_name":"LLM","advanced":false,"dynamic":false,"info":"","type":"BaseLanguageModel","list":false},"memory":{"required":false,"placeholder":"","show":true,"multiline":false,"password":false,"name":"memory","display_name":"Memory","advanced":false,"dynamic":false,"info":"","type":"BaseMemory","list":false},"prompt":{"required":true,"placeholder":"","show":true,"multiline":false,"password":false,"name":"prompt","display_name":"Prompt","advanced":false,"dynamic":false,"info":"","type":"BasePromptTemplate","list":false},"retriever":{"required":true,"placeholder":"","show":true,"multiline":false,"password":false,"name":"retriever","display_name":"Retriever","advanced":false,"dynamic":false,"info":"","type":"BaseRetriever","list":false}},"description":"Implementation of RetrievalQA using Prompt","base_classes":["Chain"],"display_name":"RetrievalQAPrompt","custom_fields":{"chain_type":null,"llm":null,"memory":null,"prompt":null,"retriever":null},"output_types":["Chain"],"documentation":"","beta":true,"error":null},"id":"CustomComponent-GQeV1"},"selected":false,"dragging":false,"positionAbsolute":{"x":2219.6980942865225,"y":438.36282568197976}},{"width":384,"height":483,"id":"EnsembleRetriever-1oIIq","type":"genericNode","position":{"x":1672.716148153916,"y":928.3251904827271},"data":{"type":"EnsembleRetriever","node":{"template":{"code":{"dynamic":true,"required":true,"placeholder":"","show":false,"multiline":true,"value":"from typing import List\nfrom langchain.retrievers import BM25Retriever, EnsembleRetriever\nfrom langchain.schema import Document, BaseRetriever\nfrom langchain.vectorstores.base import VectorStore\n\nfrom genflow import CustomComponent\n\n\nclass EnsembleRetrieverComponent(CustomComponent):\n display_name: str = \"Ensemble Retriever\"\n description: str = \"The EnsembleRetriever takes a list of retrievers as input.\"\n documentation: str = (\n \"https://python.langchain.com/docs/modules/data_connection/retrievers/ensemble\"\n )\n beta = False\n\n def build_config(self):\n return {\n \"documents\": {\"display_name\": \"Documents\"},\n \"retrievers\": {\n \"display_name\": \"Retrievers\",\n },\n \"top_k\": {\n \"display_name\": \"Top K\",\n \"is_list\": False,\n \"required\": True,\n \"value\": 2,\n \"info\": \"The number of results to return.\",\n },\n \"weights\": {\n \"display_name\": \"Weights\",\n \"is_list\": False,\n \"required\": True,\n \"value\": \"0.5, 0.5\",\n },\n \"code\": {\"show\": False},\n }\n\n def build(\n self,\n top_k: int,\n weights: str,\n documents: List[Document],\n retrievers: List[VectorStore],\n ) -> BaseRetriever:\n docs = [document.page_content for document in documents]\n bm25_retriever = BM25Retriever.from_texts(docs)\n bm25_retriever.k = top_k\n\n _retrievers = [\n _retriever.as_retriever(search_kwargs={\"k\": top_k})\n for _retriever in retrievers\n ]\n _retrievers.append(bm25_retriever)\n\n _weights = [\n int(_weight) if isinstance(_weight, int) else float(_weight)\n for _weight in weights.split(\",\")\n ]\n\n return EnsembleRetriever(retrievers=_retrievers, weights=_weights)\n","password":false,"name":"code","advanced":false,"type":"code","list":false},"_type":"CustomComponent","documents":{"required":true,"placeholder":"","show":true,"multiline":false,"password":false,"name":"documents","display_name":"Documents","advanced":false,"dynamic":false,"info":"","type":"Document","list":true},"retrievers":{"required":true,"placeholder":"","show":true,"multiline":false,"password":false,"name":"retrievers","display_name":"Retrievers","advanced":false,"dynamic":false,"info":"","type":"VectorStore","list":true},"top_k":{"required":true,"placeholder":"","show":true,"multiline":false,"value":"4","password":false,"name":"top_k","display_name":"Top K","advanced":false,"dynamic":false,"info":"The number of results to return.","type":"int","list":false},"weights":{"required":true,"placeholder":"","show":true,"multiline":false,"value":"0.5, 0.5","password":false,"name":"weights","display_name":"Weights","advanced":false,"dynamic":false,"info":"","type":"str","list":false}},"description":"The EnsembleRetriever takes a list of retrievers as input.","base_classes":["BaseRetriever"],"display_name":"Ensemble Retriever","custom_fields":{"documents":null,"retrievers":null,"top_k":null,"weights":null},"output_types":["EnsembleRetriever"],"documentation":"https://python.langchain.com/docs/modules/data_connection/retrievers/ensemble","beta":false,"error":null},"id":"EnsembleRetriever-1oIIq"},"selected":false,"positionAbsolute":{"x":1672.716148153916,"y":928.3251904827271},"dragging":false},{"width":384,"height":801,"id":"RecursiveCharacterTextSplitter-gdLr7","type":"genericNode","position":{"x":480.4673845915146,"y":238.11450874382763},"data":{"type":"RecursiveCharacterTextSplitter","node":{"template":{"code":{"dynamic":true,"required":true,"placeholder":"","show":false,"multiline":true,"value":"from typing import Optional\nfrom genflow import CustomComponent\nfrom langchain.schema import Document\nfrom genflow.utils.util import build_loader_repr_from_documents\n\n\nclass RecursiveCharacterTextSplitterComponent(CustomComponent):\n display_name: str = \"Recursive Character Text Splitter\"\n description: str = \"Split text into chunks of a specified length.\"\n documentation: str = \"https://docs.genflow.org/components/text-splitters#recursivecharactertextsplitter\"\n\n def build_config(self):\n return {\n \"documents\": {\n \"display_name\": \"Documents\",\n \"info\": \"The documents to split.\",\n },\n \"separators\": {\n \"display_name\": \"Separators\",\n \"info\": 'The characters to split on.\\nIf left empty defaults to [\"\\\\n\\\\n\", \"\\\\n\", \" \", \"\"].',\n \"is_list\": True,\n },\n \"chunk_size\": {\n \"display_name\": \"Chunk Size\",\n \"info\": \"The maximum length of each chunk.\",\n \"field_type\": \"int\",\n \"value\": 1000,\n },\n \"chunk_overlap\": {\n \"display_name\": \"Chunk Overlap\",\n \"info\": \"The amount of overlap between chunks.\",\n \"field_type\": \"int\",\n \"value\": 200,\n },\n \"code\": {\"show\": False},\n }\n\n def build(\n self,\n documents: list[Document],\n separators: Optional[list[str]] = None,\n chunk_size: Optional[int] = 1000,\n chunk_overlap: Optional[int] = 200,\n ) -> list[Document]:\n \"\"\"\n Split text into chunks of a specified length.\n\n Args:\n separators (list[str]): The characters to split on.\n chunk_size (int): The maximum length of each chunk.\n chunk_overlap (int): The amount of overlap between chunks.\n length_function (function): The function to use to calculate the length of the text.\n\n Returns:\n list[str]: The chunks of text.\n \"\"\"\n from langchain.text_splitter import RecursiveCharacterTextSplitter\n\n if separators == \"\":\n separators = None\n elif separators:\n # check if the separators list has escaped characters\n # if there are escaped characters, unescape them\n separators = [x.encode().decode(\"unicode-escape\") for x in separators]\n\n # Make sure chunk_size and chunk_overlap are ints\n if isinstance(chunk_size, str):\n chunk_size = int(chunk_size)\n if isinstance(chunk_overlap, str):\n chunk_overlap = int(chunk_overlap)\n splitter = RecursiveCharacterTextSplitter(\n separators=separators,\n chunk_size=chunk_size,\n chunk_overlap=chunk_overlap,\n )\n\n docs = splitter.split_documents(documents)\n self.repr_value = build_loader_repr_from_documents(docs)\n return docs\n","password":false,"name":"code","advanced":false,"type":"code","list":false},"_type":"CustomComponent","chunk_overlap":{"required":false,"placeholder":"","show":true,"multiline":false,"value":200,"password":false,"name":"chunk_overlap","display_name":"Chunk Overlap","advanced":false,"dynamic":false,"info":"The amount of overlap between chunks.","type":"int","list":false},"chunk_size":{"required":false,"placeholder":"","show":true,"multiline":false,"value":1000,"password":false,"name":"chunk_size","display_name":"Chunk Size","advanced":false,"dynamic":false,"info":"The maximum length of each chunk.","type":"int","list":false},"documents":{"required":true,"placeholder":"","show":true,"multiline":false,"password":false,"name":"documents","display_name":"Documents","advanced":false,"dynamic":false,"info":"The documents to split.","type":"Document","list":true},"separators":{"required":false,"placeholder":"","show":true,"multiline":false,"password":false,"name":"separators","display_name":"Separators","advanced":false,"dynamic":false,"info":"The characters to split on.\nIf left empty defaults to [\"\\n\\n\", \"\\n\", \" \", \"\"].","type":"str","list":true,"value":["'\\nclass '","'\\ndef '","'\\n\\tdef '","'\\n\\n'","'\\n'","' '","''"]}},"description":"Split text into chunks of a specified length.","base_classes":["Document"],"display_name":"Recursive Character Text Splitter","custom_fields":{"chunk_overlap":null,"chunk_size":null,"documents":null,"separators":null},"output_types":["RecursiveCharacterTextSplitter"],"documentation":"https://docs.genflow.org/components/text-splitters#recursivecharactertextsplitter","beta":true,"error":null},"id":"RecursiveCharacterTextSplitter-gdLr7"},"selected":false,"positionAbsolute":{"x":480.4673845915146,"y":238.11450874382763},"dragging":false},{"width":384,"height":735,"id":"AzureChatOpenAI-uO6Pp","type":"genericNode","position":{"x":1651.5965959918294,"y":-499.3027975709087},"data":{"type":"AzureChatOpenAI","node":{"template":{"code":{"dynamic":true,"required":true,"placeholder":"","show":false,"multiline":true,"value":"from typing import Optional\nfrom genflow.interface.custom import CustomComponent\nfrom langchain.llms.base import BaseLLM\nfrom langchain.chat_models import AzureChatOpenAI\n\n\nclass AzureChatOpenAILLM(CustomComponent):\n display_name: str = \"AzureChatOpenAI\"\n description: str = \"Azure Chat Open AI Chat&Completion large language models.\"\n\n AZURE_OPENAI_MODELS = [\n \"gpt-4\",\n \"gpt-4-32k\",\n \"gpt-4-vision\",\n ]\n\n def build_config(self):\n return {\n \"model\": {\n \"display_name\": \"Model Name\",\n \"value\": \"gpt-4\",\n \"options\": self.AZURE_OPENAI_MODELS,\n \"required\": True,\n },\n \"api_key\": {\n \"display_name\": \"AzureChatOpenAI API Key\",\n \"required\": True,\n \"password\": True,\n },\n \"api_base\": {\n \"display_name\": \"AzureChatOpenAI API Base\",\n \"required\": True,\n },\n \"api_type\": {\"display_name\": \"AzureChatOpenAI API Type\", \"required\": True},\n \"azure_deployment\": {\n \"display_name\": \"Deployment Name\",\n \"required\": True,\n },\n \"api_version\": {\n \"display_name\": \"API Version\",\n \"value\": \"2023-07-01-preview\",\n \"required\": True,\n \"advanced\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"value\": 0.5,\n \"field_type\": \"float\",\n \"required\": False,\n },\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"value\": 512,\n \"required\": False,\n \"field_type\": \"int\",\n \"advanced\": True,\n },\n \"code\": {\"show\": False},\n }\n\n def build(\n self,\n model: str,\n api_base: str,\n api_type: str,\n api_key: str,\n azure_deployment: str,\n api_version: str = \"2023-05-15\",\n temperature: Optional[float] = 0.7,\n max_tokens: Optional[int] = 512,\n ) -> BaseLLM:\n try:\n output = AzureChatOpenAI(\n model_name=model,\n openai_api_base=api_base,\n openai_api_type=api_type,\n openai_api_key=api_key,\n openai_api_version=api_version,\n deployment_name=azure_deployment,\n temperature=temperature,\n max_tokens=max_tokens,\n )\n except Exception as e:\n raise ValueError(\"Could not connect to Azure ChatOpenAI model.\") from e\n return output\n","password":false,"name":"code","advanced":false,"type":"code","list":false},"_type":"CustomComponent","api_base":{"required":true,"placeholder":"","show":true,"multiline":false,"password":false,"name":"api_base","display_name":"AzureChatOpenAI API Base","advanced":false,"dynamic":false,"info":"","type":"str","list":false,"value":"https://gpt-res.openai.azure.com/"},"api_key":{"required":true,"placeholder":"","show":true,"multiline":false,"password":true,"name":"api_key","display_name":"AzureChatOpenAI API Key","advanced":false,"dynamic":false,"info":"","type":"str","list":false,"value":""},"api_type":{"required":true,"placeholder":"","show":true,"multiline":false,"password":false,"name":"api_type","display_name":"AzureChatOpenAI API Type","advanced":false,"dynamic":false,"info":"","type":"str","list":false,"value":"azure"},"api_version":{"required":true,"placeholder":"","show":true,"multiline":false,"value":"2023-07-01-preview","password":false,"name":"api_version","display_name":"API Version","advanced":true,"dynamic":false,"info":"","type":"str","list":false},"azure_deployment":{"required":true,"placeholder":"","show":true,"multiline":false,"password":false,"name":"azure_deployment","display_name":"Deployment Name","advanced":false,"dynamic":false,"info":"","type":"str","list":false,"value":"gpt4-inference"},"max_tokens":{"required":false,"placeholder":"","show":true,"multiline":false,"value":512,"password":false,"name":"max_tokens","display_name":"Max Tokens","advanced":true,"dynamic":false,"info":"","type":"int","list":false},"model":{"required":true,"placeholder":"","show":true,"multiline":false,"value":"gpt-4","password":false,"options":["gpt-4","gpt-4-32k","gpt-4-vision"],"name":"model","display_name":"Model Name","advanced":false,"dynamic":false,"info":"","type":"str","list":true},"temperature":{"required":false,"placeholder":"","show":true,"multiline":false,"value":0.5,"password":false,"name":"temperature","display_name":"Temperature","advanced":false,"dynamic":false,"info":"","type":"float","list":false}},"description":"Azure Chat Open AI Chat&Completion large language models.","base_classes":["BaseLanguageModel","BaseLLM"],"display_name":"AzureChatOpenAI","custom_fields":{"api_base":null,"api_key":null,"api_type":null,"api_version":null,"azure_deployment":null,"max_tokens":null,"model":null,"temperature":null},"output_types":["AzureChatOpenAI"],"documentation":"","beta":true,"error":null},"id":"AzureChatOpenAI-uO6Pp"},"selected":false,"positionAbsolute":{"x":1651.5965959918294,"y":-499.3027975709087},"dragging":false},{"width":384,"height":243,"id":"ChatPromptTemplate-zKVXz","type":"genericNode","position":{"x":1505.3617906360876,"y":470.8148013400168},"data":{"type":"ChatPromptTemplate","node":{"template":{"messages":{"required":true,"placeholder":"","show":true,"multiline":false,"password":false,"name":"messages","advanced":false,"dynamic":true,"info":"","type":"BaseMessagePromptTemplate","list":true},"output_parser":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"output_parser","advanced":false,"dynamic":true,"info":"","type":"BaseOutputParser","list":false},"input_types":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"input_types","advanced":false,"dynamic":true,"info":"","type":"dict","list":false},"input_variables":{"required":true,"placeholder":"","show":false,"multiline":false,"password":false,"name":"input_variables","advanced":false,"dynamic":true,"info":"","type":"str","list":true},"partial_variables":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"partial_variables","advanced":false,"dynamic":true,"info":"","type":"dict","list":false},"validate_template":{"required":false,"placeholder":"","show":false,"multiline":false,"value":false,"password":false,"name":"validate_template","advanced":false,"dynamic":true,"info":"","type":"bool","list":false},"_type":"ChatPromptTemplate"},"description":"A prompt template for chat models.","base_classes":["ChatPromptTemplate","BaseChatPromptTemplate","BasePromptTemplate"],"display_name":"ChatPromptTemplate","custom_fields":{},"output_types":[],"documentation":"https://python.langchain.com/docs/modules/model_io/models/chat/how_to/prompts","beta":false,"error":null},"id":"ChatPromptTemplate-zKVXz"},"selected":false,"positionAbsolute":{"x":1505.3617906360876,"y":470.8148013400168},"dragging":false},{"width":384,"height":489,"id":"HumanMessagePromptTemplate-QWDS5","type":"genericNode","position":{"x":967.2636725313821,"y":317.3188638037386},"data":{"type":"HumanMessagePromptTemplate","node":{"template":{"additional_kwargs":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"additional_kwargs","advanced":true,"dynamic":true,"info":"","type":"dict","list":false},"prompt":{"required":true,"placeholder":"","show":true,"multiline":true,"value":"<|system|>>\nYou are an AI Coding Assistant that follows instructions extremely well. You have the context of a github codebase and you use this to help the user.\nPlease be truthful and give direct answers. Please say 'I don't know' if user query is not in CONTEXT\n\nCONTEXT: {context}\n</s>\n<|user|>\n{question}\n</s>\n<|assistant|>","password":false,"name":"prompt","advanced":false,"dynamic":true,"info":"","type":"prompt","list":false},"_type":"HumanMessagePromptTemplate","context":{"required":false,"placeholder":"","show":true,"multiline":true,"value":"","password":false,"name":"context","display_name":"context","advanced":false,"input_types":["Document","BaseOutputParser","Input"],"dynamic":false,"info":"","type":"str","list":false},"question":{"required":false,"placeholder":"","show":true,"multiline":true,"value":"","password":false,"name":"question","display_name":"question","advanced":false,"input_types":["Document","BaseOutputParser","Input"],"dynamic":false,"info":"","type":"str","list":false}},"description":"Human message prompt template. This is a message sent from the user.","base_classes":["HumanMessagePromptTemplate","BaseMessagePromptTemplate","BaseStringMessagePromptTemplate"],"name":"","display_name":"HumanMessagePromptTemplate","documentation":"https://python.langchain.com/docs/modules/model_io/models/chat/how_to/prompts","custom_fields":{"prompt":["context","question"]},"output_types":[],"field_formatters":{},"beta":false,"error":null},"id":"HumanMessagePromptTemplate-QWDS5"},"selected":false,"positionAbsolute":{"x":967.2636725313821,"y":317.3188638037386},"dragging":false},{"width":384,"height":645,"id":"AzureOpenAIEmbeddings-kVn9L","type":"genericNode","position":{"x":313.3375653104332,"y":1085.1267062717836},"data":{"type":"AzureOpenAIEmbeddings","node":{"template":{"code":{"dynamic":true,"required":true,"placeholder":"","show":true,"multiline":true,"value":"from genflow import CustomComponent\nfrom langchain.embeddings.base import Embeddings\nfrom langchain.embeddings import AzureOpenAIEmbeddings\n\nclass AzureOpenAIEmbeddingsComponent(CustomComponent):\n display_name: str = \"AzureOpenAI Embeddings\"\n description: str = \"\"\"Access Azure OpenAI Embedding i.e., Text-Ada version 2\"\"\"\n documentation: str = \"https://python.langchain.com/docs/integrations/text_embedding/azureopenai\"\n beta = False\n\n def build_config(self):\n return {\n \"api_key\": {\n \"display_name\": \"AzureOpenAI API Key\",\n \"is_list\": False,\n \"required\": True,\n \"value\": \"\",\n },\n \"endpoint\": {\n \"display_name\": \"Azure Endpoint\",\n \"is_list\": False,\n \"required\": True,\n \"value\": \"\",\n },\n \"version\": {\n \"display_name\": \"OpenAI Version\",\n \"is_list\": False,\n \"required\": True,\n \"value\": \"\",\n },\n \"deployment_name\": {\n \"display_name\": \"Azure Deployment Name\",\n \"is_list\": False,\n \"required\": True,\n \"value\": \"\",\n },\n \"code\": {\"show\": True},\n }\n\n def build(self, api_key: str, endpoint: str, version: str, deployment_name: str) -> Embeddings:\n return AzureOpenAIEmbeddings(\n azure_deployment=deployment_name,\n azure_endpoint = endpoint,\n openai_api_key = api_key,\n openai_api_version=version\n )\n","password":false,"name":"code","advanced":false,"type":"code","list":false},"_type":"CustomComponent","api_key":{"required":true,"placeholder":"","show":true,"multiline":false,"value":"d6d9522a01c74836907af2f3fd72ff85","password":false,"name":"api_key","display_name":"AzureOpenAI API Key","advanced":false,"dynamic":false,"info":"","type":"str","list":false},"deployment_name":{"required":true,"placeholder":"","show":true,"multiline":false,"value":"text-embed-marketplace","password":false,"name":"deployment_name","display_name":"Azure Deployment Name","advanced":false,"dynamic":false,"info":"","type":"str","list":false},"endpoint":{"required":true,"placeholder":"","show":true,"multiline":false,"value":"https://marketplace.openai.azure.com/","password":false,"name":"endpoint","display_name":"Azure Endpoint","advanced":false,"dynamic":false,"info":"","type":"str","list":false},"version":{"required":true,"placeholder":"","show":true,"multiline":false,"value":"2023-05-15","password":false,"name":"version","display_name":"OpenAI Version","advanced":false,"dynamic":false,"info":"","type":"str","list":false}},"description":"Access Azure OpenAI Embedding i.e., Text-Ada version 2","base_classes":["Embeddings"],"display_name":"AzureOpenAI Embeddings","custom_fields":{"api_key":null,"deployment_name":null,"endpoint":null,"version":null},"output_types":["AzureOpenAIEmbeddings"],"documentation":"https://python.langchain.com/docs/integrations/text_embedding/azureopenai","beta":false,"error":null},"id":"AzureOpenAIEmbeddings-kVn9L"},"selected":false,"positionAbsolute":{"x":313.3375653104332,"y":1085.1267062717836},"dragging":false},{"width":384,"height":291,"id":"Chroma-lOaKj","type":"genericNode","position":{"x":1048.0434271097317,"y":1114.082327948253},"data":{"type":"Chroma","node":{"template":{"code":{"dynamic":true,"required":true,"placeholder":"","show":false,"multiline":true,"value":"from typing import Optional, Union\nfrom genflow import CustomComponent\n\nfrom langchain.vectorstores.chroma import Chroma\nfrom langchain.schema import Document\nfrom langchain.vectorstores.base import VectorStore\nfrom langchain.schema import BaseRetriever\nfrom langchain.embeddings.base import Embeddings\nimport chromadb # type: ignore\n\n\nclass ChromaComponent(CustomComponent):\n \"\"\"\n A custom component for implementing a Vector Store using Chroma.\n \"\"\"\n\n display_name: str = \"Chroma\"\n description: str = \"Implementation of Vector Store using Chroma\"\n documentation = \"https://docs.aiplanet.com/components/vector-store#chroma\"\n beta: bool = True\n\n def build_config(self):\n \"\"\"\n Builds the configuration for the component.\n\n Returns:\n - dict: A dictionary containing the configuration options for the component.\n \"\"\"\n return {\n \"collection_name\": {\n \"display_name\": \"Collection Name\",\n \"value\": \"genflow\",\n \"required\": False,\n \"advanced\": True,\n },\n \"persist\": {\n \"display_name\": \"Persist\",\n \"value\": True,\n \"required\": False,\n \"advanced\": True,\n },\n \"persist_directory\": {\n \"display_name\": \"Persist Directory\",\n \"value\": \"/mnt/models/chroma\",\n \"required\": False,\n \"advanced\": True,\n },\n \"code\": {\"show\": False, \"display_name\": \"Code\"},\n \"documents\": {\"display_name\": \"Documents\", \"is_list\": True},\n \"embedding\": {\"display_name\": \"Embedding\"},\n \"chroma_server_cors_allow_origins\": {\n \"display_name\": \"Server CORS Allow Origins\",\n \"advanced\": True,\n },\n \"chroma_server_host\": {\"display_name\": \"Server Host\", \"advanced\": True},\n \"chroma_server_port\": {\"display_name\": \"Server Port\", \"advanced\": True},\n \"chroma_server_grpc_port\": {\n \"display_name\": \"Server gRPC Port\",\n \"advanced\": True,\n },\n \"chroma_server_ssl_enabled\": {\n \"display_name\": \"Server SSL Enabled\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n chroma_server_ssl_enabled: bool,\n collection_name: Optional[str] = \"genflow\",\n persist: Optional[bool] = True,\n persist_directory: Optional[str] = \"/mnt/models/chroma\",\n embedding: Optional[Embeddings] = None,\n documents: Optional[Document] = None,\n chroma_server_cors_allow_origins: Optional[str] = None,\n chroma_server_host: Optional[str] = None,\n chroma_server_port: Optional[int] = None,\n chroma_server_grpc_port: Optional[int] = None,\n ) -> Union[VectorStore, BaseRetriever]:\n \"\"\"\n Builds the Vector Store or BaseRetriever object.\n\n Args:\n - collection_name (str): The name of the collection.\n - persist_directory (Optional[str]): The directory to persist the Vector Store to.\n - chroma_server_ssl_enabled (bool): Whether to enable SSL for the Chroma server.\n - persist (bool): Whether to persist the Vector Store or not.\n - embedding (Optional[Embeddings]): The embeddings to use for the Vector Store.\n - documents (Optional[Document]): The documents to use for the Vector Store.\n - chroma_server_cors_allow_origins (Optional[str]): The CORS allow origins for the Chroma server.\n - chroma_server_host (Optional[str]): The host for the Chroma server.\n - chroma_server_port (Optional[int]): The port for the Chroma server.\n - chroma_server_grpc_port (Optional[int]): The gRPC port for the Chroma server.\n\n Returns:\n - Union[VectorStore, BaseRetriever]: The Vector Store or BaseRetriever object.\n \"\"\"\n\n # Chroma settings\n chroma_settings = None\n\n if chroma_server_host is not None:\n chroma_settings = chromadb.config.Settings(\n chroma_server_cors_allow_origins=chroma_server_cors_allow_origins\n or None,\n chroma_server_host=chroma_server_host,\n chroma_server_port=chroma_server_port or None,\n chroma_server_grpc_port=chroma_server_grpc_port or None,\n chroma_server_ssl_enabled=chroma_server_ssl_enabled,\n )\n\n # If documents, then we need to create a Chroma instance using .from_documents\n if documents is not None and embedding is not None:\n return Chroma.from_documents(\n documents=documents, # type: ignore\n persist_directory=persist_directory if persist else None,\n collection_name=collection_name,\n embedding=embedding,\n client_settings=chroma_settings,\n )\n\n if embedding is not None:\n return Chroma(\n persist_directory=persist_directory,\n client_settings=chroma_settings,\n embedding_function=embedding,\n collection_name=collection_name,\n )\n\n return Chroma(\n persist_directory=persist_directory, client_settings=chroma_settings\n )\n","password":false,"name":"code","advanced":false,"type":"code","list":false},"_type":"CustomComponent","chroma_server_cors_allow_origins":{"required":false,"placeholder":"","show":true,"multiline":false,"password":false,"name":"chroma_server_cors_allow_origins","display_name":"Server CORS Allow Origins","advanced":true,"dynamic":false,"info":"","type":"str","list":false},"chroma_server_grpc_port":{"required":false,"placeholder":"","show":true,"multiline":false,"password":false,"name":"chroma_server_grpc_port","display_name":"Server gRPC Port","advanced":true,"dynamic":false,"info":"","type":"int","list":false},"chroma_server_host":{"required":false,"placeholder":"","show":true,"multiline":false,"password":false,"name":"chroma_server_host","display_name":"Server Host","advanced":true,"dynamic":false,"info":"","type":"str","list":false},"chroma_server_port":{"required":false,"placeholder":"","show":true,"multiline":false,"password":false,"name":"chroma_server_port","display_name":"Server Port","advanced":true,"dynamic":false,"info":"","type":"int","list":false},"chroma_server_ssl_enabled":{"required":true,"placeholder":"","show":true,"multiline":false,"value":false,"password":false,"name":"chroma_server_ssl_enabled","display_name":"Server SSL Enabled","advanced":true,"dynamic":false,"info":"","type":"bool","list":false},"collection_name":{"required":false,"placeholder":"","show":true,"multiline":false,"value":"genflow","password":false,"name":"collection_name","display_name":"Collection Name","advanced":true,"dynamic":false,"info":"","type":"str","list":false},"documents":{"required":false,"placeholder":"","show":true,"multiline":false,"password":false,"name":"documents","display_name":"Documents","advanced":false,"dynamic":false,"info":"","type":"Document","list":true},"embedding":{"required":false,"placeholder":"","show":true,"multiline":false,"password":false,"name":"embedding","display_name":"Embedding","advanced":false,"dynamic":false,"info":"","type":"Embeddings","list":false},"persist":{"required":false,"placeholder":"","show":true,"multiline":false,"value":true,"password":false,"name":"persist","display_name":"Persist","advanced":true,"dynamic":false,"info":"","type":"bool","list":false},"persist_directory":{"required":false,"placeholder":"","show":true,"multiline":false,"value":"/mnt/models/chroma","password":false,"name":"persist_directory","display_name":"Persist Directory","advanced":true,"dynamic":false,"info":"","type":"str","list":false}},"description":"Implementation of Vector Store using Chroma","base_classes":["VectorStore","BaseRetriever"],"display_name":"Chroma","custom_fields":{"chroma_server_cors_allow_origins":null,"chroma_server_grpc_port":null,"chroma_server_host":null,"chroma_server_port":null,"chroma_server_ssl_enabled":null,"collection_name":null,"documents":null,"embedding":null,"persist":null,"persist_directory":null},"output_types":["Chroma"],"documentation":"https://docs.aiplanet.com/components/vector-store#chroma","beta":true,"error":null},"id":"Chroma-lOaKj"},"selected":false,"dragging":false,"positionAbsolute":{"x":1048.0434271097317,"y":1114.082327948253}}],"edges":[{"source":"EnsembleRetriever-1oIIq","sourceHandle":"{œbaseClassesœ:[œBaseRetrieverœ],œdataTypeœ:œEnsembleRetrieverœ,œidœ:œEnsembleRetriever-1oIIqœ}","target":"CustomComponent-GQeV1","targetHandle":"{œfieldNameœ:œretrieverœ,œidœ:œCustomComponent-GQeV1œ,œinputTypesœ:null,œtypeœ:œBaseRetrieverœ}","data":{"targetHandle":{"fieldName":"retriever","id":"CustomComponent-GQeV1","inputTypes":null,"type":"BaseRetriever"},"sourceHandle":{"baseClasses":["BaseRetriever"],"dataType":"EnsembleRetriever","id":"EnsembleRetriever-1oIIq"}},"style":{"stroke":"#555"},"className":"stroke-gray-900 stroke-connection","animated":false,"id":"reactflow__edge-EnsembleRetriever-1oIIq{œbaseClassesœ:[œBaseRetrieverœ],œdataTypeœ:œEnsembleRetrieverœ,œidœ:œEnsembleRetriever-1oIIqœ}-CustomComponent-GQeV1{œfieldNameœ:œretrieverœ,œidœ:œCustomComponent-GQeV1œ,œinputTypesœ:null,œtypeœ:œBaseRetrieverœ}"},{"source":"RecursiveCharacterTextSplitter-gdLr7","sourceHandle":"{œbaseClassesœ:[œDocumentœ],œdataTypeœ:œRecursiveCharacterTextSplitterœ,œidœ:œRecursiveCharacterTextSplitter-gdLr7œ}","target":"EnsembleRetriever-1oIIq","targetHandle":"{œfieldNameœ:œdocumentsœ,œidœ:œEnsembleRetriever-1oIIqœ,œinputTypesœ:null,œtypeœ:œDocumentœ}","data":{"targetHandle":{"fieldName":"documents","id":"EnsembleRetriever-1oIIq","inputTypes":null,"type":"Document"},"sourceHandle":{"baseClasses":["Document"],"dataType":"RecursiveCharacterTextSplitter","id":"RecursiveCharacterTextSplitter-gdLr7"}},"style":{"stroke":"#555"},"className":"stroke-gray-900 stroke-connection","animated":false,"id":"reactflow__edge-RecursiveCharacterTextSplitter-gdLr7{œbaseClassesœ:[œDocumentœ],œdataTypeœ:œRecursiveCharacterTextSplitterœ,œidœ:œRecursiveCharacterTextSplitter-gdLr7œ}-EnsembleRetriever-1oIIq{œfieldNameœ:œdocumentsœ,œidœ:œEnsembleRetriever-1oIIqœ,œinputTypesœ:null,œtypeœ:œDocumentœ}","selected":false},{"source":"AzureChatOpenAI-uO6Pp","sourceHandle":"{œbaseClassesœ:[œBaseLanguageModelœ,œBaseLLMœ],œdataTypeœ:œAzureChatOpenAIœ,œidœ:œAzureChatOpenAI-uO6Ppœ}","target":"CustomComponent-GQeV1","targetHandle":"{œfieldNameœ:œllmœ,œidœ:œCustomComponent-GQeV1œ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}","data":{"targetHandle":{"fieldName":"llm","id":"CustomComponent-GQeV1","inputTypes":null,"type":"BaseLanguageModel"},"sourceHandle":{"baseClasses":["BaseLanguageModel","BaseLLM"],"dataType":"AzureChatOpenAI","id":"AzureChatOpenAI-uO6Pp"}},"style":{"stroke":"#555"},"className":"stroke-gray-900 stroke-connection","animated":false,"id":"reactflow__edge-AzureChatOpenAI-uO6Pp{œbaseClassesœ:[œBaseLanguageModelœ,œBaseLLMœ],œdataTypeœ:œAzureChatOpenAIœ,œidœ:œAzureChatOpenAI-uO6Ppœ}-CustomComponent-GQeV1{œfieldNameœ:œllmœ,œidœ:œCustomComponent-GQeV1œ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}"},{"source":"GitLoader-Xj3Wb","sourceHandle":"{œbaseClassesœ:[œDocumentœ],œdataTypeœ:œGitLoaderœ,œidœ:œGitLoader-Xj3Wbœ}","target":"RecursiveCharacterTextSplitter-gdLr7","targetHandle":"{œfieldNameœ:œdocumentsœ,œidœ:œRecursiveCharacterTextSplitter-gdLr7œ,œinputTypesœ:null,œtypeœ:œDocumentœ}","data":{"targetHandle":{"fieldName":"documents","id":"RecursiveCharacterTextSplitter-gdLr7","inputTypes":null,"type":"Document"},"sourceHandle":{"baseClasses":["Document"],"dataType":"GitLoader","id":"GitLoader-Xj3Wb"}},"style":{"stroke":"#555"},"className":"stroke-gray-900 stroke-connection","animated":false,"id":"reactflow__edge-GitLoader-Xj3Wb{œbaseClassesœ:[œDocumentœ],œdataTypeœ:œGitLoaderœ,œidœ:œGitLoader-Xj3Wbœ}-RecursiveCharacterTextSplitter-gdLr7{œfieldNameœ:œdocumentsœ,œidœ:œRecursiveCharacterTextSplitter-gdLr7œ,œinputTypesœ:null,œtypeœ:œDocumentœ}"},{"source":"ChatPromptTemplate-zKVXz","sourceHandle":"{œbaseClassesœ:[œChatPromptTemplateœ,œBaseChatPromptTemplateœ,œBasePromptTemplateœ],œdataTypeœ:œChatPromptTemplateœ,œidœ:œChatPromptTemplate-zKVXzœ}","target":"CustomComponent-GQeV1","targetHandle":"{œfieldNameœ:œpromptœ,œidœ:œCustomComponent-GQeV1œ,œinputTypesœ:null,œtypeœ:œBasePromptTemplateœ}","data":{"targetHandle":{"fieldName":"prompt","id":"CustomComponent-GQeV1","inputTypes":null,"type":"BasePromptTemplate"},"sourceHandle":{"baseClasses":["ChatPromptTemplate","BaseChatPromptTemplate","BasePromptTemplate"],"dataType":"ChatPromptTemplate","id":"ChatPromptTemplate-zKVXz"}},"style":{"stroke":"#555"},"className":"stroke-gray-900 stroke-connection","animated":false,"id":"reactflow__edge-ChatPromptTemplate-zKVXz{œbaseClassesœ:[œChatPromptTemplateœ,œBaseChatPromptTemplateœ,œBasePromptTemplateœ],œdataTypeœ:œChatPromptTemplateœ,œidœ:œChatPromptTemplate-zKVXzœ}-CustomComponent-GQeV1{œfieldNameœ:œpromptœ,œidœ:œCustomComponent-GQeV1œ,œinputTypesœ:null,œtypeœ:œBasePromptTemplateœ}"},{"source":"HumanMessagePromptTemplate-QWDS5","sourceHandle":"{œbaseClassesœ:[œHumanMessagePromptTemplateœ,œBaseMessagePromptTemplateœ,œBaseStringMessagePromptTemplateœ],œdataTypeœ:œHumanMessagePromptTemplateœ,œidœ:œHumanMessagePromptTemplate-QWDS5œ}","target":"ChatPromptTemplate-zKVXz","targetHandle":"{œfieldNameœ:œmessagesœ,œidœ:œChatPromptTemplate-zKVXzœ,œinputTypesœ:null,œtypeœ:œBaseMessagePromptTemplateœ}","data":{"targetHandle":{"fieldName":"messages","id":"ChatPromptTemplate-zKVXz","inputTypes":null,"type":"BaseMessagePromptTemplate"},"sourceHandle":{"baseClasses":["HumanMessagePromptTemplate","BaseMessagePromptTemplate","BaseStringMessagePromptTemplate"],"dataType":"HumanMessagePromptTemplate","id":"HumanMessagePromptTemplate-QWDS5"}},"style":{"stroke":"#555"},"className":"stroke-gray-900 stroke-connection","animated":false,"id":"reactflow__edge-HumanMessagePromptTemplate-QWDS5{œbaseClassesœ:[œHumanMessagePromptTemplateœ,œBaseMessagePromptTemplateœ,œBaseStringMessagePromptTemplateœ],œdataTypeœ:œHumanMessagePromptTemplateœ,œidœ:œHumanMessagePromptTemplate-QWDS5œ}-ChatPromptTemplate-zKVXz{œfieldNameœ:œmessagesœ,œidœ:œChatPromptTemplate-zKVXzœ,œinputTypesœ:null,œtypeœ:œBaseMessagePromptTemplateœ}"},{"source":"AzureOpenAIEmbeddings-kVn9L","sourceHandle":"{œbaseClassesœ:[œEmbeddingsœ],œdataTypeœ:œAzureOpenAIEmbeddingsœ,œidœ:œAzureOpenAIEmbeddings-kVn9Lœ}","target":"Chroma-lOaKj","targetHandle":"{œfieldNameœ:œembeddingœ,œidœ:œChroma-lOaKjœ,œinputTypesœ:null,œtypeœ:œEmbeddingsœ}","data":{"targetHandle":{"fieldName":"embedding","id":"Chroma-lOaKj","inputTypes":null,"type":"Embeddings"},"sourceHandle":{"baseClasses":["Embeddings"],"dataType":"AzureOpenAIEmbeddings","id":"AzureOpenAIEmbeddings-kVn9L"}},"style":{"stroke":"#555"},"className":"stroke-foreground stroke-connection","animated":false,"id":"reactflow__edge-AzureOpenAIEmbeddings-kVn9L{œbaseClassesœ:[œEmbeddingsœ],œdataTypeœ:œAzureOpenAIEmbeddingsœ,œidœ:œAzureOpenAIEmbeddings-kVn9Lœ}-Chroma-lOaKj{œfieldNameœ:œembeddingœ,œidœ:œChroma-lOaKjœ,œinputTypesœ:null,œtypeœ:œEmbeddingsœ}"},{"source":"RecursiveCharacterTextSplitter-gdLr7","sourceHandle":"{œbaseClassesœ:[œDocumentœ],œdataTypeœ:œRecursiveCharacterTextSplitterœ,œidœ:œRecursiveCharacterTextSplitter-gdLr7œ}","target":"Chroma-lOaKj","targetHandle":"{œfieldNameœ:œdocumentsœ,œidœ:œChroma-lOaKjœ,œinputTypesœ:null,œtypeœ:œDocumentœ}","data":{"targetHandle":{"fieldName":"documents","id":"Chroma-lOaKj","inputTypes":null,"type":"Document"},"sourceHandle":{"baseClasses":["Document"],"dataType":"RecursiveCharacterTextSplitter","id":"RecursiveCharacterTextSplitter-gdLr7"}},"style":{"stroke":"#555"},"className":"stroke-foreground stroke-connection","animated":false,"id":"reactflow__edge-RecursiveCharacterTextSplitter-gdLr7{œbaseClassesœ:[œDocumentœ],œdataTypeœ:œRecursiveCharacterTextSplitterœ,œidœ:œRecursiveCharacterTextSplitter-gdLr7œ}-Chroma-lOaKj{œfieldNameœ:œdocumentsœ,œidœ:œChroma-lOaKjœ,œinputTypesœ:null,œtypeœ:œDocumentœ}"},{"source":"Chroma-lOaKj","sourceHandle":"{œbaseClassesœ:[œVectorStoreœ,œBaseRetrieverœ],œdataTypeœ:œChromaœ,œidœ:œChroma-lOaKjœ}","target":"EnsembleRetriever-1oIIq","targetHandle":"{œfieldNameœ:œretrieversœ,œidœ:œEnsembleRetriever-1oIIqœ,œinputTypesœ:null,œtypeœ:œVectorStoreœ}","data":{"targetHandle":{"fieldName":"retrievers","id":"EnsembleRetriever-1oIIq","inputTypes":null,"type":"VectorStore"},"sourceHandle":{"baseClasses":["VectorStore","BaseRetriever"],"dataType":"Chroma","id":"Chroma-lOaKj"}},"style":{"stroke":"#555"},"className":"stroke-foreground stroke-connection","animated":false,"id":"reactflow__edge-Chroma-lOaKj{œbaseClassesœ:[œVectorStoreœ,œBaseRetrieverœ],œdataTypeœ:œChromaœ,œidœ:œChroma-lOaKjœ}-EnsembleRetriever-1oIIq{œfieldNameœ:œretrieversœ,œidœ:œEnsembleRetriever-1oIIqœ,œinputTypesœ:null,œtypeœ:œVectorStoreœ}"}],"viewport":{"x":200.99465441412679,"y":-82.22656354703582,"zoom":0.554335820887459}},"description":"Allows asking queries related to python code (and markdown) by giving a github link.","name":"Chat with github python code","flow_type":"chat"}