From a8d2a25ed8d280a10158b5b4a17ee7e07bce73eb Mon Sep 17 00:00:00 2001 From: Ran Li Date: Thu, 11 Jul 2024 11:09:59 +0800 Subject: [PATCH] feat: docs update --- README.md | 5 +- .../cases/example/open_interpreter.yaml | 23 +++++++ dockerfiles/compose/open_interpreter.yaml | 64 +++++++++++++++++++ 3 files changed, 90 insertions(+), 2 deletions(-) create mode 100644 configs/client_configs/cases/example/open_interpreter.yaml create mode 100644 dockerfiles/compose/open_interpreter.yaml diff --git a/README.md b/README.md index c13d4fb..43f02b4 100644 --- a/README.md +++ b/README.md @@ -12,7 +12,8 @@
-
Documentation | Paper (comming soon)】
+
Documentation | Paper

--- @@ -30,7 +31,7 @@ Imagine if AI agents could collaborate like humans do on the internet. That's th - 🗣️ **Adaptive Conversation Flow**: The conversation flow is autonomously managed to keep agent conversations structured but flexible. - 🔄 **Scalable and Extensible**: Easy to add new types of agents or tackle different kinds of tasks. -For more details, please refer to our paper. +For more details, please refer to [our paper](https://arxiv.org/abs/2407.07061).

diff --git a/configs/client_configs/cases/example/open_interpreter.yaml b/configs/client_configs/cases/example/open_interpreter.yaml new file mode 100644 index 0000000..84a0425 --- /dev/null +++ b/configs/client_configs/cases/example/open_interpreter.yaml @@ -0,0 +1,23 @@ +server: + port: 7788 + hostname: ioa-server + +tool_agent: + image_name: open-interpreter:latest + container_name: open-interpreter-agent + agent_type: OpenInterpreter + agent_name: Open Interpreter + desc: This is Open Interpreter. It has the ability to execute code, control the terminal, and interact with online tools. + port: 7070 +comm: + name: Open Interpreter + desc: |- + This is Open Interpreter, which is an AI that can execute code, control the terminal, and interact with online tools. Tasks related to code snippet writing and execution can be assigned to this assistant. + type: Thing Assistant + support_nested_teams: false + + llm: + llm_type: openai-chat + model: gpt-4-1106-preview + # model: gpt-4-1106 + temperature: 0.1 diff --git a/dockerfiles/compose/open_interpreter.yaml b/dockerfiles/compose/open_interpreter.yaml new file mode 100644 index 0000000..5439a09 --- /dev/null +++ b/dockerfiles/compose/open_interpreter.yaml @@ -0,0 +1,64 @@ +# Set the current version of compose yml file +version: "3" + +services: + Server: + image: ioa-server:latest + build: + context: ../../ + dockerfile: dockerfiles/server.Dockerfile + container_name: ioa-server + env_file: + - .env + environment: + - OPENAI_API_KEY + #- OPENAI_BASE_URL + volumes: + - ${DOCKER_VOLUME_DIRECTORY:-.}/volumes/sqlite:/app/database + - ../../configs/server_configs:/app/configs + ports: + - 7788:7788 + stdin_open: true + tty: true + + OpenInterpreter: + image: ioa-client:latest + build: + context: ../../ + dockerfile: dockerfiles/client.Dockerfile + container_name: open-interpreter-client + volumes: + - /var/run/docker.sock:/var/run/docker.sock + - ./volumes/openInterpreter/log:/app/tool_agent_log + - ${DOCKER_VOLUME_DIRECTORY:-.}/volumes/sqlite:/app/database + - ./volumes/openai_response_log:${OPENAI_RESPONSE_LOG_PATH} + - ../../configs/client_configs:/app/configs + env_file: + - .env + environment: + - CUSTOM_CONFIG=configs/cases/open_instruction/open_interpreter.yaml + ports: + - 5051:5050 + depends_on: + - Server + stdin_open: true + tty: true + + ServerFrontend: + image: ioa-server-frontend:latest + build: + context: ../../ + dockerfile: dockerfiles/server_frontend.Dockerfile + container_name: server_frontend + ports: + - 80:80 + depends_on: + - Server + stdin_open: true + tty: true + +networks: + default: + name: agent_network + external: true + # driver: bridge