diff --git a/.github/workflows/doc_page.yml b/.github/workflows/doc_page.yml
new file mode 100644
index 0000000..7314eac
--- /dev/null
+++ b/.github/workflows/doc_page.yml
@@ -0,0 +1,52 @@
+# Simple workflow for deploying static content to GitHub Pages
+name: Deploy static content to Pages
+
+on:
+ # Runs on pushes targeting the default branch
+ push:
+ branches: ["main"]
+
+ # Allows you to run this workflow manually from the Actions tab
+ workflow_dispatch:
+
+# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
+permissions:
+ contents: read
+ pages: write
+ id-token: write
+
+# Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued.
+# However, do NOT cancel in-progress runs as we want to allow these production deployments to complete.
+concurrency:
+ group: "pages"
+ cancel-in-progress: false
+
+jobs:
+ # Single deploy job since we're just deploying
+ deploy:
+ environment:
+ name: github-pages
+ url: ${{ steps.deployment.outputs.page_url }}
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v3
+ with:
+ submodules: recursive
+
+ - name: Install dependencies
+ run: pip install -U sphinx
+
+ - name: Setup Pages
+ uses: actions/configure-pages@v3
+ run: |
+ cd docs
+ make html
+
+ - name: Upload artifact
+ uses: actions/upload-pages-artifact@v2
+ with:
+ path: 'docs/build/html/'
+ - name: Deploy to GitHub Pages
+ id: deployment
+ uses: actions/deploy-pages@v2
diff --git a/docs/source/_static/octogen_logo.png b/docs/source/_static/octogen_logo.png
new file mode 100644
index 0000000..434a1ff
Binary files /dev/null and b/docs/source/_static/octogen_logo.png differ
diff --git a/docs/source/conf.py b/docs/source/conf.py
index a36d081..f2bb14a 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -8,8 +8,8 @@
import sphinx_readable_theme
-project = 'octopus'
-copyright = '2023, imotai'
+project = 'octogen'
+copyright = '2023 octogem.dev'
author = 'imotai'
# -- General configuration ---------------------------------------------------
@@ -25,4 +25,6 @@
# -- Options for HTML output -------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
+html_theme = "alabaster"
html_static_path = ['_static']
+html_logo = "_static/octogen_logo.png"
diff --git a/docs/source/getstarted.rst b/docs/source/getstarted.rst
new file mode 100644
index 0000000..74816a1
--- /dev/null
+++ b/docs/source/getstarted.rst
@@ -0,0 +1,50 @@
+Getting Started
+===============
+
+Octogen is an Open-Source Code Interpreter Powered by GPT 3.5/4 and Codellama.
+
+Requirements
+------------
+
+Octopus works with macOs, Linux and Windows.
+Octopus requires the following enviroment
+
+- Python 3.10.0 and above.
+- `Pip `_
+- `Docker Desktop `_
+
+To use codellama, your host must have at least 8 CPUs and 16 GB of RAM
+
+Install
+-------
+
+the first step, install ``og_up`` tool::
+
+ $ pip install og_up
+
+the second step, use ``og_up`` to setup the octopus service and cli::
+
+ $ og_up
+
+You have the option to select from
+
+- OpenAI
+- Azure OpenAI
+- CodeLlama
+- Octogen(beta) agent services
+
+If you opt for CodeLlama, Octogen will automatically download it from huggingface.co.
+In case the installation of the Octogen Terminal CLI is taking longer than expected,
+you might want to consider switching to a different pip mirror.
+
+the third step, open your terminal and execute the command ``og``, you will see the following output::
+
+ Welcome to use octogen❤️ . To ask a programming question, simply type your question and press esc + enter
+ You can use /help to look for help
+
+ [1]🎧>
+
+
+How to use
+----------
+
diff --git a/docs/source/index.rst b/docs/source/index.rst
index d754aaa..aa96a17 100644
--- a/docs/source/index.rst
+++ b/docs/source/index.rst
@@ -1,16 +1,16 @@
-.. octopus documentation master file, created by
+.. octogen documentation master file, created by
sphinx-quickstart on Thu Sep 7 23:35:56 2023.
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
-Welcome to Octopus's documentation!
+Welcome to Octogen's documentation!
===================================
.. toctree::
:maxdepth: 2
:caption: Contents:
- installation.rst
+ getstarted.rst
Indices and tables
==================
diff --git a/docs/source/installation.rst b/docs/source/installation.rst
deleted file mode 100644
index 2483388..0000000
--- a/docs/source/installation.rst
+++ /dev/null
@@ -1,30 +0,0 @@
-Installation
-============
-
-You can install octopus to your local computer and cloud server
-
-Requirements
-------------
-
-Octopus works with macOs, Linux and Windows.
-Octopus requires the following enviroment
-
-- Python 3.10.0 and above.
-- Pip
-- Docker
-
-
-Install on local computer
--------------------------
-
-To use codellama, your host must have at least 8 CPUs and 16 GB of RAM
-
-the first step, install octopus_up tool
-
- pip install octopus_up
-
-the second step, use octopus_up to setup the octopus service and cli
-
- octopus_up
-
-
diff --git a/up/src/og_up/up.py b/up/src/og_up/up.py
index 1964833..a95d7fd 100644
--- a/up/src/og_up/up.py
+++ b/up/src/og_up/up.py
@@ -199,21 +199,21 @@ def choose_api_service(console):
1. OpenAI, Kernel, Agent and Cli will be installed
2. Azure OpenAI, Kernel, Agent and Cli will be installed
3. Codellama, Model Server, Kernel, Agent and Cli will be installed
-4. Octogen, Only Cli will be installed
+4. Octogen(beta), Only Cli will be installed
"""
console.print(Markdown(mk))
choice = Prompt.ask("Choices", choices=["1", "2", "3", "4"], default="1:OpenAI")
if choice == "1":
- key = Prompt.ask("Enter OpenAI Key")
+ key = Prompt.ask("Enter OpenAI Key", password=True)
model = Prompt.ask("Enter OpenAI Model", default="gpt-3.5-turbo-16k-0613")
return choice, key, model, ""
elif choice == "2":
- key = Prompt.ask("Enter Azure OpenAI Key")
+ key = Prompt.ask("Enter Azure OpenAI Key", password=True)
deployment = Prompt.ask("Enter Azure OpenAI Deployment")
api_base = Prompt.ask("Enter Azure OpenAI Base")
return choice, key, deployment, api_base
elif choice == "4":
- key = Prompt.ask("Enter Octogen Key")
+ key = Prompt.ask("Enter Octogen Key", password=True)
api_base = "https://agent.octogen.dev"
return choice, key, "" , api_base
return choice, "", "", ""
@@ -326,6 +326,12 @@ def start_service(
is_codellama="1",
model_filename="",
):
+
+ spinner = Spinner("dots", style="status.spinner", speed=1.0, text="")
+ step = "Start octogen service"
+ output = ""
+ segments.append((spinner, step, ""))
+ refresh(live, segments)
stop_service("octogen")
# TODO stop the exist service
full_name = f"{image_name}:{version}"
@@ -352,7 +358,7 @@ def start_service(
result_code = code
output += chunk
pass
-
+ segments.pop()
if result_code == 0:
segments.append(("✅", "Start octogen service", ""))
else: