Skip to content

Commit

Permalink
Merge pull request #123 from dbpunk-labs/121-feature-podman-support
Browse files Browse the repository at this point in the history
feat: podman support
  • Loading branch information
imotai authored Oct 7, 2023
2 parents 9fc446b + 6d38211 commit 06fb7a5
Show file tree
Hide file tree
Showing 6 changed files with 89 additions and 48 deletions.
4 changes: 3 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
Requirement
* python 3.10 and above
* pip
* [docker](https://www.docker.com/products/docker-desktop/) 24.0.0 and above, docker desktop is recommended
* [docker](https://www.docker.com/products/docker-desktop/) 24.0.0 and above, or [podman](https://podman.io/)

> To deploy Octogen, the user needs permission to run Docker commands.
> To use codellama, your host must have at least 8 CPUs and 16 GB of RAM.
Expand All @@ -52,6 +52,8 @@ og_up
> If you opt for CodeLlama, Octogen will automatically download it from huggingface.co.
> In case the installation of the Octogen Terminal CLI is taking longer than expected, you might want to consider switching to a different pip mirror.
The default is using docker as container engine. use podman with flag `--use_podman`

3. Open your terminal and execute the command `og`, you will see the following output

```
Expand Down
4 changes: 2 additions & 2 deletions chat/src/og_terminal/terminal_chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,10 +213,10 @@ def handle_action_end(segments, respond, images, values):
has_error = "✅" if not respond.on_agent_action_end.has_error else "❌"
old_value = values.pop()
segment = segments.pop()
if not images:
if not images and not has_error:
images.extend(respond.on_agent_action_end.output_files)
values.append(old_value)
segments.append((len(values) - 1, has_error, segment[2]))

# add the next steps loading
spinner = Spinner("dots", style="status.spinner", speed=1.0, text="")
values.append(("text", "", []))
Expand Down
1 change: 1 addition & 0 deletions docker/start_all.sh
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ sleep 3
AGENT_RPC_KEY=$(cat ${ROOT_DIR}/agent/.env | grep admin_key | tr -d '\r' | cut -d "=" -f 2)
KERNEL_RPC_KEY=$(cat ${ROOT_DIR}/kernel/.env | grep rpc_key | tr -d '\r' | cut -d "=" -f 2)
og_agent_setup --kernel_endpoint=127.0.0.1:9527 --kernel_api_key=${KERNEL_RPC_KEY} --agent_endpoint=127.0.0.1:9528 --admin_key=${AGENT_RPC_KEY}
og_agent_setup --kernel_endpoint=127.0.0.1:9527 --kernel_api_key=${KERNEL_RPC_KEY} --agent_endpoint=127.0.0.1:9528 --admin_key=${AGENT_RPC_KEY}

while true
do
Expand Down
10 changes: 6 additions & 4 deletions docs/source/getstarted.rst
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ Octopus requires the following enviroment

- Python 3.10.0 and above.
- `Pip <https://pip.pypa.io/en/stable/installation/>`_
- `Docker Desktop 24.0.0 and above <https://www.docker.com/products/docker-desktop/>`_
- `Docker Desktop 24.0.0 and above <https://www.docker.com/products/docker-desktop/>`_ or `Podman <https://podman.io/docs/installation>`_

To use codellama, your host must have at least 8 CPUs and 16 GB of RAM

Expand All @@ -28,10 +28,12 @@ the second step, use ``og_up`` to setup the octopus service and cli::

You have the option to select from

- OpenAI, [apply](https://platform.openai.com/account/api-keys) the openai api key
- Azure OpenAI, [apply](https://azure.microsoft.com/en-us/products/ai-services/openai-service) the azure api key
- OpenAI, `apply <https://platform.openai.com/account/api-keys>`_ the openai api key
- Azure OpenAI, `apply <https://azure.microsoft.com/en-us/products/ai-services/openai-service>`_ the azure api key
- CodeLlama
- Octogen(beta) agent services, [apply](https://www.octogen.dev/) the octogen agent service key
- Octogen(beta) agent services, `apply <https://www.octogen.dev/>`_ the octogen agent service key

the default is using docker as container engine, use podman with flag ``--use_podman``

If you opt for CodeLlama, Octogen will automatically download it from huggingface.co.
In case the installation of the Octogen Terminal CLI is taking longer than expected,
Expand Down
116 changes: 76 additions & 40 deletions up/src/og_up/up.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,28 @@ def refresh(
live.refresh()


def check_the_env(live, segments, need_docker=True):
def check_container_vender(vender):
command = [vender, "version", "--help"]
all_output = ""
result_code = 0
for code, output in run_with_realtime_print(command):
result_code = code
all_output += output
if result_code != 0:
return False, f"{vender} is required"
if all_output.lower().find("json") < 0:
return False, f"Upgrade the {vender} to support json format"
# check alive
command = [vender, "ps"]
result_code = 0
for code, _ in run_with_realtime_print(command):
result_code = code
if result_code != 0:
return False, f"{vender} is not running"
return True, "ok"


def check_the_env(live, segments, need_container=True, use_podman=False):
# check the python version
spinner = Spinner("dots", style="status.spinner", speed=1.0, text="")
step = "Check the environment"
Expand All @@ -134,32 +155,14 @@ def check_the_env(live, segments, need_docker=True):
segments.append(("❌", "Check the environment", "Python3.10 is required"))
refresh(live, segments)
return False, "Python3.10 is required"
if need_docker:
command = ["docker", "version", "--help"]
all_output = ""
result_code = 0
for code, output in run_with_realtime_print(command):
result_code = code
all_output += output
if result_code != 0:
old_segment = segments.pop()
segments.append(("❌", "Check the environment", "Docker is required"))
refresh(live, segments)
return False, "Docker is required"
if all_output.find("json") < 0:
old_segment = segments.pop()
segments.append(("❌", "Check the environment", "Please upgrade the docker"))
refresh(live, segments)
return False, "Upgrade the docker"
command = ["docker", "ps"]
result_code = 0
for code, _ in run_with_realtime_print(command):
result_code = code
if result_code != 0:
if need_container:
vender = "docker" if not use_podman else "podman"
result, msg = check_container_vender(vender)
if not result:
old_segment = segments.pop()
segments.append(("❌", "Check the environment", "Docker is not running"))
segments.append(("❌", "Check the environment", msg))
refresh(live, segments)
return False, "Docker is not running"
return False, msg
old_segment = segments.pop()
segments.append(("✅", "Check the environment", ""))
refresh(live, segments)
Expand Down Expand Up @@ -225,21 +228,27 @@ def download_model(
return result_code


def load_docker_image(version, image_name, live, segments, chunk_size=1024):
def load_docker_image(
version, image_name, live, segments, chunk_size=1024, use_podman=False
):
"""
download the image file and load it into docker
"""
full_name = f"{image_name}:{version}"
full_name = (
f"{image_name}:{version}"
if not use_podman
else f"docker.io/{image_name}:{version}"
)
spinner = Spinner("dots", style="status.spinner", speed=1.0, text="")
step = "Pull octogen image"
segments.append((spinner, step, ""))
refresh(live, segments)
return_code = 0
output = ""
for code, msg in run_with_realtime_print(command=["docker", "pull", full_name]):
vender = "docker" if not use_podman else "podman"
for code, msg in run_with_realtime_print(command=[vender, "pull", full_name]):
return_code = code
output += msg

old_segment = segments.pop()
if return_code == 0:
segments.append(("✅", old_segment[1], full_name))
Expand All @@ -250,7 +259,7 @@ def load_docker_image(version, image_name, live, segments, chunk_size=1024):


def choose_api_service(console):
mk = """Choose your favourite LLM
mk = """Choose your favourite Large Language Model
1. OpenAI, Kernel, Agent and Cli will be installed
2. Azure OpenAI, Kernel, Agent and Cli will be installed
3. Codellama, Llama.cpp Model Server, Kernel, Agent and Cli will be installed
Expand Down Expand Up @@ -352,22 +361,30 @@ def generate_kernel_env(live, segments, install_dir, rpc_key):
refresh(live, segments)


def stop_service(name):
command = ["docker", "ps", "-f", f"name={name}", "--format", "json"]
def stop_service(name, use_podman=False):
vender = "docker" if not use_podman else "podman"
command = [vender, "ps", "-f", f"name={name}", "--format", "json"]
output = ""
for _, chunk in run_with_realtime_print(command=command):
output += chunk
pass
if output:
if use_podman and output:
rows = json.loads(output.strip())
for row in rows:
id = row["Id"]
command = [vender, "kill", id]
for _, chunk in run_with_realtime_print(command=command):
pass
elif output:
for line in output.split(os.linesep):
if not line:
break
row = json.loads(line.strip())
id = row["ID"]
command = ["docker", "kill", id]
command = [vender, "kill", id]
for _, chunk in run_with_realtime_print(command=command):
pass
command = ["docker", "container", "rm", name]
command = [vender, "container", "rm", name]
for _, chunk in run_with_realtime_print(command=command):
pass

Expand All @@ -380,17 +397,19 @@ def start_service(
version,
is_codellama="1",
model_filename="",
use_podman=False,
):
spinner = Spinner("dots", style="status.spinner", speed=1.0, text="")
step = "Start octogen service"
output = ""
vender = "docker" if not use_podman else "podman"
segments.append((spinner, step, ""))
refresh(live, segments)
stop_service("octogen")
stop_service("octogen", use_podman=use_podman)
# TODO stop the exist service
full_name = f"{image_name}:{version}"
command = [
"docker",
vender,
"run",
"--name",
"octogen",
Expand Down Expand Up @@ -441,6 +460,7 @@ def start_octogen_for_openai(
version,
api_key,
model,
use_podman=False,
):
generate_agent_openai(live, segments, install_dir, admin_key, api_key, model)
if (
Expand All @@ -451,6 +471,7 @@ def start_octogen_for_openai(
image_name,
version,
is_codellama="0",
use_podman=use_podman,
)
== 0
):
Expand All @@ -476,6 +497,7 @@ def start_octogen_for_azure_openai(
api_key,
model,
api_base,
use_podman=False,
):
generate_agent_azure_openai(
live, segments, install_dir, admin_key, api_key, model, api_base
Expand Down Expand Up @@ -513,6 +535,7 @@ def start_octogen_for_codellama(
image_name,
version,
socks_proxy="",
use_podman=False,
):
"""
start the octogen service for codellama
Expand Down Expand Up @@ -567,6 +590,11 @@ def start_octogen_for_codellama(
default="codellama-7b-instruct.Q5_K_S.gguf",
help="the model filename in model repo",
)
@click.option(
"--use_podman",
is_flag=True,
help="use podman as the container engine",
)
def init_octogen(
image_name,
repo_name,
Expand All @@ -576,6 +604,7 @@ def init_octogen(
socks_proxy,
codellama_repo,
model_filename,
use_podman,
):
if cli_dir.find("~") == 0:
real_cli_dir = cli_dir.replace("~", os.path.expanduser("~"))
Expand All @@ -593,7 +622,7 @@ def init_octogen(
with Live(Group(*segments), console=console) as live:
run_install_cli(live, segments)
if choice == "4":
check_result, _ = check_the_env(live, segments, need_docker=False)
check_result, _ = check_the_env(live, segments, need_container=False)
if not check_result:
segments.append(("❌", "Setup octogen failed", ""))
refresh(live, segments)
Expand All @@ -602,7 +631,9 @@ def init_octogen(
segments.append(("👍", "Setup octogen done", ""))
refresh(live, segments)
return
check_result, _ = check_the_env(live, segments, need_docker=True)
check_result, _ = check_the_env(
live, segments, need_container=True, use_podman=use_podman
)
if not check_result:
segments.append(("❌", "Setup octogen failed", ""))
refresh(live, segments)
Expand All @@ -612,7 +643,9 @@ def init_octogen(
else:
version = get_latest_release_version(repo_name, live, segments)

code = load_docker_image(version, image_name, live, segments)
code = load_docker_image(
version, image_name, live, segments, use_podman=use_podman
)
if code != 0:
return
kernel_key = random_str(32)
Expand All @@ -632,6 +665,7 @@ def init_octogen(
image_name,
version,
socks_proxy,
use_podman=use_podman,
)
elif choice == "2":
# start azure openai
Expand All @@ -647,6 +681,7 @@ def init_octogen(
key,
model,
api_base,
use_podman=use_podman,
)
else:
# start for openai
Expand All @@ -661,4 +696,5 @@ def init_octogen(
version,
key,
model,
use_podman=use_podman,
)
2 changes: 1 addition & 1 deletion up/tests/up_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ def test_check_the_env_win():
console = Console()
segments = []
with Live(Group(*segments), console=console) as live:
result, msg = check_the_env(live, segments, need_docker=False)
result, msg = check_the_env(live, segments, need_container=False)
assert result


Expand Down

0 comments on commit 06fb7a5

Please sign in to comment.