Skip to content

Commit

Permalink
Merge branch 'master' into swiftyos/secrt-871-monitor-page-crashing-o…
Browse files Browse the repository at this point in the history
…n-due-to-number-of-requests
  • Loading branch information
Swiftyos authored Sep 23, 2024
2 parents 5c811a7 + fc51176 commit c388385
Show file tree
Hide file tree
Showing 14 changed files with 424 additions and 390 deletions.
40 changes: 40 additions & 0 deletions .dockerignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
# Ignore everything by default, selectively add things to context
classic/run

# AutoGPT
!classic/original_autogpt/autogpt/
!classic/original_autogpt/pyproject.toml
!classic/original_autogpt/poetry.lock
!classic/original_autogpt/README.md
!classic/original_autogpt/tests/

# Benchmark
!classic/benchmark/agbenchmark/
!classic/benchmark/pyproject.toml
!classic/benchmark/poetry.lock
!classic/benchmark/README.md

# Forge
!classic/forge/
!classic/forge/pyproject.toml
!classic/forge/poetry.lock
!classic/forge/README.md

# Frontend
!classic/frontend/build/web/

# Platform
!autogpt_platform/

# Explicitly re-ignore some folders
.*
**/__pycache__

autogpt_platform/frontend/.next/
autogpt_platform/frontend/node_modules
autogpt_platform/frontend/.env.example
autogpt_platform/frontend/.env.local
autogpt_platform/backend/.env
autogpt_platform/backend/.venv/

autogpt_platform/market/.env
6 changes: 3 additions & 3 deletions autogpt_platform/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ To run the AutoGPT Platform, follow these steps:
6. Run the following command:

```
docker compose -f docker-compose.combined.yml up -d
docker compose up -d
```

Expand All @@ -46,8 +46,8 @@ To run the AutoGPT Platform, follow these steps:

Here are some useful Docker Compose commands for managing your AutoGPT Platform:

- `docker compose -f docker-compose.combined.yml up -d`: Start the services in detached mode.
- `docker compose -f docker-compose.combined.yml stop`: Stop the running services without removing them.
- `docker compose up -d`: Start the services in detached mode.
- `docker compose stop`: Stop the running services without removing them.
- `docker compose rm`: Remove stopped service containers.
- `docker compose build`: Build or rebuild services.
- `docker compose down`: Stop and remove containers, networks, and volumes.
Expand Down
2 changes: 1 addition & 1 deletion autogpt_platform/backend/backend/blocks/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -320,7 +320,7 @@ def llm_call(input_data: AIStructuredResponseGeneratorBlock.Input) -> str:
if output_name == "response":
return output_data["response"]
else:
raise output_data
raise RuntimeError(output_data)
raise ValueError("Failed to get a response from the LLM.")

def run(self, input_data: Input) -> BlockOutput:
Expand Down
2 changes: 1 addition & 1 deletion autogpt_platform/backend/backend/blocks/time_blocks.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def __init__(self):
{"trigger": "Hello", "format": "{time}"},
],
test_output=[
("time", time.strftime("%H:%M:%S")),
("time", lambda _: time.strftime("%H:%M:%S")),
],
)

Expand Down
12 changes: 6 additions & 6 deletions autogpt_platform/backend/backend/data/execution.py
Original file line number Diff line number Diff line change
Expand Up @@ -396,19 +396,19 @@ def merge_execution_input(data: BlockInput) -> BlockInput:

# Merge all input with <input_name>_$_<index> into a single list.
items = list(data.items())
list_input: list[Any] = []

for key, value in items:
if LIST_SPLIT not in key:
continue
name, index = key.split(LIST_SPLIT)
if not index.isdigit():
list_input.append((name, value, 0))
else:
list_input.append((name, value, int(index)))
raise ValueError(f"Invalid key: {key}, #{index} index must be an integer.")

for name, value, _ in sorted(list_input, key=lambda x: x[2]):
data[name] = data.get(name, [])
data[name].append(value)
if int(index) >= len(data[name]):
# Pad list with empty string on missing indices.
data[name].extend([""] * (int(index) - len(data[name]) + 1))
data[name][int(index)] = value

# Merge all input with <input_name>_#_<index> into a single dict.
for key, value in items:
Expand Down
8 changes: 8 additions & 0 deletions autogpt_platform/backend/backend/executor/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,20 +69,28 @@ def __init__(
self.prefix = f"[ExecutionManager|uid:{user_id}|gid:{graph_id}|nid:{node_id}]|geid:{graph_eid}|nid:{node_eid}|{block_name}]"

def info(self, msg: str, **extra):
msg = self._wrap(msg, **extra)
logger.info(msg, extra={"json_fields": {**self.metadata, **extra}})

def warning(self, msg: str, **extra):
msg = self._wrap(msg, **extra)
logger.warning(msg, extra={"json_fields": {**self.metadata, **extra}})

def error(self, msg: str, **extra):
msg = self._wrap(msg, **extra)
logger.error(msg, extra={"json_fields": {**self.metadata, **extra}})

def debug(self, msg: str, **extra):
msg = self._wrap(msg, **extra)
logger.debug(msg, extra={"json_fields": {**self.metadata, **extra}})

def exception(self, msg: str, **extra):
msg = self._wrap(msg, **extra)
logger.exception(msg, extra={"json_fields": {**self.metadata, **extra}})

def _wrap(self, msg: str, **extra):
return f"{self.prefix} {msg} {extra}"


T = TypeVar("T")
ExecutionStream = Generator[NodeExecution, None, None]
Expand Down
147 changes: 0 additions & 147 deletions autogpt_platform/docker-compose.combined.yml

This file was deleted.

Loading

0 comments on commit c388385

Please sign in to comment.