From caeba80bce6fe1ce725d5f52ea80804078105f72 Mon Sep 17 00:00:00 2001 From: TincoNomad Date: Mon, 4 Nov 2024 16:24:41 -0500 Subject: [PATCH 1/4] =?UTF-8?q?=F0=9F=93=9Ddocs:=20sync=20with=20examples?= =?UTF-8?q?=20files,=20small=20updates?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/examples/advanced.md | 103 +++++++++++++++++++------- docs/examples/async.md | 98 +++++++++++++++--------- docs/examples/basic.md | 35 ++++++--- docs/examples/files.md | 102 +++++++++++++++---------- examples/big_upload_from_url.py | 30 +++++--- examples/docker_parallel_execution.py | 69 +++++++++++------ examples/plot_dataset.py | 11 +-- examples/stream_chunk_timing.py | 77 ++++++++++++++++--- mkdocs.yml | 11 ++- 9 files changed, 368 insertions(+), 168 deletions(-) diff --git a/docs/examples/advanced.md b/docs/examples/advanced.md index 3efe8c0..76622ea 100644 --- a/docs/examples/advanced.md +++ b/docs/examples/advanced.md @@ -1,46 +1,80 @@ # Advanced Examples +For detailed information about security and architecture, see: + +- [Why Sandboxing is Important](../concepts/architecture.md#why-is-sandboxing-important) +- [Implementation Comparison](../concepts/implementations.md#implementation-comparison) + ## Custom Kernel Usage ```python from codeboxapi import CodeBox codebox = CodeBox() - # Execute bash commands - result = codebox.exec("ls -la", kernel="bash") - print(result.text) - # Install system packages with timeout - result = codebox.exec( - "apt-get update && apt-get install -y ffmpeg", - kernel="bash", - timeout=300 - ) - print(result.text) + +# Execute bash commands +result = codebox.exec("ls -la", kernel="bash") +print(result.text) + +# Create and run Python scripts via bash +result = codebox.exec("echo \"print('Running from file')\" > script.py", kernel="bash") +result = codebox.exec("python script.py", kernel="bash") ``` ## File Streaming with Chunks ```python from codeboxapi import CodeBox -codebox = CodeBox() -# Upload large file with streaming -with open("large_file.dat", "rb") as f: - file = codebox.upload("remote_file.dat", f) -print(f"Uploaded file size: {file.get_size()} bytes") -# Download with streaming and custom chunk size -for chunk in codebox.stream_download("remote_file.dat"): - process_chunk(chunk) +from codeboxapi import ExecChunk +import time + +def sync_stream_exec(cb: CodeBox) -> None: + chunks: list[tuple[ExecChunk, float]] = [] + t0 = time.perf_counter() + for chunk in cb.stream_exec( + "import time;\nfor i in range(3): time.sleep(1); print(i)" + ): + chunks.append((chunk, time.perf_counter() - t0)) + print(f"{chunks[-1][1]:.5f}: {chunk}") ``` +Reference: `stream_chunk_timing.py` lines 41-50 -## Docker Implementation +## Docker Parallel Processing +> Note: Docker must be installed and running on your system to use these features. +> Requirements: +> - Docker must be installed and running (start Docker Desktop or docker daemon) +> - Port 8069 must be available +> - User must have permissions to run Docker commands ```python +import asyncio from codeboxapi import CodeBox -# Using DockerBox with custom port and image -codebox = CodeBox(api_key="docker") -# Execute code in container -result = codebox.exec("import sys; print(sys.version)") -print(result.text) + +async def train_model(codebox: CodeBox, data_split: int) -> dict: + # Install required packages + await codebox.ainstall("pandas") + await codebox.ainstall("scikit-learn") + + # Execute training code + result = await codebox.aexec(f""" + import pandas as pd + from sklearn.model_selection import train_test_split + from sklearn.linear_model import LinearRegression + + # Training code with split {data_split} + X_train, X_test = train_test_split(X, y, random_state={data_split}) + """) + return {"split": data_split, "output": result.text, "errors": result.errors} + +# Run multiple instances in parallel +codeboxes = [CodeBox(api_key="docker") for _ in range(4)] +tasks = [train_model(codebox, i) for i, codebox in enumerate(codeboxes)] +results = await asyncio.gather(*tasks) ``` +Reference: `docker_parallel_execution.py` lines 17-62 + +For more details on Docker implementation, see: +- [Docker Implementation](../concepts/implementations.md#dockerbox) +- [Data Structures](../concepts/data_structures.md) ## Error Handling @@ -48,5 +82,24 @@ print(result.text) from codeboxapi import CodeBox codebox = CodeBox() -codebox.exec("import non_existent_package") + +# Handle execution errors +result = codebox.exec("import non_existent_package") +if result.errors: + print("Error occurred:", result.errors[0]) + +# Handle runtime errors with try/except +result = codebox.exec(""" +try: + 1/0 +except Exception as e: + print(f"Error: {str(e)}") +""") +print(result.text) ``` +Reference: `getting_started.py` lines 79-81 + +For more advanced usage patterns, see: + +- [Components Overview](../concepts/components.md) +- [API Types Reference](../api/types.md) diff --git a/docs/examples/async.md b/docs/examples/async.md index b4bd45f..2221bca 100644 --- a/docs/examples/async.md +++ b/docs/examples/async.md @@ -1,57 +1,83 @@ # Async CodeBox API -## Parallel Execution +For detailed information about async operations, see: -Run multiple CodeBoxes in parallel: +- [Core Methods](../api/codebox.md#core-methods) +- [Data Structures](../concepts/data_structures.md) +## Basic Async Operations ```python -import asyncio from codeboxapi import CodeBox -async def process_codebox(id: int): +async def async_examples(): codebox = CodeBox() - # Execute code - result = await codebox.aexec(f"print('CodeBox {id}')") - print(result.text) - - # Install package - await codebox.ainstall("pandas") - # Run computation - result = await codebox.aexec( - "import pandas as pd; print(pd.__version__)" - ) - return result.text + # Async Code Execution + result = await codebox.aexec("print('Async Hello!')") + print(result.text) -async def main(): - # Run 5 CodeBoxes in parallel - results = await asyncio.gather( - *[process_codebox(i) for i in range(5)] - ) - print(f"Results: {results}") + # Async File Operations + await codebox.aupload("async_file.txt", b"Async content") + downloaded = await codebox.adownload("async_file.txt") + print("File content:", downloaded.get_content()) + + # Async Package Installation + await codebox.ainstall("requests") +``` +Reference: `async_example.py` lines 6-18 -asyncio.run(main()) +## Async Streaming +```python +async def async_stream_exec(cb: CodeBox) -> None: + chunks: list[tuple[ExecChunk, float]] = [] + t0 = time.perf_counter() + async for chunk in cb.astream_exec( + "import time;\nfor i in range(3): time.sleep(1); print(i)" + ): + chunks.append((chunk, time.perf_counter() - t0)) + print(f"{chunks[-1][1]:.5f}: {chunk}") ``` +Reference: `stream_chunk_timing.py` lines 53-62 -## Async File Operations with Progress +## Docker Parallel Processing +> Note: Docker must be installed and running on your system to use these features. +> Requirements: +> - Docker must be installed and running (start Docker Desktop or docker daemon) +> - Port 8069 must be available +> - User must have permissions to run Docker commands ```python import asyncio -from tqdm import tqdm from codeboxapi import CodeBox -async def upload_with_progress(codebox, filename: str): - total_size = os.path.getsize(filename) - with tqdm(total=total_size, desc="Uploading") as pbar: - async with aiofiles.open(filename, "rb") as f: - file = await codebox.aupload(filename, f) - pbar.update(total_size) - return file +async def train_model(codebox: CodeBox, data_split: int) -> dict: + # Install required packages + await codebox.ainstall("pandas") + await codebox.ainstall("scikit-learn") + + # Execute training code + result = await codebox.aexec(f""" + import pandas as pd + from sklearn.model_selection import train_test_split + from sklearn.linear_model import LinearRegression + + # Training code with split {data_split} + X_train, X_test = train_test_split(X, y, random_state={data_split}) + """) + return {"split": data_split, "output": result.text, "errors": result.errors} async def main(): - codebox = CodeBox() - file = await upload_with_progress(codebox, "large_file.dat") - print(f"Uploaded: {file.path}, Size: {await file.aget_size()}") - -asyncio.run(main()) + # Create multiple Docker instances + num_parallel = 4 + codeboxes = [CodeBox(api_key="docker") for _ in range(num_parallel)] + + # Create and execute tasks + tasks = [train_model(codebox, i) for i, codebox in enumerate(codeboxes)] + results = await asyncio.gather(*tasks) ``` +Reference: `docker_parallel_execution.py` lines 17-80 + +For more details on async implementations, see: + +- [Implementation Overview](../concepts/implementations.md) +- [API Reference](../api/codebox.md#core-methods) \ No newline at end of file diff --git a/docs/examples/basic.md b/docs/examples/basic.md index 8861f89..70c8e67 100644 --- a/docs/examples/basic.md +++ b/docs/examples/basic.md @@ -1,22 +1,39 @@ # Basic Usage -Run code in a new CodeBox: +For detailed information about CodeBox concepts and architecture, see: +- [What is CodeBox?](../index.md#what-is-codebox) +- [Core Components](../concepts/architecture.md#core-components) + +## Simple Execution: ```python from codeboxapi import CodeBox - codebox = CodeBox() -codebox.exec("print('Hello World!')") -``` - -Run async code: +# Basic execution +result = codebox.exec("print('Hello World!')") +print(result.text) +# Error handling +result = codebox.exec("1/0") +if result.errors: + print("Error:", result.errors[0]) +``` +## Async Execution ```python from codeboxapi import CodeBox - -codebox = CodeBox() +import asyncio async def main(): - await codebox.aexec("print('Hello World!')") + codebox = CodeBox() + result = await codebox.aexec("print('Hello World!')") + print(result.text) + +if __name__ == "__main__": + asyncio.run(main()) ``` + +For more details on configuration and setup, see: + +- [Quick Start Guide](../quickstart.md) +- [API Reference](../api/codebox.md) \ No newline at end of file diff --git a/docs/examples/files.md b/docs/examples/files.md index 63059de..f9bf407 100644 --- a/docs/examples/files.md +++ b/docs/examples/files.md @@ -1,57 +1,79 @@ # File Operations Examples -## Basic File Operations +For detailed information about file operations, see: + +- [RemoteFile Class](../api/types.md#remotefile-class) +- [File Operations Guide](../guides/files.md) +## Basic File Operations ```python from codeboxapi import CodeBox codebox = CodeBox() # Upload text file -file = codebox.upload("data.txt", "Hello World!") -print(f"File size: {file.get_size()} bytes") - -# Upload binary data -binary_data = b"Binary content" -file = codebox.upload("data.bin", binary_data) - -# List all files -for file in codebox.list_files(): - print(f"- {file.path}: {file.get_size()} bytes") - -# Download and save locally -remote_file = codebox.download("data.txt") -remote_file.save("local_data.txt") +codebox.upload("example.txt", b"Hello from CodeBox!") + +# Download a file +downloaded = codebox.download("example.txt") +content = downloaded.get_content() +print("Content:", content) + +# List files +files = codebox.list_files() +print("\nFiles:", "\n".join(f.__repr__() for f in files)) ``` +Reference: `getting_started.py` lines 13-24 + +## URL Downloads +```python +from codeboxapi import CodeBox -## Streaming Operations +def url_upload(codebox: CodeBox, url: str) -> None: + codebox.exec(""" +import requests +import os +def download_file_from_url(url: str) -> None: + response = requests.get(url, stream=True) + response.raise_for_status() + file_name = url.split('/')[-1] + file_path = './' + file_name + with open(file_path, 'wb') as file: + for chunk in response.iter_content(chunk_size=8192): + if chunk: + file.write(chunk) + """) + codebox.exec(f"download_file_from_url('{url}')") +``` +Reference: `big_upload_from_url.py` lines 4-19 + +## File Conversions ```python from codeboxapi import CodeBox -import aiofiles -# Synchronous streaming codebox = CodeBox() -# Stream upload -with open("large_file.dat", "rb") as f: - codebox.upload("remote_file.dat", f.read()) - -# Stream download with progress -from tqdm import tqdm -total_size = file.get_size() -with tqdm(total=total_size) as pbar: - with open("downloaded.dat", "wb") as f: - for chunk in codebox.stream_download("remote_file.dat"): - f.write(chunk) - pbar.update(len(chunk)) - -# Asynchronous streaming -async def stream_example(): - codebox = CodeBox() - - async with aiofiles.open("large_file.dat", "rb") as f: - file = await codebox.aupload("remote_file.dat", f) - - async for chunk in codebox.astream_download("remote_file.dat"): - await process_chunk(chunk) + +# Upload dataset csv +csv_bytes = httpx.get( + "https://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data" +).content +codebox.upload("iris.csv", csv_bytes) + +# Install required packages +codebox.install("pandas") +codebox.install("openpyxl") + +# Convert dataset csv to excel +output = codebox.exec( + "import pandas as pd\n\n" + "df = pd.read_csv('iris.csv', header=None)\n\n" + "df.to_excel('iris.xlsx', index=False)\n" +) ``` +Reference: `file_conversion.py` lines 7-23 + +For more details on file handling, see: + +- [Data Structures](../concepts/data_structures.md#remotefile) +- [API Methods](../api/codebox.md#file-operations) \ No newline at end of file diff --git a/examples/big_upload_from_url.py b/examples/big_upload_from_url.py index 750a1eb..16753a7 100644 --- a/examples/big_upload_from_url.py +++ b/examples/big_upload_from_url.py @@ -2,37 +2,47 @@ def url_upload(codebox: CodeBox, url: str) -> None: - codebox.exec( - """ + codebox.exec(""" import requests +import os def download_file_from_url(url: str) -> None: response = requests.get(url, stream=True) response.raise_for_status() file_name = url.split('/')[-1] - with open('./' + file_name, 'wb') as file: + file_path = './' + file_name + with open(file_path, 'wb') as file: for chunk in response.iter_content(chunk_size=8192): if chunk: file.write(chunk) - """ - ) - print(codebox.exec(f"download_file_from_url('{url}')")) + """) + codebox.exec(f"download_file_from_url('{url}')") codebox = CodeBox() +# First download url_upload( codebox, "https://codeboxapistorage.blob.core.windows.net/bucket/data-test.arrow", ) -print(codebox.list_files()) +# Second download url_upload( codebox, "https://codeboxapistorage.blob.core.windows.net/bucket/data-train.arrow", ) + +# List files in sandbox print(codebox.list_files()) -codebox.exec("import os") -print(codebox.exec("print(os.listdir())")) -print(codebox.exec("print([(f, os.path.getsize(f)) for f in os.listdir('.')])")) +# File verification with sizes +result = codebox.exec(""" +try: + import os + files = [(f, os.path.getsize(f)) for f in os.listdir('.')] + print(files) +except Exception as e: + print(f"Internal error: {str(e)}") +""") +print(result.text) diff --git a/examples/docker_parallel_execution.py b/examples/docker_parallel_execution.py index 9ed911f..21cec68 100644 --- a/examples/docker_parallel_execution.py +++ b/examples/docker_parallel_execution.py @@ -1,4 +1,13 @@ +""" +This script demonstrates parallel model training using multiple Docker instances. +Requirements: +- Docker must be running +- Port 8069 must be available +- Dataset 'advertising.csv' must exist in examples/assets/ +""" + import asyncio +import subprocess import time from pathlib import Path @@ -54,30 +63,44 @@ async def train_model(codebox: CodeBox, data_split: int) -> dict: async def main(): - # Create multiple Docker instances - num_parallel = 4 - codeboxes = [CodeBox(api_key="docker") for _ in range(num_parallel)] - - # Create tasks for different data splits - tasks = [] - for i, codebox in enumerate(codeboxes): - task = asyncio.create_task(train_model(codebox, i)) - tasks.append(task) - - # Execute and time the parallel processing - start_time = time.perf_counter() - results = await asyncio.gather(*tasks) - end_time = time.perf_counter() - - # Print results - print(f"\nParallel execution completed in {end_time - start_time:.2f} seconds\n") - for result in results: - if not result["errors"]: - print(f"Results for {result['split']}:") - print(result["output"]) - print("-" * 50) + try: + # Create multiple Docker instances + num_parallel = 4 + codeboxes = [CodeBox(api_key="docker") for _ in range(num_parallel)] + + # Create tasks for different data splits + tasks = [] + for i, codebox in enumerate(codeboxes): + task = asyncio.create_task(train_model(codebox, i)) + tasks.append(task) + + # Execute and time the parallel processing + start_time = time.perf_counter() + results = await asyncio.gather(*tasks) + end_time = time.perf_counter() + + # Print results + print( + f"\nParallel execution completed in {end_time - start_time:.2f} seconds\n" + ) + for result in results: + if not result["errors"]: + print(f"Results for {result['split']}:") + print(result["output"]) + print("-" * 50) + else: + print(f"Error in split {result['split']}:", result["errors"]) + + except subprocess.CalledProcessError as e: + if e.returncode == 125: + print("\nError: Docker is not running. Please start Docker and try again.") + print( + "You can verify Docker status by running 'docker ps' in your terminal." + ) else: - print(f"Error in split {result['split']}:", result["errors"]) + print(f"\nError executing Docker command: {e}") + except Exception as e: + print(f"\nUnexpected error: {e}") if __name__ == "__main__": diff --git a/examples/plot_dataset.py b/examples/plot_dataset.py index 48f272b..eb38d9a 100644 --- a/examples/plot_dataset.py +++ b/examples/plot_dataset.py @@ -9,27 +9,24 @@ codebox = CodeBox(api_key="local") -# download the iris dataset +# download and upload iris dataset silently iris_csv_bytes = httpx.get( "https://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data" ).content - -# upload the dataset to the codebox codebox.upload("iris.csv", iris_csv_bytes) -# dataset analysis code +# run the analysis file_path = Path("examples/assets/dataset_code.txt") - -# run the code output = codebox.exec(file_path) if output.images: img_bytes = base64.b64decode(output.images[0]) img_buffer = BytesIO(img_bytes) - + # Display the image img = Image.open(img_buffer) img.show() + print("Image displayed in a new window") elif output.errors: print("Error:", output.errors) diff --git a/examples/stream_chunk_timing.py b/examples/stream_chunk_timing.py index 7a08422..a0e4736 100644 --- a/examples/stream_chunk_timing.py +++ b/examples/stream_chunk_timing.py @@ -1,9 +1,43 @@ +""" +This example demonstrates the timing differences between sync and async execution +in different CodeBox modes. + +To test different modes, set CODEBOX_API_KEY in your environment to: +- 'local' for single instance local execution (default) +- 'docker' for local parallel execution (requires Docker setup) +- Your API key from https://codeboxapi.com/pricing for remote execution + +Requirements for different modes: +- Local: No additional setup needed +- Docker: + * Docker must be installed and running (start Docker Desktop or docker daemon) + * Port 8069 must be available + * User must have permissions to run Docker commands + * If you get error 125, check: + - Is Docker running? Start Docker Desktop or docker daemon + - Is port 8069 in use? Try stopping other services + - Do you have Docker permissions? Run 'docker ps' to verify +- Remote: Valid API key from https://codeboxapi.com + +Note: LocalBox (CODEBOX_API_KEY='local') only allows one instance at a time. +""" + import asyncio +import os +import subprocess import time from codeboxapi import CodeBox, ExecChunk +def check_docker_running() -> bool: + try: + subprocess.run(["docker", "ps"], capture_output=True, check=True) + return True + except (subprocess.CalledProcessError, FileNotFoundError): + return False + + def sync_stream_exec(cb: CodeBox) -> None: chunks: list[tuple[ExecChunk, float]] = [] t0 = time.perf_counter() @@ -28,17 +62,36 @@ async def async_stream_exec(cb: CodeBox) -> None: print(f"{t:.5f}: {chunk}") -print("remote") -cb = CodeBox() -sync_stream_exec(cb) -asyncio.run(async_stream_exec(cb)) +api_key = os.getenv("CODEBOX_API_KEY", "local") +display_key = "remote API key" if api_key.startswith("sk-") else f"'{api_key}'" +print(f"Running with CODEBOX_API_KEY={display_key}\n") + +if api_key == "docker" and not check_docker_running(): + print("Error: Docker is not running!") + print("Please:") + print("1. Start Docker Desktop (or docker daemon)") + print("2. Wait a few seconds for Docker to initialize") + print("3. Run 'docker ps' to verify Docker is running") + print("4. Try this example again") + exit(1) -print("local") -local = CodeBox(api_key="local") -sync_stream_exec(local) -asyncio.run(async_stream_exec(local)) +if api_key == "local": + # LocalBox only allows one instance + print("Testing single LocalBox instance:") + cb = CodeBox() + sync_stream_exec(cb) + asyncio.run(async_stream_exec(cb)) +else: + # Docker and Remote modes allow multiple instances + mode = "Remote" if api_key.startswith("sk-") else api_key.capitalize() + print(f"Testing multiple {mode} instances:\n") + + print("Instance 1:") + cb1 = CodeBox() + sync_stream_exec(cb1) + asyncio.run(async_stream_exec(cb1)) -print("docker") -docker = CodeBox(api_key="docker") -sync_stream_exec(docker) -asyncio.run(async_stream_exec(docker)) + print("\nInstance 2:") + cb2 = CodeBox() + sync_stream_exec(cb2) + asyncio.run(async_stream_exec(cb2)) diff --git a/mkdocs.yml b/mkdocs.yml index c153243..4190681 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -14,6 +14,7 @@ theme: - navigation.sections - navigation.expand - navigation.top + - navigation.footer - search.suggest - search.highlight - content.code.copy @@ -22,19 +23,17 @@ theme: nav: - Introduction: - Overview: index.md - - Quick Start: quickstart.md - - API Reference: - - Settings: api/settings.md - - CodeBox Class: api/codebox.md - - Exceptions: api/exceptions.md + - Quick Start: quickstart.md - Core Concepts: - Architecture: concepts/architecture.md - Base Components: concepts/components.md + - API Reference: + - CodeBox Class: api/codebox.md + - Types: api/types.md - User Guide: - Basic Usage: guides/basic.md - File Operations: guides/files.md - Package Management: guides/packages.md - - Examples: - Basic Examples: examples/basic.md - File Handling: examples/files.md From 61520d87d6d52394ae2ab80cd1d27f519a44ae2a Mon Sep 17 00:00:00 2001 From: TincoNomad Date: Wed, 13 Nov 2024 00:30:56 -0500 Subject: [PATCH 2/4] =?UTF-8?q?=F0=9F=A4=96task:=20docker=20tutorial=20fin?= =?UTF-8?q?ish?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/tutorial/advanced_features.md | 319 ++++++++++++++++++++++++++++ docs/tutorial/core_functionality.md | 186 ++++++++++++++++ docs/tutorial/error_handler.md | 155 ++++++++++++++ docs/tutorial/setup.md | 96 +++++++++ mkdocs.yml | 5 + 5 files changed, 761 insertions(+) create mode 100644 docs/tutorial/advanced_features.md create mode 100644 docs/tutorial/core_functionality.md create mode 100644 docs/tutorial/error_handler.md create mode 100644 docs/tutorial/setup.md diff --git a/docs/tutorial/advanced_features.md b/docs/tutorial/advanced_features.md new file mode 100644 index 0000000..6d8d08f --- /dev/null +++ b/docs/tutorial/advanced_features.md @@ -0,0 +1,319 @@ +# Part 4: Advanced Features and Visualization Enhancements + +Let's implement advanced features including parallel processing for multiple stocks and enhanced visualizations. + +> [!NOTE] +> For this part we are going to use Docker, so `api_key` on `main.py` and `parallel_analyzer.py` must be **"docker"** + +> [!WARNING] +> Make sure Docker is running before executing the code. You can verify this with `docker info` + +### First, let's create `src/parallel_analyzer.py`: + +```python +from codeboxapi import CodeBox +import asyncio +from typing import List, Dict + +class AnalysisError(Exception): + def __init__(self, phase, message): + self.phase = phase + self.message = message + super().__init__(f"{phase}: {message}") + +class ParallelStockAnalyzer: + def __init__(self, num_workers=2): + print(f"✓ Iniciando {num_workers} workers") + try: + self.workers = [ + CodeBox( + api_key="docker", + factory_id="shroominic/codebox:latest" + ) for _ in range(num_workers) + ] + except Exception as e: + raise AnalysisError("Inicialización", str(e)) + + async def setup_workers(self): + max_retries = 3 + retry_delay = 2 + + for attempt in range(max_retries): + try: + for i, worker in enumerate(self.workers): + print(f"✓ Configurando worker {i}") + await worker.ainstall("pandas") + await asyncio.sleep(1) + await worker.ainstall("yfinance") + await asyncio.sleep(1) + await worker.ainstall("ta") + break + except Exception as e: + if attempt == max_retries - 1: + raise AnalysisError("Configuración", f"Error: {str(e)}") + await asyncio.sleep(retry_delay) + + setup_code = """ + import yfinance as yf + import pandas as pd + import numpy as np + import ta + + def analyze_stock(symbol: str, period: str) -> dict: + stock = yf.Ticker(symbol) + data = stock.history(period=period) + + + # Calculate technical indicators + + # Calculate technical indicators + rsi = ta.momentum.RSIIndicator(data['Close']).rsi() + macd = ta.trend.MACD(data['Close']) + bb = ta.volatility.BollingerBands(data['Close']) + + result = { + 'symbol': symbol, + 'last_price': float(data['Close'].iloc[-1]), + 'volume': int(data['Volume'].iloc[-1]), + 'rsi': float(rsi.iloc[-1]), + 'macd': float(macd.macd().iloc[-1]), + 'bb_upper': float(bb.bollinger_hband().iloc[-1]), + 'bb_lower': float(bb.bollinger_lband().iloc[-1]) + } + return result + """ + setup_tasks = [worker.aexec(setup_code) for worker in self.workers] + await asyncio.gather(*setup_tasks) + + async def analyze_stocks(self, symbols: List[str]) -> List[Dict]: + print(f"\n=== Analizando símbolos: {symbols} ===") + + tasks = [] + for i, symbol in enumerate(symbols): + worker = self.workers[i % len(self.workers)] + worker_id = i % len(self.workers) + print(f"✓ Asignando {symbol} al worker {worker_id}") + code = f""" +try: + result = analyze_stock('{symbol}', '1y') + print(f"RESULTADO_STOCK|{symbol}|{{result}}") + result +except Exception as e: + print(f"❌ Error: {{str(e)}}") + raise +""" + tasks.append(worker.aexec(code)) + + results = await asyncio.gather(*tasks, return_exceptions=True) + processed_results = [] + + for symbol, r in zip(symbols, results): + try: + if isinstance(r, Exception): + print(f"❌ Error en {symbol}: {str(r)}") + continue + + if r and hasattr(r, 'text'): + for line in r.text.split('\n'): + if "RESULTADO_STOCK|" in line: + try: + _, symbol_from_result, result_str = line.split("|") + result_dict = eval(result_str) + if result_dict['symbol'] == symbol_from_result: + processed_results.append(result_dict) + print(f"✓ {symbol} procesado") + break + except Exception as e: + print(f"❌ Error procesando {symbol}: {str(e)}") + except Exception as e: + print(f"❌ Error con {symbol}: {str(e)}") + + print(f"\n=== Resultados: {len(processed_results)}/{len(symbols)} ===") + return processed_results +``` + +This implementation follows the Docker parallel processing pattern shown in: + +- [Docker Parallel Processing](../examples/async.md#docker-parallel-processing) + + +### Now let's enhance our visualization capabilities in `src/advanced_visualization.py`: + +```python +from codeboxapi import CodeBox +from typing import List, Dict +import asyncio + +class DockerResourceError(Exception): + def __init__(self, message: str): + self.message = message + super().__init__(self.message) + +async def create_market_dashboard(codebox: CodeBox, analysis_results: List[Dict]): + print("\n=== Generating Dashboard ===") + + try: + setup_code = """ +import pandas as pd +import matplotlib.pyplot as plt +plt.style.use('seaborn') +""" + await codebox.aexec(setup_code) + + plot_code = f""" +df = pd.DataFrame({analysis_results}) +plt.figure(figsize=(10, 5)) +plt.bar(df['symbol'], df['last_price']) +plt.title('Precio por Símbolo') +plt.ylabel('Precio') +plt.savefig('market_dashboard.png') +plt.close() +""" + result = await codebox.aexec(plot_code) + print("✓ Dashboard generated: market_dashboard.png") + return result + + except Exception as e: + print(f"❌ Error in visualization: {str(e)}") + raise +``` + +### Let's add docker resorser error to error_handlers.py: + +```python +from typing import Optional + +class StockAnalysisError(Exception): + """Base exception for stock analysis errors""" + pass + +class DataFetchError(StockAnalysisError): + def __init__(self, symbol: str, message: Optional[str] = None): + self.symbol = symbol + self.message = message or f"Failed to fetch data for symbol {symbol}" + super().__init__(self.message) + +class AnalysisError(StockAnalysisError): + def __init__(self, error_type: str, details: str): + self.error_type = error_type + self.details = details + super().__init__(f"{error_type}: {details}") + +class DockerResourceError(StockAnalysisError): + def __init__(self, message: Optional[str] = None): + self.message = message or "Docker is running out of resources" + super().__init__(self.message) +``` + +### Finally, let's update our main file that uses all these features: + +```python +from src.parallel_analyzer import ParallelStockAnalyzer +from src.advanced_visualization import create_market_dashboard +from codeboxapi import CodeBox +from src.error_handlers import AnalysisError, DockerResourceError +import asyncio + +async def setup_environment(): + try: + analyzer = ParallelStockAnalyzer(num_workers=1) + viz_box = CodeBox( + api_key="docker", + factory_id="shroominic/codebox:latest" + ) + print("✓ Environment initialized") + return analyzer, viz_box + except Exception as e: + print(f"❌ Error in configuration: {str(e)}") + raise + +async def install_dependencies(analyzer, viz_box): + try: + await analyzer.setup_workers() + await viz_box.ainstall("pandas") + await asyncio.sleep(1) + await viz_box.ainstall("matplotlib") + await asyncio.sleep(1) + await viz_box.ainstall("seaborn") + print("✓ Dependencies installed") + except Exception as e: + print(f"❌ Error in dependencies: {str(e)}") + raise + +async def main(): + print("\n=== Starting analysis process ===") + analyzer = None + viz_box = None + + try: + print("\n--- Initial configuration ---") + analyzer, viz_box = await setup_environment() + await install_dependencies(analyzer, viz_box) + + symbols = ['AAPL', 'MSFT'] + print(f"\n--- Analyzing symbols: {symbols} ---") + + results = await analyzer.analyze_stocks(symbols) + print("\n=== Final results ===") + print(f"Number of results: {len(results)}") + print(f"Content: {results}") + + if not results: + raise AnalysisError("Analysis", "No results obtained") + + print("Creating dashboard...") + try: + await create_market_dashboard(viz_box, results) + print("Analysis completed successfully") + except DockerResourceError as e: + print(f"DOCKER RESOURCE ERROR: {str(e)}") + print("Suggestion: Try freeing up Docker resources or increasing Docker limits") + raise + + except Exception as e: + print(f"Detailed error: {str(e)}") + raise + finally: + if analyzer: + for worker in analyzer.workers: + try: + await worker.astop() + except Exception as e: + print(f"Error closing worker: {str(e)}") + +if __name__ == "__main__": + asyncio.run(main()) +``` + +This implementation leverages the async functionality shown in: + +- [Async Operations](../examples/async.md#basic-async-operations) + +And follows the plotting patterns demonstrated in: + +- [Plotting with Matplotlib](../examples/getting_started.md#plotting-with-matplotlib) + + +### Running the Project + +After implementing all the components, ensure docker is running and you have the necessary permissions to create containers. + +you can aither open docker desktop, or use: +```bash +docker run +``` + +Run the following command to check if docker is running: + +```bash +docker info +``` + +For running the project, you can run the following command: + +```bash +# Run the project +python -m src.main +``` + +This will execute the stock analysis with error handling and data persistence enabled. \ No newline at end of file diff --git a/docs/tutorial/core_functionality.md b/docs/tutorial/core_functionality.md new file mode 100644 index 0000000..2d2fd79 --- /dev/null +++ b/docs/tutorial/core_functionality.md @@ -0,0 +1,186 @@ +# Part 2: Implementing Core Functionality + +Let's implement the core functionality for our stock market analysis tool. We'll create the main modules and implement basic features. + +> [!NOTE] +> Make sure you've completed the setup steps from Part 1 before continuing. + +> [!TIP] +> If you're starting from this section, check `setup.md` for the complete project structure and prerequisites. + +### First, let's update our `src/main.py`: + +```python +from codeboxapi import CodeBox +import asyncio +from pathlib import Path + +class StockAnalyzer: + def __init__(self, data_dir: str = "data"): + self.codebox = CodeBox(api_key="local") + self.data_dir = Path(data_dir) + self.data_dir.mkdir(exist_ok=True) + + async def setup_environment(self): + # Install required packages + await self.codebox.ainstall("yfinance", "pandas", "numpy", "matplotlib", "ta") + + # Initialize the environment with helper functions + setup_code = """ + import yfinance as yf + import pandas as pd + import numpy as np + import matplotlib.pyplot as plt + import ta + + def fetch_stock_data(symbol, period='1y'): + stock = yf.Ticker(symbol) + data = stock.history(period=period) + return data + + def add_technical_indicators(df): + # Add RSI + df['RSI'] = ta.momentum.RSIIndicator(df['Close']).rsi() + # Add MACD + macd = ta.trend.MACD(df['Close']) + df['MACD'] = macd.macd() + df['MACD_Signal'] = macd.macd_signal() + # Add Bollinger Bands + bollinger = ta.volatility.BollingerBands(df['Close']) + df['BB_High'] = bollinger.bollinger_hband() + df['BB_Low'] = bollinger.bollinger_lband() + return df + """ + await self.codebox.aexec(setup_code) + + async def analyze_stock(self, symbol: str, period: str = '1y'): + analysis_code = f""" + # Fetch and process data + data = fetch_stock_data('{symbol}', period='{period}') + data = add_technical_indicators(data) + + # Create analysis plots + fig, (ax1, ax2) = plt.subplots(2, 1, figsize=(15, 10)) + + # Price and Bollinger Bands + ax1.plot(data.index, data['Close'], label='Close Price') + ax1.plot(data.index, data['BB_High'], 'r--', label='BB Upper') + ax1.plot(data.index, data['BB_Low'], 'g--', label='BB Lower') + ax1.set_title(f'{symbol} Price and Bollinger Bands') + ax1.legend() + + # RSI and MACD + ax2.plot(data.index, data['RSI'], label='RSI') + ax2.plot(data.index, data['MACD'], label='MACD') + ax2.plot(data.index, data['MACD_Signal'], label='Signal') + ax2.axhline(y=70, color='r', linestyle='--') + ax2.axhline(y=30, color='g', linestyle='--') + ax2.set_title('Technical Indicators') + ax2.legend() + + plt.tight_layout() + plt.show() + + # Save data to CSV + data.to_csv('data/{symbol}_analysis.csv') + """ + result = await self.codebox.aexec(analysis_code) + return result + +async def main(): + analyzer = StockAnalyzer() + await analyzer.setup_environment() + await analyzer.analyze_stock('AAPL') + +if __name__ == "__main__": + asyncio.run(main()) +``` + +This implementation follows the patterns shown in the documentation: + +- [Simple Execution](../examples/basic.md#simple-execution) + + +And uses async functionality as shown in: + +- [Async Operations](../examples/async.md#basic-async-operations) + + +### Let's also create `src/visualization.py` for additional plotting functions: + +```python +from codeboxapi import CodeBox + +async def create_comparison_plot(codebox: CodeBox, symbols: list[str], period: str = '1y'): + plot_code = f""" + plt.figure(figsize=(15, 8)) + + for symbol in {symbols}: + data = fetch_stock_data(symbol, period='{period}') + # Normalize prices to percentage changes + normalized = data['Close'] / data['Close'].iloc[0] * 100 + plt.plot(data.index, normalized, label=symbol) + + plt.title('Stock Price Comparison (Normalized)') + plt.xlabel('Date') + plt.ylabel('Price (%)') + plt.legend() + plt.grid(True) + plt.show() + """ + return await codebox.aexec(plot_code) +``` + +This implementation leverages CodeBox's ability to handle matplotlib visualizations as shown in: + +- [Plotting with Matplotlib](../examples/getting_started.md#plotting-with-matplotlib) + + +### Let's create `src/analysis.py` for technical analysis functions: + +```python +from typing import Dict +import yfinance as yf +import pandas as pd +import ta + +def analyze_single_stock(symbol: str, period: str = '1y') -> Dict: + """Performs technical analysis on a single stock""" + stock = yf.Ticker(symbol) + data = stock.history(period=period) + + # Calculate technical indicators + rsi = ta.momentum.RSIIndicator(data['Close']).rsi() + macd = ta.trend.MACD(data['Close']) + bb = ta.volatility.BollingerBands(data['Close']) + + return { + 'symbol': symbol, + 'last_price': float(data['Close'].iloc[-1]), + 'volume': int(data['Volume'].iloc[-1]), + 'rsi': float(rsi.iloc[-1]), + 'macd': float(macd.macd().iloc[-1]), + 'bb_upper': float(bb.bollinger_hband().iloc[-1]), + 'bb_lower': float(bb.bollinger_lband().iloc[-1]) + } + +def calculate_technical_indicators(df: pd.DataFrame) -> pd.DataFrame: + """Add technical indicators to a dataframe""" + df = df.copy() + + # RSI + df['RSI'] = ta.momentum.RSIIndicator(df['Close']).rsi() + + # MACD + macd = ta.trend.MACD(df['Close']) + df['MACD'] = macd.macd() + df['MACD_Signal'] = macd.macd_signal() + + # Bollinger Bands + bollinger = ta.volatility.BollingerBands(df['Close']) + df['BB_High'] = bollinger.bollinger_hband() + df['BB_Low'] = bollinger.bollinger_lband() + + return df +``` + diff --git a/docs/tutorial/error_handler.md b/docs/tutorial/error_handler.md new file mode 100644 index 0000000..566b937 --- /dev/null +++ b/docs/tutorial/error_handler.md @@ -0,0 +1,155 @@ +# Part 3: Error Handling and Data Persistence + +Let's enhance our stock analysis tool with proper error handling and data persistence. + +### First, let's create `src/error_handlers.py` with custom exceptions: + +```python +from typing import Optional, List + +class StockAnalysisError(Exception): + """Base exception for stock analysis errors""" + pass + +class DataFetchError(StockAnalysisError): + def __init__(self, symbol: str, message: Optional[str] = None): + self.symbol = symbol + self.message = message or f"Failed to fetch data for symbol {symbol}" + super().__init__(self.message) + +class AnalysisError(StockAnalysisError): + def __init__(self, error_type: str, details: str): + self.error_type = error_type + self.details = details + super().__init__(f"{error_type}: {details}") +``` + +### Now, let's update our `src/main.py` with error handling and data persistence: + +```python +from codeboxapi import CodeBox +import asyncio +from pathlib import Path +import json +from datetime import datetime +from .error_handlers import DataFetchError, AnalysisError + +class StockAnalyzer: + def __init__(self, data_dir: str = "data"): + self.codebox = CodeBox(api_key="local") + self.data_dir = Path(data_dir) + self.data_dir.mkdir(exist_ok=True) + self.analysis_history_file = self.data_dir / "analysis_history.json" + + async def load_analysis_history(self) -> dict: + if self.analysis_history_file.exists(): + return json.loads(self.analysis_history_file.read_text()) + return {} + + async def save_analysis_history(self, symbol: str, analysis_data: dict): + history = await self.load_analysis_history() + history[symbol] = { + "timestamp": datetime.now().isoformat(), + "data": analysis_data + } + self.analysis_history_file.write_text(json.dumps(history, indent=2)) + + async def analyze_stock(self, symbol: str, period: str = '1y'): + try: + # Check if data exists and is recent + history = await self.load_analysis_history() + if symbol in history: + last_analysis = datetime.fromisoformat(history[symbol]["timestamp"]) + if (datetime.now() - last_analysis).days < 1: + print(f"Using cached analysis for {symbol}") + return history[symbol]["data"] + + # Verify symbol exists + verify_code = f""" + import yfinance as yf + try: + ticker = yf.Ticker('{symbol}') + info = ticker.info + if not info: + raise ValueError(f"Invalid symbol: {symbol}") + print("Symbol verified successfully") + except Exception as e: + print(f"Error: {str(e)}") + raise + """ + result = await self.codebox.aexec(verify_code) + if "Error" in result.text: + raise DataFetchError(symbol, result.text) + + # Perform analysis + analysis_code = f""" + try: + data = fetch_stock_data('{symbol}', period='{period}') + analysis_result = {{ + 'symbol': '{symbol}', + 'period': '{period}', + 'last_price': float(data['Close'].iloc[-1]), + 'volume': int(data['Volume'].iloc[-1]), + 'change_percent': float(data['Close'].pct_change().iloc[-1] * 100), + }} + print(json.dumps(analysis_result)) + except Exception as e: + print(f"Analysis Error: {str(e)}") + raise + """ + result = await self.codebox.aexec(analysis_code) + + if "Analysis Error" in result.text: + raise AnalysisError("Calculation", result.text) + + analysis_data = json.loads(result.text) + await self.save_analysis_history(symbol, analysis_data) + return analysis_data + + except Exception as e: + if isinstance(e, (DataFetchError, AnalysisError)): + raise + raise AnalysisError("Unknown", str(e)) + +async def main(): + analyzer = StockAnalyzer() + await analyzer.setup_environment() + try: + await analyzer.analyze_stock('AAPL') + except (DataFetchError, AnalysisError) as e: + print(f"Error during analysis: {str(e)}") + except Exception as e: + print(f"Unexpected error: {str(e)}") + +if __name__ == "__main__": + asyncio.run(main()) +``` + +This implementation follows error handling patterns shown in: + +- [Error Handling](../examples/advanced.md#error-handling) + + +And file operations patterns from: + +- [File Operations](../examples/getting_started.md#file-operations) + + +### Let's also create a utility function for data persistence in `src/utils.py`: + +```python +from pathlib import Path +import json +from typing import Dict, Any + +async def save_json_data(file_path: Path, data: Dict[str, Any]): + """Safely save JSON data with error handling""" + try: + temp_path = file_path.with_suffix('.tmp') + temp_path.write_text(json.dumps(data, indent=2)) + temp_path.replace(file_path) + except Exception as e: + if temp_path.exists(): + temp_path.unlink() + raise IOError(f"Failed to save data: {str(e)}") +``` \ No newline at end of file diff --git a/docs/tutorial/setup.md b/docs/tutorial/setup.md new file mode 100644 index 0000000..571b7b9 --- /dev/null +++ b/docs/tutorial/setup.md @@ -0,0 +1,96 @@ +# Building a Stock Market Analysis Tool with CodeBox API + +Let's create a medium-complexity project that analyzes stock market data using CodeBox API. We'll divide it into 4 parts. + +## Part 1: Project Setup + +### Create project structure +First, let's create our project structure: + +```bash +mkdir stock-analysis-codebox +cd stock-analysis-codebox +python -m venv venv +``` +### Activate virtual environment +#### On Windows: +```bash +venv\Scripts\activate +``` +#### On Unix or MacOS: +```bash +source venv/bin/activate +``` + +### Create project structure +```bash +mkdir src +mkdir data +touch README.md +touch requirements.txt +touch src/__init__.py +touch src/main.py +touch src/analysis.py +touch src/visualization.py +``` + +### Initialize git repository +```bash +git init +``` + +### Create a `.gitignore` file: +```text +venv/ +__pycache__/ +.env +*.pyc +.codebox/ +data/*.csv +``` + +### Update `requirements.txt`: +```text +codeboxapi +jupyter-kernel-gateway +ipython +pandas +yfinance +``` + +### Prerequisites +Before running the project, ensure: +- Python 3.7+ is installed +- Docker is installed and running (required for parallel processing) + - Start Docker Desktop (Windows/Mac) or docker daemon (Linux) + - Verify Docker is running with: `docker ps` +- Port 8069 is available +- User has permissions to run Docker commands + +### Install dependencies: +```bash +pip install -r requirements.txt +``` + +### Create a basic `README.md`: +```markdown +# Stock Market Analysis with CodeBox + +A Python project that demonstrates the usage of CodeBox API for analyzing stock market data. + +## Features +- Download stock market data using yfinance +- Perform technical analysis +- Generate visualizations +- Export results to various formats + +## Setup +1. Clone the repository +2. Create virtual environment: `python -m venv venv` +3. Activate virtual environment +4. Install dependencies: `pip install -r requirements.txt` +``` + +This setup follows the basic project structure shown in the documentation: + +- [Installation](../quickstart.md#installation) diff --git a/mkdocs.yml b/mkdocs.yml index 4190681..88aba0e 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -27,6 +27,11 @@ nav: - Core Concepts: - Architecture: concepts/architecture.md - Base Components: concepts/components.md + - Tutorial: + - Setup: tutorial/setup.md + - Core Functionality: tutorial/core_functionality.md + - Error Handler: tutorial/error_handler.md + - Advanced Features: tutorial/advanced_features.md - API Reference: - CodeBox Class: api/codebox.md - Types: api/types.md From 03a1b8e78f6a3c3289daf4bd3b5a5e39c31cc6eb Mon Sep 17 00:00:00 2001 From: TincoNomad Date: Wed, 13 Nov 2024 20:13:32 -0500 Subject: [PATCH 3/4] =?UTF-8?q?=F0=9F=A4=96task:=20examples=20docs=20updat?= =?UTF-8?q?ed?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/examples/advanced.md | 178 +++++++++++++++++++++++++++----------- docs/examples/async.md | 78 +++++++++++++++-- docs/examples/basic.md | 26 ++++++ docs/examples/files.md | 159 ++++++++++++++++++++++++---------- docs/guides/packages.md | 4 +- docs/index.md | 15 +--- docs/quickstart.md | 97 ++++++++++++--------- 7 files changed, 393 insertions(+), 164 deletions(-) diff --git a/docs/examples/advanced.md b/docs/examples/advanced.md index 76622ea..6c62a93 100644 --- a/docs/examples/advanced.md +++ b/docs/examples/advanced.md @@ -5,101 +5,179 @@ For detailed information about security and architecture, see: - [Why Sandboxing is Important](../concepts/architecture.md#why-is-sandboxing-important) - [Implementation Comparison](../concepts/implementations.md#implementation-comparison) -## Custom Kernel Usage +> [!NOTE] Update **main.py** with each example and run it with +>```bash +>python main.py +>``` + +## Example 1: Basic Usage with Custom Kernels ```python from codeboxapi import CodeBox -codebox = CodeBox() -# Execute bash commands -result = codebox.exec("ls -la", kernel="bash") -print(result.text) +def main(): + codebox = CodeBox() + + # Execute bash commands + result = codebox.exec("ls -la", kernel="bash") + print("Bash command result:", result.text) -# Create and run Python scripts via bash -result = codebox.exec("echo \"print('Running from file')\" > script.py", kernel="bash") -result = codebox.exec("python script.py", kernel="bash") + # Create and run Python scripts via bash + result = codebox.exec('echo "print(\'Running from file\')" > script.py', kernel="bash") + result = codebox.exec("python script.py", kernel="bash") + print("Script result:", result.text) + +if __name__ == "__main__": + main() +``` +### return +```bash +Bash command result: total 16 +drwxr-xr-x 4 renzotincopa staff 128 Nov 13 17:52 . +drwxr-xr-x 7 renzotincopa staff 224 Nov 13 13:19 .. +-rw-r--r-- 1 renzotincopa staff 13 Nov 13 17:52 async_file.txt +-rw-r--r-- 1 renzotincopa staff 11 Nov 13 13:29 data.csv + +Script result: Running from file ``` -## File Streaming with Chunks +## Example 2: File Streaming with Chunks ```python from codeboxapi import CodeBox -from codeboxapi import ExecChunk + +def main(): + codebox = CodeBox(verbose=True) + + code = """ import time +t0 = time.time() +for i in range(3): + elapsed = time.time() - t0 + print(f"{elapsed:.5f}: {i}") + time.sleep(1) +""" + + print("Starting streaming example...") -def sync_stream_exec(cb: CodeBox) -> None: - chunks: list[tuple[ExecChunk, float]] = [] - t0 = time.perf_counter() - for chunk in cb.stream_exec( - "import time;\nfor i in range(3): time.sleep(1); print(i)" - ): - chunks.append((chunk, time.perf_counter() - t0)) - print(f"{chunks[-1][1]:.5f}: {chunk}") + result = codebox.exec(code) + for chunk in result.chunks: + print(chunk.content, end='') + + +if __name__ == "__main__": + main() ``` -Reference: `stream_chunk_timing.py` lines 41-50 +### return +```bash +Starting streaming example... +0.00015: 0 +1.00524: 1 +2.01015: 2 +``` + +## Example 3: Docker Parallel Processing -## Docker Parallel Processing -> Note: Docker must be installed and running on your system to use these features. > Requirements: > - Docker must be installed and running (start Docker Desktop or docker daemon) > - Port 8069 must be available > - User must have permissions to run Docker commands ```python -import asyncio from codeboxapi import CodeBox +import asyncio async def train_model(codebox: CodeBox, data_split: int) -> dict: # Install required packages await codebox.ainstall("pandas") await codebox.ainstall("scikit-learn") - # Execute training code result = await codebox.aexec(f""" import pandas as pd from sklearn.model_selection import train_test_split from sklearn.linear_model import LinearRegression - # Training code with split {data_split} - X_train, X_test = train_test_split(X, y, random_state={data_split}) + # Training simulation + print(f'Training model with split {data_split}') """) return {"split": data_split, "output": result.text, "errors": result.errors} -# Run multiple instances in parallel -codeboxes = [CodeBox(api_key="docker") for _ in range(4)] -tasks = [train_model(codebox, i) for i, codebox in enumerate(codeboxes)] -results = await asyncio.gather(*tasks) +async def main(): + try: + # Run multiple instances in parallel + codeboxes = [CodeBox(api_key="docker") for _ in range(4)] + tasks = [train_model(codebox, i) for i, codebox in enumerate(codeboxes)] + results = await asyncio.gather(*tasks) + + for result in results: + print(f"Result from split {result['split']}:", result['output']) + + except Exception as e: + print(f"Error during execution: {str(e)}") + +if __name__ == "__main__": + asyncio.run(main()) ``` -Reference: `docker_parallel_execution.py` lines 17-62 -For more details on Docker implementation, see: -- [Docker Implementation](../concepts/implementations.md#dockerbox) -- [Data Structures](../concepts/data_structures.md) +### return +```bash +Result from split 0: Training model with split 0 +Result from split 1: Training model with split 1 +Result from split 2: Training model with split 2 +Result from split 3: Training model with split 3 +``` -## Error Handling +## Example 4: Error Handling ```python from codeboxapi import CodeBox -codebox = CodeBox() - -# Handle execution errors -result = codebox.exec("import non_existent_package") -if result.errors: - print("Error occurred:", result.errors[0]) - -# Handle runtime errors with try/except -result = codebox.exec(""" -try: - 1/0 -except Exception as e: - print(f"Error: {str(e)}") -""") -print(result.text) +def main(): + codebox = CodeBox() + + print("Example 1: Handling package import error") + # Handle execution errors + result = codebox.exec("import non_existent_package") + if result.errors: + print("Error occurred:", result.errors[0]) + + print("\nExample 2: Handling runtime error") + # Handle runtime errors with try/except + result = codebox.exec(""" + try: + 1/0 + except Exception as e: + print(f"Error: {str(e)}") + """) + print("Result:", result.text) + + print("\nExample 3: Handling syntax error") + result = codebox.exec("print('Hello' print('World')") + if result.errors: + print("Syntax error:", result.errors[0]) + +if __name__ == "__main__": + main() +``` + +### return +```bash +Example 1: Handling package import error +Error occurred: No module named 'non_existent_package' + +Example 2: Handling runtime error +Result: Error: division by zero + + +Example 3: Handling syntax error +Syntax error: '(' was never closed (, line 1) ``` -Reference: `getting_started.py` lines 79-81 + +## Additional Resources For more advanced usage patterns, see: - [Components Overview](../concepts/components.md) - [API Types Reference](../api/types.md) +- [Docker Implementation](../concepts/implementations.md#dockerbox) +- [Data Structures](../concepts/data_structures.md) diff --git a/docs/examples/async.md b/docs/examples/async.md index 2221bca..7e38d21 100644 --- a/docs/examples/async.md +++ b/docs/examples/async.md @@ -5,9 +5,19 @@ For detailed information about async operations, see: - [Core Methods](../api/codebox.md#core-methods) - [Data Structures](../concepts/data_structures.md) + ## Basic Async Operations + +Install aiofiles: + +```bash +pip install aiofiles +``` +Update main.py with the following examples: + ```python from codeboxapi import CodeBox +import asyncio async def async_examples(): codebox = CodeBox() @@ -23,19 +33,57 @@ async def async_examples(): # Async Package Installation await codebox.ainstall("requests") + +if __name__ == "__main__": + asyncio.run(async_examples()) +``` + +Then run the example with: + +```bash +python main.py +``` + +### Result: + +```bash +Async Hello! +File content: Async content ``` + Reference: `async_example.py` lines 6-18 -## Async Streaming + +## Async Streaming: + +Update again main.py with the following example: ```python +import asyncio +from codeboxapi import CodeBox + async def async_stream_exec(cb: CodeBox) -> None: - chunks: list[tuple[ExecChunk, float]] = [] - t0 = time.perf_counter() - async for chunk in cb.astream_exec( - "import time;\nfor i in range(3): time.sleep(1); print(i)" - ): - chunks.append((chunk, time.perf_counter() - t0)) - print(f"{chunks[-1][1]:.5f}: {chunk}") + result = await cb.aexec(""" +import time +import asyncio +t0 = time.perf_counter() +for i in range(3): + await asyncio.sleep(1) + print(f"{time.perf_counter() - t0:.5f}: {i}") +""") + print(f"Complete result:\n{result.text}") + +if __name__ == "__main__": + codebox = CodeBox(api_key="local") + asyncio.run(async_stream_exec(codebox)) +``` + +### Result: + +```bash +Complete result: +1.00121: 0 +2.00239: 1 +3.00352: 2 ``` Reference: `stream_chunk_timing.py` lines 53-62 @@ -46,6 +94,13 @@ Reference: `stream_chunk_timing.py` lines 53-62 > - Port 8069 must be available > - User must have permissions to run Docker commands +Install tenacity: + +```bash +pip install tenacity +``` +Then, update main file: + ```python import asyncio from codeboxapi import CodeBox @@ -75,6 +130,13 @@ async def main(): tasks = [train_model(codebox, i) for i, codebox in enumerate(codeboxes)] results = await asyncio.gather(*tasks) ``` + +### Result: + +```bash +[{'split': 0, 'output': 'Score for split 0: 1.0\n', 'errors': []}, {'split': 1, 'output': 'Score for split 1: 1.0\n', 'errors': []}, {'split': 2, 'output': 'Score for split 2: 1.0\n', 'errors': []}] +``` + Reference: `docker_parallel_execution.py` lines 17-80 For more details on async implementations, see: diff --git a/docs/examples/basic.md b/docs/examples/basic.md index 70c8e67..fe54574 100644 --- a/docs/examples/basic.md +++ b/docs/examples/basic.md @@ -5,6 +5,18 @@ For detailed information about CodeBox concepts and architecture, see: - [What is CodeBox?](../index.md#what-is-codebox) - [Core Components](../concepts/architecture.md#core-components) +## Setings: + +Install codebox and dependencies, follow [Quick Start Guide](../quickstart.md). + +- Create a main.py to run the examples codes + +- Run the code with: + +```bash +python main.py +``` + ## Simple Execution: ```python from codeboxapi import CodeBox @@ -19,6 +31,14 @@ result = codebox.exec("1/0") if result.errors: print("Error:", result.errors[0]) ``` +### Result: + +```bash +Hello World! + +Error: division by zero +``` + ## Async Execution ```python from codeboxapi import CodeBox @@ -33,6 +53,12 @@ if __name__ == "__main__": asyncio.run(main()) ``` +### Result: + +```bash +Hello World! +``` + For more details on configuration and setup, see: - [Quick Start Guide](../quickstart.md) diff --git a/docs/examples/files.md b/docs/examples/files.md index f9bf407..b86f183 100644 --- a/docs/examples/files.md +++ b/docs/examples/files.md @@ -1,79 +1,144 @@ -# File Operations Examples +# File Operations For detailed information about file operations, see: - [RemoteFile Class](../api/types.md#remotefile-class) - [File Operations Guide](../guides/files.md) +> [!NOTE] Update **main.py** with each example and run it with +>```bash +>python main.py +>``` + ## Basic File Operations + ```python from codeboxapi import CodeBox -codebox = CodeBox() +def main(): + codebox = CodeBox() + + # Upload text file + codebox.upload("example.txt", b"Hello from CodeBox!") + + # Download a file + downloaded = codebox.download("example.txt") + content = downloaded.get_content() + print("Content:", content) -# Upload text file -codebox.upload("example.txt", b"Hello from CodeBox!") + # List files + files = codebox.list_files() + print("\nFiles:", "\n".join(f.__repr__() for f in files)) -# Download a file -downloaded = codebox.download("example.txt") -content = downloaded.get_content() -print("Content:", content) +if __name__ == "__main__": + main() +``` + +### Result: +```bash +Content: Hello from CodeBox! -# List files -files = codebox.list_files() -print("\nFiles:", "\n".join(f.__repr__() for f in files)) +Files: +RemoteFile(name='example.txt', size=20) ``` -Reference: `getting_started.py` lines 13-24 ## URL Downloads + ```python from codeboxapi import CodeBox -def url_upload(codebox: CodeBox, url: str) -> None: - codebox.exec(""" -import requests -import os - -def download_file_from_url(url: str) -> None: - response = requests.get(url, stream=True) - response.raise_for_status() - file_name = url.split('/')[-1] - file_path = './' + file_name - with open(file_path, 'wb') as file: - for chunk in response.iter_content(chunk_size=8192): - if chunk: - file.write(chunk) - """) - codebox.exec(f"download_file_from_url('{url}')") +def main(): + codebox = CodeBox() + + def url_upload(url: str) -> None: + codebox.exec(""" + import requests + import os + + def download_file_from_url(url: str) -> None: + response = requests.get(url, stream=True) + response.raise_for_status() + file_name = url.split('/')[-1] + file_path = './' + file_name + with open(file_path, 'wb') as file: + for chunk in response.iter_content(chunk_size=8192): + if chunk: + file.write(chunk) + """) + codebox.exec(f"download_file_from_url('{url}')") + + # Usage example + url = "https://example.com/file.txt" + url_upload(url) + print("File uploaded successfully") + +if __name__ == "__main__": + main() +``` + +### Result: +```bash +File uploaded successfully ``` -Reference: `big_upload_from_url.py` lines 4-19 ## File Conversions + ```python from codeboxapi import CodeBox +import httpx -codebox = CodeBox() +def main(): + codebox = CodeBox() -# Upload dataset csv -csv_bytes = httpx.get( - "https://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data" -).content -codebox.upload("iris.csv", csv_bytes) + # Upload csv dataset + csv_bytes = httpx.get( + "https://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data" + ).content + codebox.upload("iris.csv", csv_bytes) -# Install required packages -codebox.install("pandas") -codebox.install("openpyxl") + # Install required packages and verify installation + codebox.exec(""" + try: + import pandas + import openpyxl + print("Required packages already installed") + except ImportError: + print("Installing required packages...") + !pip install pandas openpyxl + print("Packages installed successfully") + """) + + # Convert csv dataset to excel + result = codebox.exec( + "import pandas as pd\n" + "df = pd.read_csv('iris.csv', header=None)\n" + "df.to_excel('iris.xlsx', index=False)\n" + "'iris.xlsx'" + ) + + if result.errors: + print("Error:", result.errors[0]) + else: + # List all files to verify conversion + for file in codebox.list_files(): + print(f"File: {file.path} (Size: {file.get_size()} bytes)") + +if __name__ == "__main__": + main() +``` -# Convert dataset csv to excel -output = codebox.exec( - "import pandas as pd\n\n" - "df = pd.read_csv('iris.csv', header=None)\n\n" - "df.to_excel('iris.xlsx', index=False)\n" -) +### Result: +```bash +File: async_file.txt (Size: 4096 bytes) +File: data.csv (Size: 4096 bytes) +File: example.txt (Size: 4096 bytes) +File: iris.csv (Size: 8192 bytes) +File: iris.xlsx (Size: 12288 bytes) +File: script.py (Size: 4096 bytes) ``` -Reference: `file_conversion.py` lines 7-23 -For more details on file handling, see: +For more details about file handling, see: - [Data Structures](../concepts/data_structures.md#remotefile) -- [API Methods](../api/codebox.md#file-operations) \ No newline at end of file +- [API Methods](../api/codebox.md#file-operations) +- [Quick Start Guide](../quickstart.md) \ No newline at end of file diff --git a/docs/guides/packages.md b/docs/guides/packages.md index 0db1b8b..0bd2fd7 100644 --- a/docs/guides/packages.md +++ b/docs/guides/packages.md @@ -9,10 +9,10 @@ from codeboxapi import CodeBox codebox = CodeBox() # Install a single package -codebox.install("pandas") +nstall("pandas") # Install multiple packages -codebox.install("numpy", "matplotlib") +nstall("numpy", "matplotlib") # Install specific versions codebox.install("requests==2.28.1") diff --git a/docs/index.md b/docs/index.md index 8b18436..9716c49 100644 --- a/docs/index.md +++ b/docs/index.md @@ -21,20 +21,7 @@ CodeBox is a cloud infrastructure designed to run and test Python code in an iso - 🐳 **Docker**: Fully local parallel execution - 🏭 **Factories**: Create fully custom environments -## Quick Start - -```python -from codeboxapi import CodeBox -codebox = CodeBox() -# Execute Python code -result = codebox.exec("print('Hello World!')") -print(result.text) -# Install packages -codebox.install("pandas", "numpy") -# Handle files -codebox.upload("data.csv", "1,2,3\n4,5,6") -files = codebox.list_files() -``` +[Get started with CodeBox →](quickstart.md) ## Use Cases diff --git a/docs/quickstart.md b/docs/quickstart.md index d5e6283..90491b9 100644 --- a/docs/quickstart.md +++ b/docs/quickstart.md @@ -9,52 +9,14 @@ pip install codeboxapi This will install the `codeboxapi` package and all dependencies. -For local development without an API key, you will also need to install `jupyter-kernel-gateway`: +For local development without an API key, you will also need to install: ```bash -pip install jupyter-kernel-gateway -``` - -## Jupyter Setup for Local Development - -After installing `jupyter-kernel-gateway`, you can start using CodeBox locally without an API key. The LocalBox implementation will automatically manage the Jupyter kernel for you. - -Note: Make sure you have IPython installed in your environment: - -```bash -pip install ipython -``` - -## Local Development +pip install jupyter-kernel-gateway ipython -CodeBox provides a local execution environment using IPython for development and testing: +pip install matplotlib -```python -from codeboxapi import CodeBox - -# Local execution (no API key needed) -with CodeBox(api_key="local") as codebox: - # Execute Python code - result = codebox.exec("print('Hello World!')") - print(result.text) - # Use matplotlib (automatically handles display) - result = codebox.exec(""" -import matplotlib.pyplot as plt -plt.plot([1, 2, 3], [1, 2, 3]) -plt.show() -""") - # Work with files in local .codebox directory - codebox.upload("data.csv", "1,2,3\n4,5,6") - files = codebox.list_files() - # Install packages locally - codebox.install("pandas") -``` - -You can also specify a custom working directory: - -```python -with CodeBox(api_key="local", codebox_cwd="./my_workspace") as codebox: - codebox.exec("print('Working in custom directory')") +pip install typing-extensions ``` ## API Key Configuration @@ -67,7 +29,7 @@ codebox = CodeBox(api_key="your-api-key") Or via environment variable: -```python +```bash export CODEBOX_API_KEY="your-api-key" ``` @@ -89,3 +51,52 @@ codebox = CodeBox(api_key="docker") ```python codebox = CodeBox(api_key="your-api-key") ``` + +## Running Your First Example + +1. Create a file `main.py`: + +```python +from codeboxapi import CodeBox + +def main(): + codebox = CodeBox(api_key="local") + # Basic example + result = codebox.exec("print('Hello World!')") + print("Basic result:", result.text) + + # Example with matplotlib + result = codebox.exec(""" +import matplotlib.pyplot as plt +plt.plot([1, 2, 3], [1, 2, 3]) +plt.title('Example plot') +plt.show() +""") + print("Plot generated:", len(result.images) > 0) + + # Example with files + codebox.upload("data.csv", "1,2,3\n4,5,6") + files = codebox.list_files() + print("Files in the directory:", files) + +if __name__ == "__main__": + main() +``` + +2. Run the example: +```bash +python main.py +``` + +3. You should see: +``` +Basic result: Hello World! + +Plot generated: True +Files in the directory: [RemoteFile(data.csv, 4096 bytes)] +``` + +The example demonstrates: +- Basic Python code execution +- Generating plots with matplotlib +- Handling files in the CodeBox environment From 15b7947f29caef1705a0ffb0723208f0d29817d7 Mon Sep 17 00:00:00 2001 From: TincoNomad Date: Thu, 14 Nov 2024 13:25:32 -0500 Subject: [PATCH 4/4] =?UTF-8?q?=F0=9F=A4=96task:all=20tutorial=20completed?= =?UTF-8?q?=20&=20examples=20checked?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/tutorial/advanced_features.md | 64 ++++++- docs/tutorial/conclusion.md | 76 ++++++++ docs/tutorial/core_functionality.md | 258 ++++++++++++++------------ docs/tutorial/error_handler.md | 270 +++++++++++++++++++--------- mkdocs.yml | 1 + 5 files changed, 465 insertions(+), 204 deletions(-) create mode 100644 docs/tutorial/conclusion.md diff --git a/docs/tutorial/advanced_features.md b/docs/tutorial/advanced_features.md index 6d8d08f..3555015 100644 --- a/docs/tutorial/advanced_features.md +++ b/docs/tutorial/advanced_features.md @@ -8,7 +8,18 @@ Let's implement advanced features including parallel processing for multiple sto > [!WARNING] > Make sure Docker is running before executing the code. You can verify this with `docker info` -### First, let's create `src/parallel_analyzer.py`: +## Setting: + +Lets create the files we are going to use: + +```bash +touch src/parallel_analyzer.py src/advanced_visualization.py +``` + +### Let's update `src/parallel_analyzer.py`: + + +**Code:** ```python from codeboxapi import CodeBox @@ -137,7 +148,10 @@ This implementation follows the Docker parallel processing pattern shown in: - [Docker Parallel Processing](../examples/async.md#docker-parallel-processing) -### Now let's enhance our visualization capabilities in `src/advanced_visualization.py`: +### Now`src/advanced_visualization.py`: + + +**Code:** ```python from codeboxapi import CodeBox @@ -180,6 +194,7 @@ plt.close() ### Let's add docker resorser error to error_handlers.py: +**Code:** ```python from typing import Optional @@ -207,16 +222,17 @@ class DockerResourceError(StockAnalysisError): ### Finally, let's update our main file that uses all these features: +**Code:** ```python -from src.parallel_analyzer import ParallelStockAnalyzer -from src.advanced_visualization import create_market_dashboard +from parallel_analyzer import ParallelStockAnalyzer +from advanced_visualization import create_market_dashboard from codeboxapi import CodeBox -from src.error_handlers import AnalysisError, DockerResourceError +from error_handlers import AnalysisError, DockerResourceError import asyncio async def setup_environment(): try: - analyzer = ParallelStockAnalyzer(num_workers=1) + analyzer = ParallelStockAnalyzer(num_workers=2) viz_box = CodeBox( api_key="docker", factory_id="shroominic/codebox:latest" @@ -277,7 +293,7 @@ async def main(): if analyzer: for worker in analyzer.workers: try: - await worker.astop() + pass except Exception as e: print(f"Error closing worker: {str(e)}") @@ -316,4 +332,36 @@ For running the project, you can run the following command: python -m src.main ``` -This will execute the stock analysis with error handling and data persistence enabled. \ No newline at end of file +This will execute the stock analysis with error handling and data persistence enabled. + +### Result + +```bash +=== Starting analysis process === + +--- Initial configuration --- +✓ Starting 2 workers +✓ Environment initialized +✓ Configuring worker 0 +✓ Configuring worker 1 +✓ Dependencies installed + +--- Analyzing symbols: ['AAPL', 'MSFT'] --- + +=== Analyzing symbols: ['AAPL', 'MSFT'] === +✓ Assigning AAPL to worker 0 +✓ Assigning MSFT to worker 1 +✓ AAPL processed +✓ MSFT processed + +=== Results: 2/2 === + +=== Final results === +Number of results: 2 +Content: [{'symbol': 'AAPL', 'last_price': 228.16690063476562, 'volume': 18166019, 'rsi': 52.12271959387232, 'macd': -0.7860965218539206, 'bb_upper': 237.43318064638424, 'bb_lower': 219.23226429990484}, {'symbol': 'MSFT', 'last_price': 424.18499755859375, 'volume': 12428920, 'rsi': 53.061782246735525, 'macd': 0.3959305431284861, 'bb_upper': 435.9261503363394, 'bb_lower': 406.8953523492075}] +Creating dashboard... + +=== Generating Dashboard === +✓ Dashboard generated: market_dashboard.png +Analysis completed successfully +``` \ No newline at end of file diff --git a/docs/tutorial/conclusion.md b/docs/tutorial/conclusion.md new file mode 100644 index 0000000..4140354 --- /dev/null +++ b/docs/tutorial/conclusion.md @@ -0,0 +1,76 @@ +# Tutorial Conclusion: Building a Stock Market Analysis Tool + +## What We've Built + +Throughout this tutorial series, we've created a comprehensive stock market analysis tool using CodeBox API. The project demonstrates several advanced Python programming concepts and best practices. + +## Key Features Implemented + +1. **Core Analysis Functionality** + - Technical indicators calculation (RSI, MACD, Bollinger Bands) + - Stock data fetching with yfinance + - Basic visualization + ```markdown:docs/tutorial/core_functionality.md + startLine: 70 + endLine: 114 + ``` + +2. **Error Handling & Data Persistence** + - Custom exception hierarchy + - Graceful error handling + - Data persistence with JSON + ```markdown:docs/tutorial/error_handler.md + startLine: 20 + endLine: 35 + ``` + +3. **Parallel Processing** + - Multiple worker instances + - Docker-based parallel analysis + - Efficient resource management + ```markdown:docs/tutorial/advanced_features.md + startLine: 35 + endLine: 98 + ``` + +4. **Advanced Visualization** + - Interactive dashboards + - Multiple chart types + - Data export capabilities + +## Best Practices Demonstrated + +- Proper project structure +- Comprehensive error handling +- Asynchronous programming +- Docker integration +- Code modularity +- Type hints usage +- Documentation + +## Next Steps + +To further enhance this project, consider: + +1. Adding more technical indicators +2. Implementing real-time data updates +3. Creating a web interface +4. Adding machine learning predictions +5. Implementing portfolio management features + +## Resources + +For more information, refer to: + +- [CodeBox API Documentation](https://docs.codebox.com) +- [yfinance Documentation](https://pypi.org/project/yfinance/) +- [Technical Analysis Library Documentation](https://technical-analysis-library-python.readthedocs.io/) + +## Final Notes + +This project serves as a foundation for building more complex financial analysis tools. The modular structure allows for easy extensions and modifications based on specific needs. + +Remember to always follow proper error handling practices and consider performance implications when dealing with real-time financial data. + + +# **Congratulations! You've completed the tutorial. You can now build more complex financial analysis tools with CodeBox.** \ No newline at end of file diff --git a/docs/tutorial/core_functionality.md b/docs/tutorial/core_functionality.md index 2d2fd79..811358c 100644 --- a/docs/tutorial/core_functionality.md +++ b/docs/tutorial/core_functionality.md @@ -8,127 +8,54 @@ Let's implement the core functionality for our stock market analysis tool. We'll > [!TIP] > If you're starting from this section, check `setup.md` for the complete project structure and prerequisites. -### First, let's update our `src/main.py`: +## Let's add the code to our files -```python -from codeboxapi import CodeBox -import asyncio -from pathlib import Path - -class StockAnalyzer: - def __init__(self, data_dir: str = "data"): - self.codebox = CodeBox(api_key="local") - self.data_dir = Path(data_dir) - self.data_dir.mkdir(exist_ok=True) - - async def setup_environment(self): - # Install required packages - await self.codebox.ainstall("yfinance", "pandas", "numpy", "matplotlib", "ta") - - # Initialize the environment with helper functions - setup_code = """ - import yfinance as yf - import pandas as pd - import numpy as np - import matplotlib.pyplot as plt - import ta - - def fetch_stock_data(symbol, period='1y'): - stock = yf.Ticker(symbol) - data = stock.history(period=period) - return data - - def add_technical_indicators(df): - # Add RSI - df['RSI'] = ta.momentum.RSIIndicator(df['Close']).rsi() - # Add MACD - macd = ta.trend.MACD(df['Close']) - df['MACD'] = macd.macd() - df['MACD_Signal'] = macd.macd_signal() - # Add Bollinger Bands - bollinger = ta.volatility.BollingerBands(df['Close']) - df['BB_High'] = bollinger.bollinger_hband() - df['BB_Low'] = bollinger.bollinger_lband() - return df - """ - await self.codebox.aexec(setup_code) - - async def analyze_stock(self, symbol: str, period: str = '1y'): - analysis_code = f""" - # Fetch and process data - data = fetch_stock_data('{symbol}', period='{period}') - data = add_technical_indicators(data) - - # Create analysis plots - fig, (ax1, ax2) = plt.subplots(2, 1, figsize=(15, 10)) - - # Price and Bollinger Bands - ax1.plot(data.index, data['Close'], label='Close Price') - ax1.plot(data.index, data['BB_High'], 'r--', label='BB Upper') - ax1.plot(data.index, data['BB_Low'], 'g--', label='BB Lower') - ax1.set_title(f'{symbol} Price and Bollinger Bands') - ax1.legend() - - # RSI and MACD - ax2.plot(data.index, data['RSI'], label='RSI') - ax2.plot(data.index, data['MACD'], label='MACD') - ax2.plot(data.index, data['MACD_Signal'], label='Signal') - ax2.axhline(y=70, color='r', linestyle='--') - ax2.axhline(y=30, color='g', linestyle='--') - ax2.set_title('Technical Indicators') - ax2.legend() - - plt.tight_layout() - plt.show() - - # Save data to CSV - data.to_csv('data/{symbol}_analysis.csv') - """ - result = await self.codebox.aexec(analysis_code) - return result - -async def main(): - analyzer = StockAnalyzer() - await analyzer.setup_environment() - await analyzer.analyze_stock('AAPL') - -if __name__ == "__main__": - asyncio.run(main()) -``` - -This implementation follows the patterns shown in the documentation: - -- [Simple Execution](../examples/basic.md#simple-execution) - - -And uses async functionality as shown in: - -- [Async Operations](../examples/async.md#basic-async-operations) - - -### Let's also create `src/visualization.py` for additional plotting functions: +### First, let's update `src/visualization.py`: +**Code:** ```python from codeboxapi import CodeBox +import pandas as pd -async def create_comparison_plot(codebox: CodeBox, symbols: list[str], period: str = '1y'): + +async def create_technical_analysis_plot(codebox: CodeBox, symbol: str, df: pd.DataFrame) -> str: + """Creates a technical analysis plot and returns the base64 encoded image""" plot_code = f""" - plt.figure(figsize=(15, 8)) - - for symbol in {symbols}: - data = fetch_stock_data(symbol, period='{period}') - # Normalize prices to percentage changes - normalized = data['Close'] / data['Close'].iloc[0] * 100 - plt.plot(data.index, normalized, label=symbol) - - plt.title('Stock Price Comparison (Normalized)') - plt.xlabel('Date') - plt.ylabel('Price (%)') - plt.legend() - plt.grid(True) - plt.show() - """ - return await codebox.aexec(plot_code) +import matplotlib +matplotlib.use('Agg') +import matplotlib.pyplot as plt +import io +import base64 + +fig, (ax1, ax2) = plt.subplots(2, 1, figsize=(15, 10)) + +ax1.plot(df.index, df['Close'], label='Close Price') +ax1.plot(df.index, df['BB_High'], 'r--', label='BB Upper') +ax1.plot(df.index, df['BB_Low'], 'g--', label='BB Lower') +ax1.set_title('{symbol} Price and Bollinger Bands') +ax1.legend() + +ax2.plot(df.index, df['RSI'], label='RSI') +ax2.plot(df.index, df['MACD'], label='MACD') +ax2.plot(df.index, df['MACD_Signal'], label='Signal') +ax2.axhline(y=70, color='r', linestyle='--') +ax2.axhline(y=30, color='g', linestyle='--') +ax2.set_title('Technical Indicators') +ax2.legend() + +plt.tight_layout() + +# Save figure to memory buffer +buf = io.BytesIO() +plt.savefig(buf, format='png') +plt.close() + +# Convert to base64 +buf.seek(0) +img_base64 = base64.b64encode(buf.read()).decode('utf-8') +print(f"IMAGE_BASE64|{{img_base64}}") +""" + return plot_code ``` This implementation leverages CodeBox's ability to handle matplotlib visualizations as shown in: @@ -136,7 +63,9 @@ This implementation leverages CodeBox's ability to handle matplotlib visualizati - [Plotting with Matplotlib](../examples/getting_started.md#plotting-with-matplotlib) -### Let's create `src/analysis.py` for technical analysis functions: +### Then, update `src/analysis.py` for technical analysis functions: + +**Code:** ```python from typing import Dict @@ -184,3 +113,100 @@ def calculate_technical_indicators(df: pd.DataFrame) -> pd.DataFrame: return df ``` +### Lastly, update `src/main.py`: + +**Code:** + +```python +from codeboxapi import CodeBox +import asyncio +from pathlib import Path +from PIL import Image +import base64 +from io import BytesIO +from visualization import create_technical_analysis_plot + +class StockAnalyzer: + def __init__(self, data_dir: str = "data"): + self.codebox = CodeBox(api_key="local") + self.data_dir = Path(data_dir) + self.data_dir.mkdir(exist_ok=True) + + async def analyze_stock(self, symbol: str, period: str = '1y'): + try: + with open('src/analysis.py', 'r') as file: + analysis_code = file.read() + + # Preparar datos + setup_code = f""" +import yfinance as yf +import pandas as pd +import ta + +{analysis_code} + +df = yf.Ticker('{symbol}').history(period='{period}') +df = calculate_technical_indicators(df) +""" + await self.codebox.aexec(setup_code) + + # Generar gráfico + plot_code = await create_technical_analysis_plot(self.codebox, symbol, "df") + + # Ejecutar código + temp_file = self.data_dir / "temp_analysis.py" + temp_file.write_text(plot_code) + + with open(temp_file, 'rb') as f: + self.codebox.upload("analysis_code.py", f.read()) + + output = await self.codebox.aexec("exec(open('analysis_code.py').read())") + + if output and hasattr(output, 'text'): + for line in output.text.split('\n'): + if line.startswith('IMAGE_BASE64|'): + base64_str = line.split('|')[1].strip() + img_data = base64.b64decode(base64_str) + img = Image.open(BytesIO(img_data)) + img.save(str(self.data_dir / f'{symbol}_analysis.png')) + img.show() + print(f"✓ Analysis for {symbol} completed") + + return output + except Exception as e: + print(f"❌ Error: {str(e)}") + raise + +async def main(): + analyzer = StockAnalyzer() + print("\n=== Analyzing AAPL ===") + await analyzer.analyze_stock('AAPL') + +if __name__ == "__main__": + asyncio.run(main()) +``` + +This implementation follows the patterns shown in the documentation: + +- [Simple Execution](../examples/basic.md#simple-execution) + + +And uses async functionality as shown in: + +- [Async Operations](../examples/async.md#basic-async-operations) + +## Running the code + +```bash +python src/main.py +``` + +### Result + +```bash +=== Analyzing AAPL === +✓ Analysis for AAPL completed +``` +And a new window will open with the analysis plot. + +**Now lets continue with Error Handling** \ No newline at end of file diff --git a/docs/tutorial/error_handler.md b/docs/tutorial/error_handler.md index 566b937..50ec5dd 100644 --- a/docs/tutorial/error_handler.md +++ b/docs/tutorial/error_handler.md @@ -1,11 +1,21 @@ # Part 3: Error Handling and Data Persistence -Let's enhance our stock analysis tool with proper error handling and data persistence. +This tutorial shows how to implement proper error handling and data persistence in your stock analysis tool. -### First, let's create `src/error_handlers.py` with custom exceptions: +> [!NOTE] +> Make sure you have completed Part 2 before continuing. + +## Custom Exceptions + +First, let's create our custom exceptions in `src/error_handlers.py`: + +```bash +touch src/error_handlers.py +``` +**Code:** ```python -from typing import Optional, List +from typing import Optional class StockAnalysisError(Exception): """Base exception for stock analysis errors""" @@ -24,108 +34,194 @@ class AnalysisError(StockAnalysisError): super().__init__(f"{error_type}: {details}") ``` + + +### Let's also create a utility function for data persistence in `src/utils.py`: + +```bash +touch src/utils.py +``` + +**Code:** + +```python +from pathlib import Path +import json +from typing import Dict, Any + +async def save_json_data(file_path: Path, data: Dict[str, Any]): + """Safely save JSON data with error handling""" + try: + temp_path = file_path.with_suffix('.tmp') + temp_path.write_text(json.dumps(data, indent=2)) + temp_path.replace(file_path) + except Exception as e: + if temp_path.exists(): + temp_path.unlink() + raise IOError(f"Failed to save data: {str(e)}") +``` + + ### Now, let's update our `src/main.py` with error handling and data persistence: +**Code:** + ```python from codeboxapi import CodeBox import asyncio from pathlib import Path -import json +from PIL import Image +import base64 +from io import BytesIO from datetime import datetime -from .error_handlers import DataFetchError, AnalysisError +from visualization import create_technical_analysis_plot +from error_handlers import DataFetchError, AnalysisError +from utils import save_json_data, load_json_data class StockAnalyzer: def __init__(self, data_dir: str = "data"): self.codebox = CodeBox(api_key="local") self.data_dir = Path(data_dir) self.data_dir.mkdir(exist_ok=True) - self.analysis_history_file = self.data_dir / "analysis_history.json" - - async def load_analysis_history(self) -> dict: - if self.analysis_history_file.exists(): - return json.loads(self.analysis_history_file.read_text()) - return {} - - async def save_analysis_history(self, symbol: str, analysis_data: dict): - history = await self.load_analysis_history() - history[symbol] = { - "timestamp": datetime.now().isoformat(), - "data": analysis_data - } - self.analysis_history_file.write_text(json.dumps(history, indent=2)) + self.history_file = self.data_dir / "analysis_history.json" async def analyze_stock(self, symbol: str, period: str = '1y'): + print("\nUsing error handlers from error_handlers.py...") try: - # Check if data exists and is recent - history = await self.load_analysis_history() - if symbol in history: - last_analysis = datetime.fromisoformat(history[symbol]["timestamp"]) - if (datetime.now() - last_analysis).days < 1: - print(f"Using cached analysis for {symbol}") - return history[symbol]["data"] + # First try to load previous analysis + print("\nTrying to load previous analysis using utils.py...") + previous_data = await load_json_data(self.history_file) + if previous_data.get("last_analysis", {}).get("symbol") == symbol: + print(f"Found previous analysis for {symbol}") # Verify symbol exists verify_code = f""" import yfinance as yf try: ticker = yf.Ticker('{symbol}') - info = ticker.info - if not info: - raise ValueError(f"Invalid symbol: {symbol}") - print("Symbol verified successfully") + if not ticker.info: + raise ValueError(f"Symbol not found: {symbol}") except Exception as e: - print(f"Error: {str(e)}") - raise + raise DataFetchError('{symbol}', str(e)) """ - result = await self.codebox.aexec(verify_code) - if "Error" in result.text: - raise DataFetchError(symbol, result.text) - - # Perform analysis - analysis_code = f""" + await self.codebox.aexec(verify_code) + + with open('src/analysis.py', 'r') as file: + analysis_code = file.read() + + # Setup analysis environment + setup_code = f""" + import yfinance as yf + import pandas as pd + import ta + {analysis_code} try: - data = fetch_stock_data('{symbol}', period='{period}') - analysis_result = {{ - 'symbol': '{symbol}', - 'period': '{period}', - 'last_price': float(data['Close'].iloc[-1]), - 'volume': int(data['Volume'].iloc[-1]), - 'change_percent': float(data['Close'].pct_change().iloc[-1] * 100), - }} - print(json.dumps(analysis_result)) + df = yf.Ticker('{symbol}').history(period='{period}') + if df.empty: + raise DataFetchError('{symbol}', 'No data available') + df = calculate_technical_indicators(df) except Exception as e: - print(f"Analysis Error: {str(e)}") - raise + raise AnalysisError('Data Processing', str(e)) """ - result = await self.codebox.aexec(analysis_code) + await self.codebox.aexec(setup_code) - if "Analysis Error" in result.text: - raise AnalysisError("Calculation", result.text) - - analysis_data = json.loads(result.text) - await self.save_analysis_history(symbol, analysis_data) - return analysis_data + # Obtener datos básicos del stock + basic_info_code = f""" + ticker = yf.Ticker('{symbol}') + info = ticker.info + last_price = info.get('regularMarketPrice', 0) + volume = info.get('regularMarketVolume', 0) + prev_close = info.get('previousClose', 0) + change_percent = ((last_price - prev_close) / prev_close * 100) if prev_close else 0 + + analysis_result = {{ + "symbol": '{symbol}', + "last_price": last_price, + "volume": volume, + "change_percent": round(change_percent, 2) + }} + print(f"Analysis successful: {{analysis_result}}") + """ + await self.codebox.aexec(basic_info_code) + # Generate plot + try: + plot_code = await create_technical_analysis_plot(self.codebox, symbol, "df") + temp_file = self.data_dir / "temp_analysis.py" + temp_file.write_text(plot_code) + + with open(temp_file, 'rb') as f: + self.codebox.upload("analysis_code.py", f.read()) + + output = await self.codebox.aexec("exec(open('analysis_code.py').read())") + + if output and hasattr(output, 'text'): + for line in output.text.split('\n'): + if line.startswith('IMAGE_BASE64|'): + base64_str = line.split('|')[1].strip() + img_data = base64.b64decode(base64_str) + img = Image.open(BytesIO(img_data)) + img_path = self.data_dir / f'{symbol}_analysis.png' + img.save(str(img_path)) + + # Save analysis metadata + print("\nSaving analysis using utils.py...") + analysis_data = { + "last_analysis": { + "symbol": symbol, + "timestamp": datetime.now().isoformat(), + "image_path": str(img_path) + } + } + await save_json_data(self.history_file, analysis_data) + + print(f"✓ Analysis for {symbol} completed") + return output + + except Exception as e: + raise AnalysisError("Visualization", str(e)) + + except DataFetchError as e: + print(f"✗ DataFetchError from error_handlers.py: {str(e)}") + raise + except AnalysisError as e: + print(f"✗ AnalysisError from error_handlers.py: {str(e)}") + raise + except IOError as e: + print(f"✗ IOError from utils.py: {str(e)}") + raise AnalysisError("IO", str(e)) except Exception as e: - if isinstance(e, (DataFetchError, AnalysisError)): - raise raise AnalysisError("Unknown", str(e)) async def main(): analyzer = StockAnalyzer() - await analyzer.setup_environment() + + print("\n=== Testing Invalid Symbol ===") + try: + await analyzer.analyze_stock('INVALID') + except DataFetchError as e: + print(f"✗ Caught expected error: {str(e)}") + + print("\n=== Testing Valid Symbol ===") try: - await analyzer.analyze_stock('AAPL') - except (DataFetchError, AnalysisError) as e: - print(f"Error during analysis: {str(e)}") + result = await analyzer.analyze_stock('AAPL') + if result: + print("✓ Analysis completed successfully") except Exception as e: - print(f"Unexpected error: {str(e)}") + print(f"✗ Unexpected error: {e}") if __name__ == "__main__": asyncio.run(main()) ``` -This implementation follows error handling patterns shown in: +This implementation demonstrates: + +- Custom exception hierarchy +- Proper error handling and propagation +- Clear error messages +- Graceful handling of both valid and invalid cases + +The code follows patterns shown in: - [Error Handling](../examples/advanced.md#error-handling) @@ -135,21 +231,35 @@ And file operations patterns from: - [File Operations](../examples/getting_started.md#file-operations) -### Let's also create a utility function for data persistence in `src/utils.py`: +## Running the Example -```python -from pathlib import Path -import json -from typing import Dict, Any +Execute the script: +```bash +python src/main.py +``` -async def save_json_data(file_path: Path, data: Dict[str, Any]): - """Safely save JSON data with error handling""" - try: - temp_path = file_path.with_suffix('.tmp') - temp_path.write_text(json.dumps(data, indent=2)) - temp_path.replace(file_path) - except Exception as e: - if temp_path.exists(): - temp_path.unlink() - raise IOError(f"Failed to save data: {str(e)}") -``` \ No newline at end of file +### Result + +```bash +=== Testing Invalid Symbol === + +Using error handlers from error_handlers.py... + +Trying to load previous analysis using utils.py... +✓ Retrieved data from: data/analysis_history.json + +=== Testing Valid Symbol === + +Using error handlers from error_handlers.py... + +Trying to load previous analysis using utils.py... +✓ Retrieved data from: data/analysis_history.json +Found previous analysis for AAPL + +Saving analysis using utils.py... +✓ Data saved to: data/analysis_history.json +✓ Analysis for AAPL completed +✓ Analysis completed successfully +``` + +**Now lets continue with Parallel Analysis and Docker usage** \ No newline at end of file diff --git a/mkdocs.yml b/mkdocs.yml index 88aba0e..10e274a 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -32,6 +32,7 @@ nav: - Core Functionality: tutorial/core_functionality.md - Error Handler: tutorial/error_handler.md - Advanced Features: tutorial/advanced_features.md + - Conclusion: tutorial/conclusion.md - API Reference: - CodeBox Class: api/codebox.md - Types: api/types.md