From 6af03b2d636a2185a133cf909d4fdc353d06a871 Mon Sep 17 00:00:00 2001 From: V4LER11 Date: Mon, 4 Nov 2024 18:11:19 +0000 Subject: [PATCH 001/185] splits integrations.yaml into inegrations.d/*.yaml files Signed-off-by: V4LER11 added /v1/integrations Signed-off-by: V4LER11 json schema: added descriptions Signed-off-by: V4LER11 preserving order in schema (hard one) Signed-off-by: V4LER11 oops Signed-off-by: V4LER11 POST v1/integrations-save Signed-off-by: V4LER11 reading integrations from both integrations.yaml and integrations.d/*.yaml Signed-off-by: V4LER11 GET v1/integrations-icons Signed-off-by: V4LER11 enable / disable integrations (integrations-enabled.yaml) Signed-off-by: V4LER11 simplified code Signed-off-by: V4LER11 chrome: window_size -> (window_width, window_height) --- Cargo.toml | 2 +- src/http/routers/v1.rs | 6 +- src/http/routers/v1/integrations.rs | 206 +++++++++++++++++++++ src/integrations/integr.rs | 42 +++++ src/integrations/integr_chrome.rs | 86 ++++++++- src/integrations/integr_github.rs | 57 ++++-- src/integrations/integr_gitlab.rs | 49 ++++- src/integrations/integr_pdb.rs | 68 +++++-- src/integrations/integr_postgres.rs | 52 +++++- src/integrations/mod.rs | 265 ++++++++++++++++++++++++---- src/tools/tools_description.rs | 28 ++- src/yaml_configs/create_configs.rs | 100 +++++++++-- 12 files changed, 856 insertions(+), 105 deletions(-) create mode 100644 src/http/routers/v1/integrations.rs create mode 100644 src/integrations/integr.rs diff --git a/Cargo.toml b/Cargo.toml index 002734b8e..639856b7a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -107,4 +107,4 @@ image = "0.25.2" headless_chrome = "1.0.15" nix = { version = "0.29.0", features = ["signal"] } resvg = "0.44.0" -async-tar = "0.5.0" \ No newline at end of file +async-tar = "0.5.0" diff --git a/src/http/routers/v1.rs b/src/http/routers/v1.rs index a31f95877..5049dc194 100644 --- a/src/http/routers/v1.rs +++ b/src/http/routers/v1.rs @@ -38,7 +38,7 @@ use crate::http::routers::v1::system_prompt::handle_v1_system_prompt; use crate::http::routers::v1::vecdb::{handle_v1_vecdb_search, handle_v1_vecdb_status}; #[cfg(feature="vecdb")] use crate::http::routers::v1::handlers_memdb::{handle_mem_query, handle_mem_add, handle_mem_erase, handle_mem_update_used, handle_mem_block_until_vectorized, handle_mem_list, handle_ongoing_update_or_create, handle_ongoing_dump}; - +use crate::http::routers::v1::integrations::{handle_v1_integrations, handle_v1_integrations_icons, handle_v1_integrations_save}; use crate::http::utils::telemetry_wrapper; pub mod code_completion; @@ -66,6 +66,7 @@ mod patch; pub mod handlers_memdb; #[cfg(feature="vecdb")] pub mod vecdb; +mod integrations; pub fn make_v1_router() -> Router { @@ -89,6 +90,9 @@ pub fn make_v1_router() -> Router { .route("/tools", telemetry_get!(handle_v1_tools)) .route("/tools-check-if-confirmation-needed", telemetry_post!(handle_v1_tools_check_if_confirmation_needed)) .route("/tools-execute", telemetry_post!(handle_v1_tools_execute)) + .route("/integrations", telemetry_get!(handle_v1_integrations)) + .route("/integrations-save", telemetry_post!(handle_v1_integrations_save)) + .route("/integrations-icons", telemetry_get!(handle_v1_integrations_icons)) .route("/lsp-initialize", telemetry_post!(handle_v1_lsp_initialize)) .route("/lsp-did-changed", telemetry_post!(handle_v1_lsp_did_change)) diff --git a/src/http/routers/v1/integrations.rs b/src/http/routers/v1/integrations.rs new file mode 100644 index 000000000..ae4cd1fb2 --- /dev/null +++ b/src/http/routers/v1/integrations.rs @@ -0,0 +1,206 @@ +use std::path::PathBuf; +use std::sync::Arc; +use axum::Extension; +use axum::http::{Response, StatusCode}; +use tokio::sync::RwLock as ARwLock; +use hyper::Body; +use serde::{Deserialize, Serialize}; +use serde_json::{json, Value}; +use url::Url; +use std::fs; +use std::io::Read; +#[allow(deprecated)] +use base64::encode; +use indexmap::IndexMap; +use reqwest::Client; +use tokio::fs as async_fs; +use tracing::info; + +use crate::custom_error::ScratchError; +use crate::global_context::GlobalContext; +use crate::integrations::{get_empty_integrations, get_integration_path, get_integrations, json_for_integration, validate_integration_value}; +use crate::yaml_configs::create_configs::{integrations_enabled_cfg, read_yaml_into_value, write_yaml_value}; + + +#[derive(Serialize, Deserialize)] +struct IntegrationItem { + name: String, + enabled: bool, + schema: Option, + value: Option, +} + +#[derive(Serialize)] +struct IntegrationIcon { + name: String, + value: String, +} + +async fn load_integration_schema_and_json( + gcx: Arc>, +) -> Result, String> { + let integrations = get_empty_integrations(); + let cache_dir = gcx.read().await.cache_dir.clone(); + let integrations_yaml_value = read_yaml_into_value(&cache_dir.join("integrations.yaml")).await?; + + let mut results = IndexMap::new(); + for (i_name, i) in integrations.iter() { + let path = get_integration_path(&cache_dir, &i_name); + let j_value = json_for_integration(&path, integrations_yaml_value.get(&i_name), &i).await?; + results.insert(i_name.clone(), (i.to_schema_json(), j_value)); + } + + Ok(results) +} + +async fn get_image_base64( + cache_dir: &PathBuf, + icon_name: &str, + icon_url: &str, +) -> Result { + let assets_path = cache_dir.join("assets/integrations"); + + // Parse the URL to get the file extension + let url = Url::parse(icon_url).map_err(|e| e.to_string())?; + let extension = url + .path_segments() + .and_then(|segments| segments.last()) + .and_then(|name| name.split('.').last()) + .unwrap_or("png"); // Default to "png" if no extension is found + + let file_path = assets_path.join(format!("{}.{}", icon_name, extension)); + + // Check if the file already exists + if file_path.exists() { + info!("Using image from cache: {}", file_path.display()); + let mut file = fs::File::open(&file_path).map_err(|e| e.to_string())?; + let mut buffer = Vec::new(); + file.read_to_end(&mut buffer).map_err(|e| e.to_string())?; + #[allow(deprecated)] + let b64_image = encode(&buffer); + let image_str = format!("data:{};base64,{}", extension, b64_image); + return Ok(image_str); + } + + // Create the cache directory if it doesn't exist + async_fs::create_dir_all(&assets_path).await.map_err(|e| e.to_string())?; + + // Download the image + info!("Downloading image from {}", icon_url); + let client = Client::new(); + let response = client.get(icon_url).send().await.map_err(|e| e.to_string())?; + let bytes = response.bytes().await.map_err(|e| e.to_string())?; + + // Save the image to the cache directory + async_fs::write(&file_path, &bytes).await.map_err(|e| e.to_string())?; + + // Return the base64 string + #[allow(deprecated)] + let b64_image = encode(&bytes); + let image_str = format!("data:{};base64,{}", extension, b64_image); + Ok(image_str) +} + +pub async fn handle_v1_integrations_icons( + Extension(gcx): Extension>>, + _: hyper::body::Bytes, +) -> axum::response::Result, ScratchError> { + let cache_dir = gcx.read().await.cache_dir.clone(); + let integrations = get_integrations(gcx.clone()).await.map_err(|e|{ + ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Failed to load integrations: {}", e)) + })?; + + let mut results = vec![]; + for (i_name, i) in integrations.iter() { + let image_base64 = get_image_base64(&cache_dir, i_name, &i.icon_link()).await.map_err(|e|{ + ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Failed to get image: {}", e)) + })?; + results.push(IntegrationIcon { + name: i_name.clone(), + value: image_base64, + }); + } + + let payload = serde_json::to_string_pretty(&json!(results)).expect("Failed to serialize results"); + Ok(Response::builder() + .status(StatusCode::OK) + .header("Content-Type", "application/json") + .body(Body::from(payload)) + .unwrap()) +} + +pub async fn handle_v1_integrations( + Extension(gcx): Extension>>, + _: hyper::body::Bytes, +) -> axum::response::Result, ScratchError> { + let schemas_and_json_dict = load_integration_schema_and_json(gcx.clone()).await.map_err(|e|{ + ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Failed to load integrations: {}", e)) + })?; + + let cache_dir = gcx.read().await.cache_dir.clone(); + let enabled_path = cache_dir.join("integrations-enabled.yaml"); + let enabled_mapping = match integrations_enabled_cfg(&enabled_path).await { + serde_yaml::Value::Mapping(map) => map, + _ => serde_yaml::Mapping::new(), + }; + + let mut items = vec![]; + for (name, (schema, value)) in schemas_and_json_dict { + let item = IntegrationItem { + name: name.clone(), + enabled: enabled_mapping.get(&name).and_then(|v| v.as_bool()).unwrap_or(false), + schema: Some(schema), + value: Some(value), + }; + + items.push(item); + } + + let payload = serde_json::to_string_pretty(&json!(items)).expect("Failed to serialize items"); + Ok(Response::builder() + .status(StatusCode::OK) + .header("Content-Type", "application/json") + .body(Body::from(payload)) + .unwrap()) +} + + +pub async fn handle_v1_integrations_save( + Extension(gcx): Extension>>, + body_bytes: hyper::body::Bytes, +) -> axum::response::Result, ScratchError> { + let post = serde_json::from_slice::(&body_bytes) + .map_err(|e| ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, format!("JSON problem: {}", e)))?; + + let cache_dir = gcx.read().await.cache_dir.clone(); + let enabled_path = cache_dir.join("integrations-enabled.yaml"); + let mut enabled_value = integrations_enabled_cfg(&enabled_path).await; + if let serde_yaml::Value::Mapping(ref mut map) = enabled_value { + map.insert(serde_yaml::Value::String(post.name.clone()), serde_yaml::Value::Bool(post.enabled)); + } else { + return Err(ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Failed to parse {:?} as YAML::Mapping", enabled_path))); + } + write_yaml_value(&enabled_path, &enabled_value).await + .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Failed to write YAML: {}", e)))?; + + if let Some(post_value) = &post.value { + let yaml_value: serde_yaml::Value = serde_json::to_string(post_value).map_err(|e|e.to_string()) + .and_then(|s|serde_yaml::from_str(&s).map_err(|e|e.to_string())) + .map_err(|e| ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, format!("ERROR converting JSON to YAML: {}", e)))?; + + let yaml_value = validate_integration_value(&post.name, yaml_value).await + .map_err(|e| ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, format!("ERROR validating integration value: {}", e)))?; + + let path = get_integration_path(&cache_dir, &post.name); + + write_yaml_value(&path, &yaml_value).await.map_err(|e|{ + ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Failed to write YAML: {}", e)) + })?; + } + + Ok(Response::builder() + .status(StatusCode::OK) + .header("Content-Type", "application/json") + .body(Body::from(format!("Integration {} updated", post.name))) + .unwrap()) +} diff --git a/src/integrations/integr.rs b/src/integrations/integr.rs new file mode 100644 index 000000000..435059010 --- /dev/null +++ b/src/integrations/integr.rs @@ -0,0 +1,42 @@ +use schemars::{schema_for, JsonSchema}; +use serde::Serialize; +use serde::de::DeserializeOwned; +use crate::tools::tools_description::Tool; + + +pub trait Integration: Send + Sync { + fn name(&self) -> String; + fn update_from_json(&mut self, value: &serde_json::Value) -> Result<(), String>; + fn from_yaml_validate_to_json(&self, value: &serde_yaml::Value) -> Result; + fn to_tool(&self) -> Box; + fn to_json(&self) -> Result; + fn to_schema_json(&self) -> serde_json::Value; + fn default_value(&self) -> String; + fn icon_link(&self) -> String; +} + +pub fn json_schema() -> Result { + let schema = schema_for!(T); + let mut json_schema = serde_json::to_value(&schema).map_err(|e| e.to_string())?; + + // Reorder properties in the json_schema based on the order of dummy_instance + let dummy_instance: T = T::default(); + let serialized_value = serde_json::to_value(&dummy_instance).unwrap(); + + // schemars breaks order. Instead, reordering in Value + if let serde_json::Value::Object(ref mut schema_map) = json_schema { + if let Some(serde_json::Value::Object(ref mut properties)) = schema_map.get_mut("properties") { + if let serde_json::Value::Object(dummy_map) = serialized_value { + let mut ordered_properties = serde_json::Map::new(); + for key in dummy_map.keys() { + if let Some(value) = properties.remove(key) { + ordered_properties.insert(key.clone(), value); + } + } + *properties = ordered_properties; + } + } + } + + Ok(json_schema) +} diff --git a/src/integrations/integr_chrome.rs b/src/integrations/integr_chrome.rs index 4f4075169..e2f4e5359 100644 --- a/src/integrations/integr_chrome.rs +++ b/src/integrations/integr_chrome.rs @@ -20,18 +20,30 @@ use headless_chrome::{Browser, LaunchOptions, Tab}; use headless_chrome::browser::tab::point::Point; use headless_chrome::protocol::cdp::Page; use headless_chrome::protocol::cdp::Emulation; +use schemars::JsonSchema; use serde::{Deserialize, Serialize}; +use crate::integrations::integr::{json_schema, Integration}; -#[derive(Clone, Serialize, Deserialize, Debug)] +#[derive(Clone, Serialize, Deserialize, Debug, JsonSchema, Default)] pub struct IntegrationChrome { + #[schemars(description = "Path to the Chrome binary or WebSocket URL for remote debugging.")] pub chrome_path: Option, - pub window_size: Option>, + #[schemars(description = "Window width for the Chrome browser.")] + pub window_width: Option, + #[schemars(description = "Window height for the Chrome browser.")] + pub window_height: Option, + #[schemars(description = "Idle timeout for the Chrome browser in seconds.")] pub idle_browser_timeout: Option, #[serde(default = "default_headless")] pub headless: bool, } +#[derive(Default)] +pub struct ToolChrome { + pub integration_chrome: IntegrationChrome, +} + fn default_headless() -> bool { true } pub struct ToolChrome { @@ -65,17 +77,40 @@ impl IntegrationSession for ChromeSession fn is_expired(&self) -> bool { false } } -impl ToolChrome { - pub fn new_from_yaml(v: &serde_yaml::Value, supports_clicks: bool,) -> Result { - let integration_chrome = serde_yaml::from_value::(v.clone()).map_err(|e| { +impl Integration for ToolChrome { + fn name(&self) -> String { + "chrome".to_string() + } + + fn update_from_json(&mut self, value: &Value) -> Result<(), String> { + let integration_github = serde_json::from_value::(value.clone()) + .map_err(|e|e.to_string())?; + self.integration_chrome = integration_github; + Ok(()) + } + + fn from_yaml_validate_to_json(&self, value: &serde_yaml::Value) -> Result { + let integration_github = serde_yaml::from_value::(value.clone()).map_err(|e| { let location = e.location().map(|loc| format!(" at line {}, column {}", loc.line(), loc.column())).unwrap_or_default(); format!("{}{}", e.to_string(), location) })?; - Ok(Self { - integration_chrome, - supports_clicks, - }) + serde_json::to_value(&integration_github).map_err(|e| e.to_string()) + } + + fn to_tool(&self) -> Box { + Box::new(ToolChrome {integration_chrome: self.integration_chrome.clone()}) as Box + } + + fn to_json(&self) -> Result { + serde_json::to_value(&self.integration_chrome).map_err(|e| e.to_string()) } + + fn to_schema_json(&self) -> Value { + json_schema::().unwrap() + } + + fn default_value(&self) -> String { DEFAULT_CHROME_INTEGRATION_YAML.to_string() } + fn icon_link(&self) -> String { "https://cdn-icons-png.flaticon.com/512/732/732205.png".to_string() } } #[async_trait] @@ -170,6 +205,25 @@ async fn setup_chrome_session( session_hashmap_key: &String, ) -> Result, String> { let mut setup_log = vec![]; + if !is_chrome_session_active(&session_hashmap_key, gcx.clone()).await { + let mut is_connection = false; + if let Some(chrome_path) = args.chrome_path.clone() { + is_connection = chrome_path.starts_with("ws://"); + } + + let window_size = if args.window_width.is_some() && args.window_height.is_some() { + Some((args.window_width.unwrap(), args.window_height.unwrap())) + } else if args.window_width.is_some() { + Some((args.window_width.unwrap(), args.window_width.unwrap())) + } else { + None + }; + + let mut idle_browser_timeout = Duration::from_secs(600); + if let Some(timeout) = args.idle_browser_timeout.clone() { + idle_browser_timeout = Duration::from_secs(timeout as u64); + } + } let session_entry = { let gcx_locked = gcx.read().await; @@ -534,3 +588,17 @@ async fn screenshot_jpeg_base64(tab: &Arc, capture_beyond_viewport: bool) - MultimodalElement::new("image/jpeg".to_string(), jpeg_data) } + +const DEFAULT_CHROME_INTEGRATION_YAML: &str = r#" +# Chrome integration + +# This can be path to your chrome binary. You can install with "npx @puppeteer/browsers install chrome@stable", read +# more here https://developer.chrome.com/blog/chrome-for-testing/?utm_source=Fibery&utm_medium=iframely +#chrome_path: "/Users/me/my_path/chrome/mac_arm-130.0.6723.69/chrome-mac-arm64/Google Chrome for Testing.app/Contents/MacOS/Google Chrome for Testing" +# Or you can give it ws:// path, read more here https://developer.chrome.com/docs/devtools/remote-debugging/local-server/ +# In that case start chrome with --remote-debugging-port +# chrome_path: "ws://127.0.0.1:6006/" +# window_width: 1024 +# window_height: 768 +# idle_browser_timeout: 600 +"#; diff --git a/src/integrations/integr_github.rs b/src/integrations/integr_github.rs index 6cc800334..ada22999a 100644 --- a/src/integrations/integr_github.rs +++ b/src/integrations/integr_github.rs @@ -3,6 +3,7 @@ use std::collections::HashMap; use tokio::sync::Mutex as AMutex; use tokio::process::Command; use async_trait::async_trait; +use schemars::JsonSchema; use tracing::{error, info}; use serde::{Deserialize, Serialize}; @@ -11,28 +12,57 @@ use crate::call_validation::{ContextEnum, ChatMessage, ChatContent}; use crate::tools::tools_description::Tool; use serde_json::Value; +use crate::integrations::integr::{json_schema, Integration}; -#[derive(Clone, Serialize, Deserialize, Debug)] +#[derive(Clone, Serialize, Deserialize, Debug, JsonSchema, Default)] #[allow(non_snake_case)] pub struct IntegrationGitHub { + #[schemars(description = "Path to the GitHub CLI binary.")] pub gh_binary_path: Option, + #[schemars(description = "GitHub token for authentication.")] pub GH_TOKEN: String, } +#[derive(Default)] pub struct ToolGithub { - integration_github: IntegrationGitHub, + pub integration_github: IntegrationGitHub, } -impl ToolGithub { - pub fn new_from_yaml(gh_config: &serde_yaml::Value) -> Result { - let integration_github = serde_yaml::from_value::(gh_config.clone()) - .map_err(|e| { - let location = e.location().map(|loc| format!(" at line {}, column {}", loc.line(), loc.column())).unwrap_or_default(); - format!("{}{}", e.to_string(), location) - })?; - Ok(Self { integration_github }) +impl Integration for ToolGithub { + fn name(&self) -> String { + "github".to_string() } + + fn update_from_json(&mut self, value: &Value) -> Result<(), String> { + let integration_github = serde_json::from_value::(value.clone()) + .map_err(|e|e.to_string())?; + self.integration_github = integration_github; + Ok(()) + } + + fn from_yaml_validate_to_json(&self, value: &serde_yaml::Value) -> Result { + let integration_github = serde_yaml::from_value::(value.clone()).map_err(|e| { + let location = e.location().map(|loc| format!(" at line {}, column {}", loc.line(), loc.column())).unwrap_or_default(); + format!("{}{}", e.to_string(), location) + })?; + serde_json::to_value(&integration_github).map_err(|e| e.to_string()) + } + + fn to_tool(&self) -> Box { + Box::new(ToolGithub {integration_github: self.integration_github.clone()}) as Box + } + + fn to_json(&self) -> Result { + serde_json::to_value(&self.integration_github).map_err(|e| e.to_string()) + } + + fn to_schema_json(&self) -> Value { + json_schema::().unwrap() + } + + fn default_value(&self) -> String { DEFAULT_GITHUB_INTEGRATION_YAML.to_string() } + fn icon_link(&self) -> String { "https://cdn-icons-png.flaticon.com/512/25/25231.png".to_string() } } #[async_trait] @@ -122,3 +152,10 @@ fn parse_command_args(args: &HashMap) -> Result, Stri Ok(parsed_args) } + +const DEFAULT_GITHUB_INTEGRATION_YAML: &str = r#" +# GitHub integration + +# GH_TOKEN: "GH_xxx" # To get a token, check out https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens +# gh_binary_path: "/opt/homebrew/bin/gh" # Uncomment to set a custom path for the gh binary, defaults to "gh" +"#; diff --git a/src/integrations/integr_gitlab.rs b/src/integrations/integr_gitlab.rs index 154ffa095..257369f8a 100644 --- a/src/integrations/integr_gitlab.rs +++ b/src/integrations/integr_gitlab.rs @@ -3,6 +3,7 @@ use std::collections::HashMap; use tokio::sync::Mutex as AMutex; use tokio::process::Command; use async_trait::async_trait; +use schemars::JsonSchema; use tracing::{error, info}; use serde::{Deserialize, Serialize}; @@ -11,27 +12,56 @@ use crate::call_validation::{ContextEnum, ChatMessage}; use crate::tools::tools_description::Tool; use serde_json::Value; +use crate::integrations::integr::{json_schema, Integration}; -#[derive(Clone, Serialize, Deserialize, Debug)] +#[derive(Clone, Serialize, Deserialize, Debug, JsonSchema, Default)] #[allow(non_snake_case)] pub struct IntegrationGitLab { + #[schemars(description = "Path to the GitLab CLI binary.")] pub glab_binary_path: Option, + #[schemars(description = "GitLab token for authentication.")] pub GITLAB_TOKEN: String, } +#[derive(Default)] pub struct ToolGitlab { - integration_gitlab: IntegrationGitLab, + pub integration_gitlab: IntegrationGitLab, } -impl ToolGitlab { - pub fn new_from_yaml(v: &serde_yaml::Value) -> Result { - let integration_gitlab = serde_yaml::from_value::(v.clone()).map_err(|e| { +impl Integration for ToolGitlab{ + fn name(&self) -> String { + "gitlab".to_string() + } + + fn update_from_json(&mut self, value: &Value) -> Result<(), String> { + let integration_gitlab = serde_json::from_value::(value.clone()) + .map_err(|e|e.to_string())?; + self.integration_gitlab = integration_gitlab; + Ok(()) + } + + fn from_yaml_validate_to_json(&self, value: &serde_yaml::Value) -> Result { + let integration_gitlab = serde_yaml::from_value::(value.clone()).map_err(|e| { let location = e.location().map(|loc| format!(" at line {}, column {}", loc.line(), loc.column())).unwrap_or_default(); format!("{}{}", e.to_string(), location) })?; - Ok(Self { integration_gitlab }) + serde_json::to_value(&integration_gitlab).map_err(|e| e.to_string()) + } + + fn to_tool(&self) -> Box { + Box::new(ToolGitlab {integration_gitlab: self.integration_gitlab.clone()}) as Box } + + fn to_json(&self) -> Result { + serde_json::to_value(&self.integration_gitlab).map_err(|e| e.to_string()) + } + + fn to_schema_json(&self) -> Value { + json_schema::().unwrap() + } + fn default_value(&self) -> String { DEFAULT_GITLAB_INTEGRATION_YAML.to_string() } + fn icon_link(&self) -> String { "https://cdn-icons-png.flaticon.com/512/5968/5968853.png".to_string() } } #[async_trait] @@ -121,3 +151,10 @@ fn parse_command_args(args: &HashMap) -> Result, Stri Ok(parsed_args) } + +const DEFAULT_GITLAB_INTEGRATION_YAML: &str = r#" +# GitLab integration: install on mac using "brew install glab" + +# GITLAB_TOKEN: "glpat-xxx" # To get a token, check out https://docs.gitlab.com/ee/user/profile/personal_access_tokens +# glab_binary_path: "/opt/homebrew/bin/glab" # Uncomment to set a custom path for the glab binary, defaults to "glab" +"#; diff --git a/src/integrations/integr_pdb.rs b/src/integrations/integr_pdb.rs index e1e2845d5..d516f13c6 100644 --- a/src/integrations/integr_pdb.rs +++ b/src/integrations/integr_pdb.rs @@ -9,6 +9,7 @@ use tokio::sync::{Mutex as AMutex, RwLock as ARwLock}; use tokio::process::{Command, Child, ChildStdin, ChildStdout, ChildStderr}; use tokio::time::Duration; use async_trait::async_trait; +use schemars::JsonSchema; use tracing::{error, info}; use serde::{Deserialize, Serialize}; @@ -16,18 +17,24 @@ use crate::at_commands::at_commands::AtCommandsContext; use crate::call_validation::{ContextEnum, ChatMessage, ChatContent}; use crate::integrations::sessions::{IntegrationSession, get_session_hashmap_key}; use crate::global_context::GlobalContext; +use crate::integrations::integr::{json_schema, Integration}; use crate::tools::tools_description::{Tool, ToolDesc, ToolParam}; use crate::integrations::process_io_utils::{first_n_chars, last_n_chars, last_n_lines, write_to_stdin_and_flush, blocking_read_until_token_or_timeout}; + const SESSION_TIMEOUT_AFTER_INACTIVITY: Duration = Duration::from_secs(30 * 60); const PDB_TOKEN: &str = "(Pdb)"; -#[derive(Clone, Serialize, Deserialize, Debug)] + +#[derive(Clone, Serialize, Deserialize, Debug, JsonSchema, Default)] pub struct IntegrationPdb { + #[schemars(description = "Path to the Python binary.")] pub python_path: Option, } + +#[derive(Default)] pub struct ToolPdb { - integration_pdb: IntegrationPdb, + pub integration_pdb: IntegrationPdb, } pub struct PdbSession { @@ -56,14 +63,39 @@ impl IntegrationSession for PdbSession } } -impl ToolPdb { - pub fn new_from_yaml(v: &serde_yaml::Value) -> Result { - let integration_pdb = serde_yaml::from_value::(v.clone()).map_err(|e| { +impl Integration for ToolPdb { + fn name(&self) -> String { + "pdb".to_string() + } + + fn update_from_json(&mut self, value: &Value) -> Result<(), String> { + let integration_pdb = serde_json::from_value::(value.clone()) + .map_err(|e|e.to_string())?; + self.integration_pdb = integration_pdb; + Ok(()) + } + + fn from_yaml_validate_to_json(&self, value: &serde_yaml::Value) -> Result { + let integration_github = serde_yaml::from_value::(value.clone()).map_err(|e| { let location = e.location().map(|loc| format!(" at line {}, column {}", loc.line(), loc.column())).unwrap_or_default(); format!("{}{}", e.to_string(), location) })?; - Ok(Self { integration_pdb }) + serde_json::to_value(&integration_github).map_err(|e| e.to_string()) + } + + fn to_tool(&self) -> Box { + Box::new(ToolPdb {integration_pdb: self.integration_pdb.clone()}) as Box + } + + fn to_json(&self) -> Result { + serde_json::to_value(&self.integration_pdb).map_err(|e| e.to_string()) + } + + fn to_schema_json(&self) -> Value { + json_schema::().unwrap() } + fn default_value(&self) -> String { DEFAULT_PDB_INTEGRATION_YAML.to_string() } + fn icon_link(&self) -> String { "https://cdn-icons-png.flaticon.com/512/919/919852.png".to_string() } } #[async_trait] @@ -119,6 +151,15 @@ impl Tool for ToolPdb { Ok(tool_answer(output, tool_call_id)) } + fn command_to_match_against_confirm_deny( + &self, + args: &HashMap, + ) -> Result { + let commmand = parse_command(args)?; // todo: fix typo "commmand" + let command_args = split_command(&commmand)?; + Ok(command_args.join(" ")) + } + fn tool_description(&self) -> ToolDesc { ToolDesc { name: "pdb".to_string(), @@ -135,15 +176,6 @@ impl Tool for ToolPdb { parameters_required: vec!["command".to_string()], } } - - fn command_to_match_against_confirm_deny( - &self, - args: &HashMap, - ) -> Result { - let commmand = parse_command(args)?; - let command_args = split_command(&commmand)?; - Ok(command_args.join(" ")) - } } fn parse_command(args: &HashMap) -> Result { @@ -306,3 +338,9 @@ fn format_error(error_title: &str, error: &str) -> String "".to_string() } } + +const DEFAULT_PDB_INTEGRATION_YAML: &str = r#" +# Python debugger + +# python_path: "/opt/homebrew/bin/python3" # Uncomment to set a custom python path, defaults to "python3" +"#; diff --git a/src/integrations/integr_postgres.rs b/src/integrations/integr_postgres.rs index 7f759bca9..bc544e645 100644 --- a/src/integrations/integr_postgres.rs +++ b/src/integrations/integr_postgres.rs @@ -8,28 +8,61 @@ use serde_json::Value; use serde_yaml; use std::collections::HashMap; use std::sync::Arc; +use schemars::JsonSchema; use tokio::process::Command; use tokio::sync::Mutex as AMutex; +use crate::integrations::integr::{json_schema, Integration}; -#[derive(Clone, Serialize, Deserialize, Debug)] +#[derive(Clone, Serialize, Deserialize, Debug, JsonSchema, Default)] pub struct IntegrationPostgres { + #[schemars(description = "Path to the psql binary.")] pub psql_binary_path: Option, + #[schemars(description = "Connection string for the PSQL database.")] pub connection_string: String, } +#[derive(Default)] pub struct ToolPostgres { - integration_postgres: IntegrationPostgres, + pub integration_postgres: IntegrationPostgres, } -impl ToolPostgres { - pub fn new_from_yaml(v: &serde_yaml::Value) -> Result { - let integration_postgres = serde_yaml::from_value::(v.clone()).map_err(|e| { +impl Integration for ToolPostgres { + fn name(&self) -> String { + "postgres".to_string() + } + + fn update_from_json(&mut self, value: &Value) -> Result<(), String> { + let integration_postgres = serde_json::from_value::(value.clone()) + .map_err(|e|e.to_string())?; + self.integration_postgres = integration_postgres; + Ok(()) + } + + fn from_yaml_validate_to_json(&self, value: &serde_yaml::Value) -> Result { + let integration_github = serde_yaml::from_value::(value.clone()).map_err(|e| { let location = e.location().map(|loc| format!(" at line {}, column {}", loc.line(), loc.column())).unwrap_or_default(); format!("{}{}", e.to_string(), location) })?; - Ok(Self { integration_postgres }) + serde_json::to_value(&integration_github).map_err(|e| e.to_string()) + } + + fn to_tool(&self) -> Box { + Box::new(ToolPostgres {integration_postgres: self.integration_postgres.clone()}) as Box + } + + fn to_json(&self) -> Result { + serde_json::to_value(&self.integration_postgres).map_err(|e| e.to_string()) + } + + fn to_schema_json(&self) -> Value { + json_schema::().unwrap() } + fn default_value(&self) -> String { DEFAULT_POSTGRES_INTEGRATION_YAML.to_string() } + fn icon_link(&self) -> String { "https://cdn-icons-png.flaticon.com/512/5968/5968342.png".to_string() } +} + +impl ToolPostgres { async fn run_psql_command(&self, query: &str) -> Result { let psql_command = self.integration_postgres.psql_binary_path.as_deref().unwrap_or("psql"); @@ -110,3 +143,10 @@ impl Tool for ToolPostgres { unsafe { &mut DEFAULT_USAGE } } } + +const DEFAULT_POSTGRES_INTEGRATION_YAML: &str = r#" +# Postgres database + +# psql_binary_path: "/path/to/psql" # Uncomment to set a custom path for the psql binary, defaults to "psql" +# connection_string: "postgresql://username:password@localhost/dbname" # To get a connection string, check out https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-CONNSTRING +"#; diff --git a/src/integrations/mod.rs b/src/integrations/mod.rs index d004f0247..b7a82fffa 100644 --- a/src/integrations/mod.rs +++ b/src/integrations/mod.rs @@ -1,3 +1,22 @@ +use std::path::PathBuf; +use std::sync::Arc; +use indexmap::IndexMap; +use serde_json::json; +use tracing::{info, warn}; +use tokio::sync::{Mutex as AMutex, RwLock as ARwLock}; + +use crate::global_context::GlobalContext; +use crate::integrations::integr::Integration; +use crate::integrations::integr_chrome::ToolChrome; +use crate::integrations::integr_github::ToolGithub; +use crate::integrations::integr_gitlab::ToolGitlab; +use crate::integrations::integr_pdb::ToolPdb; +use crate::integrations::integr_postgres::ToolPostgres; +use crate::tools::tools_description::Tool; +use crate::yaml_configs::create_configs::{integrations_enabled_cfg, read_yaml_into_value}; + +pub mod sessions; +pub mod process_io_utils; pub mod integr_github; pub mod integr_gitlab; pub mod integr_pdb; @@ -6,6 +25,217 @@ pub mod docker; pub mod sessions; pub mod process_io_utils; pub mod integr_postgres; +mod integr; + + +<<<<<<< HEAD +// hint: when adding integration, update: +// DEFAULT_INTEGRATION_VALUES, INTEGRATION_ICONS, integrations_paths, validate_integration_value, load_integration_tools, load_integration_schema_and_json +======= +// when adding integration, update: get_empty_integrations (2 occurrences) +>>>>>>> 9b1345a1 (simplified code) + + +pub fn get_empty_integrations() -> IndexMap> { + let integration_names = ["github", "gitlab", "pdb", "postgres", "chrome"]; + let mut integrations = IndexMap::new(); + for i_name in integration_names { + let i = match i_name { + "github" => Box::new(ToolGithub {..Default::default()} ) as Box, + "gitlab" => Box::new(ToolGitlab {..Default::default()} ) as Box, + "pdb" => Box::new(ToolPdb {..Default::default()} ) as Box, + "postgres" => Box::new(ToolPostgres {..Default::default()} ) as Box, + "chrome" => Box::new(ToolChrome {..Default::default()} ) as Box, + _ => panic!("Unknown integration name: {}", i_name) + }; + integrations.insert(i_name.to_string(), i); + } + integrations +} + +pub fn get_integration_path(cache_dir: &PathBuf, name: &str) -> PathBuf { + cache_dir.join("integrations.d").join(format!("{}.yaml", name)) +} + +pub async fn get_integrations( + gcx: Arc>, +) -> Result>, String> { + let integrations = get_empty_integrations(); + let cache_dir = gcx.read().await.cache_dir.clone(); + + let integrations_yaml_value = read_yaml_into_value(&cache_dir.join("integrations.yaml")).await?; + + let mut results = IndexMap::new(); + for (i_name, mut i) in integrations { + let path = get_integration_path(&cache_dir, &i_name); + let j_value = json_for_integration(&path, integrations_yaml_value.get(&i_name), &i).await?; + + if j_value.get("detail").is_some() { + warn!("failed to load integration {}: {}", i_name, j_value.get("detail").unwrap()); + } else { + if let Err(e) = i.update_from_json(&j_value) { + warn!("failed to load integration {}: {}", i_name, e); + }; + } + results.insert(i_name.clone(), i); + } + + Ok(results) +} + +pub async fn validate_integration_value(name: &str, value: serde_yaml::Value) -> Result { + let integrations = get_empty_integrations(); + + match integrations.get(name) { + Some(i) => { + let j_value = i.from_yaml_validate_to_json(&value)?; + let yaml_value = serde_yaml::to_value(&j_value).map_err(|e| e.to_string())?; + Ok(yaml_value) + }, + None => Err(format!("Integration {} is not defined", name)) + } +} + +pub async fn load_integration_tools( + gcx: Arc>, +) -> IndexMap>>> { + let paths = integrations_paths(gcx.clone()).await; + let integrations_yaml_value = { + let cache_dir = gcx.read().await.cache_dir.clone(); + let yaml_path = cache_dir.join("integrations.yaml"); + read_yaml_into_value(&yaml_path).await? + }; + let cache_dir = gcx.read().await.cache_dir.clone(); + let enabled_path = cache_dir.join("integrations-enabled.yaml"); + let enabled = match integrations_enabled_cfg(&enabled_path).await { + serde_yaml::Value::Mapping(map) => map.into_iter().filter_map(|(k, v)| { + if let (serde_yaml::Value::String(key), serde_yaml::Value::Bool(value)) = (k, v) { + Some((key, value)) + } else { + None + } + }).collect::>(), + _ => std::collections::HashMap::new(), + }; + + let integrations = get_integrations(gcx.clone()).await?; + + let mut tools = IndexMap::new(); + for (i_name, i) in integrations.iter() { + if !enabled.get(i_name).unwrap_or(&false) { + info!("Integration {} is disabled", i_name); + continue; + } + let tool = i.to_tool(); + tools.insert(i_name.clone(), Arc::new(AMutex::new(tool))); + } + Ok(tools) +} + +pub async fn json_for_integration( + yaml_path: &PathBuf, + value_from_integrations: Option<&serde_yaml::Value>, + integration: &Box, +) -> Result { + let tool_name = integration.name().clone(); + + let value = if yaml_path.exists() { + match read_yaml_into_value(yaml_path).await { + Ok(value) => integration.from_yaml_validate_to_json(&value).unwrap_or_else(|e| { + let e = format!("Problem converting integration to JSON: {}", e); + json!({"detail": e.to_string()}) + }), + Err(e) => { + let e = format!("Problem reading YAML from {}: {}", yaml_path.display(), e); + json!({"detail": e.to_string()}) + } + } + } else { + json!({"detail": format!("Cannot read {}. Probably, file does not exist", yaml_path.display())}) + }; + + let value_from_integrations = value_from_integrations.map_or(json!({"detail": format!("tool {tool_name} is not defined in integrations.yaml")}), |value| { + integration.from_yaml_validate_to_json(value).unwrap_or_else(|e| { + let e = format!("Problem converting integration to JSON: {}", e); + json!({"detail": e.to_string()}) + }) + }); + + match (value.get("detail"), value_from_integrations.get("detail")) { + (None, None) => { + Err(format!("Tool {tool_name} exists in both {tool_name}.yaml and integrations.yaml. Consider removing one of them.")) + }, + (Some(_), None) => { + Ok(value_from_integrations) + }, + (None, Some(_)) => { + Ok(value) + } + (Some(_), Some(_)) => { + Ok(value) + } + } +<<<<<<< HEAD + + Ok(()) +} + +async fn load_tool_from_yaml( + yaml_path: Option<&PathBuf>, + tool_constructor: fn(&serde_yaml::Value) -> Result, + value_from_integrations: Option<&serde_yaml::Value>, + enabled: Option<&bool>, + integrations: &mut IndexMap>>>, +) -> Result<(), String> { + let yaml_path = yaml_path.as_ref().expect("No yaml path"); + let tool_name = yaml_path.file_stem().expect("No file name").to_str().expect("No file name").to_string(); + if !enabled.unwrap_or(&false) { + info!("Integration {} is disabled", tool_name); + return Ok(()); + } + let tool = if yaml_path.exists() { + match read_yaml_into_value(yaml_path).await { + Ok(value) => { + match tool_constructor(&value) { + Ok(tool) => { + // integrations.insert(tool_name, Arc::new(AMutex::new(Box::new(tool) as Box))); + Some(tool) + } + Err(e) => { + warn!("Problem in {}: {}", yaml_path.display(), e); + None + } + } + } + Err(e) => { + warn!("Problem reading {:?}: {}", yaml_path, e); + None + } + } + } else { + None + }; + + let tool_from_integrations = value_from_integrations + .and_then(|value| match tool_constructor(&value) { + Ok(tool) => Some(tool), + Err(_) => None + }); + + match (tool, tool_from_integrations) { + (Some(_), Some(_)) => { + return Err(format!("Tool {tool_name} exists in both {tool_name}.yaml and integrations.yaml. Consider removing one of them.")); + }, + (Some(tool), None) | (None, Some(tool)) => { + integrations.insert(tool_name.clone(), Arc::new(AMutex::new(Box::new(tool) as Box))); + }, + _ => {} + } + + Ok(()) +======= +>>>>>>> 9b1345a1 (simplified code) +} pub const INTEGRATIONS_DEFAULT_YAML: &str = r#"# This file is used to configure integrations in Refact Agent. # If there is a syntax error in this file, no integrations will work. @@ -29,41 +259,6 @@ commands_deny: - "glab auth token*" -# GitHub integration -#github: -# GH_TOKEN: "GH_xxx" # To get a token, check out https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens -# gh_binary_path: "/opt/homebrew/bin/gh" # Uncomment to set a custom path for the gh binary, defaults to "gh" - - -# GitLab integration: install on mac using "brew install glab" -#gitlab: -# GITLAB_TOKEN: "glpat-xxx" # To get a token, check out https://docs.gitlab.com/ee/user/profile/personal_access_tokens -# glab_binary_path: "/opt/homebrew/bin/glab" # Uncomment to set a custom path for the glab binary, defaults to "glab" - - -# Python debugger -#pdb: -# python_path: "/opt/homebrew/bin/python3" # Uncomment to set a custom python path, defaults to "python3" - - -# Chrome web browser -chrome: - # This can be path to your chrome binary. You can install with "npx @puppeteer/browsers install chrome@stable", read - # more here https://developer.chrome.com/blog/chrome-for-testing/?utm_source=Fibery&utm_medium=iframely - #chrome_path: "/Users/me/my_path/chrome/mac_arm-130.0.6723.69/chrome-mac-arm64/Google Chrome for Testing.app/Contents/MacOS/Google Chrome for Testing" - # Or you can give it ws:// path, read more here https://developer.chrome.com/docs/devtools/remote-debugging/local-server/ - # In that case start chrome with --remote-debugging-port - chrome_path: "ws://127.0.0.1:6006/" - window_size: [1024, 768] - idle_browser_timeout: 600 - - -# Postgres database -#postgres: -# psql_binary_path: "/path/to/psql" # Uncomment to set a custom path for the psql binary, defaults to "psql" -# connection_string: "postgresql://username:password@localhost/dbname" # To get a connection string, check out https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-CONNSTRING - - # Command line: things you can call and immediately get an answer #cmdline: # run_make: diff --git a/src/tools/tools_description.rs b/src/tools/tools_description.rs index 872cf8e76..57c01551d 100644 --- a/src/tools/tools_description.rs +++ b/src/tools/tools_description.rs @@ -1,25 +1,23 @@ use indexmap::IndexMap; use std::collections::HashMap; -use std::path::PathBuf; use std::sync::Arc; use serde_json::{Value, json}; use serde::{Deserialize, Serialize}; use async_trait::async_trait; use tokio::sync::RwLock as ARwLock; use tokio::sync::Mutex as AMutex; +use tracing::error; use crate::at_commands::at_commands::AtCommandsContext; use crate::call_validation::{ChatUsage, ContextEnum}; use crate::global_context::GlobalContext; -use crate::integrations::integr_github::ToolGithub; -use crate::integrations::integr_gitlab::ToolGitlab; -use crate::integrations::integr_pdb::ToolPdb; -use crate::integrations::integr_chrome::ToolChrome; -use crate::integrations::integr_postgres::ToolPostgres; + +use crate::integrations::load_integration_tools; +use crate::yaml_configs::create_configs::read_yaml_into_value; use crate::integrations::docker::integr_docker::ToolDocker; #[derive(Serialize, Deserialize, Debug, Clone)] -pub struct CommandsRequireConfirmationConfig { // todo: fix typo +pub struct CommandsRequireConfirmationConfig { pub commands_need_confirmation: Vec, pub commands_deny: Vec, } @@ -143,6 +141,14 @@ pub async fn tools_merged_and_filtered( } // #[cfg(feature="vecdb")] // tools_all.insert("knowledge".to_string(), Arc::new(AMutex::new(Box::new(crate::tools::tool_knowledge::ToolGetKnowledge{}) as Box))); + // match load_integration_tools(gcx.clone()).await { + // Ok(integrations) => { + // tools_all.extend(integrations); + // } + // Err(e) => error!("Failed to load integrations: {}", e), + // } + // #[cfg(feature="vecdb")] + // tools_all.insert("knowledge".to_string(), Arc::new(AMutex::new(Box::new(crate::tools::tool_knowledge::ToolGetKnowledge{}) as Box))); } if let Some(cmdline) = integrations_value.get("cmdline") { @@ -155,6 +161,12 @@ pub async fn tools_merged_and_filtered( tools_all.extend(cmdline_tools); } + // let integrations = load_integration_tools(gcx.clone()).await; + // tools_all.extend(integrations); + // #[cfg(feature="vecdb")] + // tools_all.insert("knowledge".to_string(), Arc::new(AMutex::new(Box::new(crate::tools::tool_knowledge::ToolGetKnowledge{}) as Box))); + // } + let mut filtered_tools = IndexMap::new(); for (tool_name, tool_arc) in tools_all { let tool_locked = tool_arc.lock().await; @@ -457,7 +469,7 @@ pub struct ToolDictDeserialize { } pub async fn tool_description_list_from_yaml( - tools: indexmap::IndexMap>>>, + tools: IndexMap>>>, turned_on: &Vec, allow_experimental: bool, ) -> Result, String> { diff --git a/src/yaml_configs/create_configs.rs b/src/yaml_configs/create_configs.rs index 380a12130..b2bae4a34 100644 --- a/src/yaml_configs/create_configs.rs +++ b/src/yaml_configs/create_configs.rs @@ -5,40 +5,77 @@ use tokio::fs::File; use tokio::io::AsyncWriteExt; use sha2::{Sha256, Digest}; use serde_yaml; -use std::path::Path; - +use std::path::{Path, PathBuf}; +use tracing::{error, warn}; use crate::global_context::GlobalContext; +use crate::integrations::{get_empty_integrations, get_integration_path}; const DEFAULT_CHECKSUM_FILE: &str = "default-checksums.yaml"; -pub async fn yaml_configs_try_create_all(gcx: Arc>) -> String -{ + +pub async fn yaml_configs_try_create_all(gcx: Arc>) -> String { let mut results = Vec::new(); + let cache_dir = gcx.read().await.cache_dir.clone(); + let files = vec![ ("bring-your-own-key.yaml", crate::caps::BRING_YOUR_OWN_KEY_SAMPLE), ("customization.yaml", crate::yaml_configs::customization_compiled_in::COMPILED_IN_INITIAL_USER_YAML), ("privacy.yaml", crate::privacy_compiled_in::COMPILED_IN_INITIAL_PRIVACY_YAML), ("integrations.yaml", crate::integrations::INTEGRATIONS_DEFAULT_YAML), ]; + for (file_name, content) in files { - match _yaml_file_exists_or_create(gcx.clone(), file_name, content).await { - Ok(result) => results.push(result), - Err(e) => { - tracing::warn!("{}", e); - results.push(format!("Error processing {}: {}", file_name, e)); + let file_path = cache_dir.join(file_name); + if let Err(e) = _yaml_file_exists_or_create(gcx.clone(), &file_path, content).await { + warn!("{}", e); + results.push(format!("Error processing {:?}: {}", file_path, e)); + } else { + results.push(file_path.to_string_lossy().to_string()); + } + } + + let integrations_d = cache_dir.join("integrations.d"); + if let Err(e) = tokio::fs::create_dir_all(&integrations_d).await { + warn!("Failed to create directory {:?}: {}", integrations_d, e); + results.push(format!("Error creating directory {:?}: {}", integrations_d, e)); + } + let integrations_enabled = cache_dir.join("integrations-enabled.yaml"); + let integrations = get_empty_integrations(); + + for (file_name, content) in integrations.iter().map(|(k, v)| (k.clone(), v.default_value())) { + let file_path = get_integration_path(&cache_dir, &file_name); + if let Err(e) = _yaml_file_exists_or_create(gcx.clone(), &file_path, &content).await { + warn!("{}", e); + results.push(format!("Error processing {:?}: {}", file_path, e)); + } else { + results.push(file_path.to_string_lossy().to_string()); + } + let integr_name = file_path.file_stem().unwrap().to_string_lossy().to_string(); + let mut enabled_cfg = integrations_enabled_cfg(&integrations_enabled).await; + if let None = enabled_cfg.get(&integr_name) { + if let serde_yaml::Value::Mapping(ref mut map) = enabled_cfg { + map.insert(serde_yaml::Value::String(integr_name), serde_yaml::Value::Bool(false)); + } + if let Err(e) = write_yaml_value(&integrations_enabled, &enabled_cfg).await { + error!("Failed to write {}: {}", integrations_enabled.display(), e); + panic!("{}", e); } } } - results[0].clone() // path to bring-your-own-key.yaml, relied upon by first run procedure -} + results.get(0).cloned().unwrap_or_default() +} -async fn _yaml_file_exists_or_create(gcx: Arc>, config_name: &str, the_default: &str) -> Result +async fn _yaml_file_exists_or_create( + gcx: Arc>, + config_path: &PathBuf, + the_default: &str +) -> Result { let cache_dir = gcx.read().await.cache_dir.clone(); - let config_path = cache_dir.join(config_name); let config_path_str = config_path.to_string_lossy().to_string(); + let config_name = config_path.file_name().ok_or_else(|| format!("{} is not a file", config_path.display()))?.to_string_lossy().to_string(); let checksums_dict = read_checksums(&cache_dir).await?; @@ -50,7 +87,7 @@ async fn _yaml_file_exists_or_create(gcx: Arc>, config_na return Ok(config_path_str); } let existing_checksum = calculate_checksum(&existing_content); - if existing_checksum == checksums_dict.get(config_name).map(|s| s.as_str()).unwrap_or("") { + if existing_checksum == checksums_dict.get(&config_name).map(|s| s.as_str()).unwrap_or("") { tracing::info!("\n * * * detected that {} is a default config from a previous version of this binary, no changes made by human, overwrite * * *\n", config_path.display()); } else { // normal exit, config changed by user @@ -101,3 +138,38 @@ async fn update_checksum(cache_dir: &Path, config_name: String, checksum: &str) .map_err(|e| format!("failed to write {}: {}", DEFAULT_CHECKSUM_FILE, e))?; Ok(()) } + +pub async fn integrations_enabled_cfg( + integrations_enabled_path: &PathBuf, +) -> serde_yaml::Value { + read_yaml_into_value(integrations_enabled_path).await.unwrap_or_else(|_| serde_yaml::Value::Mapping(Default::default())) +} + +pub async fn read_yaml_into_value(yaml_path: &PathBuf) -> Result { + let file = std::fs::File::open(&yaml_path).map_err( + |e| format!("Failed to open {}: {}", yaml_path.display(), e) + )?; + + let reader = std::io::BufReader::new(file); + serde_yaml::from_reader(reader).map_err( + |e| { + let location = e.location().map(|loc| format!(" at line {}, column {}", loc.line(), loc.column())).unwrap_or_default(); + format!("Failed to parse {}{}: {}", yaml_path.display(), location, e) + } + ) +} + +pub async fn write_yaml_value(path: &Path, value: &serde_yaml::Value) -> Result<(), String> { + let content = serde_yaml::to_string(value).map_err(|e| format!("Failed to serialize YAML: {}", e))?; + + let mut file = tokio::fs::OpenOptions::new() + .write(true) + .truncate(true) + .create(true) + .open(path) + .await + .map_err(|e| format!("Failed to open file {}: {}", path.display(), e))?; + + AsyncWriteExt::write_all(&mut file, content.as_bytes()).await + .map_err(|e| format!("Failed to write to file {}: {}", path.display(), e)) +} From 521081ebd9a8beaa2334acd96fe5d31c410d1ee3 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Fri, 15 Nov 2024 08:02:59 +0100 Subject: [PATCH 002/185] renames 1 yaml_schema 1 WIP move things around in /v1 remove "ongoing' latest and greatest WIP in confuguration prompts config_chat.rs working /chat-configuration merge fix merge fix merge cleanup rename a file chat_client: propogate error fix successfully creates config to postgres in a django project introduce %CURRENT_CONFIG% warnings WIP leave only postgres rename to v1_integrations.rs remove integrations_list.rs back all_integrations_with_icon v1 /v1/integrations-all-with-icons test_integration_schemas project_path everywhere /v1/integration-save works cleanup ooops mkdir first improve project dir matching rename to split_path_into_project_and_integration debug integration_config_save, especially "available" oops psql_binary_path setting up postgres works --- .../refact/chat_client.py | 3 +- src/call_validation.rs | 17 +- src/global_context.rs | 3 + src/http/routers/v1.rs | 45 +- src/http/routers/v1/chat.rs | 58 ++- src/http/routers/v1/handlers_memdb.rs | 49 -- src/http/routers/v1/integrations.rs | 206 --------- src/http/routers/v1/system_prompt.rs | 2 +- src/http/routers/v1/v1_integrations.rs | 132 ++++++ src/integrations/config_chat.rs | 76 +++ src/integrations/integr.rs | 42 -- src/integrations/integr_abstract.rs | 6 + src/integrations/integr_chrome.rs | 71 +-- src/integrations/integr_github.rs | 25 +- src/integrations/integr_gitlab.rs | 29 +- src/integrations/integr_pdb.rs | 39 +- src/integrations/integr_postgres.rs | 170 +++++-- src/integrations/mod.rs | 433 +++++++++--------- src/integrations/running_integrations.rs | 57 +++ src/integrations/setting_up_integrations.rs | 300 ++++++++++++ src/integrations/yaml_schema.rs | 61 +++ src/main.rs | 3 +- src/subchat.rs | 3 +- src/tools/tool_patch_aux/tickets_parsing.rs | 2 +- src/tools/tools_description.rs | 78 ++-- src/vecdb/vdb_highlev.rs | 116 ++--- src/yaml_configs/create_configs.rs | 90 ++-- src/yaml_configs/customization_compiled_in.rs | 51 ++- src/yaml_configs/customization_loader.rs | 3 +- 29 files changed, 1280 insertions(+), 890 deletions(-) delete mode 100644 src/http/routers/v1/integrations.rs create mode 100644 src/http/routers/v1/v1_integrations.rs create mode 100644 src/integrations/config_chat.rs delete mode 100644 src/integrations/integr.rs create mode 100644 src/integrations/integr_abstract.rs create mode 100644 src/integrations/running_integrations.rs create mode 100644 src/integrations/setting_up_integrations.rs create mode 100644 src/integrations/yaml_schema.rs diff --git a/python_binding_and_cmdline/refact/chat_client.py b/python_binding_and_cmdline/refact/chat_client.py index 594220663..aea27ae83 100644 --- a/python_binding_and_cmdline/refact/chat_client.py +++ b/python_binding_and_cmdline/refact/chat_client.py @@ -275,12 +275,13 @@ async def ask_using_http( if not end_of_http_chunk: continue line_str = buffer.decode('utf-8').strip() - buffer = b"" if not line_str: + buffer = b"" continue if not line_str.startswith("data: "): print("unrecognized streaming data (1):", line_str) continue + buffer = b"" line_str = line_str[6:] if line_str == "[DONE]": break diff --git a/src/call_validation.rs b/src/call_validation.rs index 6c8ac53c1..6f37ef3c0 100644 --- a/src/call_validation.rs +++ b/src/call_validation.rs @@ -137,11 +137,22 @@ pub struct ChatUsage { pub struct ChatMessage { pub role: String, pub content: ChatContent, + #[serde(default, skip_serializing_if="is_none")] pub tool_calls: Option>, + #[serde(default, skip_serializing_if="is_empty_string")] pub tool_call_id: String, + #[serde(default, skip_serializing_if="is_none")] pub usage: Option, } +fn is_none(opt: &Option) -> bool { + opt.is_none() +} + +fn is_empty_string(something: &String) -> bool { + something.is_empty() +} + #[derive(Debug, Serialize, Deserialize, Clone)] pub struct SubchatParameters { pub subchat_model: String, @@ -154,7 +165,7 @@ pub struct SubchatParameters { pub subchat_max_new_tokens: usize, } -#[derive(Debug, Deserialize, Clone)] +#[derive(Debug, Deserialize, Clone, Default)] pub struct ChatPost { pub messages: Vec, #[serde(default)] @@ -179,10 +190,12 @@ pub struct ChatPost { pub subchat_tool_parameters: IndexMap, // tool_name: {model, allowed_context, temperature} #[serde(default="PostprocessSettings::new")] pub postprocess_parameters: PostprocessSettings, - #[allow(dead_code)] + // #[allow(dead_code)] #[serde(default)] pub chat_id: String, #[serde(default)] + pub current_config_file: String, + #[serde(default)] pub style: Option, } diff --git a/src/global_context.rs b/src/global_context.rs index 98ab17f69..eeeb8c22b 100644 --- a/src/global_context.rs +++ b/src/global_context.rs @@ -139,6 +139,7 @@ pub struct GlobalContext { pub http_client: reqwest::Client, pub http_client_slowdown: Arc, pub cache_dir: PathBuf, + pub config_dir: PathBuf, pub caps: Option>>, pub caps_reading_lock: Arc>, pub caps_last_error: String, @@ -300,6 +301,7 @@ pub async fn block_until_signal( pub async fn create_global_context( cache_dir: PathBuf, + config_dir: PathBuf, ) -> (Arc>, std::sync::mpsc::Receiver, Arc, CommandLine) { let cmdline = CommandLine::from_args(); let (ask_shutdown_sender, ask_shutdown_receiver) = std::sync::mpsc::channel::(); @@ -320,6 +322,7 @@ pub async fn create_global_context( http_client, http_client_slowdown: Arc::new(Semaphore::new(2)), cache_dir, + config_dir, caps: None, caps_reading_lock: Arc::new(AMutex::::new(false)), caps_last_error: String::new(), diff --git a/src/http/routers/v1.rs b/src/http/routers/v1.rs index 5049dc194..bc9a6d749 100644 --- a/src/http/routers/v1.rs +++ b/src/http/routers/v1.rs @@ -18,7 +18,7 @@ use crate::http::routers::v1::at_commands::{handle_v1_command_completion, handle use crate::http::routers::v1::at_tools::{handle_v1_tools, handle_v1_tools_check_if_confirmation_needed, handle_v1_tools_execute}; use crate::http::routers::v1::caps::handle_v1_caps; use crate::http::routers::v1::caps::handle_v1_ping; -use crate::http::routers::v1::chat::{handle_v1_chat, handle_v1_chat_completions}; +use crate::http::routers::v1::chat::{handle_v1_chat, handle_v1_chat_completions, handle_v1_chat_configuration}; use crate::http::routers::v1::dashboard::get_dashboard_plots; use crate::http::routers::v1::docker::{handle_v1_docker_container_action, handle_v1_docker_container_list}; use crate::http::routers::v1::graceful_shutdown::handle_v1_graceful_shutdown; @@ -37,8 +37,8 @@ use crate::http::routers::v1::system_prompt::handle_v1_system_prompt; #[cfg(feature="vecdb")] use crate::http::routers::v1::vecdb::{handle_v1_vecdb_search, handle_v1_vecdb_status}; #[cfg(feature="vecdb")] -use crate::http::routers::v1::handlers_memdb::{handle_mem_query, handle_mem_add, handle_mem_erase, handle_mem_update_used, handle_mem_block_until_vectorized, handle_mem_list, handle_ongoing_update_or_create, handle_ongoing_dump}; -use crate::http::routers::v1::integrations::{handle_v1_integrations, handle_v1_integrations_icons, handle_v1_integrations_save}; +use crate::http::routers::v1::handlers_memdb::{handle_mem_query, handle_mem_add, handle_mem_erase, handle_mem_update_used, handle_mem_block_until_vectorized, handle_mem_list}; +use crate::http::routers::v1::v1_integrations::{handle_v1_integration_get, handle_v1_integration_save, handle_v1_integrations}; use crate::http::utils::telemetry_wrapper; pub mod code_completion; @@ -66,33 +66,29 @@ mod patch; pub mod handlers_memdb; #[cfg(feature="vecdb")] pub mod vecdb; -mod integrations; +mod v1_integrations; pub fn make_v1_router() -> Router { let builder = Router::new() .route("/ping", telemetry_get!(handle_v1_ping)) + .route("/graceful-shutdown", telemetry_get!(handle_v1_graceful_shutdown)) .route("/code-completion", telemetry_post!(handle_v1_code_completion_web)) .route("/code-lens", telemetry_post!(handle_v1_code_lens)) .route("/chat", telemetry_post!(handle_v1_chat)) .route("/chat/completions", telemetry_post!(handle_v1_chat_completions)) // standard + .route("/chat-configuration", telemetry_post!(handle_v1_chat_configuration)) + .route("/telemetry-network", telemetry_post!(handle_v1_telemetry_network)) .route("/snippet-accepted", telemetry_post!(handle_v1_snippet_accepted)) .route("/caps", telemetry_get!(handle_v1_caps)) - .route("/graceful-shutdown", telemetry_get!(handle_v1_graceful_shutdown)) - - .route("/at-command-completion", telemetry_post!(handle_v1_command_completion)) - .route("/at-command-preview", telemetry_post!(handle_v1_command_preview)) .route("/tools", telemetry_get!(handle_v1_tools)) .route("/tools-check-if-confirmation-needed", telemetry_post!(handle_v1_tools_check_if_confirmation_needed)) .route("/tools-execute", telemetry_post!(handle_v1_tools_execute)) - .route("/integrations", telemetry_get!(handle_v1_integrations)) - .route("/integrations-save", telemetry_post!(handle_v1_integrations_save)) - .route("/integrations-icons", telemetry_get!(handle_v1_integrations_icons)) .route("/lsp-initialize", telemetry_post!(handle_v1_lsp_initialize)) .route("/lsp-did-changed", telemetry_post!(handle_v1_lsp_did_change)) @@ -100,32 +96,42 @@ pub fn make_v1_router() -> Router { .route("/lsp-remove-folder", telemetry_post!(handle_v1_lsp_remove_folder)) .route("/lsp-set-active-document", telemetry_post!(handle_v1_set_active_document)) - .route("/get-dashboard-plots", telemetry_get!(get_dashboard_plots)) - .route("/ast-file-symbols", telemetry_post!(handle_v1_ast_file_symbols)) .route("/ast-file-dump", telemetry_post!(handle_v1_ast_file_dump)) .route("/ast-status", telemetry_get!(handle_v1_ast_status)) .route("/rag-status", telemetry_get!(handle_v1_rag_status)) .route("/config-path", telemetry_get!(handle_v1_config_path)) - // experimental + .route("/customization", telemetry_get!(handle_v1_customization)) .route("/sync-files-extract-tar", telemetry_post!(handle_v1_sync_files_extract_tar)) - .route("/code-completion-prompt", telemetry_post!(handle_v1_code_completion_prompt)) + .route("/system-prompt", telemetry_post!(handle_v1_system_prompt)) // because it works remotely - .route("/system-prompt", telemetry_post!(handle_v1_system_prompt)) + .route("/at-command-completion", telemetry_post!(handle_v1_command_completion)) + .route("/at-command-preview", telemetry_post!(handle_v1_command_preview)) + + .route("/fullpath", telemetry_post!(handle_v1_fullpath)) + + .route("/integrations", telemetry_get!(handle_v1_integrations)) + .route("/integration-get", telemetry_post!(handle_v1_integration_get)) + .route("/integration-save", telemetry_post!(handle_v1_integration_save)) .route("/docker-container-list", telemetry_post!(handle_v1_docker_container_list)) .route("/docker-container-action", telemetry_post!(handle_v1_docker_container_action)) .route("/patch-single-file-from-ticket", telemetry_post!(handle_v1_patch_single_file_from_ticket)) + // .route("/patch-apply-all", telemetry_post!(handle_v1_patch_single_file_from_ticket)) + + // experimental + .route("/get-dashboard-plots", telemetry_get!(get_dashboard_plots)) + + .route("/code-completion-prompt", telemetry_post!(handle_v1_code_completion_prompt)) .route("/subchat", telemetry_post!(handle_v1_subchat)) .route("/subchat-single", telemetry_post!(handle_v1_subchat_single)) - - .route("/fullpath", telemetry_post!(handle_v1_fullpath)); + ; #[cfg(feature="vecdb")] let builder = builder @@ -137,8 +143,7 @@ pub fn make_v1_router() -> Router { .route("/mem-update-used", telemetry_post!(handle_mem_update_used)) .route("/mem-block-until-vectorized", telemetry_get!(handle_mem_block_until_vectorized)) .route("/mem-list", telemetry_get!(handle_mem_list)) - .route("/ongoing-update", telemetry_post!(handle_ongoing_update_or_create)) - .route("/ongoing-dump", telemetry_get!(handle_ongoing_dump)); + ; builder.layer(CorsLayer::very_permissive()) } diff --git a/src/http/routers/v1/chat.rs b/src/http/routers/v1/chat.rs index b725104df..3337cc1d6 100644 --- a/src/http/routers/v1/chat.rs +++ b/src/http/routers/v1/chat.rs @@ -48,18 +48,41 @@ pub async fn lookup_chat_scratchpad( pub async fn handle_v1_chat_completions( // standard openai-style handler - Extension(global_context): Extension, + Extension(gcx): Extension, body_bytes: hyper::body::Bytes, ) -> Result, ScratchError> { - chat(global_context, body_bytes, false).await + let mut chat_post = serde_json::from_slice::(&body_bytes).map_err(|e| { + info!("chat handler cannot parse input:\n{:?}", body_bytes); + ScratchError::new(StatusCode::BAD_REQUEST, format!("JSON problem: {}", e)) + })?; + let mut messages = deserialize_messages_from_post(&chat_post.messages)?; + _chat(gcx, &mut chat_post, &mut messages, false).await +} + +pub async fn handle_v1_chat_configuration( + Extension(gcx): Extension, + body_bytes: hyper::body::Bytes, +) -> Result, ScratchError> { + let mut chat_post = serde_json::from_slice::(&body_bytes).map_err(|e| { + info!("chat handler cannot parse input:\n{:?}", body_bytes); + ScratchError::new(StatusCode::BAD_REQUEST, format!("JSON problem: {}", e)) + })?; + let mut messages = deserialize_messages_from_post(&chat_post.messages)?; + crate::integrations::config_chat::mix_config_messages(gcx.clone(), &mut messages, &chat_post.current_config_file).await; + _chat(gcx, &mut chat_post, &mut messages, true).await } pub async fn handle_v1_chat( // less-standard openai-style handler that sends role="context_*" messages first, rewrites the user message - Extension(global_context): Extension, + Extension(gcx): Extension, body_bytes: hyper::body::Bytes, ) -> Result, ScratchError> { - chat(global_context, body_bytes, true).await + let mut chat_post: ChatPost = serde_json::from_slice::(&body_bytes).map_err(|e| { + info!("chat handler cannot parse input:\n{:?}", body_bytes); + ScratchError::new(StatusCode::BAD_REQUEST, format!("JSON problem: {}", e)) + })?; + let mut messages = deserialize_messages_from_post(&chat_post.messages)?; + _chat(gcx, &mut chat_post, &mut messages, true).await } pub fn deserialize_messages_from_post(messages: &Vec) -> Result, ScratchError> { @@ -73,17 +96,12 @@ pub fn deserialize_messages_from_post(messages: &Vec) -> Resu Ok(messages) } -async fn chat( - global_context: SharedGlobalContext, - body_bytes: hyper::body::Bytes, +async fn _chat( + gcx: SharedGlobalContext, + chat_post: &mut ChatPost, + messages: &mut Vec, allow_at: bool, ) -> Result, ScratchError> { - let mut chat_post = serde_json::from_slice::(&body_bytes).map_err(|e| { - info!("chat handler cannot parse input:\n{:?}", body_bytes); - ScratchError::new(StatusCode::BAD_REQUEST, format!("JSON problem: {}", e)) - })?; - let mut messages = deserialize_messages_from_post(&chat_post.messages)?; - // converts tools into openai style if let Some(tools) = &mut chat_post.tools { for tool in tools { @@ -93,7 +111,7 @@ async fn chat( } } - let caps = crate::global_context::try_load_caps_quickly_if_not_present(global_context.clone(), 0).await?; + let caps = crate::global_context::try_load_caps_quickly_if_not_present(gcx.clone(), 0).await?; let (model_name, scratchpad_name, scratchpad_patch, n_ctx, supports_tools, supports_multimodality, supports_clicks) = lookup_chat_scratchpad( caps.clone(), &chat_post, @@ -154,21 +172,21 @@ async fn chat( } } - let docker_tool_maybe = docker_tool_load(global_context.clone()).await + let docker_tool_maybe = docker_tool_load(gcx.clone()).await .map_err(|e| info!("No docker tool available: {e}")).ok().map(Arc::new); let run_chat_threads_inside_container = docker_tool_maybe.clone() .map(|docker_tool| docker_tool.integration_docker.run_chat_threads_inside_container) .unwrap_or(false); - let should_execute_remotely = run_chat_threads_inside_container && !global_context.read().await.cmdline.inside_container; + let should_execute_remotely = run_chat_threads_inside_container && !gcx.read().await.cmdline.inside_container; if should_execute_remotely { - docker_container_check_status_or_start(global_context.clone(), docker_tool_maybe.clone(), &chat_post.chat_id).await + docker_container_check_status_or_start(gcx.clone(), docker_tool_maybe.clone(), &chat_post.chat_id).await .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, e))?; } // chat_post.stream = Some(false); // for debugging 400 errors that are hard to debug with streaming (because "data: " is not present and the error message is ignored by the library) let mut scratchpad = scratchpads::create_chat_scratchpad( - global_context.clone(), + gcx.clone(), caps, model_name.clone(), &chat_post, @@ -184,7 +202,7 @@ async fn chat( )?; // if !chat_post.chat_id.is_empty() { // let cache_dir = { - // let gcx_locked = global_context.read().await; + // let gcx_locked = gcx.read().await; // gcx_locked.cache_dir.clone() // }; // let notes_dir_path = cache_dir.join("chats"); @@ -196,7 +214,7 @@ async fn chat( // let _ = std::fs::write(¬es_path, serde_json::to_string_pretty(&chat_post.messages).unwrap()); // } let mut ccx = AtCommandsContext::new( - global_context.clone(), + gcx.clone(), n_ctx, CHAT_TOP_N, false, diff --git a/src/http/routers/v1/handlers_memdb.rs b/src/http/routers/v1/handlers_memdb.rs index 0a7d21320..a89b44c2e 100644 --- a/src/http/routers/v1/handlers_memdb.rs +++ b/src/http/routers/v1/handlers_memdb.rs @@ -1,7 +1,6 @@ use std::sync::Arc; use tokio::sync::RwLock as ARwLock; use serde_json::json; -use indexmap::IndexMap; use axum::Extension; use axum::response::Result; @@ -184,51 +183,3 @@ pub async fn handle_mem_list( Ok(response) } -#[derive(Deserialize)] -struct OngoingUpdateRequest { - goal: String, - ongoing_progress: IndexMap, - ongoing_action_new_sequence: IndexMap, - ongoing_output: IndexMap>, -} - -pub async fn handle_ongoing_update_or_create( - Extension(gcx): Extension>>, - body_bytes: hyper::body::Bytes, -) -> Result, ScratchError> { - let post: OngoingUpdateRequest = serde_json::from_slice(&body_bytes).map_err(|e| { - tracing::info!("cannot parse input:\n{:?}", body_bytes); - ScratchError::new(StatusCode::BAD_REQUEST, format!("JSON problem: {}", e)) - })?; - let vec_db = gcx.read().await.vec_db.clone(); - - crate::vecdb::vdb_highlev::ongoing_update_or_create( - vec_db, - post.goal, - post.ongoing_progress, - post.ongoing_action_new_sequence, - post.ongoing_output, - ).await.map_err(|e| { - ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("{}", e)) - })?; - let response = Response::builder() - .header("Content-Type", "application/json") - .body(Body::from(serde_json::to_string(&json!({"success": true})).unwrap())) - .unwrap(); - Ok(response) -} - -pub async fn handle_ongoing_dump( - Extension(gcx): Extension>>, - _body_bytes: hyper::body::Bytes, -) -> Result, ScratchError> { - let vec_db = gcx.read().await.vec_db.clone(); - let output = crate::vecdb::vdb_highlev::ongoing_dump(vec_db).await.map_err(|e| { - ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("{}", e)) - })?; - let response = Response::builder() - .header("Content-Type", "text/plain") - .body(Body::from(output)) - .unwrap(); - Ok(response) -} diff --git a/src/http/routers/v1/integrations.rs b/src/http/routers/v1/integrations.rs deleted file mode 100644 index ae4cd1fb2..000000000 --- a/src/http/routers/v1/integrations.rs +++ /dev/null @@ -1,206 +0,0 @@ -use std::path::PathBuf; -use std::sync::Arc; -use axum::Extension; -use axum::http::{Response, StatusCode}; -use tokio::sync::RwLock as ARwLock; -use hyper::Body; -use serde::{Deserialize, Serialize}; -use serde_json::{json, Value}; -use url::Url; -use std::fs; -use std::io::Read; -#[allow(deprecated)] -use base64::encode; -use indexmap::IndexMap; -use reqwest::Client; -use tokio::fs as async_fs; -use tracing::info; - -use crate::custom_error::ScratchError; -use crate::global_context::GlobalContext; -use crate::integrations::{get_empty_integrations, get_integration_path, get_integrations, json_for_integration, validate_integration_value}; -use crate::yaml_configs::create_configs::{integrations_enabled_cfg, read_yaml_into_value, write_yaml_value}; - - -#[derive(Serialize, Deserialize)] -struct IntegrationItem { - name: String, - enabled: bool, - schema: Option, - value: Option, -} - -#[derive(Serialize)] -struct IntegrationIcon { - name: String, - value: String, -} - -async fn load_integration_schema_and_json( - gcx: Arc>, -) -> Result, String> { - let integrations = get_empty_integrations(); - let cache_dir = gcx.read().await.cache_dir.clone(); - let integrations_yaml_value = read_yaml_into_value(&cache_dir.join("integrations.yaml")).await?; - - let mut results = IndexMap::new(); - for (i_name, i) in integrations.iter() { - let path = get_integration_path(&cache_dir, &i_name); - let j_value = json_for_integration(&path, integrations_yaml_value.get(&i_name), &i).await?; - results.insert(i_name.clone(), (i.to_schema_json(), j_value)); - } - - Ok(results) -} - -async fn get_image_base64( - cache_dir: &PathBuf, - icon_name: &str, - icon_url: &str, -) -> Result { - let assets_path = cache_dir.join("assets/integrations"); - - // Parse the URL to get the file extension - let url = Url::parse(icon_url).map_err(|e| e.to_string())?; - let extension = url - .path_segments() - .and_then(|segments| segments.last()) - .and_then(|name| name.split('.').last()) - .unwrap_or("png"); // Default to "png" if no extension is found - - let file_path = assets_path.join(format!("{}.{}", icon_name, extension)); - - // Check if the file already exists - if file_path.exists() { - info!("Using image from cache: {}", file_path.display()); - let mut file = fs::File::open(&file_path).map_err(|e| e.to_string())?; - let mut buffer = Vec::new(); - file.read_to_end(&mut buffer).map_err(|e| e.to_string())?; - #[allow(deprecated)] - let b64_image = encode(&buffer); - let image_str = format!("data:{};base64,{}", extension, b64_image); - return Ok(image_str); - } - - // Create the cache directory if it doesn't exist - async_fs::create_dir_all(&assets_path).await.map_err(|e| e.to_string())?; - - // Download the image - info!("Downloading image from {}", icon_url); - let client = Client::new(); - let response = client.get(icon_url).send().await.map_err(|e| e.to_string())?; - let bytes = response.bytes().await.map_err(|e| e.to_string())?; - - // Save the image to the cache directory - async_fs::write(&file_path, &bytes).await.map_err(|e| e.to_string())?; - - // Return the base64 string - #[allow(deprecated)] - let b64_image = encode(&bytes); - let image_str = format!("data:{};base64,{}", extension, b64_image); - Ok(image_str) -} - -pub async fn handle_v1_integrations_icons( - Extension(gcx): Extension>>, - _: hyper::body::Bytes, -) -> axum::response::Result, ScratchError> { - let cache_dir = gcx.read().await.cache_dir.clone(); - let integrations = get_integrations(gcx.clone()).await.map_err(|e|{ - ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Failed to load integrations: {}", e)) - })?; - - let mut results = vec![]; - for (i_name, i) in integrations.iter() { - let image_base64 = get_image_base64(&cache_dir, i_name, &i.icon_link()).await.map_err(|e|{ - ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Failed to get image: {}", e)) - })?; - results.push(IntegrationIcon { - name: i_name.clone(), - value: image_base64, - }); - } - - let payload = serde_json::to_string_pretty(&json!(results)).expect("Failed to serialize results"); - Ok(Response::builder() - .status(StatusCode::OK) - .header("Content-Type", "application/json") - .body(Body::from(payload)) - .unwrap()) -} - -pub async fn handle_v1_integrations( - Extension(gcx): Extension>>, - _: hyper::body::Bytes, -) -> axum::response::Result, ScratchError> { - let schemas_and_json_dict = load_integration_schema_and_json(gcx.clone()).await.map_err(|e|{ - ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Failed to load integrations: {}", e)) - })?; - - let cache_dir = gcx.read().await.cache_dir.clone(); - let enabled_path = cache_dir.join("integrations-enabled.yaml"); - let enabled_mapping = match integrations_enabled_cfg(&enabled_path).await { - serde_yaml::Value::Mapping(map) => map, - _ => serde_yaml::Mapping::new(), - }; - - let mut items = vec![]; - for (name, (schema, value)) in schemas_and_json_dict { - let item = IntegrationItem { - name: name.clone(), - enabled: enabled_mapping.get(&name).and_then(|v| v.as_bool()).unwrap_or(false), - schema: Some(schema), - value: Some(value), - }; - - items.push(item); - } - - let payload = serde_json::to_string_pretty(&json!(items)).expect("Failed to serialize items"); - Ok(Response::builder() - .status(StatusCode::OK) - .header("Content-Type", "application/json") - .body(Body::from(payload)) - .unwrap()) -} - - -pub async fn handle_v1_integrations_save( - Extension(gcx): Extension>>, - body_bytes: hyper::body::Bytes, -) -> axum::response::Result, ScratchError> { - let post = serde_json::from_slice::(&body_bytes) - .map_err(|e| ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, format!("JSON problem: {}", e)))?; - - let cache_dir = gcx.read().await.cache_dir.clone(); - let enabled_path = cache_dir.join("integrations-enabled.yaml"); - let mut enabled_value = integrations_enabled_cfg(&enabled_path).await; - if let serde_yaml::Value::Mapping(ref mut map) = enabled_value { - map.insert(serde_yaml::Value::String(post.name.clone()), serde_yaml::Value::Bool(post.enabled)); - } else { - return Err(ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Failed to parse {:?} as YAML::Mapping", enabled_path))); - } - write_yaml_value(&enabled_path, &enabled_value).await - .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Failed to write YAML: {}", e)))?; - - if let Some(post_value) = &post.value { - let yaml_value: serde_yaml::Value = serde_json::to_string(post_value).map_err(|e|e.to_string()) - .and_then(|s|serde_yaml::from_str(&s).map_err(|e|e.to_string())) - .map_err(|e| ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, format!("ERROR converting JSON to YAML: {}", e)))?; - - let yaml_value = validate_integration_value(&post.name, yaml_value).await - .map_err(|e| ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, format!("ERROR validating integration value: {}", e)))?; - - let path = get_integration_path(&cache_dir, &post.name); - - write_yaml_value(&path, &yaml_value).await.map_err(|e|{ - ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Failed to write YAML: {}", e)) - })?; - } - - Ok(Response::builder() - .status(StatusCode::OK) - .header("Content-Type", "application/json") - .body(Body::from(format!("Integration {} updated", post.name))) - .unwrap()) -} diff --git a/src/http/routers/v1/system_prompt.rs b/src/http/routers/v1/system_prompt.rs index e36e29c85..a54ec8a1a 100644 --- a/src/http/routers/v1/system_prompt.rs +++ b/src/http/routers/v1/system_prompt.rs @@ -39,4 +39,4 @@ pub async fn handle_v1_system_prompt( .status(StatusCode::OK) .body(Body::from(serde_json::to_string(&result).unwrap())) .unwrap()) -} \ No newline at end of file +} diff --git a/src/http/routers/v1/v1_integrations.rs b/src/http/routers/v1/v1_integrations.rs new file mode 100644 index 000000000..7d561b495 --- /dev/null +++ b/src/http/routers/v1/v1_integrations.rs @@ -0,0 +1,132 @@ +use std::sync::Arc; +use axum::Extension; +use axum::http::{Response, StatusCode}; +use hyper::Body; +use serde::Deserialize; +// use url::Url; +// #[allow(deprecated)] +// use base64::encode; +// use indexmap::IndexMap; +use tokio::sync::RwLock as ARwLock; + +use crate::custom_error::ScratchError; +use crate::global_context::GlobalContext; +// use crate::integrations::{get_empty_integrations, get_integration_path}; +// use crate::yaml_configs::create_configs::{integrations_enabled_cfg, read_yaml_into_value, write_yaml_value}; + + +pub async fn handle_v1_integrations( + Extension(gcx): Extension>>, + _: hyper::body::Bytes, +) -> axum::response::Result, ScratchError> { + let with_icons = crate::integrations::setting_up_integrations::integrations_all_with_icons(gcx.clone()).await; + let payload = serde_json::to_string_pretty(&with_icons).map_err(|e| { + ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Failed to serialize payload: {}", e)) + })?; + Ok(Response::builder() + .status(StatusCode::OK) + .header("Content-Type", "application/json") + .body(Body::from(payload)) + .unwrap()) +} + +#[derive(Deserialize)] +struct IntegrationGetPost { + pub integr_config_path: String, +} + +pub async fn handle_v1_integration_get( + Extension(_gcx): Extension>>, + body_bytes: hyper::body::Bytes, +) -> axum::response::Result, ScratchError> { + let post = serde_json::from_slice::(&body_bytes) + .map_err(|e| ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, format!("JSON problem: {}", e)))?; + + let the_get = crate::integrations::setting_up_integrations::integration_config_get( + post.integr_config_path, + ).await.map_err(|e|{ + ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Failed to load integrations: {}", e)) + })?; + + let payload = serde_json::to_string_pretty(&the_get).map_err(|e| { + ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Failed to serialize payload: {}", e)) + })?; + Ok(Response::builder() + .status(StatusCode::OK) + .header("Content-Type", "application/json") + .body(Body::from(payload)) + .unwrap()) +} + +#[derive(Deserialize)] +struct IntegrationSavePost { + pub integr_config_path: String, + pub integr_values: serde_json::Value, +} + +pub async fn handle_v1_integration_save( + Extension(gcx): Extension>>, + body_bytes: hyper::body::Bytes, +) -> axum::response::Result, ScratchError> { + let post = serde_json::from_slice::(&body_bytes) + .map_err(|e| ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, format!("JSON problem: {}", e)))?; + + crate::integrations::setting_up_integrations::integration_config_save(&post.integr_config_path, &post.integr_values).await.map_err(|e| { + ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("{}", e)) + })?; + + Ok(Response::builder() + .status(StatusCode::OK) + .header("Content-Type", "application/json") + .body(Body::from(format!(""))) + .unwrap()) +} + + +// async fn get_image_base64( +// cache_dir: &PathBuf, +// icon_name: &str, +// icon_url: &str, +// ) -> Result { +// let assets_path = cache_dir.join("assets/integrations"); + +// // Parse the URL to get the file extension +// let url = Url::parse(icon_url).map_err(|e| e.to_string())?; +// let extension = url +// .path_segments() +// .and_then(|segments| segments.last()) +// .and_then(|name| name.split('.').last()) +// .unwrap_or("png"); // Default to "png" if no extension is found + +// let file_path = assets_path.join(format!("{}.{}", icon_name, extension)); + +// // Check if the file already exists +// if file_path.exists() { +// info!("Using image from cache: {}", file_path.display()); +// let mut file = fs::File::open(&file_path).map_err(|e| e.to_string())?; +// let mut buffer = Vec::new(); +// file.read_to_end(&mut buffer).map_err(|e| e.to_string())?; +// #[allow(deprecated)] +// let b64_image = encode(&buffer); +// let image_str = format!("data:{};base64,{}", extension, b64_image); +// return Ok(image_str); +// } + +// // Create the cache directory if it doesn't exist +// async_fs::create_dir_all(&assets_path).await.map_err(|e| e.to_string())?; + +// // Download the image +// info!("Downloading image from {}", icon_url); +// let client = Client::new(); +// let response = client.get(icon_url).send().await.map_err(|e| e.to_string())?; +// let bytes = response.bytes().await.map_err(|e| e.to_string())?; + +// // Save the image to the cache directory +// async_fs::write(&file_path, &bytes).await.map_err(|e| e.to_string())?; + +// // Return the base64 string +// #[allow(deprecated)] +// let b64_image = encode(&bytes); +// let image_str = format!("data:{};base64,{}", extension, b64_image); +// Ok(image_str) +// } diff --git a/src/integrations/config_chat.rs b/src/integrations/config_chat.rs new file mode 100644 index 000000000..85e1a2dd1 --- /dev/null +++ b/src/integrations/config_chat.rs @@ -0,0 +1,76 @@ +use std::sync::Arc; +use std::fs; +use tokio::sync::RwLock as ARwLock; +use std::collections::HashMap; + +use crate::global_context::GlobalContext; +use crate::call_validation::{ChatContent, ChatMessage, ContextFile}; + + +pub async fn mix_config_messages( + gcx: Arc>, + messages: &mut Vec, + current_config_file: &String, +) { + let config_dir = gcx.read().await.config_dir.clone(); + let file_path = config_dir.join("integrations.d"); + + let mut context_file_vec = Vec::new(); + if let Ok(entries) = fs::read_dir(&file_path) { + for entry in entries { + if let Ok(entry) = entry { + let path = entry.path(); + if path.extension().and_then(|s| s.to_str()) == Some("yaml") { + if let Ok(file_content) = fs::read_to_string(&path) { + let context_file = ContextFile { + file_name: path.to_string_lossy().to_string(), + file_content, + line1: 0, + line2: 0, + symbols: vec![], + gradient_type: -1, + usefulness: 100.0, + }; + context_file_vec.push(context_file); + } + } + } + } + } + let custom: crate::yaml_configs::customization_loader::CustomizationYaml = match crate::yaml_configs::customization_loader::load_customization(gcx, true).await { + Ok(x) => x, + Err(why) => { + tracing::error!("Failed to load customization.yaml, will use compiled-in default for the configurator system prompt:\n{:?}", why); + crate::yaml_configs::customization_loader::load_and_mix_with_users_config( + crate::yaml_configs::customization_compiled_in::COMPILED_IN_INITIAL_USER_YAML, + "", "", true, true, &HashMap::new(), + ).unwrap() + } + }; + let sp: &crate::yaml_configs::customization_loader::SystemPrompt = custom.system_prompts.get("configurator").unwrap(); + + // let json_vec = context_file_vec.iter().map(|p| serde_json::json!(p)).collect::>(); + messages.insert(0, ChatMessage { + role: "context_file".to_string(), + content: ChatContent::SimpleText(serde_json::to_string(&context_file_vec).unwrap()), + tool_calls: None, + tool_call_id: String::new(), + usage: None, + }); + messages.insert(0, ChatMessage { + role: "system".to_string(), + content: ChatContent::SimpleText(sp.text.clone()), + tool_calls: None, + tool_call_id: String::new(), + usage: None, + }); + + for msg in messages.iter_mut() { + if let ChatContent::SimpleText(ref mut content) = msg.content { + *content = content.replace("%CURRENT_CONFIG%", current_config_file); + } + } + + tracing::info!("AAAAA\n{:#?}", messages); +} + diff --git a/src/integrations/integr.rs b/src/integrations/integr.rs deleted file mode 100644 index 435059010..000000000 --- a/src/integrations/integr.rs +++ /dev/null @@ -1,42 +0,0 @@ -use schemars::{schema_for, JsonSchema}; -use serde::Serialize; -use serde::de::DeserializeOwned; -use crate::tools::tools_description::Tool; - - -pub trait Integration: Send + Sync { - fn name(&self) -> String; - fn update_from_json(&mut self, value: &serde_json::Value) -> Result<(), String>; - fn from_yaml_validate_to_json(&self, value: &serde_yaml::Value) -> Result; - fn to_tool(&self) -> Box; - fn to_json(&self) -> Result; - fn to_schema_json(&self) -> serde_json::Value; - fn default_value(&self) -> String; - fn icon_link(&self) -> String; -} - -pub fn json_schema() -> Result { - let schema = schema_for!(T); - let mut json_schema = serde_json::to_value(&schema).map_err(|e| e.to_string())?; - - // Reorder properties in the json_schema based on the order of dummy_instance - let dummy_instance: T = T::default(); - let serialized_value = serde_json::to_value(&dummy_instance).unwrap(); - - // schemars breaks order. Instead, reordering in Value - if let serde_json::Value::Object(ref mut schema_map) = json_schema { - if let Some(serde_json::Value::Object(ref mut properties)) = schema_map.get_mut("properties") { - if let serde_json::Value::Object(dummy_map) = serialized_value { - let mut ordered_properties = serde_json::Map::new(); - for key in dummy_map.keys() { - if let Some(value) = properties.remove(key) { - ordered_properties.insert(key.clone(), value); - } - } - *properties = ordered_properties; - } - } - } - - Ok(json_schema) -} diff --git a/src/integrations/integr_abstract.rs b/src/integrations/integr_abstract.rs new file mode 100644 index 000000000..09e75462b --- /dev/null +++ b/src/integrations/integr_abstract.rs @@ -0,0 +1,6 @@ +pub trait IntegrationTrait: Send + Sync { + fn integr_schema(&self) -> &str; + fn integr_settings_apply(&mut self, value: &serde_json::Value) -> Result<(), String>; + fn integr_settings_as_json(&self) -> serde_json::Value; + fn integr_upgrade_to_tool(&self) -> Box; +} diff --git a/src/integrations/integr_chrome.rs b/src/integrations/integr_chrome.rs index e2f4e5359..71e819218 100644 --- a/src/integrations/integr_chrome.rs +++ b/src/integrations/integr_chrome.rs @@ -20,35 +20,26 @@ use headless_chrome::{Browser, LaunchOptions, Tab}; use headless_chrome::browser::tab::point::Point; use headless_chrome::protocol::cdp::Page; use headless_chrome::protocol::cdp::Emulation; -use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use crate::integrations::integr::{json_schema, Integration}; +use crate::integrations::integr_abstract::Integration; -#[derive(Clone, Serialize, Deserialize, Debug, JsonSchema, Default)] +#[derive(Clone, Serialize, Deserialize, Debug, Default)] pub struct IntegrationChrome { - #[schemars(description = "Path to the Chrome binary or WebSocket URL for remote debugging.")] pub chrome_path: Option, - #[schemars(description = "Window width for the Chrome browser.")] pub window_width: Option, - #[schemars(description = "Window height for the Chrome browser.")] pub window_height: Option, - #[schemars(description = "Idle timeout for the Chrome browser in seconds.")] pub idle_browser_timeout: Option, #[serde(default = "default_headless")] pub headless: bool, } -#[derive(Default)] -pub struct ToolChrome { - pub integration_chrome: IntegrationChrome, -} - fn default_headless() -> bool { true } +#[derive(Debug, Default)] pub struct ToolChrome { - integration_chrome: IntegrationChrome, - supports_clicks: bool, + pub integration_chrome: IntegrationChrome, + pub supports_clicks: bool, } struct ChromeSession { @@ -78,18 +69,14 @@ impl IntegrationSession for ChromeSession } impl Integration for ToolChrome { - fn name(&self) -> String { - "chrome".to_string() - } - - fn update_from_json(&mut self, value: &Value) -> Result<(), String> { + fn integr_settings_apply(&mut self, value: &Value) -> Result<(), String> { let integration_github = serde_json::from_value::(value.clone()) .map_err(|e|e.to_string())?; self.integration_chrome = integration_github; Ok(()) } - fn from_yaml_validate_to_json(&self, value: &serde_yaml::Value) -> Result { + fn integr_yaml2json(&self, value: &serde_yaml::Value) -> Result { let integration_github = serde_yaml::from_value::(value.clone()).map_err(|e| { let location = e.location().map(|loc| format!(" at line {}, column {}", loc.line(), loc.column())).unwrap_or_default(); format!("{}{}", e.to_string(), location) @@ -97,19 +84,18 @@ impl Integration for ToolChrome { serde_json::to_value(&integration_github).map_err(|e| e.to_string()) } - fn to_tool(&self) -> Box { - Box::new(ToolChrome {integration_chrome: self.integration_chrome.clone()}) as Box + fn integr_upgrade_to_tool(&self) -> Box { + Box::new(ToolChrome { + integration_chrome: self.integration_chrome.clone(), + supports_clicks: false} + ) as Box } - fn to_json(&self) -> Result { + fn integr_settings_as_json(&self) -> Result { serde_json::to_value(&self.integration_chrome).map_err(|e| e.to_string()) } - fn to_schema_json(&self) -> Value { - json_schema::().unwrap() - } - - fn default_value(&self) -> String { DEFAULT_CHROME_INTEGRATION_YAML.to_string() } + fn integr_settings_default(&self) -> String { DEFAULT_CHROME_INTEGRATION_YAML.to_string() } fn icon_link(&self) -> String { "https://cdn-icons-png.flaticon.com/512/732/732205.png".to_string() } } @@ -205,25 +191,6 @@ async fn setup_chrome_session( session_hashmap_key: &String, ) -> Result, String> { let mut setup_log = vec![]; - if !is_chrome_session_active(&session_hashmap_key, gcx.clone()).await { - let mut is_connection = false; - if let Some(chrome_path) = args.chrome_path.clone() { - is_connection = chrome_path.starts_with("ws://"); - } - - let window_size = if args.window_width.is_some() && args.window_height.is_some() { - Some((args.window_width.unwrap(), args.window_height.unwrap())) - } else if args.window_width.is_some() { - Some((args.window_width.unwrap(), args.window_width.unwrap())) - } else { - None - }; - - let mut idle_browser_timeout = Duration::from_secs(600); - if let Some(timeout) = args.idle_browser_timeout.clone() { - idle_browser_timeout = Duration::from_secs(timeout as u64); - } - } let session_entry = { let gcx_locked = gcx.read().await; @@ -241,10 +208,12 @@ async fn setup_chrome_session( } } - let window_size = match args.window_size.as_deref() { - Some([width, height]) => Some((*width, *height)), - Some([size]) => Some((*size, *size)), - _ => None, + let window_size = if args.window_width.is_some() && args.window_height.is_some() { + Some((args.window_width.unwrap(), args.window_height.unwrap())) + } else if args.window_width.is_some() { + Some((args.window_width.unwrap(), args.window_width.unwrap())) + } else { + None }; let idle_browser_timeout = args.idle_browser_timeout diff --git a/src/integrations/integr_github.rs b/src/integrations/integr_github.rs index ada22999a..d05ca8789 100644 --- a/src/integrations/integr_github.rs +++ b/src/integrations/integr_github.rs @@ -3,7 +3,6 @@ use std::collections::HashMap; use tokio::sync::Mutex as AMutex; use tokio::process::Command; use async_trait::async_trait; -use schemars::JsonSchema; use tracing::{error, info}; use serde::{Deserialize, Serialize}; @@ -12,15 +11,13 @@ use crate::call_validation::{ContextEnum, ChatMessage, ChatContent}; use crate::tools::tools_description::Tool; use serde_json::Value; -use crate::integrations::integr::{json_schema, Integration}; +use crate::integrations::integr_abstract::Integration; -#[derive(Clone, Serialize, Deserialize, Debug, JsonSchema, Default)] +#[derive(Clone, Serialize, Deserialize, Debug, Default)] #[allow(non_snake_case)] pub struct IntegrationGitHub { - #[schemars(description = "Path to the GitHub CLI binary.")] pub gh_binary_path: Option, - #[schemars(description = "GitHub token for authentication.")] pub GH_TOKEN: String, } @@ -30,18 +27,14 @@ pub struct ToolGithub { } impl Integration for ToolGithub { - fn name(&self) -> String { - "github".to_string() - } - - fn update_from_json(&mut self, value: &Value) -> Result<(), String> { + fn integr_settings_apply(&mut self, value: &Value) -> Result<(), String> { let integration_github = serde_json::from_value::(value.clone()) .map_err(|e|e.to_string())?; self.integration_github = integration_github; Ok(()) } - fn from_yaml_validate_to_json(&self, value: &serde_yaml::Value) -> Result { + fn integr_yaml2json(&self, value: &serde_yaml::Value) -> Result { let integration_github = serde_yaml::from_value::(value.clone()).map_err(|e| { let location = e.location().map(|loc| format!(" at line {}, column {}", loc.line(), loc.column())).unwrap_or_default(); format!("{}{}", e.to_string(), location) @@ -49,19 +42,15 @@ impl Integration for ToolGithub { serde_json::to_value(&integration_github).map_err(|e| e.to_string()) } - fn to_tool(&self) -> Box { + fn integr_upgrade_to_tool(&self) -> Box { Box::new(ToolGithub {integration_github: self.integration_github.clone()}) as Box } - fn to_json(&self) -> Result { + fn integr_settings_as_json(&self) -> Result { serde_json::to_value(&self.integration_github).map_err(|e| e.to_string()) } - fn to_schema_json(&self) -> Value { - json_schema::().unwrap() - } - - fn default_value(&self) -> String { DEFAULT_GITHUB_INTEGRATION_YAML.to_string() } + fn integr_settings_default(&self) -> String { DEFAULT_GITHUB_INTEGRATION_YAML.to_string() } fn icon_link(&self) -> String { "https://cdn-icons-png.flaticon.com/512/25/25231.png".to_string() } } diff --git a/src/integrations/integr_gitlab.rs b/src/integrations/integr_gitlab.rs index 257369f8a..eb6e844e3 100644 --- a/src/integrations/integr_gitlab.rs +++ b/src/integrations/integr_gitlab.rs @@ -3,24 +3,20 @@ use std::collections::HashMap; use tokio::sync::Mutex as AMutex; use tokio::process::Command; use async_trait::async_trait; -use schemars::JsonSchema; use tracing::{error, info}; use serde::{Deserialize, Serialize}; +use serde_json::Value; use crate::at_commands::at_commands::AtCommandsContext; use crate::call_validation::{ContextEnum, ChatMessage}; - use crate::tools::tools_description::Tool; -use serde_json::Value; -use crate::integrations::integr::{json_schema, Integration}; +use crate::integrations::integr_abstract::Integration; -#[derive(Clone, Serialize, Deserialize, Debug, JsonSchema, Default)] +#[derive(Clone, Serialize, Deserialize, Debug, Default)] #[allow(non_snake_case)] pub struct IntegrationGitLab { - #[schemars(description = "Path to the GitLab CLI binary.")] pub glab_binary_path: Option, - #[schemars(description = "GitLab token for authentication.")] pub GITLAB_TOKEN: String, } @@ -30,18 +26,14 @@ pub struct ToolGitlab { } impl Integration for ToolGitlab{ - fn name(&self) -> String { - "gitlab".to_string() - } - - fn update_from_json(&mut self, value: &Value) -> Result<(), String> { + fn integr_settings_apply(&mut self, value: &Value) -> Result<(), String> { let integration_gitlab = serde_json::from_value::(value.clone()) .map_err(|e|e.to_string())?; self.integration_gitlab = integration_gitlab; Ok(()) } - fn from_yaml_validate_to_json(&self, value: &serde_yaml::Value) -> Result { + fn integr_yaml2json(&self, value: &serde_yaml::Value) -> Result { let integration_gitlab = serde_yaml::from_value::(value.clone()).map_err(|e| { let location = e.location().map(|loc| format!(" at line {}, column {}", loc.line(), loc.column())).unwrap_or_default(); format!("{}{}", e.to_string(), location) @@ -49,18 +41,15 @@ impl Integration for ToolGitlab{ serde_json::to_value(&integration_gitlab).map_err(|e| e.to_string()) } - fn to_tool(&self) -> Box { + fn integr_upgrade_to_tool(&self) -> Box { Box::new(ToolGitlab {integration_gitlab: self.integration_gitlab.clone()}) as Box } - fn to_json(&self) -> Result { + fn integr_settings_as_json(&self) -> Result { serde_json::to_value(&self.integration_gitlab).map_err(|e| e.to_string()) } - - fn to_schema_json(&self) -> Value { - json_schema::().unwrap() - } - fn default_value(&self) -> String { DEFAULT_GITLAB_INTEGRATION_YAML.to_string() } + + fn integr_settings_default(&self) -> String { DEFAULT_GITLAB_INTEGRATION_YAML.to_string() } fn icon_link(&self) -> String { "https://cdn-icons-png.flaticon.com/512/5968/5968853.png".to_string() } } diff --git a/src/integrations/integr_pdb.rs b/src/integrations/integr_pdb.rs index d516f13c6..39a300b1b 100644 --- a/src/integrations/integr_pdb.rs +++ b/src/integrations/integr_pdb.rs @@ -9,7 +9,6 @@ use tokio::sync::{Mutex as AMutex, RwLock as ARwLock}; use tokio::process::{Command, Child, ChildStdin, ChildStdout, ChildStderr}; use tokio::time::Duration; use async_trait::async_trait; -use schemars::JsonSchema; use tracing::{error, info}; use serde::{Deserialize, Serialize}; @@ -17,7 +16,7 @@ use crate::at_commands::at_commands::AtCommandsContext; use crate::call_validation::{ContextEnum, ChatMessage, ChatContent}; use crate::integrations::sessions::{IntegrationSession, get_session_hashmap_key}; use crate::global_context::GlobalContext; -use crate::integrations::integr::{json_schema, Integration}; +use crate::integrations::integr_abstract::Integration; use crate::tools::tools_description::{Tool, ToolDesc, ToolParam}; use crate::integrations::process_io_utils::{first_n_chars, last_n_chars, last_n_lines, write_to_stdin_and_flush, blocking_read_until_token_or_timeout}; @@ -26,15 +25,14 @@ const SESSION_TIMEOUT_AFTER_INACTIVITY: Duration = Duration::from_secs(30 * 60); const PDB_TOKEN: &str = "(Pdb)"; -#[derive(Clone, Serialize, Deserialize, Debug, JsonSchema, Default)] -pub struct IntegrationPdb { - #[schemars(description = "Path to the Python binary.")] +#[derive(Clone, Serialize, Deserialize, Debug, Default)] +pub struct SettingsPdb { pub python_path: Option, } #[derive(Default)] pub struct ToolPdb { - pub integration_pdb: IntegrationPdb, + pub settings_pdb: SettingsPdb, } pub struct PdbSession { @@ -64,37 +62,30 @@ impl IntegrationSession for PdbSession } impl Integration for ToolPdb { - fn name(&self) -> String { - "pdb".to_string() - } - - fn update_from_json(&mut self, value: &Value) -> Result<(), String> { - let integration_pdb = serde_json::from_value::(value.clone()) + fn integr_settings_apply(&mut self, value: &Value) -> Result<(), String> { + let settings_pdb = serde_json::from_value::(value.clone()) .map_err(|e|e.to_string())?; - self.integration_pdb = integration_pdb; + self.settings_pdb = settings_pdb; Ok(()) } - fn from_yaml_validate_to_json(&self, value: &serde_yaml::Value) -> Result { - let integration_github = serde_yaml::from_value::(value.clone()).map_err(|e| { + fn integr_yaml2json(&self, value: &serde_yaml::Value) -> Result { + let integration_github = serde_yaml::from_value::(value.clone()).map_err(|e| { let location = e.location().map(|loc| format!(" at line {}, column {}", loc.line(), loc.column())).unwrap_or_default(); format!("{}{}", e.to_string(), location) })?; serde_json::to_value(&integration_github).map_err(|e| e.to_string()) } - fn to_tool(&self) -> Box { - Box::new(ToolPdb {integration_pdb: self.integration_pdb.clone()}) as Box + fn integr_upgrade_to_tool(&self) -> Box { + Box::new(ToolPdb {settings_pdb: self.settings_pdb.clone()}) as Box } - fn to_json(&self) -> Result { - serde_json::to_value(&self.integration_pdb).map_err(|e| e.to_string()) + fn integr_settings_as_json(&self) -> Result { + serde_json::to_value(&self.settings_pdb).map_err(|e| e.to_string()) } - fn to_schema_json(&self) -> Value { - json_schema::().unwrap() - } - fn default_value(&self) -> String { DEFAULT_PDB_INTEGRATION_YAML.to_string() } + fn integr_settings_default(&self) -> String { DEFAULT_PDB_INTEGRATION_YAML.to_string() } fn icon_link(&self) -> String { "https://cdn-icons-png.flaticon.com/512/919/919852.png".to_string() } } @@ -115,7 +106,7 @@ impl Tool for ToolPdb { }; let session_hashmap_key = get_session_hashmap_key("pdb", &chat_id); - let python_command = self.integration_pdb.python_path.clone().unwrap_or_else(|| "python3".to_string()); + let python_command = self.settings_pdb.python_path.clone().unwrap_or_else(|| "python3".to_string()); if command_args.windows(2).any(|w| w == ["-m", "pdb"]) { let output = start_pdb_session(&python_command, &mut command_args, &session_hashmap_key, gcx.clone(), 10).await?; diff --git a/src/integrations/integr_postgres.rs b/src/integrations/integr_postgres.rs index bc544e645..c75a3d11f 100644 --- a/src/integrations/integr_postgres.rs +++ b/src/integrations/integr_postgres.rs @@ -5,69 +5,71 @@ use crate::tools::tools_description::Tool; use async_trait::async_trait; use serde::{Deserialize, Serialize}; use serde_json::Value; -use serde_yaml; use std::collections::HashMap; use std::sync::Arc; -use schemars::JsonSchema; use tokio::process::Command; use tokio::sync::Mutex as AMutex; -use crate::integrations::integr::{json_schema, Integration}; +use crate::integrations::integr_abstract::IntegrationTrait; -#[derive(Clone, Serialize, Deserialize, Debug, JsonSchema, Default)] -pub struct IntegrationPostgres { - #[schemars(description = "Path to the psql binary.")] - pub psql_binary_path: Option, - #[schemars(description = "Connection string for the PSQL database.")] - pub connection_string: String, +#[derive(Clone, Serialize, Deserialize, Debug, Default)] +pub struct SettingsPostgres { + pub psql_binary_path: String, + pub host: String, + pub port: usize, + pub user: String, + pub password: String, + pub database: String, } #[derive(Default)] pub struct ToolPostgres { - pub integration_postgres: IntegrationPostgres, + pub settings_postgres: SettingsPostgres, } -impl Integration for ToolPostgres { - fn name(&self) -> String { - "postgres".to_string() - } - - fn update_from_json(&mut self, value: &Value) -> Result<(), String> { - let integration_postgres = serde_json::from_value::(value.clone()) - .map_err(|e|e.to_string())?; - self.integration_postgres = integration_postgres; +impl IntegrationTrait for ToolPostgres { + fn integr_settings_apply(&mut self, value: &Value) -> Result<(), String> { + match serde_json::from_value::(value.clone()) { + Ok(settings_postgres) => self.settings_postgres = settings_postgres, + Err(e) => { + tracing::error!("Failed to apply settings: {}\n{:?}", e, value); + return Err(e.to_string()); + } + } Ok(()) } - fn from_yaml_validate_to_json(&self, value: &serde_yaml::Value) -> Result { - let integration_github = serde_yaml::from_value::(value.clone()).map_err(|e| { - let location = e.location().map(|loc| format!(" at line {}, column {}", loc.line(), loc.column())).unwrap_or_default(); - format!("{}{}", e.to_string(), location) - })?; - serde_json::to_value(&integration_github).map_err(|e| e.to_string()) + fn integr_settings_as_json(&self) -> Value { + serde_json::to_value(&self.settings_postgres).unwrap() } - fn to_tool(&self) -> Box { - Box::new(ToolPostgres {integration_postgres: self.integration_postgres.clone()}) as Box + fn integr_upgrade_to_tool(&self) -> Box { + Box::new(ToolPostgres { + settings_postgres: self.settings_postgres.clone() + }) as Box } - fn to_json(&self) -> Result { - serde_json::to_value(&self.integration_postgres).map_err(|e| e.to_string()) + fn integr_schema(&self) -> &str + { + POSTGRES_INTEGRATION_SCHEMA } - fn to_schema_json(&self) -> Value { - json_schema::().unwrap() - } - fn default_value(&self) -> String { DEFAULT_POSTGRES_INTEGRATION_YAML.to_string() } - fn icon_link(&self) -> String { "https://cdn-icons-png.flaticon.com/512/5968/5968342.png".to_string() } + // fn icon_link(&self) -> String { "https://cdn-icons-png.flaticon.com/512/5968/5968342.png".to_string() } } impl ToolPostgres { - async fn run_psql_command(&self, query: &str) -> Result { - let psql_command = self.integration_postgres.psql_binary_path.as_deref().unwrap_or("psql"); + let mut psql_command = self.settings_postgres.psql_binary_path.clone(); + if psql_command.is_empty() { + psql_command = "psql".to_string(); + } let output_future = Command::new(psql_command) - .arg(&self.integration_postgres.connection_string) + .env("PGPASSWORD", &self.settings_postgres.password) + .env("PGHOST", &self.settings_postgres.host) + .env("PGUSER", &self.settings_postgres.user) + .env("PGPORT", &format!("{}", self.settings_postgres.port)) + .env("PGDATABASE", &self.settings_postgres.database) + .arg("-v") .arg("ON_ERROR_STOP=1") .arg("-c") .arg(query) @@ -75,7 +77,7 @@ impl ToolPostgres { if let Ok(output) = tokio::time::timeout(tokio::time::Duration::from_millis(10_000), output_future).await { if output.is_err() { let err_text = format!("{}", output.unwrap_err()); - tracing::error!("psql didn't work:\n{}\n{}\n{}", self.integration_postgres.connection_string, query, err_text); + tracing::error!("psql didn't work:\n{}\n{}", query, err_text); return Err(format!("psql failed:\n{}", err_text)); } let output = output.unwrap(); @@ -84,11 +86,11 @@ impl ToolPostgres { } else { // XXX: limit stderr, can be infinite let stderr_string = String::from_utf8_lossy(&output.stderr); - tracing::error!("psql didn't work:\n{}\n{}\n{}", self.integration_postgres.connection_string, query, stderr_string); + tracing::error!("psql didn't work:\n{}\n{}", query, stderr_string); Err(format!("psql failed:\n{}", stderr_string)) } } else { - tracing::error!("psql timed out:\n{}\n{}", self.integration_postgres.connection_string, query); + tracing::error!("psql timed out:\n{}", query); Err("psql command timed out".to_string()) } } @@ -144,9 +146,87 @@ impl Tool for ToolPostgres { } } -const DEFAULT_POSTGRES_INTEGRATION_YAML: &str = r#" -# Postgres database - -# psql_binary_path: "/path/to/psql" # Uncomment to set a custom path for the psql binary, defaults to "psql" -# connection_string: "postgresql://username:password@localhost/dbname" # To get a connection string, check out https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-CONNSTRING +// const DEFAULT_POSTGRES_INTEGRATION_YAML: &str = r#" +// postgres: +// enable: true +// psql_binary_path: "/path/to/psql" +// host: "my_postgres_for_django" +// user: "vasya1337" +// password: "$POSTGRES_PASSWORD" +// db: "mydjango" +// available: +// on_your_laptop: +// - project_pattern: "*web_workspace/project1" +// db: "mydjango2" +// enable: true +// when_isolated: +// user: "vasya1338" +// enable: true +// docker: +// my_postgres_for_django: +// image: "postgres:13" +// environment: +// POSTGRES_DB: "mydjango" +// POSTGRES_USER: "vasya1337" +// POSTGRES_PASSWORD: "$POSTGRES_PASSWORD" +// "#; + + +pub const POSTGRES_INTEGRATION_SCHEMA: &str = r#" +fields: + host: + f_type: string + f_desc: "Connect to this host, for example 127.0.0.1 or docker container name." + f_placeholder: marketing_db_container + port: + f_type: int + f_desc: "Which port to use." + f_default: "5432" + user: + f_type: string + f_placeholder: john_doe + password: + f_type: string + f_default: "$POSTGRES_PASSWORD" + smartlinks: + - sl_label: "Open passwords.yaml" + sl_goto: "EDITOR:passwords.yaml" + database: + f_type: string + f_placeholder: marketing_db + psql_binary_path: + f_type: string + f_desc: "If it can't find a path to `psql` you can provide it here, leave blank if not sure." + f_placeholder: "psql" +available: + on_your_laptop_possible: true + when_isolated_possible: true +smartlinks: + - sl_label: "Test" + sl_chat: + - role: "user" + content: | + šŸ”§ The postgres tool should be visible now. To test the tool, list the tables available, briefly desctibe the tables and express + satisfaction and relief if it works, and change nothing. If it doesn't work or the tool isn't available, go through the usual plan in the system prompt. + The current config file is %CURRENT_CONFIG%. +docker: + new_container_default: + image: "postgres:13" + environment: + POSTGRES_DB: "marketing_db" + POSTGRES_USER: "john_doe" + POSTGRES_PASSWORD: "$POSTGRES_PASSWORD" + smartlinks: + - sl_label: "Add Database Container" + sl_chat: + - role: "user" + content: | + šŸ”§ Your job is to create a new section under "docker" that will define a new postgres container, inside the current config file %CURRENT_CONFIG%. Follow the system prompt. "#; + + +// available: +// on_your_laptop: +// possible: true +// when_isolated: +// possible: true diff --git a/src/integrations/mod.rs b/src/integrations/mod.rs index b7a82fffa..3fce4b461 100644 --- a/src/integrations/mod.rs +++ b/src/integrations/mod.rs @@ -1,241 +1,222 @@ -use std::path::PathBuf; -use std::sync::Arc; -use indexmap::IndexMap; -use serde_json::json; -use tracing::{info, warn}; -use tokio::sync::{Mutex as AMutex, RwLock as ARwLock}; - -use crate::global_context::GlobalContext; -use crate::integrations::integr::Integration; -use crate::integrations::integr_chrome::ToolChrome; -use crate::integrations::integr_github::ToolGithub; -use crate::integrations::integr_gitlab::ToolGitlab; -use crate::integrations::integr_pdb::ToolPdb; -use crate::integrations::integr_postgres::ToolPostgres; -use crate::tools::tools_description::Tool; -use crate::yaml_configs::create_configs::{integrations_enabled_cfg, read_yaml_into_value}; +// use std::path::PathBuf; +// use std::sync::Arc; +// use indexmap::IndexMap; +// use tokio::sync::{Mutex as AMutex, RwLock as ARwLock}; + +// use crate::global_context::GlobalContext; +// use crate::tools::tools_description::Tool; +// use crate::yaml_configs::create_configs::{integrations_enabled_cfg, read_yaml_into_value}; + + +pub mod integr_abstract; +// pub mod integr_github; +// pub mod integr_gitlab; +// pub mod integr_pdb; +// pub mod integr_chrome; +pub mod integr_postgres; -pub mod sessions; pub mod process_io_utils; -pub mod integr_github; -pub mod integr_gitlab; -pub mod integr_pdb; -pub mod integr_chrome; pub mod docker; pub mod sessions; -pub mod process_io_utils; -pub mod integr_postgres; -mod integr; - - -<<<<<<< HEAD -// hint: when adding integration, update: -// DEFAULT_INTEGRATION_VALUES, INTEGRATION_ICONS, integrations_paths, validate_integration_value, load_integration_tools, load_integration_schema_and_json -======= -// when adding integration, update: get_empty_integrations (2 occurrences) ->>>>>>> 9b1345a1 (simplified code) - - -pub fn get_empty_integrations() -> IndexMap> { - let integration_names = ["github", "gitlab", "pdb", "postgres", "chrome"]; - let mut integrations = IndexMap::new(); - for i_name in integration_names { - let i = match i_name { - "github" => Box::new(ToolGithub {..Default::default()} ) as Box, - "gitlab" => Box::new(ToolGitlab {..Default::default()} ) as Box, - "pdb" => Box::new(ToolPdb {..Default::default()} ) as Box, - "postgres" => Box::new(ToolPostgres {..Default::default()} ) as Box, - "chrome" => Box::new(ToolChrome {..Default::default()} ) as Box, - _ => panic!("Unknown integration name: {}", i_name) - }; - integrations.insert(i_name.to_string(), i); - } - integrations -} - -pub fn get_integration_path(cache_dir: &PathBuf, name: &str) -> PathBuf { - cache_dir.join("integrations.d").join(format!("{}.yaml", name)) -} - -pub async fn get_integrations( - gcx: Arc>, -) -> Result>, String> { - let integrations = get_empty_integrations(); - let cache_dir = gcx.read().await.cache_dir.clone(); - - let integrations_yaml_value = read_yaml_into_value(&cache_dir.join("integrations.yaml")).await?; - - let mut results = IndexMap::new(); - for (i_name, mut i) in integrations { - let path = get_integration_path(&cache_dir, &i_name); - let j_value = json_for_integration(&path, integrations_yaml_value.get(&i_name), &i).await?; - - if j_value.get("detail").is_some() { - warn!("failed to load integration {}: {}", i_name, j_value.get("detail").unwrap()); - } else { - if let Err(e) = i.update_from_json(&j_value) { - warn!("failed to load integration {}: {}", i_name, e); - }; - } - results.insert(i_name.clone(), i); +pub mod config_chat; +pub mod yaml_schema; +pub mod setting_up_integrations; +pub mod running_integrations; + +use integr_abstract::IntegrationTrait; + + +pub fn integration_from_name(n: &str) -> Result, String> +{ + match n { + // "github" => Ok(Box::new(ToolGithub { ..Default::default() }) as Box), + // "gitlab" => Ok(Box::new(ToolGitlab { ..Default::default() }) as Box), + // "pdb" => Ok(Box::new(ToolPdb { ..Default::default() }) as Box), + "postgres" => Ok(Box::new(integr_postgres::ToolPostgres { ..Default::default() }) as Box), + // "chrome" => Ok(Box::new(ToolChrome { ..Default::default() }) as Box), + _ => Err(format!("Unknown integration name: {}", n)), } - - Ok(results) } -pub async fn validate_integration_value(name: &str, value: serde_yaml::Value) -> Result { - let integrations = get_empty_integrations(); - - match integrations.get(name) { - Some(i) => { - let j_value = i.from_yaml_validate_to_json(&value)?; - let yaml_value = serde_yaml::to_value(&j_value).map_err(|e| e.to_string())?; - Ok(yaml_value) - }, - None => Err(format!("Integration {} is not defined", name)) - } +pub fn icon_from_name(n: &str) -> String +{ + // match n { + // // "github" => Box::new(ToolGithub { ..Default::default() }) as Box, + // // "gitlab" => Box::new(ToolGitlab { ..Default::default() }) as Box, + // // "pdb" => Box::new(ToolPdb { ..Default::default() }) as Box, + // "postgres" => Box::new(integr_postgres::ToolPostgres { ..Default::default() }) as Box, + // // "chrome" => Box::new(ToolChrome { ..Default::default() }) as Box, + // _ => panic!("Unknown integration name: {}", n), + // } + return "".to_string(); } -pub async fn load_integration_tools( - gcx: Arc>, -) -> IndexMap>>> { - let paths = integrations_paths(gcx.clone()).await; - let integrations_yaml_value = { - let cache_dir = gcx.read().await.cache_dir.clone(); - let yaml_path = cache_dir.join("integrations.yaml"); - read_yaml_into_value(&yaml_path).await? - }; - let cache_dir = gcx.read().await.cache_dir.clone(); - let enabled_path = cache_dir.join("integrations-enabled.yaml"); - let enabled = match integrations_enabled_cfg(&enabled_path).await { - serde_yaml::Value::Mapping(map) => map.into_iter().filter_map(|(k, v)| { - if let (serde_yaml::Value::String(key), serde_yaml::Value::Bool(value)) = (k, v) { - Some((key, value)) - } else { - None - } - }).collect::>(), - _ => std::collections::HashMap::new(), - }; - - let integrations = get_integrations(gcx.clone()).await?; - - let mut tools = IndexMap::new(); - for (i_name, i) in integrations.iter() { - if !enabled.get(i_name).unwrap_or(&false) { - info!("Integration {} is disabled", i_name); - continue; - } - let tool = i.to_tool(); - tools.insert(i_name.clone(), Arc::new(AMutex::new(tool))); - } - Ok(tools) +pub fn integrations_list() -> Vec<&'static str> { + vec![ + // "github", + // "gitlab", + // "pdb", + "postgres", + // "chrome" + ] } -pub async fn json_for_integration( - yaml_path: &PathBuf, - value_from_integrations: Option<&serde_yaml::Value>, - integration: &Box, -) -> Result { - let tool_name = integration.name().clone(); - - let value = if yaml_path.exists() { - match read_yaml_into_value(yaml_path).await { - Ok(value) => integration.from_yaml_validate_to_json(&value).unwrap_or_else(|e| { - let e = format!("Problem converting integration to JSON: {}", e); - json!({"detail": e.to_string()}) - }), - Err(e) => { - let e = format!("Problem reading YAML from {}: {}", yaml_path.display(), e); - json!({"detail": e.to_string()}) - } - } - } else { - json!({"detail": format!("Cannot read {}. Probably, file does not exist", yaml_path.display())}) - }; - - let value_from_integrations = value_from_integrations.map_or(json!({"detail": format!("tool {tool_name} is not defined in integrations.yaml")}), |value| { - integration.from_yaml_validate_to_json(value).unwrap_or_else(|e| { - let e = format!("Problem converting integration to JSON: {}", e); - json!({"detail": e.to_string()}) - }) - }); - - match (value.get("detail"), value_from_integrations.get("detail")) { - (None, None) => { - Err(format!("Tool {tool_name} exists in both {tool_name}.yaml and integrations.yaml. Consider removing one of them.")) - }, - (Some(_), None) => { - Ok(value_from_integrations) - }, - (None, Some(_)) => { - Ok(value) - } - (Some(_), Some(_)) => { - Ok(value) - } - } -<<<<<<< HEAD - Ok(()) -} -async fn load_tool_from_yaml( - yaml_path: Option<&PathBuf>, - tool_constructor: fn(&serde_yaml::Value) -> Result, - value_from_integrations: Option<&serde_yaml::Value>, - enabled: Option<&bool>, - integrations: &mut IndexMap>>>, -) -> Result<(), String> { - let yaml_path = yaml_path.as_ref().expect("No yaml path"); - let tool_name = yaml_path.file_stem().expect("No file name").to_str().expect("No file name").to_string(); - if !enabled.unwrap_or(&false) { - info!("Integration {} is disabled", tool_name); - return Ok(()); - } - let tool = if yaml_path.exists() { - match read_yaml_into_value(yaml_path).await { - Ok(value) => { - match tool_constructor(&value) { - Ok(tool) => { - // integrations.insert(tool_name, Arc::new(AMutex::new(Box::new(tool) as Box))); - Some(tool) - } - Err(e) => { - warn!("Problem in {}: {}", yaml_path.display(), e); - None - } - } - } - Err(e) => { - warn!("Problem reading {:?}: {}", yaml_path, e); - None - } - } - } else { - None - }; - - let tool_from_integrations = value_from_integrations - .and_then(|value| match tool_constructor(&value) { - Ok(tool) => Some(tool), - Err(_) => None - }); - - match (tool, tool_from_integrations) { - (Some(_), Some(_)) => { - return Err(format!("Tool {tool_name} exists in both {tool_name}.yaml and integrations.yaml. Consider removing one of them.")); - }, - (Some(tool), None) | (None, Some(tool)) => { - integrations.insert(tool_name.clone(), Arc::new(AMutex::new(Box::new(tool) as Box))); - }, - _ => {} - } - - Ok(()) -======= ->>>>>>> 9b1345a1 (simplified code) -} +// pub fn get_integration_path(cache_dir: &PathBuf, name: &str) -> PathBuf { +// cache_dir.join("integrations.d").join(format!("{}.yaml", name)) +// } + + +// pub async fn validate_integration_value(name: &str, value: serde_yaml::Value) -> Result { +// let integrations = get_empty_integrations(); +// match integrations.get(name) { +// Some(i) => { +// let j_value: serde_json::Value = i.integr_yaml2json(&value)?; +// let yaml_value: serde_yaml::Value = serde_yaml::to_value(&j_value).map_err(|e| e.to_string())?; +// Ok(yaml_value) +// }, +// None => Err(format!("Integration {} is not defined", name)) +// } +// } + +// pub async fn load_integration_tools( +// gcx: Arc>, +// ) -> IndexMap>>> { +// let paths = integrations_paths(gcx.clone()).await; +// let integrations_yaml_value = { +// let cache_dir = gcx.read().await.cache_dir.clone(); +// let yaml_path = cache_dir.join("integrations.yaml"); +// read_yaml_into_value(&yaml_path).await? +// }; +// let cache_dir = gcx.read().await.cache_dir.clone(); +// // let enabled_path = cache_dir.join("integrations-enabled.yaml"); +// // let enabled = match integrations_enabled_cfg(&enabled_path).await { +// // serde_yaml::Value::Mapping(map) => map.into_iter().filter_map(|(k, v)| { +// // if let (serde_yaml::Value::String(key), serde_yaml::Value::Bool(value)) = (k, v) { +// // Some((key, value)) +// // } else { +// // None +// // } +// // }).collect::>(), +// // _ => std::collections::HashMap::new(), +// // }; + +// let integrations = get_integrations(gcx.clone()).await?; + +// let mut tools = IndexMap::new(); +// for (i_name, i) in integrations.iter() { +// // if !enabled.get(i_name).unwrap_or(&false) { +// // info!("Integration {} is disabled", i_name); +// // continue; +// // } +// let tool = i.integr_upgrade_to_tool(); +// tools.insert(i_name.clone(), Arc::new(AMutex::new(tool))); +// } +// Ok(tools) +// } + +// pub async fn json_for_integration( +// yaml_path: &PathBuf, +// value_from_integrations: Option<&serde_yaml::Value>, +// integration: &Box, +// ) -> Result { +// let tool_name = integration.integr_name().clone(); + +// let value = if yaml_path.exists() { +// match read_yaml_into_value(yaml_path).await { +// Ok(value) => integration.integr_yaml2json(&value).unwrap_or_else(|e| { +// let e = format!("Problem converting integration to JSON: {}", e); +// json!({"detail": e.to_string()}) +// }), +// Err(e) => { +// let e = format!("Problem reading YAML from {}: {}", yaml_path.display(), e); +// json!({"detail": e.to_string()}) +// } +// } +// } else { +// json!({"detail": format!("Cannot read {}. Probably, file does not exist", yaml_path.display())}) +// }; + +// let value_from_integrations = value_from_integrations.map_or(json!({"detail": format!("tool {tool_name} is not defined in integrations.yaml")}), |value| { +// integration.integr_yaml2json(value).unwrap_or_else(|e| { +// let e = format!("Problem converting integration to JSON: {}", e); +// json!({"detail": e.to_string()}) +// }) +// }); + +// match (value.get("detail"), value_from_integrations.get("detail")) { +// (None, None) => { +// Err(format!("Tool {tool_name} exists in both {tool_name}.yaml and integrations.yaml. Consider removing one of them.")) +// }, +// (Some(_), None) => { +// Ok(value_from_integrations) +// }, +// (None, Some(_)) => { +// Ok(value) +// } +// (Some(_), Some(_)) => { +// Ok(value) +// } +// } + +// Ok(()) +// } + +// async fn load_tool_from_yaml( +// yaml_path: Option<&PathBuf>, +// tool_constructor: fn(&serde_yaml::Value) -> Result, +// value_from_integrations: Option<&serde_yaml::Value>, +// enabled: Option<&bool>, +// integrations: &mut IndexMap>>>, +// ) -> Result<(), String> { +// let yaml_path = yaml_path.as_ref().expect("No yaml path"); +// let tool_name = yaml_path.file_stem().expect("No file name").to_str().expect("No file name").to_string(); +// if !enabled.unwrap_or(&false) { +// tracing::info!("Integration {} is disabled", tool_name); +// return Ok(()); +// } +// let tool = if yaml_path.exists() { +// match read_yaml_into_value(yaml_path).await { +// Ok(value) => { +// match tool_constructor(&value) { +// Ok(tool) => { +// // integrations.insert(tool_name, Arc::new(AMutex::new(Box::new(tool) as Box))); +// Some(tool) +// } +// Err(e) => { +// tracing::warn!("Problem in {}: {}", yaml_path.display(), e); +// None +// } +// } +// } +// Err(e) => { +// tracing::warn!("Problem reading {:?}: {}", yaml_path, e); +// None +// } +// } +// } else { +// None +// }; + +// let tool_from_integrations = value_from_integrations +// .and_then(|value| match tool_constructor(&value) { +// Ok(tool) => Some(tool), +// Err(_) => None +// }); + +// match (tool, tool_from_integrations) { +// (Some(_), Some(_)) => { +// return Err(format!("Tool {tool_name} exists in both {tool_name}.yaml and integrations.yaml. Consider removing one of them.")); +// }, +// (Some(tool), None) | (None, Some(tool)) => { +// integrations.insert(tool_name.clone(), Arc::new(AMutex::new(Box::new(tool) as Box))); +// }, +// _ => {} +// } + +// Ok(()) +// } pub const INTEGRATIONS_DEFAULT_YAML: &str = r#"# This file is used to configure integrations in Refact Agent. # If there is a syntax error in this file, no integrations will work. @@ -275,7 +256,7 @@ commands_deny: # valuable_top_or_bottom: "top" # the useful infomation more likely to be at the top or bottom? (default "top") # grep: "(?i)error|warning" # in contrast to regular grep this doesn't remove other lines from output, just prefers matching when approaching limit_lines or limit_chars (default "(?i)error") # grep_context_lines: 5 # leave that many lines around a grep match (default 5) -# remove_from_output: "process didn't exit" # some lines and very long and unwanted, this is also a regular expression (default "") +# remove_from_output: "process didn't exit" # some lines are very long and unwanted, this is also a regular expression (default "") #cmdline_services: # manage_py_runserver: diff --git a/src/integrations/running_integrations.rs b/src/integrations/running_integrations.rs new file mode 100644 index 000000000..d684717b7 --- /dev/null +++ b/src/integrations/running_integrations.rs @@ -0,0 +1,57 @@ +use std::path::PathBuf; +use std::sync::Arc; +use indexmap::IndexMap; +use tokio::sync::RwLock as ARwLock; +use tokio::sync::Mutex as AMutex; + +use crate::tools::tools_description::Tool; +use crate::global_context::GlobalContext; + + +pub async fn load_integration_tools( + gcx: Arc>, + _current_project: String, + allow_experimental: bool, +) -> IndexMap>>> { + let (global_dir, _workspace_folders_arc) = { + let gcx_locked = gcx.read().await; + (gcx_locked.config_dir.clone(), gcx_locked.documents_state.workspace_folders.clone()) + }; + let mut config_folders: Vec = Vec::new(); + // XXX filter _workspace_folders_arc that fit _current_project + config_folders.push(global_dir); + + let mut error_log: Vec = Vec::new(); + let lst: Vec<&str> = crate::integrations::integrations_list(); + let records = crate::integrations::setting_up_integrations::read_integrations_d(&config_folders, &lst, &mut error_log); + + let mut tools = IndexMap::new(); + for rec in records { + if !rec.on_your_laptop { + continue; + } + if !rec.integr_config_exists { + continue; + } + let mut integr = match crate::integrations::integration_from_name(&rec.integr_name) { + Ok(x) => x, + Err(e) => { + tracing::error!("Failed to load integration {}: {}", rec.integr_name, e); + continue; + } + }; + integr.integr_settings_apply(&rec.config_unparsed); + tools.insert(rec.integr_name.clone(), Arc::new(AMutex::new(integr.integr_upgrade_to_tool()))); + } + + for e in error_log { + tracing::error!( + "{}:{} {:?}", + crate::nicer_logs::last_n_chars(&&e.integr_config_path, 30), + e.error_line, + e.error_msg, + ); + } + + tools +} diff --git a/src/integrations/setting_up_integrations.rs b/src/integrations/setting_up_integrations.rs new file mode 100644 index 000000000..e0f543855 --- /dev/null +++ b/src/integrations/setting_up_integrations.rs @@ -0,0 +1,300 @@ +use std::fs; +use std::path::PathBuf; +use std::sync::Arc; +use regex::Regex; +use serde::Serialize; +use tokio::sync::RwLock as ARwLock; +use tokio::fs as async_fs; +use tokio::io::AsyncWriteExt; + +use crate::global_context::GlobalContext; +// use crate::tools::tools_description::Tool; +// use crate::yaml_configs::create_configs::{integrations_enabled_cfg, read_yaml_into_value}; + + +#[derive(Serialize, Default)] +pub struct YamlError { + pub integr_config_path: String, + pub error_line: usize, // starts with 1, zero if invalid + pub error_msg: String, +} + +#[derive(Serialize, Default)] +pub struct IntegrationRecord { + pub project_path: String, + pub integr_name: String, + pub integr_config_path: String, + pub integr_config_exists: bool, + pub on_your_laptop: bool, + pub when_isolated: bool, + #[serde(skip_serializing)] + pub config_unparsed: serde_json::Value, +} + +#[derive(Serialize, Default)] +pub struct IntegrationWithIconResult { + pub integrations: Vec, + pub error_log: Vec, +} + +pub fn read_integrations_d( + config_folders: &Vec, + lst: &[&str], + error_log: &mut Vec, +) -> Vec { + let mut integrations = Vec::new(); + for config_dir in config_folders { + for integr_name in lst.iter() { + let path_str = join_config_path(config_dir, integr_name); + let path = PathBuf::from(path_str.clone()); + let mut rec: IntegrationRecord = Default::default(); + let (_integr_name, project_path) = match split_path_into_project_and_integration(&path) { + Ok(x) => x, + Err(e) => { + tracing::error!("error deriving project path: {}", e); + continue; + } + }; + let short_pp = crate::nicer_logs::last_n_chars(&project_path, 10); + rec.project_path = project_path.clone(); + rec.integr_name = integr_name.to_string(); + rec.integr_config_path = path_str.clone(); + rec.integr_config_exists = path.exists(); + if rec.integr_config_exists { + match fs::read_to_string(&path) { + Ok(file_content) => match serde_yaml::from_str::(&file_content) { + Ok(yaml_value) => { + rec.config_unparsed = serde_json::to_value(yaml_value.clone()).unwrap(); + if let Some(available) = yaml_value.get("available").and_then(|v| v.as_mapping()) { + rec.on_your_laptop = available.get("on_your_laptop").and_then(|v| v.as_bool()).unwrap_or(false); + rec.when_isolated = available.get("when_isolated").and_then(|v| v.as_bool()).unwrap_or(false); + } else { + tracing::info!("{} no 'available' mapping in `{}`", short_pp, integr_name); + } + } + Err(e) => { + let location = e.location().map(|loc| format!(" at line {}, column {}", loc.line(), loc.column())).unwrap_or_default(); + error_log.push(YamlError { + integr_config_path: path_str.to_string(), + error_line: e.location().map(|loc| loc.line()).unwrap_or(0), + error_msg: e.to_string(), + }); + tracing::warn!("failed to parse {}{}: {}", path_str, location, e.to_string()); + } + }, + Err(e) => { + error_log.push(YamlError { + integr_config_path: path_str.to_string(), + error_line: 0, + error_msg: e.to_string(), + }); + tracing::warn!("failed to read {}: {}", path_str, e.to_string()); + } + } + } else { + tracing::info!("{} no config file `{}`", short_pp, integr_name); + } + integrations.push(rec); + } + } + integrations +} + +pub fn join_config_path(config_dir: &PathBuf, integr_name: &str) -> String { + config_dir.join("integrations.d").join(format!("{}.yaml", integr_name)).to_string_lossy().into_owned() +} + +pub async fn config_dirs( + gcx: Arc>, +) -> Vec { + let (global_dir, workspace_folders_arc) = { + let gcx_locked = gcx.read().await; + (gcx_locked.config_dir.clone(), gcx_locked.documents_state.workspace_folders.clone()) + }; + let mut config_folders = workspace_folders_arc.lock().unwrap().clone(); + config_folders = config_folders.iter().map(|folder| folder.join(".refact")).collect(); + config_folders.push(global_dir); + config_folders +} + +pub fn split_path_into_project_and_integration(cfg_path: &PathBuf) -> Result<(String, String), String> { + let path_str = cfg_path.to_string_lossy(); + let re_per_project = Regex::new(r"^(.*)[\\/]\.refact[\\/](integrations\.d)[\\/](.+)\.yaml$").unwrap(); + let re_global = Regex::new(r"^(.*)[\\/]\.config[\\/](refact[\\/](integrations\.d)[\\/](.+)\.yaml$)").unwrap(); + + if let Some(caps) = re_per_project.captures(&path_str) { + let project_path = caps.get(1).map_or(String::new(), |m| m.as_str().to_string()); + let integr_name = caps.get(3).map_or(String::new(), |m| m.as_str().to_string()); + Ok((integr_name, project_path)) + } else if let Some(caps) = re_global.captures(&path_str) { + let integr_name = caps.get(4).map_or(String::new(), |m| m.as_str().to_string()); + Ok((integr_name, String::new())) + } else { + Err(format!("invalid path: {}", cfg_path.display())) + } +} + +pub async fn integrations_all_with_icons( + gcx: Arc>, +) -> IntegrationWithIconResult { + let config_folders = config_dirs(gcx).await; + let lst: Vec<&str> = crate::integrations::integrations_list(); + let mut error_log: Vec = Vec::new(); + let integrations = read_integrations_d(&config_folders, &lst, &mut error_log); + // rec.integr_icon = crate::integrations::icon_from_name(integr_name); + IntegrationWithIconResult { + integrations, + error_log, + } +} + +#[derive(Serialize, Default)] +pub struct IntegrationGetResult { + pub project_path: String, + pub integr_name: String, + pub integr_config_path: String, + pub integr_schema: serde_json::Value, + pub integr_values: serde_json::Value, + pub error_log: Vec, +} + +pub async fn integration_config_get( + integr_config_path: String, +) -> Result { + let sanitized_path = crate::files_correction::canonical_path(&integr_config_path); + let integr_name = sanitized_path.file_stem().and_then(|s| s.to_str()).unwrap_or_default().to_string(); + if integr_name.is_empty() { + return Err(format!("can't derive integration name from file name")); + } + + let (integr_name, project_path) = split_path_into_project_and_integration(&sanitized_path)?; + let mut result = IntegrationGetResult { + project_path: project_path.clone(), + integr_name: integr_name.clone(), + integr_config_path: integr_config_path.clone(), + integr_schema: serde_json::Value::Null, + integr_values: serde_json::Value::Null, + error_log: Vec::new(), + }; + + let mut integration_box = crate::integrations::integration_from_name(integr_name.as_str())?; + result.integr_schema = { + let y: serde_yaml::Value = serde_yaml::from_str(integration_box.integr_schema()).unwrap(); + let j = serde_json::to_value(y).unwrap(); + j + }; + + let mut available = serde_json::json!({ + "on_your_laptop": false, + "when_isolated": false + }); + if sanitized_path.exists() { + match fs::read_to_string(&sanitized_path) { + Ok(content) => { + match serde_yaml::from_str::(&content) { + Ok(y) => { + let j = serde_json::to_value(y).unwrap(); + available["on_your_laptop"] = j.get("available").and_then(|v| v.get("on_your_laptop")).and_then(|v| v.as_bool()).unwrap_or(false).into(); + available["when_isolated"] = j.get("available").and_then(|v| v.get("when_isolated")).and_then(|v| v.as_bool()).unwrap_or(false).into(); + let did_it_work = integration_box.integr_settings_apply(&j); + if let Err(e) = did_it_work { + tracing::error!("oops: {}", e); + } + } + Err(e) => { + return Err(format!("failed to parse: {}", e.to_string())); + } + }; + } + Err(e) => { + return Err(format!("failed to read configuration file: {}", e.to_string())); + } + }; + } + + result.integr_values = integration_box.integr_settings_as_json(); + result.integr_values["available"] = available; + Ok(result) +} + +pub async fn integration_config_save( + integr_config_path: &String, + integr_values: &serde_json::Value, +) -> Result<(), String> { + let config_path = crate::files_correction::canonical_path(integr_config_path); + let (integr_name, _project_path) = crate::integrations::setting_up_integrations::split_path_into_project_and_integration(&config_path) + .map_err(|e| format!("Failed to split path: {}", e))?; + let mut integration_box = crate::integrations::integration_from_name(integr_name.as_str()) + .map_err(|e| format!("Failed to load integrations: {}", e))?; + + integration_box.integr_settings_apply(integr_values)?; // this will produce "no field XXX" errors + + let mut sanitized_json: serde_json::Value = integration_box.integr_settings_as_json(); + tracing::info!("posted values:\n{}", serde_json::to_string_pretty(integr_values).unwrap()); + if !sanitized_json.as_object_mut().unwrap().contains_key("available") { + sanitized_json["available"] = serde_json::Value::Object(serde_json::Map::new()); + } + sanitized_json["available"]["on_your_laptop"] = integr_values.pointer("/available/on_your_laptop").cloned().unwrap_or(serde_json::Value::Bool(false)); + sanitized_json["available"]["when_isolated"] = integr_values.pointer("/available/when_isolated").cloned().unwrap_or(serde_json::Value::Bool(false)); + tracing::info!("writing to {}:\n{}", config_path.display(), serde_json::to_string_pretty(&sanitized_json).unwrap()); + let sanitized_yaml = serde_yaml::to_value(sanitized_json).unwrap(); + + let config_dir = config_path.parent().ok_or_else(|| { + "Failed to get parent directory".to_string() + })?; + async_fs::create_dir_all(config_dir).await.map_err(|e| { + format!("Failed to create {}: {}", config_dir.display(), e) + })?; + + let mut file = async_fs::File::create(&config_path).await.map_err(|e| { + format!("Failed to create {}: {}", config_path.display(), e) + })?; + let sanitized_yaml_string = serde_yaml::to_string(&sanitized_yaml).unwrap(); + file.write_all(sanitized_yaml_string.as_bytes()).await.map_err(|e| { + format!("Failed to write to {}: {}", config_path.display(), e) + })?; + + Ok(()) +} + +#[cfg(test)] +mod tests { + // use super::*; + use crate::integrations::integr_abstract::IntegrationTrait; + use crate::integrations::yaml_schema::ISchema; + use serde_yaml; + use indexmap::IndexMap; + use std::fs::File; + use std::io::Write; + + #[tokio::test] + async fn test_integration_schemas() { + let integrations = crate::integrations::integrations_list(); + for name in integrations { + let mut integration_box = crate::integrations::integration_from_name(name).unwrap(); + let schema_json = { + let y: serde_yaml::Value = serde_yaml::from_str(integration_box.integr_schema()).unwrap(); + let j = serde_json::to_value(y).unwrap(); + j + }; + let schema_yaml: serde_yaml::Value = serde_json::from_value(schema_json.clone()).unwrap(); + let compare_me1 = serde_yaml::to_string(&schema_yaml).unwrap(); + let schema_struct: ISchema = serde_json::from_value(schema_json).unwrap(); + let schema_struct_yaml = serde_json::to_value(&schema_struct).unwrap(); + let compare_me2 = serde_yaml::to_string(&schema_struct_yaml).unwrap(); + if compare_me1 != compare_me2 { + eprintln!("schema mismatch for integration `{}`:\nOriginal:\n{}\nSerialized:\n{}", name, compare_me1, compare_me2); + let original_file_path = format!("/tmp/original_schema_{}.yaml", name); + let serialized_file_path = format!("/tmp/serialized_schema_{}.yaml", name); + let mut original_file = File::create(&original_file_path).unwrap(); + let mut serialized_file = File::create(&serialized_file_path).unwrap(); + original_file.write_all(compare_me1.as_bytes()).unwrap(); + serialized_file.write_all(compare_me2.as_bytes()).unwrap(); + eprintln!("cat {}", original_file_path); + eprintln!("cat {}", serialized_file_path); + eprintln!("diff {} {}", original_file_path, serialized_file_path); + panic!("oops"); + } + } + } +} diff --git a/src/integrations/yaml_schema.rs b/src/integrations/yaml_schema.rs new file mode 100644 index 000000000..eaaeed2db --- /dev/null +++ b/src/integrations/yaml_schema.rs @@ -0,0 +1,61 @@ +use serde::{Deserialize, Serialize}; +use indexmap::IndexMap; +use crate::call_validation::ChatMessage; + + +#[derive(Serialize, Deserialize, Debug, Default)] +pub struct DockerService { + pub image: String, + #[serde(default)] + pub environment: IndexMap, +} + +#[derive(Serialize, Deserialize, Debug, Default)] +pub struct ISchemaField { + pub f_type: String, + #[serde(default, skip_serializing_if="is_default")] + pub f_desc: String, + #[serde(default, skip_serializing_if="is_default")] + pub f_default: String, + #[serde(default, skip_serializing_if="is_default")] + pub f_placeholder: String, + #[serde(default, skip_serializing_if="is_empty")] + pub smartlinks: Vec, +} + +#[derive(Serialize, Deserialize, Debug, Default)] +pub struct ISmartLink { + pub sl_label: String, + #[serde(default, skip_serializing_if="is_empty")] + pub sl_chat: Vec, + #[serde(default, skip_serializing_if="is_default")] + pub sl_goto: String, +} + +#[derive(Serialize, Deserialize, Debug, Default)] +pub struct ISchemaAvailable { + pub on_your_laptop_possible: bool, + pub when_isolated_possible: bool, +} + +#[derive(Serialize, Deserialize, Debug, Default)] +pub struct ISchemaDocker { + pub new_container_default: DockerService, + pub smartlinks: Vec, +} + +#[derive(Serialize, Deserialize, Debug, Default)] +pub struct ISchema { + pub fields: IndexMap, + pub available: ISchemaAvailable, + pub smartlinks: Vec, + pub docker: ISchemaDocker, +} + +fn is_default(t: &T) -> bool { + t == &T::default() +} + +fn is_empty(t: &Vec) -> bool { + t.is_empty() +} diff --git a/src/main.rs b/src/main.rs index 4e89dbd16..0b0d443fd 100644 --- a/src/main.rs +++ b/src/main.rs @@ -71,7 +71,8 @@ async fn main() { rayon::ThreadPoolBuilder::new().num_threads(cpu_num / 2).build_global().unwrap(); let home_dir = home::home_dir().ok_or(()).expect("failed to find home dir"); let cache_dir = home_dir.join(".cache/refact"); - let (gcx, ask_shutdown_receiver, shutdown_flag, cmdline) = global_context::create_global_context(cache_dir.clone()).await; + let config_dir = home_dir.join(".config/refact"); + let (gcx, ask_shutdown_receiver, shutdown_flag, cmdline) = global_context::create_global_context(cache_dir.clone(), config_dir.clone()).await; let mut writer_is_stderr = false; let (logs_writer, _guard) = if cmdline.logs_stderr { writer_is_stderr = true; diff --git a/src/subchat.rs b/src/subchat.rs index 4daf5c182..168465c93 100644 --- a/src/subchat.rs +++ b/src/subchat.rs @@ -60,8 +60,7 @@ async fn create_chat_post_and_scratchpad( only_deterministic_messages, subchat_tool_parameters: tconfig.subchat_tool_parameters.clone(), postprocess_parameters: PostprocessSettings::new(), - chat_id: "".to_string(), - style: None, + ..Default::default() }; let (model_name, scratchpad_name, scratchpad_patch, n_ctx, supports_tools, _supports_multimodality, supports_clicks) = lookup_chat_scratchpad( diff --git a/src/tools/tool_patch_aux/tickets_parsing.rs b/src/tools/tool_patch_aux/tickets_parsing.rs index ac3271901..bdf596fcf 100644 --- a/src/tools/tool_patch_aux/tickets_parsing.rs +++ b/src/tools/tool_patch_aux/tickets_parsing.rs @@ -71,7 +71,7 @@ pub struct TicketToApply { } pub fn good_error_text(reason: &str, tickets: &Vec, resolution: Option) -> (String, Option) { - let mut text = format!("Couldn't create patch for tickets: '{}'.\nReason: {reason}", tickets.join(", ")); + let text = format!("Couldn't create patch for tickets: '{}'.\nReason: {reason}", tickets.join(", ")); if let Some(resolution) = resolution { let cd_format = format!("šŸ’æ {resolution}"); return (text, Some(cd_format)) diff --git a/src/tools/tools_description.rs b/src/tools/tools_description.rs index 57c01551d..544a4d4e2 100644 --- a/src/tools/tools_description.rs +++ b/src/tools/tools_description.rs @@ -1,20 +1,19 @@ -use indexmap::IndexMap; +use std::path::PathBuf; use std::collections::HashMap; use std::sync::Arc; +use indexmap::IndexMap; use serde_json::{Value, json}; use serde::{Deserialize, Serialize}; use async_trait::async_trait; use tokio::sync::RwLock as ARwLock; use tokio::sync::Mutex as AMutex; -use tracing::error; + use crate::at_commands::at_commands::AtCommandsContext; use crate::call_validation::{ChatUsage, ContextEnum}; use crate::global_context::GlobalContext; - -use crate::integrations::load_integration_tools; use crate::yaml_configs::create_configs::read_yaml_into_value; +// use crate::integrations::docker::integr_docker::ToolDocker; -use crate::integrations::docker::integr_docker::ToolDocker; #[derive(Serialize, Deserialize, Debug, Clone)] pub struct CommandsRequireConfirmationConfig { @@ -69,7 +68,7 @@ pub async fn read_integrations_yaml(cache_dir: &PathBuf) -> Result>, - supports_clicks: bool, + _supports_clicks: bool, // XXX ) -> Result>>>, String> { let (ast_on, vecdb_on, allow_experimental, cache_dir) = { let gcx_locked = gcx.read().await; @@ -112,33 +111,33 @@ pub async fn tools_merged_and_filtered( if allow_experimental { // The approach here: if it exists, it shouldn't have syntax errors, note the "?" - if let Some(gh_config) = integrations_value.get("github") { - tools_all.insert("github".to_string(), Arc::new(AMutex::new(Box::new(ToolGithub::new_from_yaml(gh_config)?) as Box))); - } - if let Some(gl_config) = integrations_value.get("gitlab") { - tools_all.insert("gitlab".to_string(), Arc::new(AMutex::new(Box::new(ToolGitlab::new_from_yaml(gl_config)?) as Box))); - } - if let Some(pdb_config) = integrations_value.get("pdb") { - tools_all.insert("pdb".to_string(), Arc::new(AMutex::new(Box::new(ToolPdb::new_from_yaml(pdb_config)?) as Box))); - } - if let Some(chrome_config) = integrations_value.get("chrome") { - tools_all.insert("chrome".to_string(), Arc::new(AMutex::new(Box::new(ToolChrome::new_from_yaml(chrome_config, supports_clicks)?) as Box))); - } - if let Some(postgres_config) = integrations_value.get("postgres") { - tools_all.insert("postgres".to_string(), Arc::new(AMutex::new(Box::new(ToolPostgres::new_from_yaml(postgres_config)?) as Box))); - } - if let Some(docker_config) = integrations_value.get("docker") { - tools_all.insert("docker".to_string(), Arc::new(AMutex::new(Box::new(ToolDocker::new_from_yaml(docker_config)?) as Box))); - } - if let Ok(caps) = crate::global_context::try_load_caps_quickly_if_not_present(gcx.clone(), 0).await { - let have_thinking_model = { - let caps_locked = caps.read().unwrap(); - caps_locked.running_models.contains(&"o1-mini".to_string()) - }; - if have_thinking_model { - tools_all.insert("deep_thinking".to_string(), Arc::new(AMutex::new(Box::new(crate::tools::tool_deep_thinking::ToolDeepThinking{}) as Box))); - } - } + // if let Some(gh_config) = integrations_value.get("github") { + // tools_all.insert("github".to_string(), Arc::new(AMutex::new(Box::new(ToolGithub::new_from_yaml(gh_config)?) as Box))); + // } + // if let Some(gl_config) = integrations_value.get("gitlab") { + // tools_all.insert("gitlab".to_string(), Arc::new(AMutex::new(Box::new(ToolGitlab::new_from_yaml(gl_config)?) as Box))); + // } + // if let Some(pdb_config) = integrations_value.get("pdb") { + // tools_all.insert("pdb".to_string(), Arc::new(AMutex::new(Box::new(ToolPdb::new_from_yaml(pdb_config)?) as Box))); + // } + // if let Some(chrome_config) = integrations_value.get("chrome") { + // tools_all.insert("chrome".to_string(), Arc::new(AMutex::new(Box::new(ToolChrome::new_from_yaml(chrome_config, supports_clicks)?) as Box))); + // } + // if let Some(postgres_config) = integrations_value.get("postgres") { + // tools_all.insert("postgres".to_string(), Arc::new(AMutex::new(Box::new(ToolPostgres::new_from_yaml(postgres_config)?) as Box))); + // } + // if let Some(docker_config) = integrations_value.get("docker") { + // tools_all.insert("docker".to_string(), Arc::new(AMutex::new(Box::new(ToolDocker::new_from_yaml(docker_config)?) as Box))); + // } + // if let Ok(caps) = crate::global_context::try_load_caps_quickly_if_not_present(gcx.clone(), 0).await { + // let have_thinking_model = { + // let caps_locked = caps.read().unwrap(); + // caps_locked.running_models.contains(&"o1-mini".to_string()) + // }; + // if have_thinking_model { + // tools_all.insert("deep_thinking".to_string(), Arc::new(AMutex::new(Box::new(crate::tools::tool_deep_thinking::ToolDeepThinking{}) as Box))); + // } + // } // #[cfg(feature="vecdb")] // tools_all.insert("knowledge".to_string(), Arc::new(AMutex::new(Box::new(crate::tools::tool_knowledge::ToolGetKnowledge{}) as Box))); // match load_integration_tools(gcx.clone()).await { @@ -161,11 +160,12 @@ pub async fn tools_merged_and_filtered( tools_all.extend(cmdline_tools); } - // let integrations = load_integration_tools(gcx.clone()).await; - // tools_all.extend(integrations); - // #[cfg(feature="vecdb")] - // tools_all.insert("knowledge".to_string(), Arc::new(AMutex::new(Box::new(crate::tools::tool_knowledge::ToolGetKnowledge{}) as Box))); - // } + let integrations = crate::integrations::running_integrations::load_integration_tools( + gcx.clone(), + "".to_string(), + allow_experimental, + ).await; + tools_all.extend(integrations); let mut filtered_tools = IndexMap::new(); for (tool_name, tool_arc) in tools_all { @@ -185,6 +185,8 @@ pub async fn tools_merged_and_filtered( pub async fn commands_require_confirmation_rules_from_integrations_yaml(gcx: Arc>) -> Result { + // XXX + // let integrations_value = read_integrations_yaml(gcx.clone()).await?; let cache_dir = gcx.read().await.cache_dir.clone(); let integrations_value = read_integrations_yaml(&cache_dir).await?; diff --git a/src/vecdb/vdb_highlev.rs b/src/vecdb/vdb_highlev.rs index 4eb34cd5c..1db424c41 100644 --- a/src/vecdb/vdb_highlev.rs +++ b/src/vecdb/vdb_highlev.rs @@ -1,8 +1,5 @@ -use std::collections::HashMap; use std::path::PathBuf; use std::sync::Arc; -use std::sync::Mutex as StdMutex; -use indexmap::IndexMap; use tokio::sync::{Mutex as AMutex, RwLock as ARwLock}; use tokio::task::JoinHandle; use async_trait::async_trait; @@ -16,7 +13,7 @@ use crate::global_context::{CommandLine, GlobalContext}; use crate::knowledge::{lance_search, MemoriesDatabase}; use crate::vecdb::vdb_cache::VecDBCache; use crate::vecdb::vdb_lance::VecDBHandler; -use crate::vecdb::vdb_structs::{MemoRecord, MemoSearchResult, OngoingWork, SearchResult, VecDbStatus, VecdbConstants, VecdbSearch}; +use crate::vecdb::vdb_structs::{MemoRecord, MemoSearchResult, SearchResult, VecDbStatus, VecdbConstants, VecdbSearch}; use crate::vecdb::vdb_thread::{vecdb_start_background_tasks, vectorizer_enqueue_dirty_memory, vectorizer_enqueue_files, FileVectorizerService}; @@ -36,7 +33,6 @@ pub struct VecDb { pub vectorizer_service: Arc>, cmdline: CommandLine, // TODO: take from command line what's needed, don't store a copy constants: VecdbConstants, - pub mem_ongoing: Arc>>, } async fn vecdb_test_request( @@ -251,7 +247,6 @@ impl VecDb { vectorizer_service, cmdline: cmdline.clone(), constants: constants.clone(), - mem_ongoing: Arc::new(StdMutex::new(HashMap::::new())), }) } @@ -471,37 +466,6 @@ pub async fn memories_search( Ok(MemoSearchResult { query_text: query.clone(), results }) } -pub async fn ongoing_update_or_create( - vec_db: Arc>>, - goal: String, - ongoing_progress: IndexMap, - ongoing_action_new_sequence: IndexMap, - ongoing_output: IndexMap>, -) -> Result<(), String> { - let ongoing_map_arc = { - let vec_db_guard = vec_db.lock().await; - let vec_db = vec_db_guard.as_ref().ok_or("VecDb is not initialized")?; - vec_db.mem_ongoing.clone() - }; - let mut ongoing_map = ongoing_map_arc.lock().unwrap(); - if let Some(ongoing) = ongoing_map.get_mut(&goal) { - ongoing.ongoing_progress = ongoing_progress; - ongoing.ongoing_action_sequences.push(ongoing_action_new_sequence); - ongoing.ongoing_output.extend(ongoing_output); - ongoing.ongoing_attempt_n += 1; - } else { - let new_ongoing = OngoingWork { - ongoing_goal: goal.clone(), - ongoing_attempt_n: 1, - ongoing_progress, - ongoing_action_sequences: vec![ongoing_action_new_sequence], - ongoing_output, - }; - ongoing_map.insert(goal, new_ongoing); - } - Ok(()) -} - // pub async fn ongoing_find( // vec_db: Arc>>, // goal: String, @@ -519,46 +483,46 @@ pub async fn ongoing_update_or_create( // } // } -pub async fn ongoing_dump( - vec_db: Arc>>, -) -> Result { - let ongoing_map_arc = { - let vec_db_guard = vec_db.lock().await; - let vec_db = vec_db_guard.as_ref().ok_or("VecDb is not initialized")?; - vec_db.mem_ongoing.clone() - }; - let ongoing_map = ongoing_map_arc.lock().unwrap(); - - let mut output = String::new(); - for (_, ongoing) in ongoing_map.iter() { - let mut ordered_map = IndexMap::new(); - ordered_map.insert("PROGRESS".to_string(), serde_json::Value::Object(ongoing.ongoing_progress.clone().into_iter().collect())); - let action_sequences: Vec = ongoing.ongoing_action_sequences - .iter() - .map(|map| serde_json::Value::Object(map.clone().into_iter().collect())) - .collect(); - ordered_map.insert("TRIED_ACTION_SEQUENCES".to_string(), serde_json::Value::Array(action_sequences)); - let output_value: serde_json::Value = serde_json::Value::Object( - ongoing.ongoing_output - .clone() - .into_iter() - .map(|(k, v)| (k, serde_json::Value::Object(v.into_iter().collect()))) - .collect() - ); - ordered_map.insert("OUTPUT".to_string(), output_value); - output.push_str(&format!( - "šŸ’æ Ongoing session with goal: {}\nAttempt number: {}\nSummary of progress:\n\n{}\n\n", - ongoing.ongoing_goal, - ongoing.ongoing_attempt_n, - serde_json::to_string_pretty(&ordered_map).unwrap() - )); - } - if output.is_empty() { - output = "No ongoing work found.\n".to_string(); - } +// pub async fn ongoing_dump( +// vec_db: Arc>>, +// ) -> Result { +// let ongoing_map_arc = { +// let vec_db_guard = vec_db.lock().await; +// let vec_db = vec_db_guard.as_ref().ok_or("VecDb is not initialized")?; +// vec_db.mem_ongoing.clone() +// }; +// let ongoing_map = ongoing_map_arc.lock().unwrap(); - Ok(output) -} +// let mut output = String::new(); +// for (_, ongoing) in ongoing_map.iter() { +// let mut ordered_map = IndexMap::new(); +// ordered_map.insert("PROGRESS".to_string(), serde_json::Value::Object(ongoing.ongoing_progress.clone().into_iter().collect())); +// let action_sequences: Vec = ongoing.ongoing_action_sequences +// .iter() +// .map(|map| serde_json::Value::Object(map.clone().into_iter().collect())) +// .collect(); +// ordered_map.insert("TRIED_ACTION_SEQUENCES".to_string(), serde_json::Value::Array(action_sequences)); +// let output_value: serde_json::Value = serde_json::Value::Object( +// ongoing.ongoing_output +// .clone() +// .into_iter() +// .map(|(k, v)| (k, serde_json::Value::Object(v.into_iter().collect()))) +// .collect() +// ); +// ordered_map.insert("OUTPUT".to_string(), output_value); +// output.push_str(&format!( +// "šŸ’æ Ongoing session with goal: {}\nAttempt number: {}\nSummary of progress:\n\n{}\n\n", +// ongoing.ongoing_goal, +// ongoing.ongoing_attempt_n, +// serde_json::to_string_pretty(&ordered_map).unwrap() +// )); +// } +// if output.is_empty() { +// output = "No ongoing work found.\n".to_string(); +// } + +// Ok(output) +// } #[async_trait] impl VecdbSearch for VecDb { diff --git a/src/yaml_configs/create_configs.rs b/src/yaml_configs/create_configs.rs index b2bae4a34..a6a9eb486 100644 --- a/src/yaml_configs/create_configs.rs +++ b/src/yaml_configs/create_configs.rs @@ -6,9 +6,8 @@ use tokio::io::AsyncWriteExt; use sha2::{Sha256, Digest}; use serde_yaml; use std::path::{Path, PathBuf}; -use tracing::{error, warn}; use crate::global_context::GlobalContext; -use crate::integrations::{get_empty_integrations, get_integration_path}; +// use crate::integrations::{get_empty_integrations, get_integration_path}; const DEFAULT_CHECKSUM_FILE: &str = "default-checksums.yaml"; @@ -28,7 +27,7 @@ pub async fn yaml_configs_try_create_all(gcx: Arc>) -> St for (file_name, content) in files { let file_path = cache_dir.join(file_name); if let Err(e) = _yaml_file_exists_or_create(gcx.clone(), &file_path, content).await { - warn!("{}", e); + tracing::warn!("{}", e); results.push(format!("Error processing {:?}: {}", file_path, e)); } else { results.push(file_path.to_string_lossy().to_string()); @@ -37,38 +36,39 @@ pub async fn yaml_configs_try_create_all(gcx: Arc>) -> St let integrations_d = cache_dir.join("integrations.d"); if let Err(e) = tokio::fs::create_dir_all(&integrations_d).await { - warn!("Failed to create directory {:?}: {}", integrations_d, e); + tracing::warn!("Failed to create directory {:?}: {}", integrations_d, e); results.push(format!("Error creating directory {:?}: {}", integrations_d, e)); } - let integrations_enabled = cache_dir.join("integrations-enabled.yaml"); - let integrations = get_empty_integrations(); - - for (file_name, content) in integrations.iter().map(|(k, v)| (k.clone(), v.default_value())) { - let file_path = get_integration_path(&cache_dir, &file_name); - if let Err(e) = _yaml_file_exists_or_create(gcx.clone(), &file_path, &content).await { - warn!("{}", e); - results.push(format!("Error processing {:?}: {}", file_path, e)); - } else { - results.push(file_path.to_string_lossy().to_string()); - } - let integr_name = file_path.file_stem().unwrap().to_string_lossy().to_string(); - let mut enabled_cfg = integrations_enabled_cfg(&integrations_enabled).await; - if let None = enabled_cfg.get(&integr_name) { - if let serde_yaml::Value::Mapping(ref mut map) = enabled_cfg { - map.insert(serde_yaml::Value::String(integr_name), serde_yaml::Value::Bool(false)); - } - if let Err(e) = write_yaml_value(&integrations_enabled, &enabled_cfg).await { - error!("Failed to write {}: {}", integrations_enabled.display(), e); - panic!("{}", e); - } - } - } + + // let integrations_enabled = cache_dir.join("integrations-enabled.yaml"); + // let integrations = get_empty_integrations(); + + // for (file_name, content) in integrations.iter().map(|(k, v)| (k.clone(), v.integr_settings_default())) { + // let file_path = get_integration_path(&cache_dir, &file_name); + // if let Err(e) = _yaml_file_exists_or_create(gcx.clone(), &file_path, &content).await { + // tracing::warn!("{}", e); + // results.push(format!("Error processing {:?}: {}", file_path, e)); + // } else { + // results.push(file_path.to_string_lossy().to_string()); + // } + // let integr_name = file_path.file_stem().unwrap().to_string_lossy().to_string(); + // let mut enabled_cfg = integrations_enabled_cfg(&integrations_enabled).await; + // if let None = enabled_cfg.get(&integr_name) { + // if let serde_yaml::Value::Mapping(ref mut map) = enabled_cfg { + // map.insert(serde_yaml::Value::String(integr_name), serde_yaml::Value::Bool(false)); + // } + // if let Err(e) = write_yaml_value(&integrations_enabled, &enabled_cfg).await { + // error!("Failed to write {}: {}", integrations_enabled.display(), e); + // panic!("{}", e); + // } + // } + // } results.get(0).cloned().unwrap_or_default() } async fn _yaml_file_exists_or_create( - gcx: Arc>, + gcx: Arc>, config_path: &PathBuf, the_default: &str ) -> Result @@ -139,11 +139,11 @@ async fn update_checksum(cache_dir: &Path, config_name: String, checksum: &str) Ok(()) } -pub async fn integrations_enabled_cfg( - integrations_enabled_path: &PathBuf, -) -> serde_yaml::Value { - read_yaml_into_value(integrations_enabled_path).await.unwrap_or_else(|_| serde_yaml::Value::Mapping(Default::default())) -} +// pub async fn integrations_enabled_cfg( +// integrations_enabled_path: &PathBuf, +// ) -> serde_yaml::Value { +// read_yaml_into_value(integrations_enabled_path).await.unwrap_or_else(|_| serde_yaml::Value::Mapping(Default::default())) +// } pub async fn read_yaml_into_value(yaml_path: &PathBuf) -> Result { let file = std::fs::File::open(&yaml_path).map_err( @@ -159,17 +159,17 @@ pub async fn read_yaml_into_value(yaml_path: &PathBuf) -> Result Result<(), String> { - let content = serde_yaml::to_string(value).map_err(|e| format!("Failed to serialize YAML: {}", e))?; +// pub async fn write_yaml_value(path: &Path, value: &serde_yaml::Value) -> Result<(), String> { +// let content = serde_yaml::to_string(value).map_err(|e| format!("Failed to serialize YAML: {}", e))?; - let mut file = tokio::fs::OpenOptions::new() - .write(true) - .truncate(true) - .create(true) - .open(path) - .await - .map_err(|e| format!("Failed to open file {}: {}", path.display(), e))?; +// let mut file = tokio::fs::OpenOptions::new() +// .write(true) +// .truncate(true) +// .create(true) +// .open(path) +// .await +// .map_err(|e| format!("Failed to open file {}: {}", path.display(), e))?; - AsyncWriteExt::write_all(&mut file, content.as_bytes()).await - .map_err(|e| format!("Failed to write to file {}: {}", path.display(), e)) -} +// AsyncWriteExt::write_all(&mut file, content.as_bytes()).await +// .map_err(|e| format!("Failed to write to file {}: {}", path.display(), e)) +// } diff --git a/src/yaml_configs/customization_compiled_in.rs b/src/yaml_configs/customization_compiled_in.rs index 0098758be..899b80ebc 100644 --- a/src/yaml_configs/customization_compiled_in.rs +++ b/src/yaml_configs/customization_compiled_in.rs @@ -54,7 +54,7 @@ PROMPT_PINS: | When using šŸ“PARTIAL_EDIT, include some of the original code above and to help undestand where those changes must be placed. If the user gives you a function to rewrite, prefer šŸ“REWRITE_ONE_SYMBOL over šŸ“PARTIAL_EDIT because it can be applied faster. - If a file is big, šŸ“PARTIAL_EDIT is better than šŸ“REWRITE_WHOLE_FILE. Generate several šŸ“-tickets for all the changes necessary. + If the file is big, šŸ“PARTIAL_EDIT is better than šŸ“REWRITE_WHOLE_FILE. Generate several šŸ“-tickets for all the changes necessary. Don't use šŸ“REWRITE_ONE_SYMBOL if you are changing many symbols at once. @@ -146,6 +146,51 @@ PROMPT_AGENTIC_EXPERIMENTAL_KNOWLEDGE: | IT IS FORBIDDEN TO JUST CALL TOOLS WITHOUT EXPLAINING. EXPLAIN FIRST! SERIOUSLY ABOUT CALLING knowledge(). IF IT'S ANYTHING ABOUT THE PROJECT, CALL knowledge() FIRST. +PROMPT_CONFIGURATOR: | + You are Refact Agent, a coding assistant. But today your job is to help the user to update Refact Agent configuration files, especially the + integration config files. + + %PROMPT_PINS% + %WORKSPACE_INFO% + + The integration config format is the following YAML: + ``` + integration_name: + field1: "value1" + field2: "value2" + available: + on_your_laptop: + - project_pattern: "*my_workspace/my_project1" + enable: true + - project_pattern: "*my_project2" + enable: true + when_isolated: + - image_pattern: "docker_image_for_my_project1_*" + enable: true + docker: + new_container_default: + image: "name_like_on_docker_hub:latest" + environment: + VARIABLE1: "VALUE1" + existing_containers: + my_container1: + image: "my_image1:latest" + environment: + VARIABLE2: "VALUE2" + ``` + The first user message will have all the exiting configs, docker images and containers. + + The next user message will start with šŸ”§ and it will specify your exact mission for this chat. + + Your approximate plan: + - look at the current project by calling tree() + - using cat() look inside files like Cargo.toml package.json that might help you with your mission + - derive as much information as possible from the project itself + - write a markdown table that has 2 columns, key parameters on lhs, and values you were able to derive from the project (or just reasonable defaults) on rhs + - write 1 paragraph explanation of what you are about to do + - ask the user if they want to change anything + - write updated configs using šŸ“REWRITE_WHOLE_FILE + system_prompts: default: @@ -156,6 +201,9 @@ system_prompts: agentic_tools: text: "%PROMPT_AGENTIC_TOOLS%" show: never + configurator: + text: "%PROMPT_CONFIGURATOR%" + show: experimental agentic_experimental_knowledge: text: "%PROMPT_AGENTIC_EXPERIMENTAL_KNOWLEDGE%" show: experimental @@ -348,3 +396,4 @@ pub const COMPILED_IN_INITIAL_USER_YAML : &str = r#"# You can find the compiled- # Replace all variables with animal names, such that they lose any original meaning. "#; + diff --git a/src/yaml_configs/customization_loader.rs b/src/yaml_configs/customization_loader.rs index 4c9557700..1fb083070 100644 --- a/src/yaml_configs/customization_loader.rs +++ b/src/yaml_configs/customization_loader.rs @@ -118,7 +118,7 @@ fn _replace_variables_in_system_prompts(config: &mut CustomizationYaml, variable } } -fn load_and_mix_with_users_config( +pub fn load_and_mix_with_users_config( user_yaml: &str, caps_yaml: &str, caps_default_system_prompt: &str, @@ -250,5 +250,6 @@ mod tests { assert_eq!(config.system_prompts.get("exploration_tools").is_some(), true); assert_eq!(config.system_prompts.get("agentic_tools").is_some(), true); assert_eq!(config.system_prompts.get("agentic_experimental_knowledge").is_some(), true); + assert_eq!(config.system_prompts.get("configurator").is_some(), true); } } From 96275d210dba6355ea513664e8492e1e77127436 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Tue, 26 Nov 2024 09:02:21 +0100 Subject: [PATCH 003/185] string_short, string_long, string_multiline --- src/integrations/integr_postgres.rs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/integrations/integr_postgres.rs b/src/integrations/integr_postgres.rs index c75a3d11f..22a851b31 100644 --- a/src/integrations/integr_postgres.rs +++ b/src/integrations/integr_postgres.rs @@ -175,27 +175,27 @@ impl Tool for ToolPostgres { pub const POSTGRES_INTEGRATION_SCHEMA: &str = r#" fields: host: - f_type: string + f_type: string_long f_desc: "Connect to this host, for example 127.0.0.1 or docker container name." f_placeholder: marketing_db_container port: - f_type: int + f_type: string_short f_desc: "Which port to use." f_default: "5432" user: - f_type: string + f_type: string_short f_placeholder: john_doe password: - f_type: string + f_type: string_short f_default: "$POSTGRES_PASSWORD" smartlinks: - sl_label: "Open passwords.yaml" sl_goto: "EDITOR:passwords.yaml" database: - f_type: string + f_type: string_short f_placeholder: marketing_db psql_binary_path: - f_type: string + f_type: string_long f_desc: "If it can't find a path to `psql` you can provide it here, leave blank if not sure." f_placeholder: "psql" available: From c00cb99999f317234f99a96ca9a3ef5e7da2c4ea Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Tue, 26 Nov 2024 17:01:41 +0100 Subject: [PATCH 004/185] postgres port is now string --- src/integrations/integr_postgres.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/integrations/integr_postgres.rs b/src/integrations/integr_postgres.rs index 22a851b31..c05542abd 100644 --- a/src/integrations/integr_postgres.rs +++ b/src/integrations/integr_postgres.rs @@ -16,7 +16,7 @@ use crate::integrations::integr_abstract::IntegrationTrait; pub struct SettingsPostgres { pub psql_binary_path: String, pub host: String, - pub port: usize, + pub port: String, pub user: String, pub password: String, pub database: String, @@ -67,7 +67,7 @@ impl ToolPostgres { .env("PGPASSWORD", &self.settings_postgres.password) .env("PGHOST", &self.settings_postgres.host) .env("PGUSER", &self.settings_postgres.user) - .env("PGPORT", &format!("{}", self.settings_postgres.port)) + .env("PGPORT", &self.settings_postgres.port) .env("PGDATABASE", &self.settings_postgres.database) .arg("-v") .arg("ON_ERROR_STOP=1") From a05cf9af5edb3aadfc26d07b0003e9ecb5dd84b0 Mon Sep 17 00:00:00 2001 From: JegernOUTT Date: Wed, 27 Nov 2024 20:58:04 +1030 Subject: [PATCH 005/185] Add 'patch-apply-all' route and handler - Introduce the 'handle_v1_patch_apply_all' function to handle requests for applying all patches. - Add a new API route '/patch-apply-all' for the above handler. - Update ticket parsing to include message index tracking. - Include additional structs 'PatchApplyAllPost' and 'PatchApplyAllResponse' for request and response payloads. --- src/http/routers/v1.rs | 4 +- src/http/routers/v1/patch.rs | 90 ++++++++++++++++++++- src/tools/tool_patch_aux/tickets_parsing.rs | 14 ++-- 3 files changed, 99 insertions(+), 9 deletions(-) diff --git a/src/http/routers/v1.rs b/src/http/routers/v1.rs index bc9a6d749..202cac228 100644 --- a/src/http/routers/v1.rs +++ b/src/http/routers/v1.rs @@ -29,7 +29,7 @@ use crate::http::routers::v1::status::handle_v1_rag_status; use crate::http::routers::v1::customization::handle_v1_customization; use crate::http::routers::v1::customization::handle_v1_config_path; use crate::http::routers::v1::gui_help_handlers::handle_v1_fullpath; -use crate::http::routers::v1::patch::handle_v1_patch_single_file_from_ticket; +use crate::http::routers::v1::patch::{handle_v1_patch_apply_all, handle_v1_patch_single_file_from_ticket}; use crate::http::routers::v1::subchat::{handle_v1_subchat, handle_v1_subchat_single}; use crate::http::routers::v1::sync_files::handle_v1_sync_files_extract_tar; use crate::http::routers::v1::system_prompt::handle_v1_system_prompt; @@ -122,7 +122,7 @@ pub fn make_v1_router() -> Router { .route("/docker-container-action", telemetry_post!(handle_v1_docker_container_action)) .route("/patch-single-file-from-ticket", telemetry_post!(handle_v1_patch_single_file_from_ticket)) - // .route("/patch-apply-all", telemetry_post!(handle_v1_patch_single_file_from_ticket)) + .route("/patch-apply-all", telemetry_post!(handle_v1_patch_apply_all)) // experimental .route("/get-dashboard-plots", telemetry_get!(get_dashboard_plots)) diff --git a/src/http/routers/v1/patch.rs b/src/http/routers/v1/patch.rs index 056d280a2..4a1287a17 100644 --- a/src/http/routers/v1/patch.rs +++ b/src/http/routers/v1/patch.rs @@ -4,6 +4,7 @@ use axum::Extension; use axum::http::{Response, StatusCode}; use hashbrown::HashMap; use hyper::Body; +use itertools::Itertools; use serde::{Deserialize, Serialize}; use tokio::sync::{RwLock as ARwLock, Mutex as AMutex}; use crate::at_commands::at_commands::AtCommandsContext; @@ -12,8 +13,9 @@ use crate::custom_error::ScratchError; use crate::diffs::{ApplyDiffResult, correct_and_validate_chunks, read_files_n_apply_diff_chunks, unwrap_diff_apply_outputs, ApplyDiffOutput, ApplyDiffUnwrapped}; use crate::global_context::GlobalContext; use crate::http::routers::v1::chat::deserialize_messages_from_post; -use crate::tools::tool_patch_aux::tickets_parsing::{get_and_correct_active_tickets, get_tickets_from_messages}; +use crate::tools::tool_patch_aux::tickets_parsing::{correct_and_validate_active_ticket, get_and_correct_active_tickets, get_tickets_from_messages, TicketToApply}; use crate::tools::tool_patch::process_tickets; +use crate::tools::tool_patch_aux::diff_apply::diff_apply; use crate::tools::tool_patch_aux::postprocessing_utils::fill_out_already_applied_status; use crate::tools::tools_execute::unwrap_subchat_params; @@ -24,6 +26,11 @@ pub struct PatchPost { pub ticket_ids: Vec, } +#[derive(Deserialize)] +pub struct PatchApplyAllPost { + pub messages: Vec, +} + #[derive(Serialize)] pub struct PatchResponse { state: Vec, @@ -31,6 +38,11 @@ pub struct PatchResponse { chunks: Vec, } +#[derive(Serialize)] +pub struct PatchApplyAllResponse { + chunks: Vec, +} + pub fn resolve_diff_apply_outputs( outputs: HashMap, diff_chunks: &Vec, @@ -133,3 +145,79 @@ pub async fn handle_v1_patch_single_file_from_ticket( }).unwrap())) .unwrap()) } + +pub async fn handle_v1_patch_apply_all( + Extension(global_context): Extension>>, + body_bytes: hyper::body::Bytes, +) -> axum::response::Result, ScratchError> { + let post = serde_json::from_slice::(&body_bytes) + .map_err(|e| ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, format!("JSON problem: {}", e)))?; + let messages = deserialize_messages_from_post(&post.messages)?; + + let ccx = Arc::new(AMutex::new(AtCommandsContext::new( + global_context.clone(), + 8096, + 10, + false, + messages, + "".to_string(), + false, + ).await)); + let params = unwrap_subchat_params(ccx.clone(), "patch").await.map_err(|e| { + ScratchError::new(StatusCode::BAD_REQUEST, format!("Failed to unwrap subchat params: {}", e)) + })?; + { + let mut ccx_lock = ccx.lock().await; + ccx_lock.n_ctx = params.subchat_n_ctx; + } + + // leave only the latest ticket for each file + let all_tickets = get_tickets_from_messages(ccx.clone()).await; + let mut filename_by_ticket: HashMap = HashMap::new(); + for ticket in all_tickets.values() { + if let Some(el) = filename_by_ticket.get(&ticket.filename_before) { + if ticket.message_idx <= el.message_idx { + continue + } else { + filename_by_ticket.remove(&ticket.filename_before); + } + } + let mut ticket = ticket.clone(); + correct_and_validate_active_ticket(global_context.clone(), &mut ticket).await.map_err(|e| + ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, format!("Invalid ticket: {e}")) + )?; + filename_by_ticket.insert(ticket.filename_before.clone(), ticket); + } + let mut active_tickets = filename_by_ticket.values().cloned().collect::>(); + let active_indices = active_tickets.iter().map(|ticket| ticket.id.clone()).collect::>(); + + let mut usage = ChatUsage { ..Default::default() }; + let diff_chunks_maybe = process_tickets( + ccx.clone(), + &mut active_tickets, + active_indices, + ¶ms, + &"patch_123".to_string(), + &mut usage, + ).await; + if !active_tickets.is_empty() { + let bad_ticket_ids = active_tickets.iter().map(|ticket| ticket.id.clone()).join(", "); + return Err(ScratchError::new( + StatusCode::UNPROCESSABLE_ENTITY, format!("Couldn't process some of the tickets: {bad_ticket_ids}" + ))) + } + let mut diff_chunks = diff_chunks_maybe.map_err(|(e, _)| + ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, e) + )?; + diff_apply(global_context.clone(), &mut diff_chunks).await.map_err(|err| ScratchError::new( + StatusCode::UNPROCESSABLE_ENTITY, format!("Couldn't apply the diff: {err}")) + )?; + + Ok(Response::builder() + .status(StatusCode::OK) + .header("Content-Type", "application/json") + .body(Body::from(serde_json::to_string_pretty(&PatchApplyAllResponse { + chunks: diff_chunks + }).unwrap())) + .unwrap()) +} diff --git a/src/tools/tool_patch_aux/tickets_parsing.rs b/src/tools/tool_patch_aux/tickets_parsing.rs index bdf596fcf..defd09038 100644 --- a/src/tools/tool_patch_aux/tickets_parsing.rs +++ b/src/tools/tool_patch_aux/tickets_parsing.rs @@ -58,6 +58,7 @@ pub struct TicketToApply { pub orig_action: PatchAction, #[serde(default)] pub fallback_action: Option, + pub message_idx: usize, pub id: String, pub filename_before: String, #[serde(default)] @@ -79,7 +80,7 @@ pub fn good_error_text(reason: &str, tickets: &Vec, resolution: Option>, ticket: &mut TicketToApply) -> Result<(), String> { +pub async fn correct_and_validate_active_ticket(gcx: Arc>, ticket: &mut TicketToApply) -> Result<(), String> { fn _error_text(reason: &str, ticket: &TicketToApply) -> String { format!("Failed to validate TICKET '{}': {}", ticket.id, reason) } @@ -173,8 +174,8 @@ fn split_preserving_quotes(s: &str) -> Vec { result } -async fn parse_tickets(gcx: Arc>, content: &str) -> Vec { - async fn process_ticket(gcx: Arc>, lines: &[&str], line_num: usize) -> Result<(usize, TicketToApply), String> { +async fn parse_tickets(gcx: Arc>, content: &str, message_idx: usize) -> Vec { + async fn process_ticket(gcx: Arc>, lines: &[&str], line_num: usize, message_idx: usize) -> Result<(usize, TicketToApply), String> { let mut ticket = TicketToApply::default(); let header = if let Some(idx) = lines[line_num].find("šŸ“") { split_preserving_quotes(&lines[line_num][idx..].trim()) @@ -182,6 +183,7 @@ async fn parse_tickets(gcx: Arc>, content: &str) -> Vec { match PatchAction::from_string(action) { @@ -247,7 +249,7 @@ async fn parse_tickets(gcx: Arc>, content: &str) -> Vec { // if there is something to put to the extra context if let Some(l) = line_num_before_first_block { @@ -280,8 +282,8 @@ pub async fn get_tickets_from_messages( (ccx_lock.global_context.clone(), ccx_lock.messages.clone()) }; let mut tickets: HashMap = HashMap::new(); - for message in messages.iter().filter(|x| x.role == "assistant") { - for ticket in parse_tickets(gcx.clone(), &message.content.content_text_only()).await.into_iter() { + for (idx, message) in messages.iter().enumerate().filter(|(_, x)| x.role == "assistant") { + for ticket in parse_tickets(gcx.clone(), &message.content.content_text_only(), idx).await.into_iter() { tickets.insert(ticket.id.clone(), ticket); } } From 62bdb8e7f165456fede5f1e507d280fa90e357fd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Tue, 26 Nov 2024 10:19:27 +0100 Subject: [PATCH 006/185] feat: add as_any to tools, to allow downcasting --- src/integrations/docker/integr_docker.rs | 2 ++ src/integrations/integr_chrome.rs | 2 ++ src/integrations/integr_github.rs | 2 ++ src/integrations/integr_gitlab.rs | 2 ++ src/integrations/integr_pdb.rs | 2 ++ src/integrations/integr_postgres.rs | 2 ++ src/tools/tool_ast_definition.rs | 2 ++ src/tools/tool_ast_reference.rs | 2 ++ src/tools/tool_cat.rs | 2 ++ src/tools/tool_cmdline.rs | 2 ++ src/tools/tool_deep_thinking.rs | 2 ++ src/tools/tool_knowledge.rs | 2 ++ src/tools/tool_locate_search.rs | 2 ++ src/tools/tool_patch.rs | 2 ++ src/tools/tool_relevant_files.rs | 2 ++ src/tools/tool_search.rs | 2 ++ src/tools/tool_tree.rs | 2 ++ src/tools/tool_web.rs | 2 ++ src/tools/tools_description.rs | 3 ++- 19 files changed, 38 insertions(+), 1 deletion(-) diff --git a/src/integrations/docker/integr_docker.rs b/src/integrations/docker/integr_docker.rs index df44d9064..575522109 100644 --- a/src/integrations/docker/integr_docker.rs +++ b/src/integrations/docker/integr_docker.rs @@ -100,6 +100,8 @@ impl ToolDocker { #[async_trait] impl Tool for ToolDocker { + fn as_any(&self) -> &dyn std::any::Any { self } + async fn tool_execute( &mut self, ccx: Arc>, diff --git a/src/integrations/integr_chrome.rs b/src/integrations/integr_chrome.rs index 71e819218..0be23f41b 100644 --- a/src/integrations/integr_chrome.rs +++ b/src/integrations/integr_chrome.rs @@ -101,6 +101,8 @@ impl Integration for ToolChrome { #[async_trait] impl Tool for ToolChrome { + fn as_any(&self) -> &dyn std::any::Any { self } + async fn tool_execute( &mut self, ccx: Arc>, diff --git a/src/integrations/integr_github.rs b/src/integrations/integr_github.rs index d05ca8789..fd23f2585 100644 --- a/src/integrations/integr_github.rs +++ b/src/integrations/integr_github.rs @@ -56,6 +56,8 @@ impl Integration for ToolGithub { #[async_trait] impl Tool for ToolGithub { + fn as_any(&self) -> &dyn std::any::Any { self } + async fn tool_execute( &mut self, _ccx: Arc>, diff --git a/src/integrations/integr_gitlab.rs b/src/integrations/integr_gitlab.rs index eb6e844e3..c76d2b3ef 100644 --- a/src/integrations/integr_gitlab.rs +++ b/src/integrations/integr_gitlab.rs @@ -55,6 +55,8 @@ impl Integration for ToolGitlab{ #[async_trait] impl Tool for ToolGitlab { + fn as_any(&self) -> &dyn std::any::Any { self } + async fn tool_execute( &mut self, _ccx: Arc>, diff --git a/src/integrations/integr_pdb.rs b/src/integrations/integr_pdb.rs index 39a300b1b..5b52fa3f2 100644 --- a/src/integrations/integr_pdb.rs +++ b/src/integrations/integr_pdb.rs @@ -91,6 +91,8 @@ impl Integration for ToolPdb { #[async_trait] impl Tool for ToolPdb { + fn as_any(&self) -> &dyn std::any::Any { self } + async fn tool_execute( &mut self, ccx: Arc>, diff --git a/src/integrations/integr_postgres.rs b/src/integrations/integr_postgres.rs index c05542abd..34250047a 100644 --- a/src/integrations/integr_postgres.rs +++ b/src/integrations/integr_postgres.rs @@ -98,6 +98,8 @@ impl ToolPostgres { #[async_trait] impl Tool for ToolPostgres { + fn as_any(&self) -> &dyn std::any::Any { self } + async fn tool_execute( &mut self, _ccx: Arc>, diff --git a/src/tools/tool_ast_definition.rs b/src/tools/tool_ast_definition.rs index 83e8e11e9..7d878a9dd 100644 --- a/src/tools/tool_ast_definition.rs +++ b/src/tools/tool_ast_definition.rs @@ -15,6 +15,8 @@ pub struct ToolAstDefinition; #[async_trait] impl Tool for ToolAstDefinition { + fn as_any(&self) -> &dyn std::any::Any { self } + async fn tool_execute( &mut self, ccx: Arc>, diff --git a/src/tools/tool_ast_reference.rs b/src/tools/tool_ast_reference.rs index d0891e3d5..1231fdfeb 100644 --- a/src/tools/tool_ast_reference.rs +++ b/src/tools/tool_ast_reference.rs @@ -15,6 +15,8 @@ pub struct ToolAstReference; #[async_trait] impl Tool for ToolAstReference { + fn as_any(&self) -> &dyn std::any::Any { self } + async fn tool_execute( &mut self, ccx: Arc>, diff --git a/src/tools/tool_cat.rs b/src/tools/tool_cat.rs index 608830815..eab123933 100644 --- a/src/tools/tool_cat.rs +++ b/src/tools/tool_cat.rs @@ -23,6 +23,8 @@ pub struct ToolCat; #[async_trait] impl Tool for ToolCat { + fn as_any(&self) -> &dyn std::any::Any { self } + async fn tool_execute( &mut self, ccx: Arc>, diff --git a/src/tools/tool_cmdline.rs b/src/tools/tool_cmdline.rs index 8a9f9c089..96821e6ec 100644 --- a/src/tools/tool_cmdline.rs +++ b/src/tools/tool_cmdline.rs @@ -349,6 +349,8 @@ async fn execute_background_command( #[async_trait] impl Tool for ToolCmdline { + fn as_any(&self) -> &dyn std::any::Any { self } + async fn tool_execute( &mut self, ccx: Arc>, diff --git a/src/tools/tool_deep_thinking.rs b/src/tools/tool_deep_thinking.rs index 36ea57983..af7c28915 100644 --- a/src/tools/tool_deep_thinking.rs +++ b/src/tools/tool_deep_thinking.rs @@ -15,6 +15,8 @@ pub struct ToolDeepThinking; #[async_trait] impl Tool for ToolDeepThinking { + fn as_any(&self) -> &dyn std::any::Any { self } + async fn tool_execute( &mut self, ccx: Arc>, diff --git a/src/tools/tool_knowledge.rs b/src/tools/tool_knowledge.rs index f1b3f9632..1121fdaed 100644 --- a/src/tools/tool_knowledge.rs +++ b/src/tools/tool_knowledge.rs @@ -18,6 +18,8 @@ pub struct ToolGetKnowledge; #[async_trait] impl Tool for ToolGetKnowledge { + fn as_any(&self) -> &dyn std::any::Any { self } + async fn tool_execute( &mut self, ccx: Arc>, diff --git a/src/tools/tool_locate_search.rs b/src/tools/tool_locate_search.rs index 71fef8de9..c300ca560 100644 --- a/src/tools/tool_locate_search.rs +++ b/src/tools/tool_locate_search.rs @@ -95,6 +95,8 @@ Don't write backquotes, json format only. #[async_trait] impl Tool for ToolLocateSearch { + fn as_any(&self) -> &dyn std::any::Any { self } + async fn tool_execute( &mut self, ccx: Arc>, diff --git a/src/tools/tool_patch.rs b/src/tools/tool_patch.rs index 9b4a7129d..8cab7ea0a 100644 --- a/src/tools/tool_patch.rs +++ b/src/tools/tool_patch.rs @@ -125,6 +125,8 @@ fn return_cd_instruction_or_error( #[async_trait] impl Tool for ToolPatch { + fn as_any(&self) -> &dyn std::any::Any { self } + async fn tool_execute( &mut self, ccx: Arc>, diff --git a/src/tools/tool_relevant_files.rs b/src/tools/tool_relevant_files.rs index 41da04047..e5d0595d2 100644 --- a/src/tools/tool_relevant_files.rs +++ b/src/tools/tool_relevant_files.rs @@ -61,6 +61,8 @@ pub struct ToolRelevantFiles; #[async_trait] impl Tool for ToolRelevantFiles { + fn as_any(&self) -> &dyn std::any::Any { self } + async fn tool_execute( &mut self, ccx: Arc>, diff --git a/src/tools/tool_search.rs b/src/tools/tool_search.rs index 20daee653..30a681bd9 100644 --- a/src/tools/tool_search.rs +++ b/src/tools/tool_search.rs @@ -58,6 +58,8 @@ async fn execute_att_search( #[async_trait] impl Tool for ToolSearch { + fn as_any(&self) -> &dyn std::any::Any { self } + async fn tool_execute( &mut self, ccx: Arc>, diff --git a/src/tools/tool_tree.rs b/src/tools/tool_tree.rs index 50ca3e01a..871414969 100644 --- a/src/tools/tool_tree.rs +++ b/src/tools/tool_tree.rs @@ -23,6 +23,8 @@ fn preformat_path(path: &String) -> String { #[async_trait] impl Tool for ToolTree { + fn as_any(&self) -> &dyn std::any::Any { self } + async fn tool_execute( &mut self, ccx: Arc>, diff --git a/src/tools/tool_web.rs b/src/tools/tool_web.rs index 2a8188c80..e8b969af5 100644 --- a/src/tools/tool_web.rs +++ b/src/tools/tool_web.rs @@ -14,6 +14,8 @@ pub struct ToolWeb; #[async_trait] impl Tool for ToolWeb { + fn as_any(&self) -> &dyn std::any::Any { self } + async fn tool_execute( &mut self, _ccx: Arc>, diff --git a/src/tools/tools_description.rs b/src/tools/tools_description.rs index 544a4d4e2..b73ce2e44 100644 --- a/src/tools/tools_description.rs +++ b/src/tools/tools_description.rs @@ -11,7 +11,6 @@ use tokio::sync::Mutex as AMutex; use crate::at_commands::at_commands::AtCommandsContext; use crate::call_validation::{ChatUsage, ContextEnum}; use crate::global_context::GlobalContext; -use crate::yaml_configs::create_configs::read_yaml_into_value; // use crate::integrations::docker::integr_docker::ToolDocker; @@ -23,6 +22,8 @@ pub struct CommandsRequireConfirmationConfig { #[async_trait] pub trait Tool: Send + Sync { + fn as_any(&self) -> &dyn std::any::Any; + async fn tool_execute( &mut self, ccx: Arc>, From 905156276d287cd06636b36f26d53b6acc33dbe7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Tue, 26 Nov 2024 12:21:17 +0100 Subject: [PATCH 007/185] feat: make docker work with the new way of config --- src/http/routers/v1/chat.rs | 2 +- .../docker/docker_container_manager.rs | 24 +-- .../docker/docker_ssh_tunnel_utils.rs | 4 - src/integrations/docker/integr_docker.rs | 156 +++++++++++++----- src/integrations/mod.rs | 4 +- src/integrations/yaml_schema.rs | 9 +- 6 files changed, 141 insertions(+), 58 deletions(-) diff --git a/src/http/routers/v1/chat.rs b/src/http/routers/v1/chat.rs index 3337cc1d6..de6308fd2 100644 --- a/src/http/routers/v1/chat.rs +++ b/src/http/routers/v1/chat.rs @@ -175,7 +175,7 @@ async fn _chat( let docker_tool_maybe = docker_tool_load(gcx.clone()).await .map_err(|e| info!("No docker tool available: {e}")).ok().map(Arc::new); let run_chat_threads_inside_container = docker_tool_maybe.clone() - .map(|docker_tool| docker_tool.integration_docker.run_chat_threads_inside_container) + .map(|docker_tool| docker_tool.settings_docker.run_chat_threads_inside_container) .unwrap_or(false); let should_execute_remotely = run_chat_threads_inside_container && !gcx.read().await.cmdline.inside_container; diff --git a/src/integrations/docker/docker_container_manager.rs b/src/integrations/docker/docker_container_manager.rs index a71737369..c8f5afdb6 100644 --- a/src/integrations/docker/docker_container_manager.rs +++ b/src/integrations/docker/docker_container_manager.rs @@ -107,7 +107,7 @@ pub async fn docker_container_check_status_or_start( Ok(()) => {} Err(e) => { warn!("SSH tunnel error: {}, restarting tunnel..", e); - let ssh_config = docker.integration_docker.ssh_config.clone().ok_or_else(|| "No ssh config for docker container".to_string())?; + let ssh_config = docker.settings_docker.ssh_config.clone().ok_or_else(|| "No ssh config for docker container".to_string())?; docker_container_session.connection = DockerContainerConnectionEnum::SshTunnel( ssh_tunnel_open(&mut ssh_tunnel.forwarded_ports, &ssh_config).await? ); @@ -121,14 +121,14 @@ pub async fn docker_container_check_status_or_start( Ok(()) } None => { - let ssh_config_maybe = docker.integration_docker.ssh_config.clone(); + let ssh_config_maybe = docker.settings_docker.ssh_config.clone(); const LSP_PORT: &str = "8001"; let mut ports_to_forward = if ssh_config_maybe.is_some() { - docker.integration_docker.ports.iter() + docker.settings_docker.ports.iter() .map(|p| Port {external: "0".to_string(), internal: p.internal.clone()}).collect::>() } else { - docker.integration_docker.ports.clone() + docker.settings_docker.ports.clone() }; ports_to_forward.insert(0, Port {external: "0".to_string(), internal: LSP_PORT.to_string()}); @@ -139,13 +139,13 @@ pub async fn docker_container_check_status_or_start( let host_lsp_port = exposed_ports.iter().find(|p| p.internal == LSP_PORT) .ok_or_else(|| "No LSP port exposed".to_string())?.external.clone(); - let keep_containers_alive_for_x_minutes = docker.integration_docker.keep_containers_alive_for_x_minutes; + let keep_containers_alive_for_x_minutes = docker.settings_docker.keep_containers_alive_for_x_minutes; let connection = match ssh_config_maybe { Some(ssh_config) => { let mut ports_to_forward_through_ssh = exposed_ports.into_iter() .map(|exposed_port| { - let matched_external_port = docker.integration_docker.ports.iter() + let matched_external_port = docker.settings_docker.ports.iter() .find(|configured_port| configured_port.internal == exposed_port.internal) .map_or_else(|| "0".to_string(), |forwarded_port| forwarded_port.external.clone()); Port { @@ -169,8 +169,8 @@ pub async fn docker_container_check_status_or_start( }; docker_container_sync_workspace(gcx.clone(), &docker, &container_id, &lsp_port_to_connect).await?; - if !docker.integration_docker.command.is_empty() { - let cmd_to_execute = format!("exec --detach {} {}", container_id, docker.integration_docker.command); + if !docker.settings_docker.command.is_empty() { + let cmd_to_execute = format!("exec --detach {} {}", container_id, docker.settings_docker.command); match docker.command_execute(&cmd_to_execute, gcx.clone(), false).await { Ok((cmd_stdout, cmd_stderr)) => { info!("Command executed: {cmd_stdout}\n{cmd_stderr}") }, Err(e) => { error!("Command execution failed: {}", e) }, @@ -232,12 +232,12 @@ async fn docker_container_create( lsp_port: &str, gcx: Arc>, ) -> Result { - let docker_image_id = docker.integration_docker.docker_image_id.clone(); + let docker_image_id = docker.settings_docker.docker_image_id.clone(); if docker_image_id.is_empty() { return Err("No image ID to run container from, please specify one.".to_string()); } - let workspace_folder = docker.integration_docker.container_workspace_folder.clone(); - let host_lsp_path = docker.integration_docker.host_lsp_path.clone(); + let workspace_folder = docker.settings_docker.container_workspace_folder.clone(); + let host_lsp_path = docker.settings_docker.host_lsp_path.clone(); let (address_url, api_key) = { let gcx_locked = gcx.read().await; @@ -351,7 +351,7 @@ async fn docker_container_sync_workspace( .into_iter() .next() .ok_or_else(|| "No workspace folders found".to_string())?; - let container_workspace_folder = PathBuf::from(&docker.integration_docker.container_workspace_folder); + let container_workspace_folder = PathBuf::from(&docker.settings_docker.container_workspace_folder); let temp_tar_file = TempfileBuilder::new().suffix(".tar").tempfile() .map_err(|e| format!("Error creating temporary tar file: {}", e))?.into_temp_path(); diff --git a/src/integrations/docker/docker_ssh_tunnel_utils.rs b/src/integrations/docker/docker_ssh_tunnel_utils.rs index d5b5b20d8..1040bba98 100644 --- a/src/integrations/docker/docker_ssh_tunnel_utils.rs +++ b/src/integrations/docker/docker_ssh_tunnel_utils.rs @@ -10,14 +10,10 @@ use crate::integrations::docker::docker_container_manager::Port; #[derive(Clone, Serialize, Deserialize, Debug)] pub struct SshConfig { pub host: String, - #[serde(default = "default_user")] pub user: String, - #[serde(default = "default_port")] pub port: u16, pub identity_file: Option, } -fn default_user() -> String { "root".to_string() } -fn default_port() -> u16 { 22 } pub struct SshTunnel { pub forwarded_ports: Vec, diff --git a/src/integrations/docker/integr_docker.rs b/src/integrations/docker/integr_docker.rs index 575522109..285da4ec0 100644 --- a/src/integrations/docker/integr_docker.rs +++ b/src/integrations/docker/integr_docker.rs @@ -9,55 +9,64 @@ use serde_json::Value; use crate::at_commands::at_commands::AtCommandsContext; use crate::call_validation::{ChatContent, ChatMessage, ContextEnum}; use crate::global_context::GlobalContext; -use crate::tools::tools_description::{read_integrations_yaml, Tool}; +use crate::integrations::integr_abstract::IntegrationTrait; +use crate::integrations::running_integrations::load_integration_tools; +use crate::tools::tools_description::Tool; use crate::integrations::docker::docker_ssh_tunnel_utils::{SshConfig, forward_remote_docker_if_needed}; use crate::integrations::docker::docker_container_manager::Port; -#[derive(Clone, Serialize, Deserialize, Debug)] -pub struct IntegrationDocker { - #[serde(default = "default_connect_to_daemon_at")] +#[derive(Clone, Serialize, Deserialize, Default, Debug)] +pub struct SettingsDocker { pub connect_to_daemon_at: String, - #[serde(default = "default_docker_cli_path")] pub docker_cli_path: String, pub ssh_config: Option, - #[serde(default = "default_container_workspace_folder")] pub container_workspace_folder: String, - #[serde(default)] pub docker_image_id: String, - #[serde(default = "default_host_lsp_path")] pub host_lsp_path: String, - #[serde(default)] pub run_chat_threads_inside_container: bool, - #[serde(default = "default_label")] pub label: String, - #[serde(default)] pub command: String, - #[serde(default = "default_keep_containers_alive_for_x_minutes")] pub keep_containers_alive_for_x_minutes: u64, - #[serde(default)] pub ports: Vec, } -fn default_connect_to_daemon_at() -> String { "unix:///var/run/docker.sock".to_string() } -fn default_docker_cli_path() -> String { "docker".to_string() } -fn default_container_workspace_folder() -> String { "/app".to_string() } -fn default_host_lsp_path() -> String { "/opt/refact/bin/refact-lsp".to_string() } -fn default_label() -> String { "refact".to_string() } -fn default_keep_containers_alive_for_x_minutes() -> u64 { 60 } +#[derive(Clone, Default, Debug)] pub struct ToolDocker { - pub integration_docker: IntegrationDocker, + pub settings_docker: SettingsDocker, } -impl ToolDocker { - pub fn new_from_yaml(docker_config: &serde_yaml::Value) -> Result { - let integration_docker = serde_yaml::from_value::(docker_config.clone()) - .map_err(|e| { - let location = e.location().map(|loc| format!(" at line {}, column {}", loc.line(), loc.column())).unwrap_or_default(); - format!("{}{}", e.to_string(), location) - })?; - Ok(Self { integration_docker }) +impl IntegrationTrait for ToolDocker { + fn integr_settings_apply(&mut self, value: &Value) -> Result<(), String> { + match serde_json::from_value::(value.clone()) { + Ok(settings_docker) => { + tracing::info!("Docker settings applied: {:?}", settings_docker); + self.settings_docker = settings_docker + }, + Err(e) => { + tracing::error!("Failed to apply settings: {}\n{:?}", e, value); + return Err(e.to_string()); + } + } + Ok(()) + } + + fn integr_settings_as_json(&self) -> Value { + serde_json::to_value(&self.settings_docker).unwrap() + } + + fn integr_upgrade_to_tool(&self) -> Box { + Box::new(ToolDocker { + settings_docker: self.settings_docker.clone() + }) as Box } + fn integr_schema(&self) -> &str + { + DOCKER_INTEGRATION_SCHEMA + } +} + +impl ToolDocker { pub async fn command_execute(&self, command: &str, gcx: Arc>, fail_if_stderr_is_not_empty: bool) -> Result<(String, String), String> { let mut command_args = split_command(&command)?; @@ -66,10 +75,10 @@ impl ToolDocker { return Err("Docker commands that are interactive or blocking are not supported".to_string()); } - command_append_label_if_creates_resource(&mut command_args, &self.integration_docker.label); + command_append_label_if_creates_resource(&mut command_args, &self.settings_docker.label); let docker_host = self.get_docker_host(gcx.clone()).await?; - let output = Command::new(&self.integration_docker.docker_cli_path) + let output = Command::new(&self.settings_docker.docker_cli_path) .arg("-H") .arg(&docker_host) .args(&command_args) @@ -88,12 +97,12 @@ impl ToolDocker { pub async fn get_docker_host(&self, gcx: Arc>) -> Result { - match &self.integration_docker.ssh_config { + match &self.settings_docker.ssh_config { Some(ssh_config) => { - let local_port = forward_remote_docker_if_needed(&self.integration_docker.connect_to_daemon_at, ssh_config, gcx.clone()).await?; + let local_port = forward_remote_docker_if_needed(&self.settings_docker.connect_to_daemon_at, ssh_config, gcx.clone()).await?; Ok(format!("127.0.0.1:{}", local_port)) }, - None => Ok(self.integration_docker.connect_to_daemon_at.clone()), + None => Ok(self.settings_docker.connect_to_daemon_at.clone()), } } } @@ -141,11 +150,10 @@ impl Tool for ToolDocker { } pub async fn docker_tool_load(gcx: Arc>) -> Result { - let cache_dir = gcx.read().await.cache_dir.clone(); - let integrations_yaml = read_integrations_yaml(&cache_dir).await?; - let docker_config = integrations_yaml.get("docker") - .ok_or_else(|| "No docker integration found in integrations.yaml".to_string())?; - Ok(ToolDocker::new_from_yaml(docker_config)?) + let tools = load_integration_tools(gcx.clone(), "".to_string(), true).await; + let docker_tool = tools.get("docker").cloned().ok_or("Docker integration not found")? + .lock().await.as_any().downcast_ref::().cloned().unwrap(); + Ok(docker_tool) } fn parse_command(args: &HashMap) -> Result{ @@ -234,4 +242,74 @@ fn command_append_label_if_creates_resource(command_args: &mut Vec, labe break; } } -} \ No newline at end of file +} + +pub const DOCKER_INTEGRATION_SCHEMA: &str = r#" +fields: + connect_to_daemon_at: + f_type: string + f_desc: "The address to connect to the Docker daemon." + f_default: "unix:///var/run/docker.sock" + docker_cli_path: + f_type: string + f_desc: "Path to the Docker CLI executable." + f_default: "docker" + ssh_config: + f_type: object + f_desc: "SSH configuration for connecting to remote Docker daemons." + f_fields: + host: + f_type: string + f_desc: "The SSH host." + user: + f_type: string + f_desc: "The SSH user." + f_default: "root" + port: + f_type: int + f_desc: "The SSH port." + f_default: "22" + identity_file: + f_type: string + f_desc: "Path to the SSH identity file." + container_workspace_folder: + f_type: string + f_desc: "The workspace folder inside the container." + f_default: "/app" + docker_image_id: + f_type: string + f_desc: "The Docker image ID to use." + host_lsp_path: + f_type: string + f_desc: "Path to the LSP on the host." + f_default: "/opt/refact/bin/refact-lsp" + run_chat_threads_inside_container: + f_type: bool + f_desc: "Whether to run chat threads inside the container." + f_default: "false" + label: + f_type: string + f_desc: "Label for the Docker container." + f_default: "refact" + command: + f_type: string + f_desc: "Command to run inside the Docker container." + keep_containers_alive_for_x_minutes: + f_type: int + f_desc: "How long to keep containers alive in minutes." + f_default: "60" + ports: + f_type: array + f_desc: "Ports to expose." +available: + on_your_laptop_possible: true + when_isolated_possible: false +smartlinks: + - sl_label: "Test" + sl_chat: + - role: "user" + content: | + šŸ”§ The docker tool should be visible now. To test the tool, list the running containers, briefly describe the containers and express + satisfaction and relief if it works, and change nothing. If it doesn't work or the tool isn't available, go through the usual plan in the system prompt. + The current config file is %CURRENT_CONFIG%. +"#; \ No newline at end of file diff --git a/src/integrations/mod.rs b/src/integrations/mod.rs index 3fce4b461..7afd6efb8 100644 --- a/src/integrations/mod.rs +++ b/src/integrations/mod.rs @@ -34,6 +34,7 @@ pub fn integration_from_name(n: &str) -> Result Ok(Box::new(ToolPdb { ..Default::default() }) as Box), "postgres" => Ok(Box::new(integr_postgres::ToolPostgres { ..Default::default() }) as Box), // "chrome" => Ok(Box::new(ToolChrome { ..Default::default() }) as Box), + "docker" => Ok(Box::new(docker::integr_docker::ToolDocker {..Default::default() }) as Box), _ => Err(format!("Unknown integration name: {}", n)), } } @@ -57,7 +58,8 @@ pub fn integrations_list() -> Vec<&'static str> { // "gitlab", // "pdb", "postgres", - // "chrome" + // "chrome", + "docker" ] } diff --git a/src/integrations/yaml_schema.rs b/src/integrations/yaml_schema.rs index eaaeed2db..2657047e0 100644 --- a/src/integrations/yaml_schema.rs +++ b/src/integrations/yaml_schema.rs @@ -21,6 +21,8 @@ pub struct ISchemaField { pub f_placeholder: String, #[serde(default, skip_serializing_if="is_empty")] pub smartlinks: Vec, + #[serde(default, skip_serializing_if="is_indexmap_empty")] + pub f_fields: IndexMap, } #[derive(Serialize, Deserialize, Debug, Default)] @@ -49,7 +51,8 @@ pub struct ISchema { pub fields: IndexMap, pub available: ISchemaAvailable, pub smartlinks: Vec, - pub docker: ISchemaDocker, + #[serde(skip_serializing_if = "Option::is_none")] + pub docker: Option, } fn is_default(t: &T) -> bool { @@ -59,3 +62,7 @@ fn is_default(t: &T) -> bool { fn is_empty(t: &Vec) -> bool { t.is_empty() } + +fn is_indexmap_empty(t: &IndexMap) -> bool { + t.is_empty() +} From 053ab9745f8b50c9ae5944b17638972206c58c73 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Tue, 26 Nov 2024 12:28:51 +0100 Subject: [PATCH 008/185] fix: string long and short also for docker --- src/integrations/docker/integr_docker.rs | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/src/integrations/docker/integr_docker.rs b/src/integrations/docker/integr_docker.rs index 285da4ec0..3bb81718c 100644 --- a/src/integrations/docker/integr_docker.rs +++ b/src/integrations/docker/integr_docker.rs @@ -247,11 +247,11 @@ fn command_append_label_if_creates_resource(command_args: &mut Vec, labe pub const DOCKER_INTEGRATION_SCHEMA: &str = r#" fields: connect_to_daemon_at: - f_type: string + f_type: string_long f_desc: "The address to connect to the Docker daemon." f_default: "unix:///var/run/docker.sock" docker_cli_path: - f_type: string + f_type: string_long f_desc: "Path to the Docker CLI executable." f_default: "docker" ssh_config: @@ -259,28 +259,28 @@ fields: f_desc: "SSH configuration for connecting to remote Docker daemons." f_fields: host: - f_type: string + f_type: string_long f_desc: "The SSH host." user: - f_type: string + f_type: string_short f_desc: "The SSH user." f_default: "root" port: - f_type: int + f_type: string_short f_desc: "The SSH port." f_default: "22" identity_file: - f_type: string + f_type: string_short f_desc: "Path to the SSH identity file." container_workspace_folder: - f_type: string + f_type: string_long f_desc: "The workspace folder inside the container." f_default: "/app" docker_image_id: - f_type: string + f_type: string_long f_desc: "The Docker image ID to use." host_lsp_path: - f_type: string + f_type: string_long f_desc: "Path to the LSP on the host." f_default: "/opt/refact/bin/refact-lsp" run_chat_threads_inside_container: @@ -288,14 +288,14 @@ fields: f_desc: "Whether to run chat threads inside the container." f_default: "false" label: - f_type: string + f_type: string_short f_desc: "Label for the Docker container." f_default: "refact" command: - f_type: string + f_type: string_long f_desc: "Command to run inside the Docker container." keep_containers_alive_for_x_minutes: - f_type: int + f_type: string_short f_desc: "How long to keep containers alive in minutes." f_default: "60" ports: From 404ed9254382bd591a44a79b15f71864be5c3f64 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Tue, 26 Nov 2024 19:34:04 +0100 Subject: [PATCH 009/185] feat: add filter label and filter image fields to docker schema part of postgres integration --- src/integrations/integr_postgres.rs | 2 ++ src/integrations/yaml_schema.rs | 2 ++ 2 files changed, 4 insertions(+) diff --git a/src/integrations/integr_postgres.rs b/src/integrations/integr_postgres.rs index 34250047a..b33a5cccc 100644 --- a/src/integrations/integr_postgres.rs +++ b/src/integrations/integr_postgres.rs @@ -212,6 +212,8 @@ smartlinks: satisfaction and relief if it works, and change nothing. If it doesn't work or the tool isn't available, go through the usual plan in the system prompt. The current config file is %CURRENT_CONFIG%. docker: + filter_label: "" + filter_image: "postgres" new_container_default: image: "postgres:13" environment: diff --git a/src/integrations/yaml_schema.rs b/src/integrations/yaml_schema.rs index 2657047e0..c8837aa58 100644 --- a/src/integrations/yaml_schema.rs +++ b/src/integrations/yaml_schema.rs @@ -42,6 +42,8 @@ pub struct ISchemaAvailable { #[derive(Serialize, Deserialize, Debug, Default)] pub struct ISchemaDocker { + pub filter_label: String, + pub filter_image: String, pub new_container_default: DockerService, pub smartlinks: Vec, } From 3b9fe6a5dc5915dd039465391cdb502a8b694435 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Wed, 20 Nov 2024 14:31:13 +0100 Subject: [PATCH 010/185] fix: copy folders with all content to the container --- .../docker/docker_container_manager.rs | 54 +++++++++++-------- 1 file changed, 32 insertions(+), 22 deletions(-) diff --git a/src/integrations/docker/docker_container_manager.rs b/src/integrations/docker/docker_container_manager.rs index c8f5afdb6..fb8ff1c3a 100644 --- a/src/integrations/docker/docker_container_manager.rs +++ b/src/integrations/docker/docker_container_manager.rs @@ -1,14 +1,13 @@ use std::path::PathBuf; use std::{sync::Arc, sync::Weak, time::SystemTime}; -use async_tar::Builder; use serde::{Deserialize, Deserializer, Serialize}; use serde_json::Value; -use tempfile::Builder as TempfileBuilder; use tokio::fs::File; use tokio::sync::{Mutex as AMutex, RwLock as ARwLock}; use tokio::time::Duration; -use tokio_util::compat::TokioAsyncWriteCompatExt; +use tokio_util::compat::{Compat, TokioAsyncWriteCompatExt}; use tracing::{error, info, warn}; +use walkdir::WalkDir; use crate::files_correction::get_project_dirs; use crate::files_in_workspace::retrieve_files_in_workspace_folders; @@ -277,7 +276,7 @@ async fn docker_container_sync_yaml_configs( let container_home_dir = docker_container_get_home_dir(&docker, &container_id, gcx.clone()).await?; // Creating intermediate folders one by one, as docker cp does not support --parents - let temp_dir = TempfileBuilder::new().tempdir() + let temp_dir = tempfile::Builder::new().tempdir() .map_err(|e| format!("Error creating temporary directory: {}", e))?; let temp_dir_path = temp_dir.path().to_string_lossy().to_string(); docker.command_execute(&format!("container cp {temp_dir_path} {container_id}:{container_home_dir}/.cache/"), gcx.clone(), true).await?; @@ -353,13 +352,13 @@ async fn docker_container_sync_workspace( .ok_or_else(|| "No workspace folders found".to_string())?; let container_workspace_folder = PathBuf::from(&docker.settings_docker.container_workspace_folder); - let temp_tar_file = TempfileBuilder::new().suffix(".tar").tempfile() + let temp_tar_file = tempfile::Builder::new().suffix(".tar").tempfile() .map_err(|e| format!("Error creating temporary tar file: {}", e))?.into_temp_path(); let tar_file_name = temp_tar_file.file_name().unwrap_or_default().to_string_lossy().to_string(); let tar_async_file = File::create(&temp_tar_file).await .map_err(|e| format!("Error opening temporary tar file: {}", e))?; - let mut tar_builder = Builder::new(tar_async_file.compat_write()); + let mut tar_builder = async_tar::Builder::new(tar_async_file.compat_write()); tar_builder.follow_symlinks(true); tar_builder.mode(async_tar::HeaderMode::Complete); @@ -372,22 +371,10 @@ async fn docker_container_sync_workspace( tar_builder.append_path_with_name(file, relative_path).await .map_err(|e| format!("Error adding file to tar archive: {}", e))?; } - - if workspace_folder.join(".git").exists() { - let git_folder = workspace_folder.join(".git").to_path_buf(); - tar_builder.append_path_with_name(git_folder, ".git").await - .map_err(|e| format!("Error adding .git to tar archive: {}", e))?; - } - if workspace_folder.join(".hg").exists() { - let hg_folder = workspace_folder.join(".hg").to_path_buf(); - tar_builder.append_path_with_name(hg_folder, ".hg").await - .map_err(|e| format!("Error adding .hg to tar archive: {}", e))?; - } - if workspace_folder.join(".svn").exists() { - let svn_folder = workspace_folder.join(".svn").to_path_buf(); - tar_builder.append_path_with_name(svn_folder, ".svn").await - .map_err(|e| format!("Error adding .svn to tar archive: {}", e))?; - } + + append_folder_if_exists(&mut tar_builder, &workspace_folder, ".git").await?; + append_folder_if_exists(&mut tar_builder, &workspace_folder, ".hg").await?; + append_folder_if_exists(&mut tar_builder, &workspace_folder, ".svn").await?; tar_builder.finish().await.map_err(|e| format!("Error finishing tar archive: {}", e))?; @@ -407,6 +394,29 @@ async fn docker_container_sync_workspace( Ok(()) } +async fn append_folder_if_exists( + tar_builder: &mut async_tar::Builder>, + workspace_folder: &PathBuf, + folder_name: &str +) -> Result<(), String> { + let folder_path = workspace_folder.join(folder_name); + let mut num_files = 0; + if folder_path.exists() { + for entry in WalkDir::new(&folder_path) { + let entry = entry.map_err(|e| format!("Error walking directory: {}", e))?; + let relative_path = entry.path().strip_prefix(&workspace_folder) + .map_err(|e| format!("Error stripping prefix: {}", e))?; + tar_builder.append_path_with_name(entry.path(), relative_path).await + .map_err(|e| format!("Error adding file to tar archive: {}", e))?; + num_files += 1; + } + info!("Added folder {folder_name}, with {num_files} files."); + } else { + info!("Folder {folder_name} does not exist."); + } + Ok(()) +} + async fn docker_container_get_exposed_ports( docker: &ToolDocker, container_id: &str, From f07fd96e76eed272f9889ff42703db1f254aa107 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Thu, 21 Nov 2024 18:53:39 +0100 Subject: [PATCH 011/185] fix: start lsp without workspace folder and add it once workspace is sync --- src/http/routers/v1/lsp_like_handlers.rs | 2 +- .../docker/docker_container_manager.rs | 18 +++++++++++++----- 2 files changed, 14 insertions(+), 6 deletions(-) diff --git a/src/http/routers/v1/lsp_like_handlers.rs b/src/http/routers/v1/lsp_like_handlers.rs index cce7a08bf..7dc92a4cf 100644 --- a/src/http/routers/v1/lsp_like_handlers.rs +++ b/src/http/routers/v1/lsp_like_handlers.rs @@ -12,7 +12,7 @@ use crate::global_context::SharedGlobalContext; use crate::files_in_workspace; #[derive(Serialize, Deserialize, Clone)] -struct LspLikeInit { +pub struct LspLikeInit { pub project_roots: Vec, } diff --git a/src/integrations/docker/docker_container_manager.rs b/src/integrations/docker/docker_container_manager.rs index fb8ff1c3a..941f0d971 100644 --- a/src/integrations/docker/docker_container_manager.rs +++ b/src/integrations/docker/docker_container_manager.rs @@ -7,12 +7,14 @@ use tokio::sync::{Mutex as AMutex, RwLock as ARwLock}; use tokio::time::Duration; use tokio_util::compat::{Compat, TokioAsyncWriteCompatExt}; use tracing::{error, info, warn}; +use url::Url; use walkdir::WalkDir; use crate::files_correction::get_project_dirs; use crate::files_in_workspace::retrieve_files_in_workspace_folders; use crate::global_context::GlobalContext; use crate::http::http_post; +use crate::http::routers::v1::lsp_like_handlers::LspLikeInit; use crate::http::routers::v1::sync_files::SyncFilesExtractTarPost; use crate::integrations::sessions::get_session_hashmap_key; use crate::integrations::sessions::IntegrationSession; @@ -244,9 +246,8 @@ async fn docker_container_create( }; let lsp_command = format!( - "{DEFAULT_CONTAINER_LSP_PATH} --http-port {lsp_port} --logs-stderr \ - --address-url {address_url} --api-key {api_key} --vecdb --reset-memory --ast --experimental \ - --inside-container --workspace-folder {workspace_folder}", + "{DEFAULT_CONTAINER_LSP_PATH} --http-port {lsp_port} --logs-stderr --inside-container \ + --address-url {address_url} --api-key {api_key} --vecdb --reset-memory --ast --experimental", ); let ports_to_forward_as_arg_list = ports_to_forward.iter() @@ -381,16 +382,23 @@ async fn docker_container_sync_workspace( let cp_command = format!("container cp {} {}:{}", temp_tar_file.to_string_lossy(), container_id, container_workspace_folder.to_string_lossy()); docker.command_execute(&cp_command, gcx.clone(), true).await?; - let post = SyncFilesExtractTarPost { + let sync_files_post = SyncFilesExtractTarPost { tar_path: container_workspace_folder.join(&tar_file_name).to_string_lossy().to_string(), extract_to: container_workspace_folder.to_string_lossy().to_string(), }; - http_post(&format!("http://localhost:{lsp_port_to_connect}/v1/sync-files-extract-tar"), &post).await?; + http_post(&format!("http://localhost:{lsp_port_to_connect}/v1/sync-files-extract-tar"), &sync_files_post).await?; tokio::fs::remove_file(&temp_tar_file).await .map_err(|e| format!("Error removing temporary archive: {}", e))?; info!("Workspace synced successfully."); + + let initialize_post = LspLikeInit { + project_roots: vec![Url::parse(&format!("file://{}", container_workspace_folder.to_string_lossy())).unwrap()], + }; + http_post(&format!("http://localhost:{lsp_port_to_connect}/v1/lsp-initialize"), &initialize_post).await?; + info!("LSP initialized for workspace."); + Ok(()) } From dce5dfa97beb92ae9c876a8039b484d2a83a048f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Fri, 22 Nov 2024 10:22:48 +0100 Subject: [PATCH 012/185] feat: add git stage and commit handler --- Cargo.toml | 5 ++- src/git.rs | 82 ++++++++++++++++++++++++++++++++++++++ src/http/routers/v1.rs | 4 ++ src/http/routers/v1/git.rs | 64 +++++++++++++++++++++++++++++ src/main.rs | 1 + 5 files changed, 154 insertions(+), 2 deletions(-) create mode 100644 src/git.rs create mode 100644 src/http/routers/v1/git.rs diff --git a/Cargo.toml b/Cargo.toml index 639856b7a..755ee8813 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -16,12 +16,12 @@ default = ["vecdb"] vecdb = ["arrow", "arrow-array", "arrow-schema", "lance", "vectordb"] [build-dependencies] -shadow-rs = "0.25.0" +shadow-rs = "0.36.0" [dependencies] sled = { version = "0.34", default-features = false, features = [] } # all features = ["compression", "docs", "event_log", "failpoints", "io_uring", "lock_free_delays", "measure_allocs", "miri_optimizations", "mutex", "no_inline", "no_logs", "pretty_backtrace", "testing"] -shadow-rs = { version = "0.25.0", features = [], default-features = false } +shadow-rs = { version = "0.36.0", features = [], default-features = false } hyper = { version = "0.14", features = ["server", "stream"] } reqwest = { version = "0.12", default-features = false, features = ["json", "stream", "rustls-tls-webpki-roots", "charset", "http2"] } tokio = { version = "1", features = ["fs", "io-std", "io-util", "macros", "rt-multi-thread", "signal", "process"] } @@ -108,3 +108,4 @@ headless_chrome = "1.0.15" nix = { version = "0.29.0", features = ["signal"] } resvg = "0.44.0" async-tar = "0.5.0" +git2 = "0.19.0" \ No newline at end of file diff --git a/src/git.rs b/src/git.rs new file mode 100644 index 000000000..c9b95eafa --- /dev/null +++ b/src/git.rs @@ -0,0 +1,82 @@ +use git2::{Branch, BranchType, IndexAddOption, Oid, Repository, Signature, Status}; + +/// Similar to git checkout -b +pub fn create_or_checkout_to_branch<'repo>(repository: &'repo Repository, branch_name: &str) -> Result, String> { + let branch = match repository.find_branch(branch_name, BranchType::Local) { + Ok(branch) => branch, + Err(_) => { + let head_commit = repository.head() + .and_then(|h| h.peel_to_commit()) + .map_err(|e| format!("Failed to get HEAD commit: {}", e))?; + repository.branch(branch_name, &head_commit, false) + .map_err(|e| format!("Failed to create branch: {}", e))? + } + }; + + // Checkout to the branch + let object = repository.revparse_single(&("refs/heads/".to_owned() + branch_name)) + .map_err(|e| format!("Failed to revparse single: {}", e))?; + repository.checkout_tree(&object, None) + .map_err(|e| format!("Failed to checkout tree: {}", e))?; + repository.set_head(&format!("refs/heads/{}", branch_name)) + .map_err(|e| format!("Failed to set head: {}", e))?; + + Ok(branch) +} + +/// Similar to git add . +pub fn stage_all_changes(repository: &Repository) -> Result<(), String> { + let mut index = repository.index() + .map_err(|e| format!("Failed to get index: {}", e))?; + index.add_all(["*"].iter(), IndexAddOption::DEFAULT, None) + .map_err(|e| format!("Failed to add files to index: {}", e))?; + index.write() + .map_err(|e| format!("Failed to write index: {}", e))?; + Ok(()) +} + +/// Returns: +/// +/// A tuple containing the number of new files, modified files, and deleted files. +pub fn count_file_changes(repository: &Repository) -> Result<(usize, usize, usize), String> { + let (mut new_files, mut modified_files, mut deleted_files) = (0, 0, 0); + + let statuses = repository.statuses(None) + .map_err(|e| format!("Failed to get statuses: {}", e))?; + for entry in statuses.iter() { + let status = entry.status(); + if status.contains(Status::INDEX_NEW) { new_files += 1; } + if status.contains(Status::INDEX_MODIFIED) { modified_files += 1;} + if status.contains(Status::INDEX_DELETED) { deleted_files += 1; } + } + + Ok((new_files, modified_files, deleted_files)) +} + +pub fn commit(repository: &Repository, branch: &Branch, message: &str, author_name: &str, author_email: &str) -> Result { + + let mut index = repository.index() + .map_err(|e| format!("Failed to get index: {}", e))?; + let tree_id = index.write_tree() + .map_err(|e| format!("Failed to write tree: {}", e))?; + let tree = repository.find_tree(tree_id) + .map_err(|e| format!("Failed to find tree: {}", e))?; + + let signature = Signature::now(author_name, author_email) + .map_err(|e| format!("Failed to create signature: {}", e))?; + + let branch_ref_name = branch.get().name() + .ok_or_else(|| "Invalid branch name".to_string())?; + + let parent_commit = if let Some(target) = branch.get().target() { + repository.find_commit(target) + .map_err(|e| format!("Failed to find branch commit: {}", e))? + } else { + return Err("No parent commits found (initial commit is not supported)".to_string()); + }; + + repository.commit( + Some(branch_ref_name), &signature, &signature, message, &tree, &[&parent_commit] + ).map_err(|e| format!("Failed to create commit: {}", e)) +} + diff --git a/src/http/routers/v1.rs b/src/http/routers/v1.rs index 202cac228..8846072ac 100644 --- a/src/http/routers/v1.rs +++ b/src/http/routers/v1.rs @@ -21,6 +21,7 @@ use crate::http::routers::v1::caps::handle_v1_ping; use crate::http::routers::v1::chat::{handle_v1_chat, handle_v1_chat_completions, handle_v1_chat_configuration}; use crate::http::routers::v1::dashboard::get_dashboard_plots; use crate::http::routers::v1::docker::{handle_v1_docker_container_action, handle_v1_docker_container_list}; +use crate::http::routers::v1::git::handle_v1_git_stage_and_commit; use crate::http::routers::v1::graceful_shutdown::handle_v1_graceful_shutdown; use crate::http::routers::v1::snippet_accepted::handle_v1_snippet_accepted; use crate::http::routers::v1::telemetry_network::handle_v1_telemetry_network; @@ -48,6 +49,7 @@ pub mod telemetry_network; pub mod snippet_accepted; pub mod caps; mod docker; +mod git; pub mod graceful_shutdown; mod dashboard; pub mod lsp_like_handlers; @@ -107,6 +109,8 @@ pub fn make_v1_router() -> Router { .route("/sync-files-extract-tar", telemetry_post!(handle_v1_sync_files_extract_tar)) + .route("/git-stage-and-commit", telemetry_post!(handle_v1_git_stage_and_commit)) + .route("/system-prompt", telemetry_post!(handle_v1_system_prompt)) // because it works remotely .route("/at-command-completion", telemetry_post!(handle_v1_command_completion)) diff --git a/src/http/routers/v1/git.rs b/src/http/routers/v1/git.rs new file mode 100644 index 000000000..7e78ebfca --- /dev/null +++ b/src/http/routers/v1/git.rs @@ -0,0 +1,64 @@ +use std::sync::Arc; +use axum::Extension; +use axum::http::{Response, StatusCode}; +use git2::Repository; +use hyper::Body; +use serde::{Deserialize, Serialize}; +use tokio::sync::RwLock as ARwLock; +use url::Url; + +use crate::custom_error::ScratchError; +use crate::git::{commit, count_file_changes, create_or_checkout_to_branch, stage_all_changes}; +use crate::global_context::GlobalContext; + +#[derive(Serialize, Deserialize, Clone, Debug)] +pub struct GitStageAndCommitPost { + chat_id: String, + repository_path: Url, +} + +pub async fn handle_v1_git_stage_and_commit( + Extension(_gcx): Extension>>, + body_bytes: hyper::body::Bytes, +) -> Result, ScratchError> { + let post = serde_json::from_slice::(&body_bytes) + .map_err(|e| ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, format!("JSON problem: {}", e)))?; + + let repo_path = crate::files_correction::canonical_path( + &post.repository_path.to_file_path().unwrap_or_default().to_string_lossy().to_string()); + let repository = Repository::open(&repo_path) + .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Could not open repository: {}", e)))?; + + let branch_name = format!("refact-{}", post.chat_id); + let branch = create_or_checkout_to_branch(&repository, &branch_name) + .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, e))?; + + stage_all_changes(&repository) + .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, e))?; + + let (new_files, modified_files, deleted_files) = count_file_changes(&repository) + .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, e))?; + + let commit_oid = if new_files + modified_files + deleted_files != 0 { + Some(commit( + &repository, + &branch, + &format!("Refact agent commit in chat {} at {}", post.chat_id, chrono::Utc::now().format("%Y-%m-%d %H:%M:%S")), + "Refact Agent", + "agent@refact.ai", + ).map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, e))?) + } else { + None + }; + + Ok(Response::builder() + .status(StatusCode::OK) + .header("Content-Type", "application/json") + .body(Body::from(serde_json::json!({ + "commit_oid": commit_oid.map(|x| x.to_string()), + "new_files": new_files, + "modified_files": modified_files, + "deleted_files": deleted_files, + }).to_string())) + .unwrap()) +} \ No newline at end of file diff --git a/src/main.rs b/src/main.rs index 0b0d443fd..9b53acd31 100644 --- a/src/main.rs +++ b/src/main.rs @@ -64,6 +64,7 @@ mod http; mod integrations; mod privacy; mod privacy_compiled_in; +mod git; #[tokio::main] async fn main() { From b3d641814d08132f1f69757d2aa3b6ca62107b77 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Fri, 22 Nov 2024 11:39:06 +0100 Subject: [PATCH 013/185] feat: ls files using git2 library instead of cli --- src/files_in_workspace.rs | 6 +++--- src/git.rs | 29 ++++++++++++++++++++++++++++- 2 files changed, 31 insertions(+), 4 deletions(-) diff --git a/src/files_in_workspace.rs b/src/files_in_workspace.rs index 94e49118d..50e1ddd68 100644 --- a/src/files_in_workspace.rs +++ b/src/files_in_workspace.rs @@ -12,6 +12,7 @@ use walkdir::WalkDir; use which::which; use tracing::info; +use crate::git::git_ls_files; use crate::global_context::GlobalContext; use crate::telemetry; use crate::file_filter::{is_this_inside_blacklisted_dir, is_valid_file, BLACKLISTED_DIRS, SOURCE_FILE_EXTENSIONS}; @@ -211,9 +212,8 @@ async fn _run_command(cmd: &str, args: &[&str], path: &PathBuf, filter_out_statu } async fn ls_files_under_version_control(path: &PathBuf) -> Option> { - if path.join(".git").exists() && which("git").is_ok() { - // Git repository - _run_command("git", &["ls-files", "--cached", "--modified", "--others", "--exclude-standard"], path, false).await + if path.join(".git").exists() { + git_ls_files(path) } else if path.join(".hg").exists() && which("hg").is_ok() { // Mercurial repository _run_command("hg", &["status", "--added", "--modified", "--clean", "--unknown", "--no-status"], path, false).await diff --git a/src/git.rs b/src/git.rs index c9b95eafa..64b62db13 100644 --- a/src/git.rs +++ b/src/git.rs @@ -1,4 +1,31 @@ -use git2::{Branch, BranchType, IndexAddOption, Oid, Repository, Signature, Status}; +use std::path::PathBuf; +use tracing::error; +use git2::{Branch, BranchType, IndexAddOption, Oid, Repository, Signature, Status, StatusOptions}; + +pub fn git_ls_files(repository_path: &PathBuf) -> Option> { + let repository = Repository::open(repository_path) + .map_err(|e| error!("Failed to open repository: {}", e)).ok()?; + + let mut status_options = StatusOptions::new(); + status_options + .include_untracked(true) + .recurse_untracked_dirs(true) + .include_unmodified(true) + .exclude_submodules(false) + .include_ignored(false) + .recurse_ignored_dirs(false); + + let statuses = repository.statuses(Some(&mut status_options)) + .map_err(|e| error!("Failed to get statuses: {}", e)).ok()?; + + let mut files = Vec::new(); + for entry in statuses.iter() { + if let Some(path) = entry.path() { + files.push(repository_path.join(path)); + } + } + if !files.is_empty() { Some(files) } else { None } +} /// Similar to git checkout -b pub fn create_or_checkout_to_branch<'repo>(repository: &'repo Repository, branch_name: &str) -> Result, String> { From 2fb6863b8f82a6319121a8c69b9837c972f88b87 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Wed, 27 Nov 2024 18:08:11 +0100 Subject: [PATCH 014/185] feat: add name to list endpoint --- src/http/routers/v1/docker.rs | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/http/routers/v1/docker.rs b/src/http/routers/v1/docker.rs index 229b1c9d2..94f66ad2b 100644 --- a/src/http/routers/v1/docker.rs +++ b/src/http/routers/v1/docker.rs @@ -35,6 +35,7 @@ pub struct DockerContainerListPost { pub struct DockerContainerListOutput { id: String, status: String, + name: String, created: Option, user: Option, #[serde(default)] @@ -120,9 +121,13 @@ pub async fn handle_v1_docker_container_list( let response_body: Vec = inspect_output.into_iter() .map(|container| { + let mut container_name = extract_string_field(&container, &["Name"], "Missing container name")?; + if container_name.starts_with('/') { container_name = container_name[1..].to_string() }; + Ok(DockerContainerListOutput { id: extract_string_field(&container, &["Id"], "Missing container ID")? .get(0..12).unwrap_or("").to_string(), + name: container_name, status: extract_string_field(&container, &["State", "Status"], "Missing container status")?, created: container["Created"].as_str().map(ToString::to_string), user: container["Config"]["User"].as_str().map(ToString::to_string), From 66cf2eca699bb95a25883381f4d507c8741f6273 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Thu, 28 Nov 2024 13:01:11 +0100 Subject: [PATCH 015/185] feat: make ports behave like docker compose - "published:target" --- .../docker/docker_container_manager.rs | 85 +++++++++---------- .../docker/docker_ssh_tunnel_utils.rs | 30 +++---- 2 files changed, 57 insertions(+), 58 deletions(-) diff --git a/src/integrations/docker/docker_container_manager.rs b/src/integrations/docker/docker_container_manager.rs index 941f0d971..6c99b2d6c 100644 --- a/src/integrations/docker/docker_container_manager.rs +++ b/src/integrations/docker/docker_container_manager.rs @@ -1,7 +1,6 @@ use std::path::PathBuf; use std::{sync::Arc, sync::Weak, time::SystemTime}; -use serde::{Deserialize, Deserializer, Serialize}; -use serde_json::Value; +use serde::{Deserialize, Serialize}; use tokio::fs::File; use tokio::sync::{Mutex as AMutex, RwLock as ARwLock}; use tokio::time::Duration; @@ -23,7 +22,29 @@ use crate::integrations::docker::integr_docker::{docker_tool_load, ToolDocker}; use super::docker_ssh_tunnel_utils::ssh_tunnel_check_status; -const DEFAULT_CONTAINER_LSP_PATH: &str = "/usr/local/bin/refact-lsp"; +pub const DEFAULT_CONTAINER_LSP_PATH: &str = "/usr/local/bin/refact-lsp"; +pub const TARGET_LSP_PORT: &str = "8001"; + +#[derive(Clone, Debug)] +pub struct Port { + pub published: String, + pub target: String, +} + +impl<'de> Deserialize<'de> for Port { + fn deserialize>(deserializer: D) -> Result { + let s = String::deserialize(deserializer)?; + let (published, target) = s.split_once(':') + .ok_or_else(|| serde::de::Error::custom("expected format '8080:3000'"))?; + Ok(Port { published: published.to_string(), target: target.to_string() }) + } +} + +impl Serialize for Port { + fn serialize(&self, serializer: S) -> Result { + serializer.serialize_str(&format!("{}:{}", self.published, self.target)) + } +} pub struct DockerContainerSession { container_id: String, @@ -33,27 +54,7 @@ pub struct DockerContainerSession { weak_gcx: Weak>, } -#[derive(Clone, Serialize, Deserialize, Debug)] -pub struct Port { - #[serde(rename = "local_port", deserialize_with = "string_or_number")] - pub external: String, - #[serde(rename = "container_port", deserialize_with = "string_or_number")] - pub internal: String, -} - -fn string_or_number<'de, D>(deserializer: D) -> Result -where - D: Deserializer<'de>, -{ - let value = serde::Deserialize::deserialize(deserializer)?; - Ok(match value { - serde_json::Value::String(s) => s, - serde_json::Value::Number(n) => n.to_string(), - _ => return Err(serde::de::Error::custom("expected a string or an integer")), - }) -} - -enum DockerContainerConnectionEnum { +pub enum DockerContainerConnectionEnum { SshTunnel(SshTunnel), LocalPort(String), } @@ -74,9 +75,7 @@ impl Drop for DockerContainerSession { } impl IntegrationSession for DockerContainerSession { - fn as_any_mut(&mut self) -> &mut dyn std::any::Any { - self - } + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { self } fn is_expired(&self) -> bool { let current_time = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_secs(); @@ -127,18 +126,18 @@ pub async fn docker_container_check_status_or_start( const LSP_PORT: &str = "8001"; let mut ports_to_forward = if ssh_config_maybe.is_some() { docker.settings_docker.ports.iter() - .map(|p| Port {external: "0".to_string(), internal: p.internal.clone()}).collect::>() + .map(|p| Port {published: "0".to_string(), target: p.target.clone()}).collect::>() } else { docker.settings_docker.ports.clone() }; - ports_to_forward.insert(0, Port {external: "0".to_string(), internal: LSP_PORT.to_string()}); + ports_to_forward.insert(0, Port {published: "0".to_string(), target: LSP_PORT.to_string()}); let container_id = docker_container_create(&docker, &chat_id, &ports_to_forward, LSP_PORT, gcx.clone()).await?; docker_container_sync_yaml_configs(&docker, &container_id, gcx.clone()).await?; docker_container_start(gcx.clone(), &docker, &container_id).await?; let exposed_ports = docker_container_get_exposed_ports(&docker, &container_id, &ports_to_forward, gcx.clone()).await?; - let host_lsp_port = exposed_ports.iter().find(|p| p.internal == LSP_PORT) - .ok_or_else(|| "No LSP port exposed".to_string())?.external.clone(); + let host_lsp_port = exposed_ports.iter().find(|p| p.target == LSP_PORT) + .ok_or_else(|| "No LSP port exposed".to_string())?.published.clone(); let keep_containers_alive_for_x_minutes = docker.settings_docker.keep_containers_alive_for_x_minutes; @@ -147,11 +146,11 @@ pub async fn docker_container_check_status_or_start( let mut ports_to_forward_through_ssh = exposed_ports.into_iter() .map(|exposed_port| { let matched_external_port = docker.settings_docker.ports.iter() - .find(|configured_port| configured_port.internal == exposed_port.internal) - .map_or_else(|| "0".to_string(), |forwarded_port| forwarded_port.external.clone()); + .find(|configured_port| configured_port.target == exposed_port.target) + .map_or_else(|| "0".to_string(), |forwarded_port| forwarded_port.published.clone()); Port { - external: matched_external_port, - internal: exposed_port.external, + published: matched_external_port, + target: exposed_port.published, } }).collect::>(); let ssh_tunnel = ssh_tunnel_open(&mut ports_to_forward_through_ssh, &ssh_config).await?; @@ -162,7 +161,7 @@ pub async fn docker_container_check_status_or_start( let lsp_port_to_connect = match &connection { DockerContainerConnectionEnum::SshTunnel(ssh_tunnel) => { - ssh_tunnel.get_first_external_port()? + ssh_tunnel.get_first_published_port()? }, DockerContainerConnectionEnum::LocalPort(internal_port) => { internal_port.to_string() @@ -213,7 +212,7 @@ pub async fn docker_container_get_host_lsp_port_to_connect( return match &docker_container_session.connection { DockerContainerConnectionEnum::SshTunnel(ssh_tunnel) => { - ssh_tunnel.get_first_external_port() + ssh_tunnel.get_first_published_port() }, DockerContainerConnectionEnum::LocalPort(internal_port) => { Ok(internal_port.to_string()) @@ -251,7 +250,7 @@ async fn docker_container_create( ); let ports_to_forward_as_arg_list = ports_to_forward.iter() - .map(|p| format!("--publish={}:{}", p.external, p.internal)).collect::>().join(" "); + .map(|p| format!("--publish={}:{}", p.published, p.target)).collect::>().join(" "); let run_command = format!( "container create --name=refact-{chat_id} --volume={host_lsp_path}:{DEFAULT_CONTAINER_LSP_PATH} \ {ports_to_forward_as_arg_list} --entrypoint sh {docker_image_id} -c '{lsp_command}'", @@ -309,7 +308,7 @@ async fn docker_container_get_home_dir( let inspect_config_command = "container inspect --format '{{json .Config}}' ".to_string() + &container_id; let (inspect_config_output, _) = docker.command_execute(&inspect_config_command, gcx.clone(), true).await?; - let config_json: Value = serde_json::from_str(&inspect_config_output) + let config_json: serde_json::Value = serde_json::from_str(&inspect_config_output) .map_err(|e| format!("Error parsing docker config: {}", e))?; if let Some(home_env) = config_json.get("Env").and_then(|env| env.as_array()) @@ -317,7 +316,7 @@ async fn docker_container_get_home_dir( return Ok(home_env.to_string()); } - let user = config_json.get("User").and_then(Value::as_str).unwrap_or(""); + let user = config_json.get("User").and_then(serde_json::Value::as_str).unwrap_or(""); Ok(if user.is_empty() || user == "root" { "root".to_string() } else { format!("/home/{user}") }) } @@ -435,15 +434,15 @@ async fn docker_container_get_exposed_ports( let (inspect_output, _) = docker.command_execute(&inspect_command, gcx.clone(), true).await?; tracing::info!("{}:\n{}", inspect_command, inspect_output); - let inspect_data: Value = serde_json::from_str(&inspect_output) + let inspect_data: serde_json::Value = serde_json::from_str(&inspect_output) .map_err(|e| format!("Error parsing JSON output from docker inspect: {}", e))?; let mut exposed_ports = Vec::new(); for port in ports_to_forward { - let host_port = inspect_data[&format!("{}/tcp", port.internal)][0]["HostPort"] + let host_port = inspect_data[&format!("{}/tcp", port.target)][0]["HostPort"] .as_str() .ok_or_else(|| "Error getting host port from docker inspect output.".to_string())?; - exposed_ports.push(Port { external: host_port.to_string(), internal: port.internal.to_string() }); + exposed_ports.push(Port { published: host_port.to_string(), target: port.target.to_string() }); } Ok(exposed_ports) } diff --git a/src/integrations/docker/docker_ssh_tunnel_utils.rs b/src/integrations/docker/docker_ssh_tunnel_utils.rs index 1040bba98..755f5353c 100644 --- a/src/integrations/docker/docker_ssh_tunnel_utils.rs +++ b/src/integrations/docker/docker_ssh_tunnel_utils.rs @@ -23,9 +23,9 @@ pub struct SshTunnel { } impl SshTunnel { - pub fn get_first_external_port(&self) -> Result { + pub fn get_first_published_port(&self) -> Result { self.forwarded_ports.iter().next() - .map(|port| port.external.clone()) + .map(|port| port.published.clone()) .ok_or_else(|| "Internal error: No forwarded ports found.".to_string()) } } @@ -40,7 +40,7 @@ pub async fn forward_remote_docker_if_needed(connect_to_daemon_at: &str, ssh_con if let Some(ssh_tunnel) = ssh_tunnel_locked.deref_mut() { match ssh_tunnel_check_status(ssh_tunnel).await { - Ok(()) => return ssh_tunnel.get_first_external_port(), + Ok(()) => return ssh_tunnel.get_first_published_port(), Err(e) => { warn!("{}, restarting...", e); *ssh_tunnel_locked = None; @@ -54,8 +54,8 @@ pub async fn forward_remote_docker_if_needed(connect_to_daemon_at: &str, ssh_con connect_to_daemon_at.split(":").last().unwrap_or_default().to_string() }; - let ssh_tunnel = ssh_tunnel_open(&mut vec![Port { external: "0".to_string(), internal: remote_port_or_socket }], ssh_config).await?; - let port = ssh_tunnel.get_first_external_port()?; + let ssh_tunnel = ssh_tunnel_open(&mut vec![Port { published: "0".to_string(), target: remote_port_or_socket }], ssh_config).await?; + let port = ssh_tunnel.get_first_published_port()?; *ssh_tunnel_locked = Some(ssh_tunnel); info!("Forwarding remote docker to local port {port}"); Ok(port) @@ -89,17 +89,17 @@ pub async fn ssh_tunnel_open(ports_to_forward: &mut Vec, ssh_config: &SshC command.stderr(Stdio::piped()); for port in ports_to_forward.iter_mut() { - if port.external == "0" { + if port.published == "0" { // Bind to port 0, so the OS will assign a free port. let listener = TcpListener::bind("127.0.0.1:0").await.map_err(|e| format!("Failed to bind to address: {}", e))?; let local_addr = listener.local_addr().map_err(|e| format!("Failed to get local address: {}", e))?; - port.external = local_addr.port().to_string(); + port.published = local_addr.port().to_string(); } - let local_addr = format!("127.0.0.1:{}", port.external); - let remote_addr = if port.internal.parse::().is_ok() { - format!("127.0.0.1:{}", port.internal) + let local_addr = format!("127.0.0.1:{}", port.published); + let remote_addr = if port.target.parse::().is_ok() { + format!("127.0.0.1:{}", port.target) } else { - port.internal.clone() + port.target.clone() }; command.arg("-L").arg(format!("{local_addr}:{remote_addr}")); } @@ -115,9 +115,9 @@ pub async fn ssh_tunnel_open(ports_to_forward: &mut Vec, ssh_config: &SshC let port_to_test_connection = ports_to_forward.iter().next().ok_or_else(|| "Failed to get port to test connection".to_string())?; for attempt in 0..10 { - match TcpStream::connect(format!("127.0.0.1:{}", &port_to_test_connection.external)).await { + match TcpStream::connect(format!("127.0.0.1:{}", &port_to_test_connection.published)).await { Ok(_) => { - info!("huzzah, it worked: connect to 127.0.0.1:{}", port_to_test_connection.external); + info!("huzzah, it worked: connect to 127.0.0.1:{}", port_to_test_connection.published); return Ok(SshTunnel { forwarded_ports: ports_to_forward.clone(), process, @@ -126,7 +126,7 @@ pub async fn ssh_tunnel_open(ports_to_forward: &mut Vec, ssh_config: &SshC }); } Err(e) => { - info!("this should eventually work: connect to 127.0.0.1:{} attempt {}: {}", port_to_test_connection.external, attempt + 1, e); + info!("this should eventually work: connect to 127.0.0.1:{} attempt {}: {}", port_to_test_connection.published, attempt + 1, e); let (_, stderr_output, _) = blocking_read_until_token_or_timeout(&mut stdout, &mut stderr, 300, "").await?; if !stderr_output.is_empty() { return Err(format!("Failed to open ssh tunnel: {}", stderr_output)); @@ -135,5 +135,5 @@ pub async fn ssh_tunnel_open(ports_to_forward: &mut Vec, ssh_config: &SshC } } - return Err(format!("Failed to connect to 127.0.0.1:{}, max attempts reached", &port_to_test_connection.external)); + return Err(format!("Failed to connect to 127.0.0.1:{}, max attempts reached", &port_to_test_connection.published)); } From ad4538ef749d377238872d34b11dffe0e13d0a73 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Thu, 28 Nov 2024 18:50:13 +0100 Subject: [PATCH 016/185] fix: add static ssh to dockerfile to fix docker build --- docker/lsp-debug.Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/docker/lsp-debug.Dockerfile b/docker/lsp-debug.Dockerfile index af8a2a1f2..904263bc9 100644 --- a/docker/lsp-debug.Dockerfile +++ b/docker/lsp-debug.Dockerfile @@ -8,6 +8,7 @@ RUN apk add --no-cache \ curl \ git \ openssl-dev \ + openssl-libs-static \ pkgconfig \ protobuf-dev \ zlib-static From 8b4f90b6e743e124488e8c9eded8add6ccf22583 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Thu, 28 Nov 2024 23:12:30 +0100 Subject: [PATCH 017/185] fix: copy config folder to the container --- src/integrations/docker/docker_container_manager.rs | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/src/integrations/docker/docker_container_manager.rs b/src/integrations/docker/docker_container_manager.rs index 6c99b2d6c..e664a7a20 100644 --- a/src/integrations/docker/docker_container_manager.rs +++ b/src/integrations/docker/docker_container_manager.rs @@ -272,7 +272,10 @@ async fn docker_container_sync_yaml_configs( container_id: &str, gcx: Arc>, ) -> Result<(), String> { - let cache_dir = gcx.read().await.cache_dir.clone(); + let (cache_dir, config_dir) = { + let gcx_locked = gcx.read().await; + (gcx_locked.cache_dir.clone(), gcx_locked.config_dir.clone()) + }; let container_home_dir = docker_container_get_home_dir(&docker, &container_id, gcx.clone()).await?; // Creating intermediate folders one by one, as docker cp does not support --parents @@ -281,7 +284,7 @@ async fn docker_container_sync_yaml_configs( let temp_dir_path = temp_dir.path().to_string_lossy().to_string(); docker.command_execute(&format!("container cp {temp_dir_path} {container_id}:{container_home_dir}/.cache/"), gcx.clone(), true).await?; docker.command_execute(&format!("container cp {temp_dir_path} {container_id}:{container_home_dir}/.cache/refact"), gcx.clone(), true).await?; - + let config_files_to_sync = ["privacy.yaml", "integrations.yaml", "bring-your-own-key.yaml", "competency.yaml"]; let (remote_integrations_path, competency_path) = { let gcx_locked = gcx.read().await; @@ -297,6 +300,11 @@ async fn docker_container_sync_yaml_configs( docker.command_execute(&format!("container cp {local_path} {container_path}"), gcx.clone(), true).await?; } + // Copying config folder + let config_dir_string = config_dir.to_string_lossy().to_string(); + docker.command_execute(&format!("container cp {temp_dir_path} {container_id}:{container_home_dir}/.config/"), gcx.clone(), true).await?; + docker.command_execute(&format!("container cp {config_dir_string} {container_id}:{container_home_dir}/.config/refact"), gcx.clone(), true).await?; + Ok(()) } From 5474a89a8699313ee913b140773416fed0894a86 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Thu, 28 Nov 2024 23:12:59 +0100 Subject: [PATCH 018/185] fix: increase waiting time for ssh tunnel to open, as it may take a while --- src/integrations/docker/docker_ssh_tunnel_utils.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/integrations/docker/docker_ssh_tunnel_utils.rs b/src/integrations/docker/docker_ssh_tunnel_utils.rs index 755f5353c..5c08baff7 100644 --- a/src/integrations/docker/docker_ssh_tunnel_utils.rs +++ b/src/integrations/docker/docker_ssh_tunnel_utils.rs @@ -114,7 +114,7 @@ pub async fn ssh_tunnel_open(ports_to_forward: &mut Vec, ssh_config: &SshC } let port_to_test_connection = ports_to_forward.iter().next().ok_or_else(|| "Failed to get port to test connection".to_string())?; - for attempt in 0..10 { + for attempt in 0..25 { match TcpStream::connect(format!("127.0.0.1:{}", &port_to_test_connection.published)).await { Ok(_) => { info!("huzzah, it worked: connect to 127.0.0.1:{}", port_to_test_connection.published); @@ -127,7 +127,7 @@ pub async fn ssh_tunnel_open(ports_to_forward: &mut Vec, ssh_config: &SshC } Err(e) => { info!("this should eventually work: connect to 127.0.0.1:{} attempt {}: {}", port_to_test_connection.published, attempt + 1, e); - let (_, stderr_output, _) = blocking_read_until_token_or_timeout(&mut stdout, &mut stderr, 300, "").await?; + let (_, stderr_output, _) = blocking_read_until_token_or_timeout(&mut stdout, &mut stderr, 400, "").await?; if !stderr_output.is_empty() { return Err(format!("Failed to open ssh tunnel: {}", stderr_output)); } From bdc5aa0aaef5e6b334390c64bc1a01adf86a4f21 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Fri, 29 Nov 2024 16:51:15 +0100 Subject: [PATCH 019/185] feat: add f label for minor configuration of special upper-lower case --- src/integrations/integr_postgres.rs | 1 + src/integrations/yaml_schema.rs | 2 ++ 2 files changed, 3 insertions(+) diff --git a/src/integrations/integr_postgres.rs b/src/integrations/integr_postgres.rs index b33a5cccc..179c905fb 100644 --- a/src/integrations/integr_postgres.rs +++ b/src/integrations/integr_postgres.rs @@ -200,6 +200,7 @@ fields: f_type: string_long f_desc: "If it can't find a path to `psql` you can provide it here, leave blank if not sure." f_placeholder: "psql" + f_label: "PSQL Binary Path" available: on_your_laptop_possible: true when_isolated_possible: true diff --git a/src/integrations/yaml_schema.rs b/src/integrations/yaml_schema.rs index c8837aa58..3ef44af90 100644 --- a/src/integrations/yaml_schema.rs +++ b/src/integrations/yaml_schema.rs @@ -19,6 +19,8 @@ pub struct ISchemaField { pub f_default: String, #[serde(default, skip_serializing_if="is_default")] pub f_placeholder: String, + #[serde(default, skip_serializing_if="is_default")] + pub f_label: String, #[serde(default, skip_serializing_if="is_empty")] pub smartlinks: Vec, #[serde(default, skip_serializing_if="is_indexmap_empty")] From cd313d2623165b95e1f18a4aa7e8e1a6966e7b9b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Fri, 29 Nov 2024 16:52:06 +0100 Subject: [PATCH 020/185] refactor: get rid of nested configs for docker --- .../docker/docker_container_manager.rs | 4 +- .../docker/docker_ssh_tunnel_utils.rs | 8 +- src/integrations/docker/integr_docker.rs | 83 +++++++++++++------ src/integrations/mod.rs | 2 +- src/integrations/yaml_schema.rs | 2 - 5 files changed, 66 insertions(+), 33 deletions(-) diff --git a/src/integrations/docker/docker_container_manager.rs b/src/integrations/docker/docker_container_manager.rs index e664a7a20..bc0d9c422 100644 --- a/src/integrations/docker/docker_container_manager.rs +++ b/src/integrations/docker/docker_container_manager.rs @@ -107,7 +107,7 @@ pub async fn docker_container_check_status_or_start( Ok(()) => {} Err(e) => { warn!("SSH tunnel error: {}, restarting tunnel..", e); - let ssh_config = docker.settings_docker.ssh_config.clone().ok_or_else(|| "No ssh config for docker container".to_string())?; + let ssh_config = docker.settings_docker.get_ssh_config().ok_or_else(|| "No ssh config for docker container".to_string())?; docker_container_session.connection = DockerContainerConnectionEnum::SshTunnel( ssh_tunnel_open(&mut ssh_tunnel.forwarded_ports, &ssh_config).await? ); @@ -121,7 +121,7 @@ pub async fn docker_container_check_status_or_start( Ok(()) } None => { - let ssh_config_maybe = docker.settings_docker.ssh_config.clone(); + let ssh_config_maybe = docker.settings_docker.get_ssh_config(); const LSP_PORT: &str = "8001"; let mut ports_to_forward = if ssh_config_maybe.is_some() { diff --git a/src/integrations/docker/docker_ssh_tunnel_utils.rs b/src/integrations/docker/docker_ssh_tunnel_utils.rs index 5c08baff7..3fc0384e0 100644 --- a/src/integrations/docker/docker_ssh_tunnel_utils.rs +++ b/src/integrations/docker/docker_ssh_tunnel_utils.rs @@ -30,7 +30,7 @@ impl SshTunnel { } } -pub async fn forward_remote_docker_if_needed(connect_to_daemon_at: &str, ssh_config: &SshConfig, gcx: Arc>) -> Result +pub async fn forward_remote_docker_if_needed(docker_daemon_address: &str, ssh_config: &SshConfig, gcx: Arc>) -> Result { let ssh_tunnel_arc = { let gcx_locked = gcx.read().await; @@ -48,10 +48,10 @@ pub async fn forward_remote_docker_if_needed(connect_to_daemon_at: &str, ssh_con } } - let remote_port_or_socket = if connect_to_daemon_at.starts_with("unix://") || connect_to_daemon_at.starts_with("npipe://") { - connect_to_daemon_at.split("://").nth(1).unwrap_or_default().to_string() + let remote_port_or_socket = if docker_daemon_address.starts_with("unix://") || docker_daemon_address.starts_with("npipe://") { + docker_daemon_address.split("://").nth(1).unwrap_or_default().to_string() } else { - connect_to_daemon_at.split(":").last().unwrap_or_default().to_string() + docker_daemon_address.split(":").last().unwrap_or_default().to_string() }; let ssh_tunnel = ssh_tunnel_open(&mut vec![Port { published: "0".to_string(), target: remote_port_or_socket }], ssh_config).await?; diff --git a/src/integrations/docker/integr_docker.rs b/src/integrations/docker/integr_docker.rs index 3bb81718c..f6fcc30bf 100644 --- a/src/integrations/docker/integr_docker.rs +++ b/src/integrations/docker/integr_docker.rs @@ -17,19 +17,51 @@ use crate::integrations::docker::docker_container_manager::Port; #[derive(Clone, Serialize, Deserialize, Default, Debug)] pub struct SettingsDocker { - pub connect_to_daemon_at: String, + pub docker_daemon_address: String, pub docker_cli_path: String, - pub ssh_config: Option, + pub remote_docker: bool, + pub ssh_host: String, + pub ssh_user: String, + #[serde(serialize_with = "serialize_num_to_str", deserialize_with = "deserialize_str_to_num")] + pub ssh_port: u16, + pub ssh_identity_file: String, pub container_workspace_folder: String, pub docker_image_id: String, pub host_lsp_path: String, pub run_chat_threads_inside_container: bool, pub label: String, pub command: String, + #[serde(serialize_with = "serialize_num_to_str", deserialize_with = "deserialize_str_to_num")] pub keep_containers_alive_for_x_minutes: u64, pub ports: Vec, } +fn serialize_num_to_str(num: &T, serializer: S) -> Result { + serializer.serialize_str(&num.to_string()) +} +fn deserialize_str_to_num<'de, T, D>(deserializer: D) -> Result +where + T: std::str::FromStr, T::Err: std::fmt::Display, D: serde::Deserializer<'de>, +{ + String::deserialize(deserializer)?.parse().map_err(serde::de::Error::custom) +} + +impl SettingsDocker { + pub fn get_ssh_config(&self) -> Option { + if self.remote_docker { + Some(SshConfig { + host: self.ssh_host.clone(), + user: self.ssh_user.clone(), + port: self.ssh_port.clone(), + identity_file: if !self.ssh_identity_file.is_empty() + { Some(self.ssh_identity_file.clone()) } else { None }, + }) + } else { + None + } + } +} + #[derive(Clone, Default, Debug)] pub struct ToolDocker { pub settings_docker: SettingsDocker, @@ -97,12 +129,12 @@ impl ToolDocker { pub async fn get_docker_host(&self, gcx: Arc>) -> Result { - match &self.settings_docker.ssh_config { + match &self.settings_docker.get_ssh_config() { Some(ssh_config) => { - let local_port = forward_remote_docker_if_needed(&self.settings_docker.connect_to_daemon_at, ssh_config, gcx.clone()).await?; + let local_port = forward_remote_docker_if_needed(&self.settings_docker.docker_daemon_address, ssh_config, gcx.clone()).await?; Ok(format!("127.0.0.1:{}", local_port)) }, - None => Ok(self.settings_docker.connect_to_daemon_at.clone()), + None => Ok(self.settings_docker.docker_daemon_address.clone()), } } } @@ -246,7 +278,7 @@ fn command_append_label_if_creates_resource(command_args: &mut Vec, labe pub const DOCKER_INTEGRATION_SCHEMA: &str = r#" fields: - connect_to_daemon_at: + docker_daemon_address: f_type: string_long f_desc: "The address to connect to the Docker daemon." f_default: "unix:///var/run/docker.sock" @@ -254,24 +286,27 @@ fields: f_type: string_long f_desc: "Path to the Docker CLI executable." f_default: "docker" - ssh_config: - f_type: object - f_desc: "SSH configuration for connecting to remote Docker daemons." - f_fields: - host: - f_type: string_long - f_desc: "The SSH host." - user: - f_type: string_short - f_desc: "The SSH user." - f_default: "root" - port: - f_type: string_short - f_desc: "The SSH port." - f_default: "22" - identity_file: - f_type: string_short - f_desc: "Path to the SSH identity file." + remote_docker: + f_type: bool + f_desc: "Use SSH to connect to remote Docker." + ssh_host: + f_type: string_long + f_desc: "SSH host to connect to remote Docker." + f_label: "SSH Host" + ssh_user: + f_type: string_short + f_desc: "SSH user to connect to remote Docker." + f_default: "root" + f_label: "SSH User" + ssh_port: + f_type: string_short + f_desc: "The SSH port to connect to remote Docker." + f_default: "22" + f_label: "SSH Port" + ssh_identity_file: + f_type: string_long + f_desc: "Path to the SSH identity file to connect to remote Docker." + f_label: "SSH Identity File" container_workspace_folder: f_type: string_long f_desc: "The workspace folder inside the container." diff --git a/src/integrations/mod.rs b/src/integrations/mod.rs index 7afd6efb8..6e186fc64 100644 --- a/src/integrations/mod.rs +++ b/src/integrations/mod.rs @@ -274,7 +274,7 @@ commands_deny: # --- Docker integration --- docker: - connect_to_daemon_at: "unix:///var/run/docker.sock" # Path to the Docker daemon. For remote Docker, the path to the daemon on the remote server. + docker_daemon_address: "unix:///var/run/docker.sock" # Path to the Docker daemon. For remote Docker, the path to the daemon on the remote server. # docker_cli_path: "/usr/local/bin/docker" # Uncomment to set a custom path for the docker cli, defaults to "docker" # Uncomment the following to connect to a remote Docker daemon diff --git a/src/integrations/yaml_schema.rs b/src/integrations/yaml_schema.rs index 3ef44af90..c95859ce1 100644 --- a/src/integrations/yaml_schema.rs +++ b/src/integrations/yaml_schema.rs @@ -23,8 +23,6 @@ pub struct ISchemaField { pub f_label: String, #[serde(default, skip_serializing_if="is_empty")] pub smartlinks: Vec, - #[serde(default, skip_serializing_if="is_indexmap_empty")] - pub f_fields: IndexMap, } #[derive(Serialize, Deserialize, Debug, Default)] From bfc3819423b9de1a090096788c2c35803125ebe8 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Sun, 1 Dec 2024 07:31:59 +0100 Subject: [PATCH 021/185] python_binding: better errors --- .../refact/chat_client.py | 37 +++++++++---------- src/tools/tool_cmdline.rs | 2 +- 2 files changed, 19 insertions(+), 20 deletions(-) diff --git a/python_binding_and_cmdline/refact/chat_client.py b/python_binding_and_cmdline/refact/chat_client.py index aea27ae83..c14ab4e71 100644 --- a/python_binding_and_cmdline/refact/chat_client.py +++ b/python_binding_and_cmdline/refact/chat_client.py @@ -391,7 +391,7 @@ async def diff_apply( async with session.post(base_url + "/diff-apply", json=post_me) as response: if response.status != 200: raise Exception(f"unexpected response status {response.status}, response: {await response.text()}") - return await response.json(content_type=None) + return await _better_response_json(response) async def mem_add(base_url: str, mem_type: str, goal: str, project: str, payload: str) -> Dict[str, Any]: @@ -404,7 +404,7 @@ async def mem_add(base_url: str, mem_type: str, goal: str, project: str, payload } async with aiohttp.ClientSession() as session: async with session.post(url, json=data) as response: - return await response.json() + return await _better_response_json(response) async def mem_block_until_vectorized(base_url: str) -> Tuple[Dict[str, Any], float]: @@ -412,7 +412,7 @@ async def mem_block_until_vectorized(base_url: str) -> Tuple[Dict[str, Any], flo t0 = time.time() async with aiohttp.ClientSession() as session: async with session.get(url) as response: - return (await response.json(), time.time() - t0) + return (await _better_response_json(response), time.time() - t0) async def mem_update_used(base_url: str, memid: str, correct: float, relevant: float) -> Dict[str, Any]: @@ -424,7 +424,7 @@ async def mem_update_used(base_url: str, memid: str, correct: float, relevant: f } async with aiohttp.ClientSession() as session: async with session.post(url, json=data) as response: - return await response.json() + return await _better_response_json(response) async def mem_erase(base_url: str, memid: str) -> Dict[str, Any]: @@ -434,7 +434,7 @@ async def mem_erase(base_url: str, memid: str) -> Dict[str, Any]: } async with aiohttp.ClientSession() as session: async with session.post(url, json=data) as response: - return await response.json() + return await _better_response_json(response) async def mem_query(base_url: str, goal: str, project: str, top_n: Optional[int] = 5) -> Tuple[int, Dict[str, Any]]: @@ -446,20 +446,19 @@ async def mem_query(base_url: str, goal: str, project: str, top_n: Optional[int] } async with aiohttp.ClientSession() as session: async with session.post(url, json=data) as response: - return response.status, await response.json() - - -async def ongoing_update(base_url: str, goal: str, progress: Dict[str, Any], actseq: Dict[str, Any], output: Dict[str, Any]): - url = f"{base_url}/ongoing-update" - data = { - "goal": goal, - "ongoing_progress": progress, - "ongoing_action_new_sequence": actseq, - "ongoing_output": output, - } - async with aiohttp.ClientSession() as session: - async with session.post(url, json=data) as response: - return await response.json() + return response.status, await _better_response_json(response) + + +async def _better_response_json(response): + if response.status == 200: + return await response.json() + txt = await response.text() + if txt.startswith("{"): + j = json.loads(txt) + if "detail" in j: + raise ValueError(j['detail']) + return j + raise ValueError("Unexpected response: %r" % txt) def gen_function_call_id(): diff --git a/src/tools/tool_cmdline.rs b/src/tools/tool_cmdline.rs index 96821e6ec..2af4e6108 100644 --- a/src/tools/tool_cmdline.rs +++ b/src/tools/tool_cmdline.rs @@ -350,7 +350,7 @@ async fn execute_background_command( #[async_trait] impl Tool for ToolCmdline { fn as_any(&self) -> &dyn std::any::Any { self } - + async fn tool_execute( &mut self, ccx: Arc>, From 92f09227435f37f9c4845b192b7ca01c77ebf20e Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Sun, 1 Dec 2024 12:05:04 +0100 Subject: [PATCH 022/185] remove competency.yaml, add back knowledge() call --- src/global_context.rs | 6 +-- .../docker/docker_container_manager.rs | 18 +++---- src/tools/mod.rs | 4 +- src/tools/tools_description.rs | 47 ++++++++-------- src/yaml_configs/customization_compiled_in.rs | 54 +++---------------- src/yaml_configs/customization_loader.rs | 45 ++++++++-------- 6 files changed, 67 insertions(+), 107 deletions(-) diff --git a/src/global_context.rs b/src/global_context.rs index eeeb8c22b..c2de69bfa 100644 --- a/src/global_context.rs +++ b/src/global_context.rs @@ -37,7 +37,7 @@ pub struct CommandLine { pub address_url: String, #[structopt(long, short="k", default_value="", help="The API key to authenticate your requests, will appear in HTTP requests this binary makes.")] pub api_key: String, - #[structopt(long, help="Trust self-signed SSL certificates")] + #[structopt(long, help="Trust self-signed SSL certificates, when connecting to an inference server.")] pub insecure: bool, #[structopt(long, short="p", default_value="0", help="Bind 127.0.0.1: to listen for HTTP requests, such as /v1/code-completion, /v1/chat, /v1/caps.")] @@ -88,12 +88,10 @@ pub struct CommandLine { #[structopt(long, help="Enable experimental features, such as new integrations.")] pub experimental: bool, - #[structopt(long, help="Pass true to tell this binary it can run more tools without confirmation.")] + #[structopt(long, help="A way to tell this binary it can run more tools without confirmation.")] pub inside_container: bool, #[structopt(long, default_value="", help="Specify a different configuration for integrations to be used inside remote containers.")] pub remote_integrations: String, - #[structopt(long, short="s", default_value="", help="Read a competency.yaml file that turns on specialization for a particular area, such as creating websites.")] - pub competency: String, } impl CommandLine { diff --git a/src/integrations/docker/docker_container_manager.rs b/src/integrations/docker/docker_container_manager.rs index bc0d9c422..5de18ad26 100644 --- a/src/integrations/docker/docker_container_manager.rs +++ b/src/integrations/docker/docker_container_manager.rs @@ -122,7 +122,7 @@ pub async fn docker_container_check_status_or_start( } None => { let ssh_config_maybe = docker.settings_docker.get_ssh_config(); - + const LSP_PORT: &str = "8001"; let mut ports_to_forward = if ssh_config_maybe.is_some() { docker.settings_docker.ports.iter() @@ -197,7 +197,7 @@ pub async fn docker_container_check_status_or_start( pub async fn docker_container_get_host_lsp_port_to_connect( gcx: Arc>, chat_id: &str, -) -> Result +) -> Result { let docker_container_session_maybe = { let gcx_locked = gcx.read().await; @@ -248,7 +248,7 @@ async fn docker_container_create( "{DEFAULT_CONTAINER_LSP_PATH} --http-port {lsp_port} --logs-stderr --inside-container \ --address-url {address_url} --api-key {api_key} --vecdb --reset-memory --ast --experimental", ); - + let ports_to_forward_as_arg_list = ports_to_forward.iter() .map(|p| format!("--publish={}:{}", p.published, p.target)).collect::>().join(" "); let run_command = format!( @@ -284,16 +284,16 @@ async fn docker_container_sync_yaml_configs( let temp_dir_path = temp_dir.path().to_string_lossy().to_string(); docker.command_execute(&format!("container cp {temp_dir_path} {container_id}:{container_home_dir}/.cache/"), gcx.clone(), true).await?; docker.command_execute(&format!("container cp {temp_dir_path} {container_id}:{container_home_dir}/.cache/refact"), gcx.clone(), true).await?; - + let config_files_to_sync = ["privacy.yaml", "integrations.yaml", "bring-your-own-key.yaml", "competency.yaml"]; - let (remote_integrations_path, competency_path) = { + let remote_integrations_path = { let gcx_locked = gcx.read().await; - (gcx_locked.cmdline.remote_integrations.clone(), gcx_locked.cmdline.competency.clone()) + gcx_locked.cmdline.remote_integrations.clone() }; for file in &config_files_to_sync { let local_path = match *file { "integrations.yaml" if !remote_integrations_path.is_empty() => remote_integrations_path.clone(), - "competency.yaml" if !competency_path.is_empty() => competency_path.clone(), + // "competency.yaml" if !competency_path.is_empty() => competency_path.clone(), _ => cache_dir.join(file).to_string_lossy().to_string(), }; let container_path = format!("{container_id}:{container_home_dir}/.cache/refact/{file}"); @@ -410,8 +410,8 @@ async fn docker_container_sync_workspace( } async fn append_folder_if_exists( - tar_builder: &mut async_tar::Builder>, - workspace_folder: &PathBuf, + tar_builder: &mut async_tar::Builder>, + workspace_folder: &PathBuf, folder_name: &str ) -> Result<(), String> { let folder_path = workspace_folder.join(folder_name); diff --git a/src/tools/mod.rs b/src/tools/mod.rs index 9b0badff5..dce7f70ea 100644 --- a/src/tools/mod.rs +++ b/src/tools/mod.rs @@ -13,8 +13,8 @@ mod tool_deep_thinking; #[cfg(feature="vecdb")] mod tool_search; -// #[cfg(feature="vecdb")] -// mod tool_knowledge; +#[cfg(feature="vecdb")] +mod tool_knowledge; #[cfg(feature="vecdb")] mod tool_locate_search; pub mod tool_patch; diff --git a/src/tools/tools_description.rs b/src/tools/tools_description.rs index b73ce2e44..2d02475af 100644 --- a/src/tools/tools_description.rs +++ b/src/tools/tools_description.rs @@ -110,6 +110,9 @@ pub async fn tools_merged_and_filtered( ("locate".to_string(), Arc::new(AMutex::new(Box::new(crate::tools::tool_locate_search::ToolLocateSearch{}) as Box))), ]); + #[cfg(feature="vecdb")] + tools_all.insert("knowledge".to_string(), Arc::new(AMutex::new(Box::new(crate::tools::tool_knowledge::ToolGetKnowledge{}) as Box))); + if allow_experimental { // The approach here: if it exists, it shouldn't have syntax errors, note the "?" // if let Some(gh_config) = integrations_value.get("github") { @@ -139,16 +142,6 @@ pub async fn tools_merged_and_filtered( // tools_all.insert("deep_thinking".to_string(), Arc::new(AMutex::new(Box::new(crate::tools::tool_deep_thinking::ToolDeepThinking{}) as Box))); // } // } - // #[cfg(feature="vecdb")] - // tools_all.insert("knowledge".to_string(), Arc::new(AMutex::new(Box::new(crate::tools::tool_knowledge::ToolGetKnowledge{}) as Box))); - // match load_integration_tools(gcx.clone()).await { - // Ok(integrations) => { - // tools_all.extend(integrations); - // } - // Err(e) => error!("Failed to load integrations: {}", e), - // } - // #[cfg(feature="vecdb")] - // tools_all.insert("knowledge".to_string(), Arc::new(AMutex::new(Box::new(crate::tools::tool_knowledge::ToolGetKnowledge{}) as Box))); } if let Some(cmdline) = integrations_value.get("cmdline") { @@ -367,22 +360,28 @@ tools: parameters_required: - "project_dir" - "command" -"####; + - name: "knowledge" + agentic: true + description: "Fetches successful trajectories to help you accomplish your task. Call each time you have a new task to increase your chances of success." + experimental: true + parameters: + - name: "im_going_to_use_tools" + type: "string" + description: "Which tools are you about to use? Comma-separated list, examples: hg, git, github, gitlab, rust debugger, patch" + - name: "im_going_to_apply_to" + type: "string" + description: "What your actions will be applied to? List all you can identify, starting with the project name. Comma-separated list, examples: project1, file1.cpp, MyClass, PRs, issues" + - name: "language_slash_framework" + type: "string" + description: "What programming language and framework is the current project using? Use lowercase, dashes and dots. Examples: python/django, typescript/node.js, rust/tokio, ruby/rails, php/laravel, c++/boost-asio" + parameters_required: + - "im_going_to_use_tools" + - "im_going_to_apply_to" + - "language_slash_framework" + +"####; -// - name: "knowledge" -// description: "What kind of knowledge you will need to accomplish this task? Call each time you have a new task or topic." -// experimental: true -// parameters: -// - name: "im_going_to_use_tools" -// type: "string" -// description: "Which tools are you about to use? Comma-separated list, examples: hg, git, github, gitlab, rust debugger, patch" -// - name: "im_going_to_apply_to" -// type: "string" -// description: "What your future actions will be applied to? List all you can identify, starting from the project name. Comma-separated list, examples: project1, file1.cpp, MyClass, PRs, issues" -// parameters_required: -// - "im_going_to_use_tools" -// - "im_going_to_apply_to" #[allow(dead_code)] diff --git a/src/yaml_configs/customization_compiled_in.rs b/src/yaml_configs/customization_compiled_in.rs index 899b80ebc..5481886fc 100644 --- a/src/yaml_configs/customization_compiled_in.rs +++ b/src/yaml_configs/customization_compiled_in.rs @@ -77,12 +77,13 @@ PROMPT_EXPLORATION_TOOLS: | Explain your plan briefly before calling the tools in parallel. - IT IS FORBIDDEN TO JUST CALL TOOLS WITHOUT EXPLAINING. EXPLAIN FIRST! USE EXPLORATION TOOLS IN PARALLEL! USE šŸ“ BEFORE ANY CODE BLOCK! + USE EXPLORATION TOOLS IN PARALLEL! USE šŸ“ BEFORE ANY CODE BLOCK! PROMPT_AGENTIC_TOOLS: | - [mode3] You are Refact Chat, a coding assistant. + [mode3] You are Refact Agent, an autonomous bot for coding tasks. + %CD_INSTRUCTIONS% %PROMPT_PINS% %WORKSPACE_INFO% @@ -91,60 +92,24 @@ PROMPT_AGENTIC_TOOLS: | copy a lot, just copy word-for-word. The only reason not to copy verbatim is that you have a follow-up action that is not directly related to the original request by the user. - Thinking strategy for the answers: + Thinking strategy: * Question unrelated to the project => just answer immediately. * Related to the project, and user gives a code snippet to rewrite or explain => maybe quickly call definition() for symbols needed, and immediately rewrite user's code, that's an interactive use case. - * Related to the project, user describes an issue that appears to be local => call locate() to find where exactly in the code that is. + * Related to the project, user describes an issue that appears to be somewhere in the code => call locate() to find where exactly in the code that is. - * Related to the project, user want a major change => call tree() to see what files the project has, use cat("file2,file1", skeleton=True) with - comma-separated paths to relevant files, you can get images this way, too. The skeleton flag that helps to take a quick look - inside many files. You might need to cat() a file you want to change in full later. + * User's request likely involves several steps, function calls, agentic tools like browser, database, debugger => then you need to call knowledge() first + to get access to the latest and best trajectories accomplishing a similar thing. - If user wants changes, write the changes yourself using šŸ“-notation, then call patch() in parallel for each file to change, + If the task requires changes, write the changes yourself using šŸ“-notation, then call patch() in parallel for each file to change, and put all tickets you want to apply to a file in a comma-separated list. - %CD_INSTRUCTIONS% - %SPECIALIZATION% - WHEN USING EXPLORATION TOOLS, USE SEVERAL IN PARALLEL! USE šŸ“ BEFORE ANY CODE BLOCK! -PROMPT_AGENTIC_EXPERIMENTAL_KNOWLEDGE: | - [mode3exp] You are Refact Agent, a coding assistant. Use triple backquotes for code blocks. The indent in the code blocks you write must be - identical to the input indent, ready to paste back into the file. - - %WORKSPACE_INFO% - - You are entrusted the agentic tools, locate() and patch(). They think for a long time, but produce reliable results and hide - complexity, as to not waste tokens here in this chat. Avoid them unless user wants to fix a bug without giving any specifics. - - When user asks something new, always call knowledge() to recall your previous attempts on the topic. - - Thinking strategy for the answers: - - * Question unrelated to the project => just answer immediately. A question about python the programming language is a good example -- just answer it, - there's no context you need. - - * Related to the project, and user gives a code snippet to rewrite or explain => call knowledge() because it's cheap, maybe quickly call definition() - for symbols needed, and immediately rewrite user's code, that's an interactive use case. - - * Related to the project, user doesn't give specific pointer to a code => call knowledge(), look if you had enough past experience with similar - questions, if yes call cat("file1, file2", "symbol1, symbol2") with the recalled files and symbols. If it's not enough information coming - from knowledge(), only then call locate() for a reliable files list, and continue with cat(). Don't call anything after cat(), it's still an - interative use case, should be fast. - - * Related to the project, user asks for actions that have to do with integrations, like version control, github, gitlab, review board etc => call knowledge() - and pay close attention to which past trajectories the user liked and didn't like before. Then try to execute what the user wants in a - manner that the user will like. - - %CD_INSTRUCTIONS% - - IT IS FORBIDDEN TO JUST CALL TOOLS WITHOUT EXPLAINING. EXPLAIN FIRST! SERIOUSLY ABOUT CALLING knowledge(). IF IT'S ANYTHING ABOUT THE PROJECT, CALL knowledge() FIRST. - PROMPT_CONFIGURATOR: | You are Refact Agent, a coding assistant. But today your job is to help the user to update Refact Agent configuration files, especially the @@ -204,9 +169,6 @@ system_prompts: configurator: text: "%PROMPT_CONFIGURATOR%" show: experimental - agentic_experimental_knowledge: - text: "%PROMPT_AGENTIC_EXPERIMENTAL_KNOWLEDGE%" - show: experimental subchat_tool_parameters: diff --git a/src/yaml_configs/customization_loader.rs b/src/yaml_configs/customization_loader.rs index 1fb083070..1972abfaa 100644 --- a/src/yaml_configs/customization_loader.rs +++ b/src/yaml_configs/customization_loader.rs @@ -194,34 +194,35 @@ pub async fn load_customization( let caps_locked = caps.read().unwrap(); (caps_locked.customization.clone(), caps_locked.code_chat_default_system_prompt.clone()) }; - let competency_path = gcx.read().await.cmdline.competency.clone(); + // let competency_path = gcx.read().await.cmdline.competency.clone(); let cache_dir = gcx.read().await.cache_dir.clone(); let customization_yaml_path = cache_dir.join("customization.yaml"); let user_config_text = std::fs::read_to_string(&customization_yaml_path).map_err(|e| format!("Failed to read file: {}", e))?; - let competency_yaml = if !competency_path.is_empty() { - std::fs::read_to_string(&competency_path).map_err(|e| format!("Failed to read file: {}", e))? - } else { - let global_competency_path = cache_dir.join("competency.yaml"); - if let Ok(content) = std::fs::read_to_string(&global_competency_path) { - content - } else { - tracing::info!("there is no competency.yaml supplied in the command line, and couldn't read {} either", global_competency_path.display()); - String::new() - } - }; - - let system_prompt_vars = if competency_yaml.is_empty() { - let mut map = HashMap::new(); - map.insert("SPECIALIZATION".to_string(), "".to_string()); - map - } else { - let competency: Competency = serde_yaml::from_str(&competency_yaml) - .map_err(|e| format!("Error parsing competency YAML: {}\n{}", e, competency_yaml))?; - competency.system_prompt_vars - }; + // let competency_yaml = if !competency_path.is_empty() { + // std::fs::read_to_string(&competency_path).map_err(|e| format!("Failed to read file: {}", e))? + // } else { + // let global_competency_path = cache_dir.join("competency.yaml"); + // if let Ok(content) = std::fs::read_to_string(&global_competency_path) { + // content + // } else { + // tracing::info!("there is no competency.yaml supplied in the command line, and couldn't read {} either", global_competency_path.display()); + // String::new() + // } + // }; + let mut system_prompt_vars = HashMap::new(); + + // let system_prompt_vars = if competency_yaml.is_empty() { + // let mut map = HashMap::new(); + // map.insert("SPECIALIZATION".to_string(), "".to_string()); + // map + // } else { + // let competency: Competency = serde_yaml::from_str(&competency_yaml) + // .map_err(|e| format!("Error parsing competency YAML: {}\n{}", e, competency_yaml))?; + // competency.system_prompt_vars + // }; load_and_mix_with_users_config(&user_config_text, &caps_config_text, &caps_default_system_prompt, skip_visibility_filtering, allow_experimental, &system_prompt_vars).map_err(|e| e.to_string()) } From e41d49723f3218714ca04edfe495e451369e022a Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Sun, 1 Dec 2024 15:22:06 +0100 Subject: [PATCH 023/185] can use --integrations-yaml for a custom integrations file --- src/global_context.rs | 6 +- src/http/routers/v1/v1_integrations.rs | 2 +- .../docker/docker_container_manager.rs | 2 +- src/integrations/integr_chrome.rs | 2 +- src/integrations/integr_postgres.rs | 3 +- src/integrations/running_integrations.rs | 14 ++-- src/integrations/setting_up_integrations.rs | 84 ++++++++++++++++--- src/integrations/yaml_schema.rs | 4 - 8 files changed, 86 insertions(+), 31 deletions(-) diff --git a/src/global_context.rs b/src/global_context.rs index c2de69bfa..bac4a37cc 100644 --- a/src/global_context.rs +++ b/src/global_context.rs @@ -88,10 +88,12 @@ pub struct CommandLine { #[structopt(long, help="Enable experimental features, such as new integrations.")] pub experimental: bool, + #[structopt(long, help="A way to tell this binary it can run more tools without confirmation.")] pub inside_container: bool, - #[structopt(long, default_value="", help="Specify a different configuration for integrations to be used inside remote containers.")] - pub remote_integrations: String, + + #[structopt(long, default_value="", help="Specify an alternative integrations.yaml, this also disables the global integrations.d")] + pub integrations_yaml: String, } impl CommandLine { diff --git a/src/http/routers/v1/v1_integrations.rs b/src/http/routers/v1/v1_integrations.rs index 7d561b495..87eb5a9d2 100644 --- a/src/http/routers/v1/v1_integrations.rs +++ b/src/http/routers/v1/v1_integrations.rs @@ -65,7 +65,7 @@ struct IntegrationSavePost { } pub async fn handle_v1_integration_save( - Extension(gcx): Extension>>, + Extension(_gcx): Extension>>, body_bytes: hyper::body::Bytes, ) -> axum::response::Result, ScratchError> { let post = serde_json::from_slice::(&body_bytes) diff --git a/src/integrations/docker/docker_container_manager.rs b/src/integrations/docker/docker_container_manager.rs index 5de18ad26..2b1ece356 100644 --- a/src/integrations/docker/docker_container_manager.rs +++ b/src/integrations/docker/docker_container_manager.rs @@ -288,7 +288,7 @@ async fn docker_container_sync_yaml_configs( let config_files_to_sync = ["privacy.yaml", "integrations.yaml", "bring-your-own-key.yaml", "competency.yaml"]; let remote_integrations_path = { let gcx_locked = gcx.read().await; - gcx_locked.cmdline.remote_integrations.clone() + gcx_locked.cmdline.integrations_yaml.clone() }; for file in &config_files_to_sync { let local_path = match *file { diff --git a/src/integrations/integr_chrome.rs b/src/integrations/integr_chrome.rs index 0be23f41b..c3f7fc359 100644 --- a/src/integrations/integr_chrome.rs +++ b/src/integrations/integr_chrome.rs @@ -102,7 +102,7 @@ impl Integration for ToolChrome { #[async_trait] impl Tool for ToolChrome { fn as_any(&self) -> &dyn std::any::Any { self } - + async fn tool_execute( &mut self, ccx: Arc>, diff --git a/src/integrations/integr_postgres.rs b/src/integrations/integr_postgres.rs index 179c905fb..bd5aa629c 100644 --- a/src/integrations/integr_postgres.rs +++ b/src/integrations/integr_postgres.rs @@ -14,6 +14,7 @@ use crate::integrations::integr_abstract::IntegrationTrait; #[derive(Clone, Serialize, Deserialize, Debug, Default)] pub struct SettingsPostgres { + #[serde(default)] pub psql_binary_path: String, pub host: String, pub port: String, @@ -99,7 +100,7 @@ impl ToolPostgres { #[async_trait] impl Tool for ToolPostgres { fn as_any(&self) -> &dyn std::any::Any { self } - + async fn tool_execute( &mut self, _ccx: Arc>, diff --git a/src/integrations/running_integrations.rs b/src/integrations/running_integrations.rs index d684717b7..9829e7a78 100644 --- a/src/integrations/running_integrations.rs +++ b/src/integrations/running_integrations.rs @@ -1,4 +1,4 @@ -use std::path::PathBuf; +// use std::path::PathBuf; use std::sync::Arc; use indexmap::IndexMap; use tokio::sync::RwLock as ARwLock; @@ -11,19 +11,15 @@ use crate::global_context::GlobalContext; pub async fn load_integration_tools( gcx: Arc>, _current_project: String, - allow_experimental: bool, + _allow_experimental: bool, ) -> IndexMap>>> { - let (global_dir, _workspace_folders_arc) = { - let gcx_locked = gcx.read().await; - (gcx_locked.config_dir.clone(), gcx_locked.documents_state.workspace_folders.clone()) - }; - let mut config_folders: Vec = Vec::new(); // XXX filter _workspace_folders_arc that fit _current_project - config_folders.push(global_dir); + let config_folders= crate::integrations::setting_up_integrations::config_dirs(gcx.clone()).await; + let integrations_yaml_path = crate::integrations::setting_up_integrations::get_integrations_yaml_path(gcx.clone()).await; let mut error_log: Vec = Vec::new(); let lst: Vec<&str> = crate::integrations::integrations_list(); - let records = crate::integrations::setting_up_integrations::read_integrations_d(&config_folders, &lst, &mut error_log); + let records = crate::integrations::setting_up_integrations::read_integrations_d(&config_folders, &integrations_yaml_path, &lst, &mut error_log); let mut tools = IndexMap::new(); for rec in records { diff --git a/src/integrations/setting_up_integrations.rs b/src/integrations/setting_up_integrations.rs index e0f543855..c9a73a7eb 100644 --- a/src/integrations/setting_up_integrations.rs +++ b/src/integrations/setting_up_integrations.rs @@ -39,6 +39,7 @@ pub struct IntegrationWithIconResult { pub fn read_integrations_d( config_folders: &Vec, + integrations_yaml_path: &String, lst: &[&str], error_log: &mut Vec, ) -> Vec { @@ -55,7 +56,7 @@ pub fn read_integrations_d( continue; } }; - let short_pp = crate::nicer_logs::last_n_chars(&project_path, 10); + let short_pp = if project_path.is_empty() { format!("global") } else { crate::nicer_logs::last_n_chars(&project_path, 15) }; rec.project_path = project_path.clone(); rec.integr_name = integr_name.to_string(); rec.integr_config_path = path_str.clone(); @@ -65,12 +66,6 @@ pub fn read_integrations_d( Ok(file_content) => match serde_yaml::from_str::(&file_content) { Ok(yaml_value) => { rec.config_unparsed = serde_json::to_value(yaml_value.clone()).unwrap(); - if let Some(available) = yaml_value.get("available").and_then(|v| v.as_mapping()) { - rec.on_your_laptop = available.get("on_your_laptop").and_then(|v| v.as_bool()).unwrap_or(false); - rec.when_isolated = available.get("when_isolated").and_then(|v| v.as_bool()).unwrap_or(false); - } else { - tracing::info!("{} no 'available' mapping in `{}`", short_pp, integr_name); - } } Err(e) => { let location = e.location().map(|loc| format!(" at line {}, column {}", loc.line(), loc.column())).unwrap_or_default(); @@ -97,9 +92,71 @@ pub fn read_integrations_d( integrations.push(rec); } } + + let short_yaml = crate::nicer_logs::last_n_chars(integrations_yaml_path, 15); + match fs::read_to_string(integrations_yaml_path) { + Ok(content) => match serde_yaml::from_str::(&content) { + Ok(y) => { + for integr_name in lst.iter() { + if let Some(config) = y.get(integr_name) { + let mut rec: IntegrationRecord = Default::default(); + rec.integr_config_path = integrations_yaml_path.clone(); + rec.integr_name = integr_name.to_string(); + rec.integr_config_exists = true; + rec.config_unparsed = serde_json::to_value(config.clone()).unwrap(); + integrations.push(rec); + tracing::info!("{} has `{}`", short_yaml, integr_name); + } else { + tracing::info!("{} no section `{}`", short_yaml, integr_name); + } + } + } + Err(e) => { + error_log.push(YamlError { + integr_config_path: integrations_yaml_path.clone(), + error_line: e.location().map(|loc| loc.line()).unwrap_or(0), + error_msg: e.to_string(), + }); + tracing::warn!("failed to parse {}: {}", integrations_yaml_path, e); + } + }, + Err(e) => { + error_log.push(YamlError { + integr_config_path: integrations_yaml_path.clone(), + error_line: 0, + error_msg: e.to_string(), + }); + tracing::warn!("failed to read {}: {}", integrations_yaml_path, e); + } + }; + + for rec in &mut integrations { + if !rec.integr_config_exists { + continue; + } + if let Some(available) = rec.config_unparsed.get("available").and_then(|v| v.as_object()) { + rec.on_your_laptop = available.get("on_your_laptop").and_then(|v| v.as_bool()).unwrap_or(false); + rec.when_isolated = available.get("when_isolated").and_then(|v| v.as_bool()).unwrap_or(false); + } else { + let short_pp = if rec.project_path.is_empty() { format!("global") } else { crate::nicer_logs::last_n_chars(&rec.project_path, 15) }; + tracing::info!("{} no 'available' mapping in `{}`", short_pp, rec.integr_name); + } + } + integrations } + +pub async fn get_integrations_yaml_path(gcx: Arc>) -> String { + let gcx_locked = gcx.read().await; + let r = gcx_locked.cmdline.integrations_yaml.clone(); + if r.is_empty() { + let config_dir = gcx_locked.config_dir.join("integrations.yaml"); + return config_dir.to_string_lossy().to_string(); + } + r +} + pub fn join_config_path(config_dir: &PathBuf, integr_name: &str) -> String { config_dir.join("integrations.d").join(format!("{}.yaml", integr_name)).to_string_lossy().into_owned() } @@ -107,13 +164,15 @@ pub fn join_config_path(config_dir: &PathBuf, integr_name: &str) -> String { pub async fn config_dirs( gcx: Arc>, ) -> Vec { - let (global_dir, workspace_folders_arc) = { + let (global_dir, workspace_folders_arc, integrations_yaml) = { let gcx_locked = gcx.read().await; - (gcx_locked.config_dir.clone(), gcx_locked.documents_state.workspace_folders.clone()) + (gcx_locked.config_dir.clone(), gcx_locked.documents_state.workspace_folders.clone(), gcx_locked.cmdline.integrations_yaml.clone()) }; let mut config_folders = workspace_folders_arc.lock().unwrap().clone(); config_folders = config_folders.iter().map(|folder| folder.join(".refact")).collect(); - config_folders.push(global_dir); + if integrations_yaml.is_empty() { + config_folders.push(global_dir); + } config_folders } @@ -137,10 +196,11 @@ pub fn split_path_into_project_and_integration(cfg_path: &PathBuf) -> Result<(St pub async fn integrations_all_with_icons( gcx: Arc>, ) -> IntegrationWithIconResult { - let config_folders = config_dirs(gcx).await; + let config_folders = config_dirs(gcx.clone()).await; let lst: Vec<&str> = crate::integrations::integrations_list(); let mut error_log: Vec = Vec::new(); - let integrations = read_integrations_d(&config_folders, &lst, &mut error_log); + let integrations_yaml_path = get_integrations_yaml_path(gcx.clone()).await; + let integrations = read_integrations_d(&config_folders, &integrations_yaml_path, &lst, &mut error_log); // rec.integr_icon = crate::integrations::icon_from_name(integr_name); IntegrationWithIconResult { integrations, diff --git a/src/integrations/yaml_schema.rs b/src/integrations/yaml_schema.rs index c95859ce1..faacf12ef 100644 --- a/src/integrations/yaml_schema.rs +++ b/src/integrations/yaml_schema.rs @@ -64,7 +64,3 @@ fn is_default(t: &T) -> bool { fn is_empty(t: &Vec) -> bool { t.is_empty() } - -fn is_indexmap_empty(t: &IndexMap) -> bool { - t.is_empty() -} From f2f1f0170856947566600782b52510ba5dad7c1c Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Sun, 1 Dec 2024 16:48:24 +0100 Subject: [PATCH 024/185] read secrets.yaml variables.yaml --- src/integrations/running_integrations.rs | 3 +- src/integrations/setting_up_integrations.rs | 42 ++++++++++++++++++++- 2 files changed, 43 insertions(+), 2 deletions(-) diff --git a/src/integrations/running_integrations.rs b/src/integrations/running_integrations.rs index 9829e7a78..fafe6b6d7 100644 --- a/src/integrations/running_integrations.rs +++ b/src/integrations/running_integrations.rs @@ -19,7 +19,8 @@ pub async fn load_integration_tools( let mut error_log: Vec = Vec::new(); let lst: Vec<&str> = crate::integrations::integrations_list(); - let records = crate::integrations::setting_up_integrations::read_integrations_d(&config_folders, &integrations_yaml_path, &lst, &mut error_log); + let vars_for_replacements = crate::integrations::setting_up_integrations::get_vars_for_replacements(gcx.clone()).await; + let records = crate::integrations::setting_up_integrations::read_integrations_d(&config_folders, &integrations_yaml_path, &vars_for_replacements, &lst, &mut error_log); let mut tools = IndexMap::new(); for rec in records { diff --git a/src/integrations/setting_up_integrations.rs b/src/integrations/setting_up_integrations.rs index c9a73a7eb..51bb7b3aa 100644 --- a/src/integrations/setting_up_integrations.rs +++ b/src/integrations/setting_up_integrations.rs @@ -1,6 +1,7 @@ use std::fs; use std::path::PathBuf; use std::sync::Arc; +use std::collections::HashMap; use regex::Regex; use serde::Serialize; use tokio::sync::RwLock as ARwLock; @@ -40,6 +41,7 @@ pub struct IntegrationWithIconResult { pub fn read_integrations_d( config_folders: &Vec, integrations_yaml_path: &String, + vars_for_replacements: &HashMap, lst: &[&str], error_log: &mut Vec, ) -> Vec { @@ -134,6 +136,16 @@ pub fn read_integrations_d( if !rec.integr_config_exists { continue; } + if let serde_json::Value::Object(map) = &mut rec.config_unparsed { + for (key, value) in map.iter_mut() { + if let Some(str_value) = value.as_str() { + let replaced_value = vars_for_replacements.iter().fold(str_value.to_string(), |acc, (var, replacement)| { + acc.replace(&format!("${}", var), replacement) + }); + *value = serde_json::Value::String(replaced_value); + } + } + } if let Some(available) = rec.config_unparsed.get("available").and_then(|v| v.as_object()) { rec.on_your_laptop = available.get("on_your_laptop").and_then(|v| v.as_bool()).unwrap_or(false); rec.when_isolated = available.get("when_isolated").and_then(|v| v.as_bool()).unwrap_or(false); @@ -157,6 +169,32 @@ pub async fn get_integrations_yaml_path(gcx: Arc>) -> Str r } +pub async fn get_vars_for_replacements(gcx: Arc>) -> HashMap { + let gcx_locked = gcx.read().await; + let secrets_yaml_path = gcx_locked.config_dir.join("secrets.yaml"); + let variables_yaml_path = gcx_locked.config_dir.join("variables.yaml"); + let mut variables = HashMap::new(); + if let Ok(secrets_content) = fs::read_to_string(&secrets_yaml_path) { + if let Ok(secrets_yaml) = serde_yaml::from_str::>(&secrets_content) { + variables.extend(secrets_yaml); + } else { + tracing::warn!("cannot parse secrets.yaml"); + } + } else { + tracing::info!("cannot read secrets.yaml"); + } + if let Ok(variables_content) = fs::read_to_string(&variables_yaml_path) { + if let Ok(variables_yaml) = serde_yaml::from_str::>(&variables_content) { + variables.extend(variables_yaml); + } else { + tracing::warn!("cannot parse variables.yaml"); + } + } else { + tracing::info!("cannot read variables.yaml"); + } + variables +} + pub fn join_config_path(config_dir: &PathBuf, integr_name: &str) -> String { config_dir.join("integrations.d").join(format!("{}.yaml", integr_name)).to_string_lossy().into_owned() } @@ -200,7 +238,9 @@ pub async fn integrations_all_with_icons( let lst: Vec<&str> = crate::integrations::integrations_list(); let mut error_log: Vec = Vec::new(); let integrations_yaml_path = get_integrations_yaml_path(gcx.clone()).await; - let integrations = read_integrations_d(&config_folders, &integrations_yaml_path, &lst, &mut error_log); + let vars_for_replacements = get_vars_for_replacements(gcx.clone()).await; + let integrations = read_integrations_d(&config_folders, &integrations_yaml_path, &vars_for_replacements, &lst, &mut error_log); + // rec.integr_icon = crate::integrations::icon_from_name(integr_name); IntegrationWithIconResult { integrations, From d95ff16a10abfa5bf431f2b71dccd713698bb591 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Mon, 2 Dec 2024 07:53:01 +0100 Subject: [PATCH 025/185] make cmdline_ and service_ work as sections and config files in .d --- src/integrations/mod.rs | 10 + src/integrations/running_integrations.rs | 6 +- src/integrations/setting_up_integrations.rs | 212 ++++++++++++-------- 3 files changed, 146 insertions(+), 82 deletions(-) diff --git a/src/integrations/mod.rs b/src/integrations/mod.rs index 6e186fc64..2ea3ce9e0 100644 --- a/src/integrations/mod.rs +++ b/src/integrations/mod.rs @@ -35,6 +35,16 @@ pub fn integration_from_name(n: &str) -> Result Ok(Box::new(integr_postgres::ToolPostgres { ..Default::default() }) as Box), // "chrome" => Ok(Box::new(ToolChrome { ..Default::default() }) as Box), "docker" => Ok(Box::new(docker::integr_docker::ToolDocker {..Default::default() }) as Box), + cmdline if cmdline.starts_with("cmdline_") => { + let tool_name = cmdline.strip_prefix("cmdline_").unwrap(); + tracing::info!("todo finish me tool_name={}", tool_name); + Err("todo finish me".to_string()) + }, + service if service.starts_with("service_") => { + let tool_name = service.strip_prefix("service_").unwrap(); + tracing::info!("todo finish me service tool_name={}", tool_name); + Err("todo finish me".to_string()) + }, _ => Err(format!("Unknown integration name: {}", n)), } } diff --git a/src/integrations/running_integrations.rs b/src/integrations/running_integrations.rs index fafe6b6d7..87d2e81f5 100644 --- a/src/integrations/running_integrations.rs +++ b/src/integrations/running_integrations.rs @@ -14,13 +14,13 @@ pub async fn load_integration_tools( _allow_experimental: bool, ) -> IndexMap>>> { // XXX filter _workspace_folders_arc that fit _current_project - let config_folders= crate::integrations::setting_up_integrations::config_dirs(gcx.clone()).await; + let (config_dirs, global_config_dir) = crate::integrations::setting_up_integrations::get_config_dirs(gcx.clone()).await; let integrations_yaml_path = crate::integrations::setting_up_integrations::get_integrations_yaml_path(gcx.clone()).await; let mut error_log: Vec = Vec::new(); let lst: Vec<&str> = crate::integrations::integrations_list(); let vars_for_replacements = crate::integrations::setting_up_integrations::get_vars_for_replacements(gcx.clone()).await; - let records = crate::integrations::setting_up_integrations::read_integrations_d(&config_folders, &integrations_yaml_path, &vars_for_replacements, &lst, &mut error_log); + let records = crate::integrations::setting_up_integrations::read_integrations_d(&config_dirs, &global_config_dir, &integrations_yaml_path, &vars_for_replacements, &lst, &mut error_log); let mut tools = IndexMap::new(); for rec in records { @@ -33,7 +33,7 @@ pub async fn load_integration_tools( let mut integr = match crate::integrations::integration_from_name(&rec.integr_name) { Ok(x) => x, Err(e) => { - tracing::error!("Failed to load integration {}: {}", rec.integr_name, e); + tracing::error!("don't have integration {}: {}", rec.integr_name, e); continue; } }; diff --git a/src/integrations/setting_up_integrations.rs b/src/integrations/setting_up_integrations.rs index 51bb7b3aa..012c645b1 100644 --- a/src/integrations/setting_up_integrations.rs +++ b/src/integrations/setting_up_integrations.rs @@ -39,18 +39,27 @@ pub struct IntegrationWithIconResult { } pub fn read_integrations_d( - config_folders: &Vec, + config_dirs: &Vec, + global_config_dir: &PathBuf, integrations_yaml_path: &String, vars_for_replacements: &HashMap, lst: &[&str], error_log: &mut Vec, ) -> Vec { - let mut integrations = Vec::new(); - for config_dir in config_folders { + let mut result = Vec::new(); + + // 1. Read each of config_dirs + let mut files_to_read = Vec::new(); + let mut project_config_dirs = config_dirs.iter().map(|dir| dir.to_string_lossy().to_string()).collect::>(); + if integrations_yaml_path.is_empty() { + project_config_dirs.push("".to_string()); // global + } + for project_config_dir in project_config_dirs { + // Read config_folder/integr_name.yaml and make a record, even if the file doesn't exist + let dir_path = if project_config_dir == "" { global_config_dir.clone() } else { PathBuf::from(project_config_dir.clone()) }; for integr_name in lst.iter() { - let path_str = join_config_path(config_dir, integr_name); + let path_str = join_config_path(&dir_path, integr_name); let path = PathBuf::from(path_str.clone()); - let mut rec: IntegrationRecord = Default::default(); let (_integr_name, project_path) = match split_path_into_project_and_integration(&path) { Ok(x) => x, Err(e) => { @@ -58,84 +67,123 @@ pub fn read_integrations_d( continue; } }; - let short_pp = if project_path.is_empty() { format!("global") } else { crate::nicer_logs::last_n_chars(&project_path, 15) }; - rec.project_path = project_path.clone(); - rec.integr_name = integr_name.to_string(); - rec.integr_config_path = path_str.clone(); - rec.integr_config_exists = path.exists(); - if rec.integr_config_exists { - match fs::read_to_string(&path) { - Ok(file_content) => match serde_yaml::from_str::(&file_content) { - Ok(yaml_value) => { - rec.config_unparsed = serde_json::to_value(yaml_value.clone()).unwrap(); - } - Err(e) => { - let location = e.location().map(|loc| format!(" at line {}, column {}", loc.line(), loc.column())).unwrap_or_default(); - error_log.push(YamlError { - integr_config_path: path_str.to_string(), - error_line: e.location().map(|loc| loc.line()).unwrap_or(0), - error_msg: e.to_string(), - }); - tracing::warn!("failed to parse {}{}: {}", path_str, location, e.to_string()); - } - }, + files_to_read.push((path_str, integr_name.to_string(), project_path)); + } + // Find special files that start with cmdline_* and service_* + if let Ok(entries) = fs::read_dir(dir_path.join("integrations.d")) { + let mut entries: Vec<_> = entries.filter_map(Result::ok).collect(); + entries.sort_by_key(|entry| entry.file_name()); + for entry in entries { + let file_name = entry.file_name(); + let file_name_str = file_name.to_string_lossy(); + if !file_name_str.ends_with(".yaml") { + continue; + } + let file_name_str_no_yaml = file_name_str.trim_end_matches(".yaml").to_string(); + if file_name_str.starts_with("cmdline_") || file_name_str.starts_with("service_") { + files_to_read.push((entry.path().to_string_lossy().to_string(), file_name_str_no_yaml.to_string(), project_config_dir.clone())); + } + } + } + } + + for (path_str, integr_name, project_path) in files_to_read { + let path = PathBuf::from(&path_str); + let short_pp = if project_path.is_empty() { format!("global") } else { crate::nicer_logs::last_n_chars(&project_path, 15) }; + let mut rec: IntegrationRecord = Default::default(); + rec.project_path = project_path.clone(); + rec.integr_name = integr_name.clone(); + rec.integr_config_path = path_str.clone(); + rec.integr_config_exists = path.exists(); + if rec.integr_config_exists { + match fs::read_to_string(&path) { + Ok(file_content) => match serde_yaml::from_str::(&file_content) { + Ok(yaml_value) => { + tracing::info!("{} has {}", short_pp, integr_name); + rec.config_unparsed = serde_json::to_value(yaml_value.clone()).unwrap(); + } Err(e) => { + let location = e.location().map(|loc| format!(" at line {}, column {}", loc.line(), loc.column())).unwrap_or_default(); error_log.push(YamlError { integr_config_path: path_str.to_string(), - error_line: 0, + error_line: e.location().map(|loc| loc.line()).unwrap_or(0), error_msg: e.to_string(), }); - tracing::warn!("failed to read {}: {}", path_str, e.to_string()); + tracing::warn!("failed to parse {}{}: {}", path_str, location, e.to_string()); } + }, + Err(e) => { + error_log.push(YamlError { + integr_config_path: path_str.to_string(), + error_line: 0, + error_msg: e.to_string(), + }); + tracing::warn!("failed to read {}: {}", path_str, e.to_string()); } - } else { - tracing::info!("{} no config file `{}`", short_pp, integr_name); } - integrations.push(rec); + } else { + tracing::info!("{} no config file for {}", short_pp, integr_name); } + result.push(rec); } - let short_yaml = crate::nicer_logs::last_n_chars(integrations_yaml_path, 15); - match fs::read_to_string(integrations_yaml_path) { - Ok(content) => match serde_yaml::from_str::(&content) { - Ok(y) => { - for integr_name in lst.iter() { - if let Some(config) = y.get(integr_name) { - let mut rec: IntegrationRecord = Default::default(); - rec.integr_config_path = integrations_yaml_path.clone(); - rec.integr_name = integr_name.to_string(); - rec.integr_config_exists = true; - rec.config_unparsed = serde_json::to_value(config.clone()).unwrap(); - integrations.push(rec); - tracing::info!("{} has `{}`", short_yaml, integr_name); + // 2. Read single file integrations_yaml_path, sections in yaml become integrations + // The --integrations-yaml flag disables the global config folder in (1) + if !integrations_yaml_path.is_empty() { + let short_yaml = crate::nicer_logs::last_n_chars(integrations_yaml_path, 15); + match fs::read_to_string(integrations_yaml_path) { + Ok(content) => match serde_yaml::from_str::(&content) { + Ok(y) => { + if let Some(mapping) = y.as_mapping() { + for (key, value) in mapping { + if let Some(key_str) = key.as_str() { + if key_str.starts_with("cmdline_") || key_str.starts_with("service_") { + let mut rec: IntegrationRecord = Default::default(); + rec.integr_config_path = integrations_yaml_path.clone(); + rec.integr_name = key_str.to_string(); + rec.integr_config_exists = true; + rec.config_unparsed = serde_json::to_value(value.clone()).unwrap(); + result.push(rec); + tracing::info!("{} detected prefix `{}`", short_yaml, key_str); + } else if lst.contains(&key_str) { + let mut rec: IntegrationRecord = Default::default(); + rec.integr_config_path = integrations_yaml_path.clone(); + rec.integr_name = key_str.to_string(); + rec.integr_config_exists = true; + rec.config_unparsed = serde_json::to_value(value.clone()).unwrap(); + result.push(rec); + tracing::info!("{} has `{}`", short_yaml, key_str); + } else { + tracing::warn!("{} unrecognized section `{}`", short_yaml, key_str); + } + } + } } else { - tracing::info!("{} no section `{}`", short_yaml, integr_name); + tracing::warn!("{} is not a mapping", short_yaml); } } - } + Err(e) => { + error_log.push(YamlError { + integr_config_path: integrations_yaml_path.clone(), + error_line: e.location().map(|loc| loc.line()).unwrap_or(0), + error_msg: e.to_string(), + }); + tracing::warn!("failed to parse {}: {}", integrations_yaml_path, e); + } + }, Err(e) => { error_log.push(YamlError { integr_config_path: integrations_yaml_path.clone(), - error_line: e.location().map(|loc| loc.line()).unwrap_or(0), + error_line: 0, error_msg: e.to_string(), }); - tracing::warn!("failed to parse {}: {}", integrations_yaml_path, e); + tracing::warn!("failed to read {}: {}", integrations_yaml_path, e); } - }, - Err(e) => { - error_log.push(YamlError { - integr_config_path: integrations_yaml_path.clone(), - error_line: 0, - error_msg: e.to_string(), - }); - tracing::warn!("failed to read {}: {}", integrations_yaml_path, e); - } - }; + }; + } - for rec in &mut integrations { - if !rec.integr_config_exists { - continue; - } + // 3. Replace vars in config_unparsed + for rec in &mut result { if let serde_json::Value::Object(map) = &mut rec.config_unparsed { for (key, value) in map.iter_mut() { if let Some(str_value) = value.as_str() { @@ -146,26 +194,35 @@ pub fn read_integrations_d( } } } + } + + // 4. Fill on_your_laptop/when_isolated in each record + for rec in &mut result { + if !rec.integr_config_exists { + continue; + } if let Some(available) = rec.config_unparsed.get("available").and_then(|v| v.as_object()) { rec.on_your_laptop = available.get("on_your_laptop").and_then(|v| v.as_bool()).unwrap_or(false); rec.when_isolated = available.get("when_isolated").and_then(|v| v.as_bool()).unwrap_or(false); } else { let short_pp = if rec.project_path.is_empty() { format!("global") } else { crate::nicer_logs::last_n_chars(&rec.project_path, 15) }; - tracing::info!("{} no 'available' mapping in `{}`", short_pp, rec.integr_name); + tracing::info!("{} no 'available' mapping in `{}` will default to true", short_pp, rec.integr_name); + rec.on_your_laptop = true; + rec.when_isolated = true; } } - integrations + result } pub async fn get_integrations_yaml_path(gcx: Arc>) -> String { let gcx_locked = gcx.read().await; let r = gcx_locked.cmdline.integrations_yaml.clone(); - if r.is_empty() { - let config_dir = gcx_locked.config_dir.join("integrations.yaml"); - return config_dir.to_string_lossy().to_string(); - } + // if r.is_empty() { + // let config_dir = gcx_locked.config_dir.join("integrations.yaml"); + // return config_dir.to_string_lossy().to_string(); + // } r } @@ -199,19 +256,16 @@ pub fn join_config_path(config_dir: &PathBuf, integr_name: &str) -> String { config_dir.join("integrations.d").join(format!("{}.yaml", integr_name)).to_string_lossy().into_owned() } -pub async fn config_dirs( +pub async fn get_config_dirs( gcx: Arc>, -) -> Vec { - let (global_dir, workspace_folders_arc, integrations_yaml) = { +) -> (Vec, PathBuf) { + let (global_config_dir, workspace_folders_arc, integrations_yaml) = { let gcx_locked = gcx.read().await; (gcx_locked.config_dir.clone(), gcx_locked.documents_state.workspace_folders.clone(), gcx_locked.cmdline.integrations_yaml.clone()) }; - let mut config_folders = workspace_folders_arc.lock().unwrap().clone(); - config_folders = config_folders.iter().map(|folder| folder.join(".refact")).collect(); - if integrations_yaml.is_empty() { - config_folders.push(global_dir); - } - config_folders + let mut config_dirs = workspace_folders_arc.lock().unwrap().clone(); + config_dirs = config_dirs.iter().map(|dir| dir.join(".refact")).collect(); + (config_dirs, global_config_dir) } pub fn split_path_into_project_and_integration(cfg_path: &PathBuf) -> Result<(String, String), String> { @@ -234,12 +288,12 @@ pub fn split_path_into_project_and_integration(cfg_path: &PathBuf) -> Result<(St pub async fn integrations_all_with_icons( gcx: Arc>, ) -> IntegrationWithIconResult { - let config_folders = config_dirs(gcx.clone()).await; + let (config_dirs, global_config_dir) = get_config_dirs(gcx.clone()).await; let lst: Vec<&str> = crate::integrations::integrations_list(); let mut error_log: Vec = Vec::new(); let integrations_yaml_path = get_integrations_yaml_path(gcx.clone()).await; let vars_for_replacements = get_vars_for_replacements(gcx.clone()).await; - let integrations = read_integrations_d(&config_folders, &integrations_yaml_path, &vars_for_replacements, &lst, &mut error_log); + let integrations = read_integrations_d(&config_dirs, &global_config_dir, &integrations_yaml_path, &vars_for_replacements, &lst, &mut error_log); // rec.integr_icon = crate::integrations::icon_from_name(integr_name); IntegrationWithIconResult { From 0141517f45716e373fb17ff8c300e6dcdf8aab0b Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Mon, 2 Dec 2024 12:21:12 +0100 Subject: [PATCH 026/185] f_extra --- src/integrations/integr_postgres.rs | 1 + src/integrations/yaml_schema.rs | 2 ++ 2 files changed, 3 insertions(+) diff --git a/src/integrations/integr_postgres.rs b/src/integrations/integr_postgres.rs index bd5aa629c..a98ef33a1 100644 --- a/src/integrations/integr_postgres.rs +++ b/src/integrations/integr_postgres.rs @@ -202,6 +202,7 @@ fields: f_desc: "If it can't find a path to `psql` you can provide it here, leave blank if not sure." f_placeholder: "psql" f_label: "PSQL Binary Path" + f_extra: true available: on_your_laptop_possible: true when_isolated_possible: true diff --git a/src/integrations/yaml_schema.rs b/src/integrations/yaml_schema.rs index faacf12ef..23d4fd6a0 100644 --- a/src/integrations/yaml_schema.rs +++ b/src/integrations/yaml_schema.rs @@ -23,6 +23,8 @@ pub struct ISchemaField { pub f_label: String, #[serde(default, skip_serializing_if="is_empty")] pub smartlinks: Vec, + #[serde(default, skip_serializing_if="is_default")] + pub f_extra: bool, } #[derive(Serialize, Deserialize, Debug, Default)] From c1535f767e3b4af5ed3f9a5aa547c6d9368957e8 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Mon, 2 Dec 2024 12:30:54 +0100 Subject: [PATCH 027/185] postgres description --- src/integrations/integr_postgres.rs | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/integrations/integr_postgres.rs b/src/integrations/integr_postgres.rs index a98ef33a1..6eebcc524 100644 --- a/src/integrations/integr_postgres.rs +++ b/src/integrations/integr_postgres.rs @@ -203,6 +203,11 @@ fields: f_placeholder: "psql" f_label: "PSQL Binary Path" f_extra: true +description: | + The Postgres tool is for the AI model to call, when it wants to look at data inside your database, or make any changes. + On this page you can also see Docker containers with Postgres servers. + You can ask model to create a new container with a new database for you, + or ask model to configure the tool to use an existing container with existing database. available: on_your_laptop_possible: true when_isolated_possible: true From 70c6b6160330fa05788b719b872d8dff07c5a5f2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Tue, 3 Dec 2024 10:13:20 +0100 Subject: [PATCH 028/185] feat: add smartlinks for each container --- src/integrations/integr_postgres.rs | 8 +++++++- src/integrations/yaml_schema.rs | 1 + 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/src/integrations/integr_postgres.rs b/src/integrations/integr_postgres.rs index 6eebcc524..409a2c04f 100644 --- a/src/integrations/integr_postgres.rs +++ b/src/integrations/integr_postgres.rs @@ -233,7 +233,13 @@ docker: sl_chat: - role: "user" content: | - šŸ”§ Your job is to create a new section under "docker" that will define a new postgres container, inside the current config file %CURRENT_CONFIG%. Follow the system prompt. + šŸ”§ Your job is to create a postgres container, using the image and environment from new_container_default section in the current config file: %CURRENT_CONFIG%. Follow the system prompt. + smartlinks_for_each_container: + - sl_label: "Use for integration" + sl_chat: + - role: "user" + content: | + šŸ”§ Your job is to modify postgres connection config in the current file to match the variables from the container, use docker tool to inspect the container if needed. Current config file: %CURRENT_CONFIG%. "#; diff --git a/src/integrations/yaml_schema.rs b/src/integrations/yaml_schema.rs index 23d4fd6a0..0e439c1dd 100644 --- a/src/integrations/yaml_schema.rs +++ b/src/integrations/yaml_schema.rs @@ -48,6 +48,7 @@ pub struct ISchemaDocker { pub filter_image: String, pub new_container_default: DockerService, pub smartlinks: Vec, + pub smartlinks_for_each_container: Vec, } #[derive(Serialize, Deserialize, Debug, Default)] From 68d7e28ffeb69ea7d2ddd3964bd5d1c87ee39ec6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Tue, 3 Dec 2024 10:34:34 +0100 Subject: [PATCH 029/185] fix: serialize list of ports as comma separated published:target notation --- .../docker/docker_container_manager.rs | 15 -------------- src/integrations/docker/integr_docker.rs | 20 +++++++++++++++++-- 2 files changed, 18 insertions(+), 17 deletions(-) diff --git a/src/integrations/docker/docker_container_manager.rs b/src/integrations/docker/docker_container_manager.rs index 2b1ece356..5559c5c88 100644 --- a/src/integrations/docker/docker_container_manager.rs +++ b/src/integrations/docker/docker_container_manager.rs @@ -31,21 +31,6 @@ pub struct Port { pub target: String, } -impl<'de> Deserialize<'de> for Port { - fn deserialize>(deserializer: D) -> Result { - let s = String::deserialize(deserializer)?; - let (published, target) = s.split_once(':') - .ok_or_else(|| serde::de::Error::custom("expected format '8080:3000'"))?; - Ok(Port { published: published.to_string(), target: target.to_string() }) - } -} - -impl Serialize for Port { - fn serialize(&self, serializer: S) -> Result { - serializer.serialize_str(&format!("{}:{}", self.published, self.target)) - } -} - pub struct DockerContainerSession { container_id: String, connection: DockerContainerConnectionEnum, diff --git a/src/integrations/docker/integr_docker.rs b/src/integrations/docker/integr_docker.rs index f6fcc30bf..80d30665d 100644 --- a/src/integrations/docker/integr_docker.rs +++ b/src/integrations/docker/integr_docker.rs @@ -33,6 +33,7 @@ pub struct SettingsDocker { pub command: String, #[serde(serialize_with = "serialize_num_to_str", deserialize_with = "deserialize_str_to_num")] pub keep_containers_alive_for_x_minutes: u64, + #[serde(serialize_with = "serialize_ports", deserialize_with = "deserialize_ports")] pub ports: Vec, } @@ -46,6 +47,21 @@ where String::deserialize(deserializer)?.parse().map_err(serde::de::Error::custom) } +fn serialize_ports(ports: &Vec, serializer: S) -> Result { + let ports_str = ports.iter().map(|port| format!("{}:{}", port.published, port.target)) + .collect::>().join(","); + serializer.serialize_str(&ports_str) +} + +fn deserialize_ports<'de, D: serde::Deserializer<'de>>(deserializer: D) -> Result, D::Error> { + let ports_str = String::deserialize(deserializer)?; + ports_str.split(',').filter(|s| !s.is_empty()).map(|port_str| { + let (published, target) = port_str.split_once(':') + .ok_or_else(|| serde::de::Error::custom("expected format 'published:target'"))?; + Ok(Port { published: published.to_string(), target: target.to_string() }) + }).collect() +} + impl SettingsDocker { pub fn get_ssh_config(&self) -> Option { if self.remote_docker { @@ -334,8 +350,8 @@ fields: f_desc: "How long to keep containers alive in minutes." f_default: "60" ports: - f_type: array - f_desc: "Ports to expose." + f_type: string_long + f_desc: "Comma separated published:target notation for ports to publish, example '8080:3000,5000:5432'" available: on_your_laptop_possible: true when_isolated_possible: false From bf96fc493df2751e8aa07f7efc9b612c31b96891 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Tue, 3 Dec 2024 11:46:29 +0100 Subject: [PATCH 030/185] fix: shorten docker error for integration use and docker handlers --- src/http/routers/v1/docker.rs | 6 ++-- .../docker/docker_container_manager.rs | 30 +++++++++---------- src/integrations/docker/integr_docker.rs | 11 +++++-- 3 files changed, 26 insertions(+), 21 deletions(-) diff --git a/src/http/routers/v1/docker.rs b/src/http/routers/v1/docker.rs index 94f66ad2b..6ad626bd7 100644 --- a/src/http/routers/v1/docker.rs +++ b/src/http/routers/v1/docker.rs @@ -64,7 +64,7 @@ pub async fn handle_v1_docker_container_action( DockerAction::Remove => format!("container remove --volumes {}", post.container), DockerAction::Stop => format!("container stop {}", post.container), }; - let (output, _) = docker.command_execute(&docker_command, gcx.clone(), true).await + let (output, _) = docker.command_execute(&docker_command, gcx.clone(), true, false).await .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Command {} failed: {}", docker_command, e)))?; Ok(Response::builder().status(StatusCode::OK).body(Body::from( @@ -87,7 +87,7 @@ pub async fn handle_v1_docker_container_list( None => "container list --all --no-trunc --format json".to_string(), }; - let (unparsed_output, _) = docker.command_execute(&docker_command, gcx.clone(), true).await + let (unparsed_output, _) = docker.command_execute(&docker_command, gcx.clone(), true, false).await .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Command {} failed: {}", docker_command, e)))?; let mut output: Vec = unparsed_output.lines().map(|line| serde_json::from_str(line)).collect::, _>>() @@ -113,7 +113,7 @@ pub async fn handle_v1_docker_container_list( } let inspect_command = format!("container inspect --format json {}", container_ids.join(" ")); - let (inspect_unparsed_output, _) = docker.command_execute(&inspect_command, gcx.clone(), true).await + let (inspect_unparsed_output, _) = docker.command_execute(&inspect_command, gcx.clone(), true, false).await .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Command {} failed: {}", inspect_command, e)))?; let inspect_output = serde_json::from_str::>(&inspect_unparsed_output) diff --git a/src/integrations/docker/docker_container_manager.rs b/src/integrations/docker/docker_container_manager.rs index 5559c5c88..c77208d0b 100644 --- a/src/integrations/docker/docker_container_manager.rs +++ b/src/integrations/docker/docker_container_manager.rs @@ -156,7 +156,7 @@ pub async fn docker_container_check_status_or_start( if !docker.settings_docker.command.is_empty() { let cmd_to_execute = format!("exec --detach {} {}", container_id, docker.settings_docker.command); - match docker.command_execute(&cmd_to_execute, gcx.clone(), false).await { + match docker.command_execute(&cmd_to_execute, gcx.clone(), false, true).await { Ok((cmd_stdout, cmd_stderr)) => { info!("Command executed: {cmd_stdout}\n{cmd_stderr}") }, Err(e) => { error!("Command execution failed: {}", e) }, }; @@ -242,7 +242,7 @@ async fn docker_container_create( ); info!("Executing docker command: {}", &run_command); - let (run_output, _) = docker.command_execute(&run_command, gcx.clone(), true).await?; + let (run_output, _) = docker.command_execute(&run_command, gcx.clone(), true, true).await?; let container_id = run_output.trim(); if container_id.len() < 12 { @@ -267,8 +267,8 @@ async fn docker_container_sync_yaml_configs( let temp_dir = tempfile::Builder::new().tempdir() .map_err(|e| format!("Error creating temporary directory: {}", e))?; let temp_dir_path = temp_dir.path().to_string_lossy().to_string(); - docker.command_execute(&format!("container cp {temp_dir_path} {container_id}:{container_home_dir}/.cache/"), gcx.clone(), true).await?; - docker.command_execute(&format!("container cp {temp_dir_path} {container_id}:{container_home_dir}/.cache/refact"), gcx.clone(), true).await?; + docker.command_execute(&format!("container cp {temp_dir_path} {container_id}:{container_home_dir}/.cache/"), gcx.clone(), true, true).await?; + docker.command_execute(&format!("container cp {temp_dir_path} {container_id}:{container_home_dir}/.cache/refact"), gcx.clone(), true, true).await?; let config_files_to_sync = ["privacy.yaml", "integrations.yaml", "bring-your-own-key.yaml", "competency.yaml"]; let remote_integrations_path = { @@ -282,13 +282,13 @@ async fn docker_container_sync_yaml_configs( _ => cache_dir.join(file).to_string_lossy().to_string(), }; let container_path = format!("{container_id}:{container_home_dir}/.cache/refact/{file}"); - docker.command_execute(&format!("container cp {local_path} {container_path}"), gcx.clone(), true).await?; + docker.command_execute(&format!("container cp {local_path} {container_path}"), gcx.clone(), true, true).await?; } // Copying config folder let config_dir_string = config_dir.to_string_lossy().to_string(); - docker.command_execute(&format!("container cp {temp_dir_path} {container_id}:{container_home_dir}/.config/"), gcx.clone(), true).await?; - docker.command_execute(&format!("container cp {config_dir_string} {container_id}:{container_home_dir}/.config/refact"), gcx.clone(), true).await?; + docker.command_execute(&format!("container cp {temp_dir_path} {container_id}:{container_home_dir}/.config/"), gcx.clone(), true, true).await?; + docker.command_execute(&format!("container cp {config_dir_string} {container_id}:{container_home_dir}/.config/refact"), gcx.clone(), true, true).await?; Ok(()) } @@ -299,7 +299,7 @@ async fn docker_container_get_home_dir( gcx: Arc>, ) -> Result { let inspect_config_command = "container inspect --format '{{json .Config}}' ".to_string() + &container_id; - let (inspect_config_output, _) = docker.command_execute(&inspect_config_command, gcx.clone(), true).await?; + let (inspect_config_output, _) = docker.command_execute(&inspect_config_command, gcx.clone(), true, true).await?; let config_json: serde_json::Value = serde_json::from_str(&inspect_config_output) .map_err(|e| format!("Error parsing docker config: {}", e))?; @@ -319,13 +319,13 @@ async fn docker_container_start( container_id: &str, ) -> Result<(), String> { let start_command = "container start ".to_string() + &container_id; - docker.command_execute(&start_command, gcx.clone(), true).await?; + docker.command_execute(&start_command, gcx.clone(), true, true).await?; // If docker container is not running, print last lines of logs. let inspect_command = "container inspect --format '{{json .State.Running}}' ".to_string() + &container_id; - let (inspect_output, _) = docker.command_execute(&inspect_command, gcx.clone(), true).await?; + let (inspect_output, _) = docker.command_execute(&inspect_command, gcx.clone(), true, true).await?; if inspect_output.trim() != "true" { - let (logs_output, _) = docker.command_execute(&format!("container logs --tail 10 {container_id}"), gcx.clone(), true).await?; + let (logs_output, _) = docker.command_execute(&format!("container logs --tail 10 {container_id}"), gcx.clone(), true, true).await?; return Err(format!("Docker container is not running: \n{logs_output}")); } @@ -372,7 +372,7 @@ async fn docker_container_sync_workspace( tar_builder.finish().await.map_err(|e| format!("Error finishing tar archive: {}", e))?; let cp_command = format!("container cp {} {}:{}", temp_tar_file.to_string_lossy(), container_id, container_workspace_folder.to_string_lossy()); - docker.command_execute(&cp_command, gcx.clone(), true).await?; + docker.command_execute(&cp_command, gcx.clone(), true, true).await?; let sync_files_post = SyncFilesExtractTarPost { tar_path: container_workspace_folder.join(&tar_file_name).to_string_lossy().to_string(), @@ -424,7 +424,7 @@ async fn docker_container_get_exposed_ports( gcx: Arc>, ) -> Result, String> { let inspect_command = "inspect --format '{{json .NetworkSettings.Ports}}' ".to_string() + &container_id; - let (inspect_output, _) = docker.command_execute(&inspect_command, gcx.clone(), true).await?; + let (inspect_output, _) = docker.command_execute(&inspect_command, gcx.clone(), true, true).await?; tracing::info!("{}:\n{}", inspect_command, inspect_output); let inspect_data: serde_json::Value = serde_json::from_str(&inspect_output) @@ -446,9 +446,9 @@ async fn docker_container_kill( ) -> Result<(), String> { let docker = docker_tool_load(gcx.clone()).await?; - docker.command_execute(&format!("container stop {container_id}"), gcx.clone(), true).await?; + docker.command_execute(&format!("container stop {container_id}"), gcx.clone(), true, true).await?; info!("Stopped docker container {container_id}."); - docker.command_execute(&format!("container remove {container_id}"), gcx.clone(), true).await?; + docker.command_execute(&format!("container remove {container_id}"), gcx.clone(), true, true).await?; info!("Removed docker container {container_id}."); Ok(()) } \ No newline at end of file diff --git a/src/integrations/docker/integr_docker.rs b/src/integrations/docker/integr_docker.rs index 80d30665d..42b6ed980 100644 --- a/src/integrations/docker/integr_docker.rs +++ b/src/integrations/docker/integr_docker.rs @@ -115,7 +115,7 @@ impl IntegrationTrait for ToolDocker { } impl ToolDocker { - pub async fn command_execute(&self, command: &str, gcx: Arc>, fail_if_stderr_is_not_empty: bool) -> Result<(String, String), String> + pub async fn command_execute(&self, command: &str, gcx: Arc>, fail_if_stderr_is_not_empty: bool, verbose_error: bool) -> Result<(String, String), String> { let mut command_args = split_command(&command)?; @@ -137,7 +137,12 @@ impl ToolDocker { let stderr = String::from_utf8_lossy(&output.stderr).to_string(); if fail_if_stderr_is_not_empty && !stderr.is_empty() { - return Err(format!("Error executing command {command}: \n{stderr}")); + let error_message = if verbose_error { + format!("Command `{}` failed: {}", command, stderr) + } else { + stderr + }; + return Err(error_message); } Ok((stdout, stderr)) @@ -173,7 +178,7 @@ impl Tool for ToolDocker { ccx_locked.global_context.clone() }; - let (stdout, _) = self.command_execute(&command, gcx.clone(), true).await?; + let (stdout, _) = self.command_execute(&command, gcx.clone(), true, false).await?; Ok((false, vec![ ContextEnum::ChatMessage(ChatMessage { From 33b39faefaa10649ae33692a752f40ec6ff33ebc Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Wed, 4 Dec 2024 07:03:20 +0100 Subject: [PATCH 031/185] merge fixes --- src/http/routers/v1/chat.rs | 2 +- src/integrations/docker/docker_container_manager.rs | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/src/http/routers/v1/chat.rs b/src/http/routers/v1/chat.rs index f88f0d3ac..ebb1f47f9 100644 --- a/src/http/routers/v1/chat.rs +++ b/src/http/routers/v1/chat.rs @@ -189,7 +189,7 @@ async fn _chat( gcx.clone(), caps, model_name.clone(), - &mut chat_post, + chat_post, &messages, &scratchpad_name, &scratchpad_patch, diff --git a/src/integrations/docker/docker_container_manager.rs b/src/integrations/docker/docker_container_manager.rs index 3e782c3ae..9fe4751bb 100644 --- a/src/integrations/docker/docker_container_manager.rs +++ b/src/integrations/docker/docker_container_manager.rs @@ -1,5 +1,6 @@ use std::path::PathBuf; use std::{sync::Arc, sync::Weak, time::SystemTime}; +use std::future::Future; use serde::{Deserialize, Serialize}; use tokio::fs::File; use tokio::sync::{Mutex as AMutex, RwLock as ARwLock}; @@ -25,6 +26,7 @@ use super::docker_ssh_tunnel_utils::ssh_tunnel_check_status; pub const DEFAULT_CONTAINER_LSP_PATH: &str = "/usr/local/bin/refact-lsp"; pub const TARGET_LSP_PORT: &str = "8001"; + #[derive(Clone, Debug)] pub struct Port { pub published: String, From 0eb2e68bc7f697532347bc45eed751b674805f7b Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Wed, 4 Dec 2024 07:43:08 +0100 Subject: [PATCH 032/185] fix tests --- src/integrations/yaml_schema.rs | 2 ++ src/yaml_configs/customization_loader.rs | 1 - 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/src/integrations/yaml_schema.rs b/src/integrations/yaml_schema.rs index 0e439c1dd..0efd06838 100644 --- a/src/integrations/yaml_schema.rs +++ b/src/integrations/yaml_schema.rs @@ -54,6 +54,8 @@ pub struct ISchemaDocker { #[derive(Serialize, Deserialize, Debug, Default)] pub struct ISchema { pub fields: IndexMap, + #[serde(default, skip_serializing_if="is_default")] + pub description: String, pub available: ISchemaAvailable, pub smartlinks: Vec, #[serde(skip_serializing_if = "Option::is_none")] diff --git a/src/yaml_configs/customization_loader.rs b/src/yaml_configs/customization_loader.rs index 1972abfaa..d68b014fc 100644 --- a/src/yaml_configs/customization_loader.rs +++ b/src/yaml_configs/customization_loader.rs @@ -250,7 +250,6 @@ mod tests { assert_eq!(config.system_prompts.get("default").is_some(), true); assert_eq!(config.system_prompts.get("exploration_tools").is_some(), true); assert_eq!(config.system_prompts.get("agentic_tools").is_some(), true); - assert_eq!(config.system_prompts.get("agentic_experimental_knowledge").is_some(), true); assert_eq!(config.system_prompts.get("configurator").is_some(), true); } } From 0c62912581d8830daea426cc128595c83fcd2f6b Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Wed, 4 Dec 2024 09:16:25 +0100 Subject: [PATCH 033/185] move cmdline to integrations --- .../integr_cmdline.rs} | 78 +++++++++++++++++-- src/integrations/mod.rs | 1 + src/postprocessing/pp_command_output.rs | 3 +- src/tools/mod.rs | 1 - src/tools/tools_description.rs | 18 ++--- 5 files changed, 84 insertions(+), 17 deletions(-) rename src/{tools/tool_cmdline.rs => integrations/integr_cmdline.rs} (87%) diff --git a/src/tools/tool_cmdline.rs b/src/integrations/integr_cmdline.rs similarity index 87% rename from src/tools/tool_cmdline.rs rename to src/integrations/integr_cmdline.rs index 84bb8b731..0cf5883a5 100644 --- a/src/tools/tool_cmdline.rs +++ b/src/integrations/integr_cmdline.rs @@ -7,6 +7,7 @@ use indexmap::IndexMap; use tokio::sync::{Mutex as AMutex, RwLock as ARwLock}; use tokio::io::BufReader; use serde::Deserialize; +use serde::Serialize; use async_trait::async_trait; use tokio::process::Command; use tracing::info; @@ -19,18 +20,20 @@ use crate::global_context::GlobalContext; use crate::integrations::process_io_utils::{blocking_read_until_token_or_timeout, is_someone_listening_on_that_tcp_port}; use crate::integrations::sessions::IntegrationSession; use crate::postprocessing::pp_command_output::{CmdlineOutputFilter, output_mini_postprocessing}; +use crate::integrations::integr_abstract::IntegrationTrait; const REALLY_HORRIBLE_ROUNDTRIP: u64 = 3000; // 3000 should be a really bad ping via internet, just in rare case it's a remote port -#[derive(Deserialize)] +#[derive(Deserialize, Serialize, Clone)] struct CmdlineToolConfig { + command: String, + command_workdir: String, + description: String, parameters: Vec, parameters_required: Option>, - command: String, - command_workdir: String, // blocking #[serde(default = "_default_timeout")] @@ -56,11 +59,41 @@ fn _default_startup_wait() -> u64 { } pub struct ToolCmdline { - a_service: bool, + is_service: bool, name: String, cfg: CmdlineToolConfig, } +impl IntegrationTrait for ToolCmdline { + fn integr_settings_apply(&mut self, value: &serde_json::Value) -> Result<(), String> { + match serde_json::from_value::(value.clone()) { + Ok(x) => self.cfg = x, + Err(e) => { + tracing::error!("Failed to apply settings: {}\n{:?}", e, value); + return Err(e.to_string()); + } + } + Ok(()) + } + + fn integr_settings_as_json(&self) -> serde_json::Value { + serde_json::to_value(&self.cfg).unwrap() + } + + fn integr_upgrade_to_tool(&self) -> Box { + Box::new(ToolCmdline { + is_service: self.is_service, + name: self.name.clone(), + cfg: self.cfg.clone(), + }) as Box + } + + fn integr_schema(&self) -> &str + { + CMDLINE_INTEGRATION_SCHEMA + } +} + pub fn cmdline_tool_from_yaml_value( cfg_cmdline_value: &serde_yaml::Value, background: bool, @@ -83,7 +116,7 @@ pub fn cmdline_tool_from_yaml_value( } let tool = Arc::new(AMutex::new(Box::new( ToolCmdline { - a_service: background, + is_service: background, name: c_name.clone(), cfg: c_cmd_tool, } @@ -394,7 +427,7 @@ impl Tool for ToolCmdline { let command = _replace_args(self.cfg.command.as_str(), &args_str); let workdir = _replace_args(self.cfg.command_workdir.as_str(), &args_str); - let tool_ouput = if self.a_service { + let tool_ouput = if self.is_service { let action = args_str.get("action").cloned().unwrap_or("start".to_string()); if !["start", "restart", "stop", "status"].contains(&action.as_str()) { return Err("Tool call is invalid. Param 'action' must be one of 'start', 'restart', 'stop', 'status'. Try again".to_string()); @@ -436,3 +469,36 @@ impl Tool for ToolCmdline { } } } + +pub const CMDLINE_INTEGRATION_SCHEMA: &str = r#" +fields: + command: + f_type: string_long + f_desc: "The command to execute." + f_placeholder: "echo Hello World" + command_workdir: + f_type: string_long + f_desc: "The working directory for the command." + f_placeholder: "/path/to/workdir" + description: + f_type: string_long + f_desc: "The model will see this description, why the model should call this?" + f_placeholder: "" + parameters: + f_type: "tool_parameters" + f_desc: "The model will fill in those parameters." + timeout: + f_type: integer + f_desc: "The command must immediately return the results, it can't be interactive. If the command runs for too long, it will be terminated and stderr/stdout collected will be presented to the model." + f_default: 10 + output_filter: + f_type: "output_filter" + f_desc: "The output from the command can be long or even quasi-infinite. This section allows to set limits, prioritize top or bottom, or use regexp to show the model the relevant part." + f_placeholder: "filter" +description: | + There you can adapt any command line tool for use by AI model. You can give the model instructions why to call it, which parameters to provide, + set a timeout and restrict the output. If you want a tool that runs in the background such as a web server, use service_* instead. +available: + on_your_laptop_possible: true + when_isolated_possible: true +"#; diff --git a/src/integrations/mod.rs b/src/integrations/mod.rs index 2ea3ce9e0..dc98685e4 100644 --- a/src/integrations/mod.rs +++ b/src/integrations/mod.rs @@ -14,6 +14,7 @@ pub mod integr_abstract; // pub mod integr_pdb; // pub mod integr_chrome; pub mod integr_postgres; +pub mod integr_cmdline; pub mod process_io_utils; pub mod docker; diff --git a/src/postprocessing/pp_command_output.rs b/src/postprocessing/pp_command_output.rs index bd88da32c..24e52df72 100644 --- a/src/postprocessing/pp_command_output.rs +++ b/src/postprocessing/pp_command_output.rs @@ -1,8 +1,9 @@ +use serde::Serialize; use serde::Deserialize; use regex::Regex; -#[derive(Deserialize)] +#[derive(Deserialize, Serialize, Clone)] pub struct CmdlineOutputFilter { #[serde(default = "default_limit_lines")] pub limit_lines: usize, diff --git a/src/tools/mod.rs b/src/tools/mod.rs index dce7f70ea..a667522cb 100644 --- a/src/tools/mod.rs +++ b/src/tools/mod.rs @@ -18,4 +18,3 @@ mod tool_knowledge; #[cfg(feature="vecdb")] mod tool_locate_search; pub mod tool_patch; -mod tool_cmdline; diff --git a/src/tools/tools_description.rs b/src/tools/tools_description.rs index 2d02475af..2c43b6833 100644 --- a/src/tools/tools_description.rs +++ b/src/tools/tools_description.rs @@ -144,15 +144,15 @@ pub async fn tools_merged_and_filtered( // } } - if let Some(cmdline) = integrations_value.get("cmdline") { - let cmdline_tools = crate::tools::tool_cmdline::cmdline_tool_from_yaml_value(cmdline, false)?; - tools_all.extend(cmdline_tools); - } - - if let Some(cmdline) = integrations_value.get("cmdline_services") { - let cmdline_tools = crate::tools::tool_cmdline::cmdline_tool_from_yaml_value(cmdline, true)?; - tools_all.extend(cmdline_tools); - } + // if let Some(cmdline) = integrations_value.get("cmdline") { + // let cmdline_tools = crate::tools::tool_cmdline::cmdline_tool_from_yaml_value(cmdline, false)?; + // tools_all.extend(cmdline_tools); + // } + + // if let Some(cmdline) = integrations_value.get("cmdline_services") { + // let cmdline_tools = crate::tools::tool_cmdline::cmdline_tool_from_yaml_value(cmdline, true)?; + // tools_all.extend(cmdline_tools); + // } let integrations = crate::integrations::running_integrations::load_integration_tools( gcx.clone(), From f962a5b399973aaafca3a45b439c1a5a3753ad36 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Tue, 3 Dec 2024 12:21:25 +0100 Subject: [PATCH 034/185] oleg_stream_system_prompt --- src/http/routers/v1/chat.rs | 37 ++++++++++++++++++- src/scratchpads/chat_passthrough.rs | 9 ++++- src/yaml_configs/customization_compiled_in.rs | 5 +-- 3 files changed, 46 insertions(+), 5 deletions(-) diff --git a/src/http/routers/v1/chat.rs b/src/http/routers/v1/chat.rs index ebb1f47f9..8483d07bf 100644 --- a/src/http/routers/v1/chat.rs +++ b/src/http/routers/v1/chat.rs @@ -174,16 +174,51 @@ async fn _chat( let docker_tool_maybe = docker_tool_load(gcx.clone()).await .map_err(|e| info!("No docker tool available: {e}")).ok().map(Arc::new); + // XXX change this for post.isolation, not docker settings let run_chat_threads_inside_container = docker_tool_maybe.clone() .map(|docker_tool| docker_tool.settings_docker.run_chat_threads_inside_container) .unwrap_or(false); let should_execute_remotely = run_chat_threads_inside_container && !gcx.read().await.cmdline.inside_container; - if should_execute_remotely { docker_container_check_status_or_start(gcx.clone(), docker_tool_maybe.clone(), &chat_post.chat_id).await .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, e))?; } + let have_system = !messages.is_empty() && messages[0].role == "system"; + if !have_system { + // XXX: make it explicit instead of auto-detector + let mut exploration_tools: bool = false; + let mut agentic_tools: bool = false; + if chat_post.tools.is_some() { + for t in chat_post.tools.as_ref().unwrap() { + let tobj = t.as_object().unwrap(); + if let Some(function) = tobj.get("function") { + if let Some(name) = function.get("name") { + if name.as_str() == Some("web") { // anything that will still be on without ast and vecdb + exploration_tools = true; + } + if name.as_str() == Some("patch") { + agentic_tools = true; + } + } + } + } + } + use crate::scratchpads::chat_utils_prompts::{get_default_system_prompt, get_default_system_prompt_from_remote, system_prompt_add_workspace_info}; + let system_message_content = if should_execute_remotely { + get_default_system_prompt_from_remote(gcx.clone(), exploration_tools, agentic_tools, &chat_post.chat_id).await.map_err(|e| + ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, e) + )? + } else { + get_default_system_prompt(gcx.clone(), exploration_tools, agentic_tools).await + }; + messages.insert(0, ChatMessage { + role: "tool".to_string(), + content: ChatContent::SimpleText(system_message_content), + ..Default::default() + }) + } + // chat_post.stream = Some(false); // for debugging 400 errors that are hard to debug with streaming (because "data: " is not present and the error message is ignored by the library) let mut scratchpad = scratchpads::create_chat_scratchpad( gcx.clone(), diff --git a/src/scratchpads/chat_passthrough.rs b/src/scratchpads/chat_passthrough.rs index 29f1bb4f2..e3300ca16 100644 --- a/src/scratchpads/chat_passthrough.rs +++ b/src/scratchpads/chat_passthrough.rs @@ -236,7 +236,14 @@ impl ScratchpadAbstract for ChatPassthrough { } fn response_spontaneous(&mut self) -> Result, String> { - self.has_rag_results.response_streaming() + let mut deterministic: Vec = vec![]; + let have_system_prompt_in_post = !self.post.messages.is_empty() && self.post.messages[0].get("role") == Some(&serde_json::Value::String("system".to_string())); + let have_system_prompt_in_messages = !self.messages.is_empty() && self.messages[0].role == "system"; + if !have_system_prompt_in_post && have_system_prompt_in_messages { + self.has_rag_results.in_json.insert(0, json!(self.messages[0])); + } + deterministic.extend(self.has_rag_results.response_streaming()?); + Ok(deterministic) } fn streaming_finished(&mut self, finish_reason: FinishReason) -> Result { diff --git a/src/yaml_configs/customization_compiled_in.rs b/src/yaml_configs/customization_compiled_in.rs index 5481886fc..4c25662b6 100644 --- a/src/yaml_configs/customization_compiled_in.rs +++ b/src/yaml_configs/customization_compiled_in.rs @@ -110,10 +110,9 @@ PROMPT_AGENTIC_TOOLS: | WHEN USING EXPLORATION TOOLS, USE SEVERAL IN PARALLEL! USE šŸ“ BEFORE ANY CODE BLOCK! - PROMPT_CONFIGURATOR: | - You are Refact Agent, a coding assistant. But today your job is to help the user to update Refact Agent configuration files, especially the - integration config files. + [mode3config] You are Refact Agent, a coding assistant. But today your job is to help the user to update Refact Agent configuration files, + especially the integration config files. %PROMPT_PINS% %WORKSPACE_INFO% From 371baba31941dae8a807cf8b74a78d9c67a7435f Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Wed, 4 Dec 2024 06:35:42 +0100 Subject: [PATCH 035/185] streaming the system message actually works --- .../refact/cli_streaming.py | 9 +++++---- src/http/routers/v1/chat.rs | 6 ++++-- src/scratchpads/chat_generic.rs | 13 +----------- src/scratchpads/chat_llama2.rs | 14 +++---------- src/scratchpads/chat_passthrough.rs | 20 +------------------ src/scratchpads/chat_utils_limit_history.rs | 12 +---------- 6 files changed, 15 insertions(+), 59 deletions(-) diff --git a/python_binding_and_cmdline/refact/cli_streaming.py b/python_binding_and_cmdline/refact/cli_streaming.py index 5314c085e..046e9f029 100644 --- a/python_binding_and_cmdline/refact/cli_streaming.py +++ b/python_binding_and_cmdline/refact/cli_streaming.py @@ -133,7 +133,7 @@ def process_streaming_data(data: Dict[str, Any], deltas_collector: Optional[chat msg = chat_client.Message.model_validate(data) replace_last_user = False - if msg.role == "user": + if msg.role == "user" or msg.role == "system'": if len(streaming_messages) > 0: if streaming_messages[-1].role == "user": replace_last_user = True @@ -157,10 +157,11 @@ def process_streaming_data(data: Dict[str, Any], deltas_collector: Optional[chat print_response("\n") flush_response() - elif msg.role in ["plain_text", "cd_instruction", "user"]: + elif msg.role in ["plain_text", "cd_instruction", "user", "system"]: if replace_last_user: return - print_response("\n") + role_text = FormattedText([("fg:ansiblue", f"\n{msg.role}")]) + print_formatted_text(role_text) if isinstance(msg.content, str): print_response(msg.content.strip()) elif isinstance(msg.content, list): @@ -174,7 +175,7 @@ def process_streaming_data(data: Dict[str, Any], deltas_collector: Optional[chat print_response("\n".join(collected_responses)) else: print_response("content is None, not normal\n") - print_response("\n") + print_response("\n\n") elif msg.role in ["assistant"]: if msg.content is not None: diff --git a/src/http/routers/v1/chat.rs b/src/http/routers/v1/chat.rs index 8483d07bf..3526e5c9c 100644 --- a/src/http/routers/v1/chat.rs +++ b/src/http/routers/v1/chat.rs @@ -212,9 +212,11 @@ async fn _chat( } else { get_default_system_prompt(gcx.clone(), exploration_tools, agentic_tools).await }; + messages.insert(0, ChatMessage { - role: "tool".to_string(), - content: ChatContent::SimpleText(system_message_content), + role: "system".to_string(), + // XXX: need remote %WORKSPACE_INFO% as well + content: ChatContent::SimpleText(system_prompt_add_workspace_info(gcx.clone(), &system_message_content).await), ..Default::default() }) } diff --git a/src/scratchpads/chat_generic.rs b/src/scratchpads/chat_generic.rs index 8c19eb025..ee6bc272b 100644 --- a/src/scratchpads/chat_generic.rs +++ b/src/scratchpads/chat_generic.rs @@ -16,7 +16,6 @@ use crate::scratchpad_abstract::{FinishReason, HasTokenizerAndEot, ScratchpadAbs use crate::scratchpads::chat_utils_deltadelta::DeltaDeltaChatStreamer; use crate::scratchpads::chat_utils_limit_history::limit_messages_history; use crate::scratchpads::scratchpad_utils::HasRagResults; -use crate::scratchpads::chat_utils_prompts::{get_default_system_prompt, system_prompt_add_workspace_info}; const DEBUG: bool = true; @@ -34,7 +33,6 @@ pub struct GenericChatScratchpad { // "SYSTEM:" keyword means it's not one token pub keyword_user: String, pub keyword_asst: String, - pub default_system_message: String, pub has_rag_results: HasRagResults, pub global_context: Arc>, pub allow_at: bool, @@ -58,7 +56,6 @@ impl GenericChatScratchpad { keyword_syst: "".to_string(), keyword_user: "".to_string(), keyword_asst: "".to_string(), - default_system_message: "".to_string(), has_rag_results: HasRagResults::new(), global_context, allow_at, @@ -80,7 +77,6 @@ impl ScratchpadAbstract for GenericChatScratchpad { self.keyword_syst = patch.get("keyword_system").and_then(|x| x.as_str()).unwrap_or("SYSTEM:").to_string(); self.keyword_user = patch.get("keyword_user").and_then(|x| x.as_str()).unwrap_or("USER:").to_string(); self.keyword_asst = patch.get("keyword_assistant").and_then(|x| x.as_str()).unwrap_or("ASSISTANT:").to_string(); - self.default_system_message = get_default_system_prompt(self.global_context.clone(), exploration_tools, agentic_tools).await; self.t.eot = patch.get("eot").and_then(|x| x.as_str()).unwrap_or("<|endoftext|>").to_string(); @@ -115,16 +111,9 @@ impl ScratchpadAbstract for GenericChatScratchpad { } else { (self.messages.clone(), self.messages.len(), false) }; - let mut limited_msgs: Vec = limit_messages_history(&self.t, &messages, undroppable_msg_n, self.post.parameters.max_new_tokens, n_ctx, &self.default_system_message)?; + let mut limited_msgs: Vec = limit_messages_history(&self.t, &messages, undroppable_msg_n, self.post.parameters.max_new_tokens, n_ctx)?; // if self.supports_tools { // }; - if let Some(first_msg) = limited_msgs.first_mut() { - if first_msg.role == "system" { - first_msg.content = ChatContent::SimpleText( - system_prompt_add_workspace_info(gcx.clone(), &first_msg.content.content_text_only()).await - ); - } - } sampling_parameters_to_patch.stop = self.dd.stop_list.clone(); // adapted from https://huggingface.co/spaces/huggingface-projects/llama-2-13b-chat/blob/main/model.py#L24 let mut prompt = self.token_bos.to_string(); diff --git a/src/scratchpads/chat_llama2.rs b/src/scratchpads/chat_llama2.rs index e7ff0258a..9d1f96310 100644 --- a/src/scratchpads/chat_llama2.rs +++ b/src/scratchpads/chat_llama2.rs @@ -15,7 +15,6 @@ use crate::scratchpad_abstract::{FinishReason, HasTokenizerAndEot, ScratchpadAbs use crate::scratchpads::chat_utils_deltadelta::DeltaDeltaChatStreamer; use crate::scratchpads::chat_utils_limit_history::limit_messages_history; use crate::scratchpads::scratchpad_utils::HasRagResults; -use crate::scratchpads::chat_utils_prompts::{get_default_system_prompt, system_prompt_add_workspace_info}; const DEBUG: bool = true; @@ -30,7 +29,6 @@ pub struct ChatLlama2 { pub messages: Vec, pub keyword_s: String, // "SYSTEM:" keyword means it's not one token pub keyword_slash_s: String, - pub default_system_message: String, pub has_rag_results: HasRagResults, pub global_context: Arc>, pub allow_at: bool, @@ -52,7 +50,7 @@ impl ChatLlama2 { messages: messages.clone(), keyword_s: "".to_string(), keyword_slash_s: "".to_string(), - default_system_message: "".to_string(), + // default_system_message: "".to_string(), has_rag_results: HasRagResults::new(), global_context, allow_at, @@ -71,7 +69,6 @@ impl ScratchpadAbstract for ChatLlama2 { ) -> Result<(), String> { self.keyword_s = patch.get("s").and_then(|x| x.as_str()).unwrap_or("").to_string(); self.keyword_slash_s = patch.get("slash_s").and_then(|x| x.as_str()).unwrap_or("").to_string(); - self.default_system_message = get_default_system_prompt(self.global_context.clone(), exploration_tools, agentic_tools).await; self.t.eot = self.keyword_s.clone(); info!("llama2 chat model adaptation patch applied {:?}", self.keyword_s); self.t.assert_one_token(&self.t.eot.as_str())?; @@ -95,12 +92,7 @@ impl ScratchpadAbstract for ChatLlama2 { } else { (self.messages.clone(), self.messages.len(), false) }; - let mut limited_msgs: Vec = limit_messages_history(&self.t, &messages, undroppable_msg_n, sampling_parameters_to_patch.max_new_tokens, n_ctx, &self.default_system_message)?; - if let Some(first_msg) = limited_msgs.first_mut() { - if first_msg.role == "system" { - first_msg.content = ChatContent::SimpleText(system_prompt_add_workspace_info(gcx.clone(), &first_msg.content.content_text_only()).await); - } - } + let mut limited_msgs: Vec = limit_messages_history(&self.t, &messages, undroppable_msg_n, sampling_parameters_to_patch.max_new_tokens, n_ctx)?; sampling_parameters_to_patch.stop = self.dd.stop_list.clone(); // loosely adapted from https://huggingface.co/spaces/huggingface-projects/llama-2-13b-chat/blob/main/model.py#L24 let mut prompt = "".to_string(); @@ -112,7 +104,7 @@ impl ScratchpadAbstract for ChatLlama2 { if msg.role == "system" { if !do_strip { prompt.push_str("<>\n"); - prompt.push_str(self.default_system_message.as_str()); + prompt.push_str(msg_content.as_str()); prompt.push_str("\n<>\n"); } } else { diff --git a/src/scratchpads/chat_passthrough.rs b/src/scratchpads/chat_passthrough.rs index e3300ca16..32c949157 100644 --- a/src/scratchpads/chat_passthrough.rs +++ b/src/scratchpads/chat_passthrough.rs @@ -56,7 +56,6 @@ pub struct ChatPassthrough { pub t: HasTokenizerAndEot, pub post: ChatPost, pub messages: Vec, - pub default_system_message: String, pub has_rag_results: HasRagResults, pub delta_sender: DeltaSender, pub global_context: Arc>, @@ -79,7 +78,6 @@ impl ChatPassthrough { t: HasTokenizerAndEot::new(tokenizer), post: post.clone(), messages: messages.clone(), - default_system_message: "".to_string(), has_rag_results: HasRagResults::new(), delta_sender: DeltaSender::new(), global_context, @@ -99,11 +97,6 @@ impl ScratchpadAbstract for ChatPassthrough { agentic_tools: bool, should_execute_remotely: bool, ) -> Result<(), String> { - self.default_system_message = if should_execute_remotely { - get_default_system_prompt_from_remote(self.global_context.clone(), exploration_tools, agentic_tools, &self.post.chat_id).await? - } else { - get_default_system_prompt(self.global_context.clone(), exploration_tools, agentic_tools).await - }; Ok(()) } @@ -132,21 +125,10 @@ impl ScratchpadAbstract for ChatPassthrough { run_tools_locally(ccx.clone(), at_tools.clone(), self.t.tokenizer.clone(), sampling_parameters_to_patch.max_new_tokens, &messages, &mut self.has_rag_results, &style).await? } }; - let mut limited_msgs = limit_messages_history(&self.t, &messages, undroppable_msg_n, sampling_parameters_to_patch.max_new_tokens, n_ctx, &self.default_system_message).unwrap_or_else(|e| { + let mut limited_msgs = limit_messages_history(&self.t, &messages, undroppable_msg_n, sampling_parameters_to_patch.max_new_tokens, n_ctx).unwrap_or_else(|e| { error!("error limiting messages: {}", e); vec![] }); - if let Some(first_msg) = limited_msgs.first_mut() { - if first_msg.role == "system" { - first_msg.content = ChatContent::SimpleText(system_prompt_add_workspace_info(gcx.clone(), &first_msg.content.content_text_only()).await); - } - if self.post.model == "o1-mini" && first_msg.role == "system" { - limited_msgs.remove(0); - } - } - if DEBUG { - info!("chat passthrough {} messages -> {} messages after applying at-commands and limits, possibly adding the default system message", messages.len(), limited_msgs.len()); - } let converted_messages = convert_messages_to_openai_format(limited_msgs, &style); diff --git a/src/scratchpads/chat_utils_limit_history.rs b/src/scratchpads/chat_utils_limit_history.rs index 09ed06dfd..4cd4d458c 100644 --- a/src/scratchpads/chat_utils_limit_history.rs +++ b/src/scratchpads/chat_utils_limit_history.rs @@ -9,7 +9,6 @@ pub fn limit_messages_history( last_user_msg_starts: usize, max_new_tokens: usize, context_size: usize, - default_system_message: &String, ) -> Result, String> { let tokens_limit: i32 = context_size as i32 - max_new_tokens as i32; @@ -31,11 +30,6 @@ pub fn limit_messages_history( tokens_used += tcnt; } } - let need_default_system_msg = !have_system && default_system_message.len() > 0; - if need_default_system_msg { - let tcnt = t.count_tokens(default_system_message.as_str())? as i32; - tokens_used += tcnt; - } let mut log_buffer = Vec::new(); let mut dropped = false; @@ -83,10 +77,6 @@ pub fn limit_messages_history( } } - let mut messages_out: Vec = messages.iter().enumerate().filter(|(i, _)| message_take[*i]).map(|(_, x)| x.clone()).collect(); - if need_default_system_msg { - messages_out.insert(0, ChatMessage::new("system".to_string(), default_system_message.clone())); - } - // info!("messages_out: {:?}", messages_out); + let messages_out: Vec = messages.iter().enumerate().filter(|(i, _)| message_take[*i]).map(|(_, x)| x.clone()).collect(); Ok(messages_out) } From 574c99cd7b74af66f032125a45c58a3cb6a208c7 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Wed, 4 Dec 2024 12:29:44 +0100 Subject: [PATCH 036/185] 'meta' in /v1/chat --- src/call_validation.rs | 31 ++++++++++++++++++++++++++--- src/http/routers/v1/chat.rs | 8 ++++---- src/scratchpads/chat_generic.rs | 2 +- src/scratchpads/chat_passthrough.rs | 3 +-- src/tools/tool_knowledge.rs | 2 +- 5 files changed, 35 insertions(+), 11 deletions(-) diff --git a/src/call_validation.rs b/src/call_validation.rs index 6f37ef3c0..0700617ac 100644 --- a/src/call_validation.rs +++ b/src/call_validation.rs @@ -190,13 +190,38 @@ pub struct ChatPost { pub subchat_tool_parameters: IndexMap, // tool_name: {model, allowed_context, temperature} #[serde(default="PostprocessSettings::new")] pub postprocess_parameters: PostprocessSettings, - // #[allow(dead_code)] + #[serde(default)] + pub meta: ChatMeta, + #[serde(default)] + pub style: Option, +} + +#[derive(Debug, Deserialize, Clone, Default)] +pub struct ChatMeta { #[serde(default)] pub chat_id: String, #[serde(default)] - pub current_config_file: String, + pub chat_remote: bool, #[serde(default)] - pub style: Option, + pub chat_mode: ChatMode, + #[serde(default)] + pub current_config_file: String, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +#[serde(rename_all = "UPPERCASE")] +pub enum ChatMode { + NoTools, + Explore, + Agent, + Configure, + ProjectSummary, +} + +impl Default for ChatMode { + fn default() -> Self { + ChatMode::NoTools + } } fn default_true() -> bool { diff --git a/src/http/routers/v1/chat.rs b/src/http/routers/v1/chat.rs index 3526e5c9c..f9c14933f 100644 --- a/src/http/routers/v1/chat.rs +++ b/src/http/routers/v1/chat.rs @@ -68,7 +68,7 @@ pub async fn handle_v1_chat_configuration( ScratchError::new(StatusCode::BAD_REQUEST, format!("JSON problem: {}", e)) })?; let mut messages = deserialize_messages_from_post(&chat_post.messages)?; - crate::integrations::config_chat::mix_config_messages(gcx.clone(), &mut messages, &chat_post.current_config_file).await; + crate::integrations::config_chat::mix_config_messages(gcx.clone(), &mut messages, &chat_post.meta.current_config_file).await; _chat(gcx, &mut chat_post, &mut messages, true).await } @@ -180,7 +180,7 @@ async fn _chat( .unwrap_or(false); let should_execute_remotely = run_chat_threads_inside_container && !gcx.read().await.cmdline.inside_container; if should_execute_remotely { - docker_container_check_status_or_start(gcx.clone(), docker_tool_maybe.clone(), &chat_post.chat_id).await + docker_container_check_status_or_start(gcx.clone(), docker_tool_maybe.clone(), &chat_post.meta.chat_id).await .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, e))?; } @@ -206,7 +206,7 @@ async fn _chat( } use crate::scratchpads::chat_utils_prompts::{get_default_system_prompt, get_default_system_prompt_from_remote, system_prompt_add_workspace_info}; let system_message_content = if should_execute_remotely { - get_default_system_prompt_from_remote(gcx.clone(), exploration_tools, agentic_tools, &chat_post.chat_id).await.map_err(|e| + get_default_system_prompt_from_remote(gcx.clone(), exploration_tools, agentic_tools, &chat_post.meta.chat_id).await.map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, e) )? } else { @@ -256,7 +256,7 @@ async fn _chat( CHAT_TOP_N, false, messages.clone(), - chat_post.chat_id.clone(), + chat_post.meta.chat_id.clone(), should_execute_remotely, ).await; ccx.subchat_tool_parameters = chat_post.subchat_tool_parameters.clone(); diff --git a/src/scratchpads/chat_generic.rs b/src/scratchpads/chat_generic.rs index ee6bc272b..4755e289e 100644 --- a/src/scratchpads/chat_generic.rs +++ b/src/scratchpads/chat_generic.rs @@ -10,7 +10,7 @@ use tracing::{info, error}; use crate::at_commands::execute_at::run_at_commands; use crate::at_commands::at_commands::AtCommandsContext; -use crate::call_validation::{ChatContent, ChatMessage, ChatPost, ContextFile, SamplingParameters}; +use crate::call_validation::{ChatMessage, ChatPost, ContextFile, SamplingParameters}; use crate::global_context::GlobalContext; use crate::scratchpad_abstract::{FinishReason, HasTokenizerAndEot, ScratchpadAbstract}; use crate::scratchpads::chat_utils_deltadelta::DeltaDeltaChatStreamer; diff --git a/src/scratchpads/chat_passthrough.rs b/src/scratchpads/chat_passthrough.rs index 32c949157..6637c045c 100644 --- a/src/scratchpads/chat_passthrough.rs +++ b/src/scratchpads/chat_passthrough.rs @@ -9,12 +9,11 @@ use tokio::sync::Mutex as AMutex; use tracing::{error, info}; use crate::at_commands::execute_at::run_at_commands; use crate::at_commands::at_commands::AtCommandsContext; -use crate::call_validation::{ChatContent, ChatMessage, ChatPost, SamplingParameters}; +use crate::call_validation::{ChatMessage, ChatPost, SamplingParameters}; use crate::global_context::GlobalContext; use crate::scratchpad_abstract::{FinishReason, HasTokenizerAndEot, ScratchpadAbstract}; use crate::scratchpads::chat_utils_limit_history::limit_messages_history; use crate::scratchpads::scratchpad_utils::HasRagResults; -use crate::scratchpads::chat_utils_prompts::{get_default_system_prompt, get_default_system_prompt_from_remote, system_prompt_add_workspace_info}; use crate::scratchpads::passthrough_convert_messages::convert_messages_to_openai_format; use crate::tools::tools_description::{tool_description_list_from_yaml, tools_merged_and_filtered}; use crate::tools::tools_execute::{run_tools_locally, run_tools_remotely}; diff --git a/src/tools/tool_knowledge.rs b/src/tools/tool_knowledge.rs index 1121fdaed..e71f7e7c3 100644 --- a/src/tools/tool_knowledge.rs +++ b/src/tools/tool_knowledge.rs @@ -19,7 +19,7 @@ pub struct ToolGetKnowledge; #[async_trait] impl Tool for ToolGetKnowledge { fn as_any(&self) -> &dyn std::any::Any { self } - + async fn tool_execute( &mut self, ccx: Arc>, From c48696871195740f8093bff1ba5cf467437033e2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Mon, 2 Dec 2024 08:57:39 +0100 Subject: [PATCH 037/185] feat: add links detection handler with project summarization detection --- src/http/routers/v1.rs | 4 ++ src/http/routers/v1/links.rs | 72 ++++++++++++++++++++++++++++++++++++ 2 files changed, 76 insertions(+) create mode 100644 src/http/routers/v1/links.rs diff --git a/src/http/routers/v1.rs b/src/http/routers/v1.rs index 7ff983c9c..72a9a2014 100644 --- a/src/http/routers/v1.rs +++ b/src/http/routers/v1.rs @@ -26,6 +26,7 @@ use crate::http::routers::v1::git::handle_v1_git_stage_and_commit; use crate::http::routers::v1::graceful_shutdown::handle_v1_graceful_shutdown; use crate::http::routers::v1::snippet_accepted::handle_v1_snippet_accepted; use crate::http::routers::v1::telemetry_network::handle_v1_telemetry_network; +use crate::http::routers::v1::links::handle_v1_links; use crate::http::routers::v1::lsp_like_handlers::{handle_v1_lsp_did_change, handle_v1_lsp_add_folder, handle_v1_lsp_initialize, handle_v1_lsp_remove_folder, handle_v1_set_active_document}; use crate::http::routers::v1::status::handle_v1_rag_status; use crate::http::routers::v1::customization::handle_v1_customization; @@ -53,6 +54,7 @@ mod docker; mod git; pub mod graceful_shutdown; mod dashboard; +pub mod links; pub mod lsp_like_handlers; pub mod customization; mod at_commands; @@ -138,6 +140,8 @@ pub fn make_v1_router() -> Router { .route("/subchat", telemetry_post!(handle_v1_subchat)) .route("/subchat-single", telemetry_post!(handle_v1_subchat_single)) + + .route("/links", telemetry_post!(handle_v1_links)) ; #[cfg(feature="vecdb")] diff --git a/src/http/routers/v1/links.rs b/src/http/routers/v1/links.rs new file mode 100644 index 000000000..f6496ecfa --- /dev/null +++ b/src/http/routers/v1/links.rs @@ -0,0 +1,72 @@ +use std::sync::Arc; +use axum::Extension; +use axum::http::{Response, StatusCode}; +use hyper::Body; +use serde::{Deserialize, Serialize}; +use tokio::sync::RwLock as ARwLock; + +use crate::call_validation::ChatMessage; +use crate::custom_error::ScratchError; +use crate::global_context::GlobalContext; + +#[derive(Serialize, Deserialize, Clone, Debug)] +pub struct LinksPost { + messages: Vec, + chat_id: String, +} + +#[derive(Serialize, Deserialize, Debug)] +#[serde(rename_all = "kebab-case")] +enum LinkAction { + PatchAll, + FollowUp, + Commit, + Goto, + SummarizeProject, +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct Link { + action: LinkAction, + text: String, + #[serde(skip_serializing_if = "Option::is_none")] + goto: Option, +} + +pub async fn handle_v1_links( + Extension(gcx): Extension>>, + body_bytes: hyper::body::Bytes, +) -> Result, ScratchError> { + let _post = serde_json::from_slice::(&body_bytes) + .map_err(|e| ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, format!("JSON problem: {}", e)))?; + + let mut links = Vec::new(); + + if project_summarization_is_missing(gcx.clone()).await { + links.push(Link { + action: LinkAction::SummarizeProject, + text: "Investigate Project".to_string(), + goto: None, + }); + } + + Ok(Response::builder() + .status(StatusCode::OK) + .header("Content-Type", "application/json") + .body(Body::from(serde_json::json!({"links": links}).to_string())) + .unwrap()) +} + +async fn project_summarization_is_missing(gcx: Arc>) -> bool { + let active_file = gcx.read().await.documents_state.active_file_path.clone(); + let workspace_folders = crate::files_correction::get_project_dirs(gcx.clone()).await; + if workspace_folders.is_empty() { + tracing::info!("No projects found, project summarization is not relevant."); + return false; + } + + let (active_project_path, _) = crate::files_in_workspace::detect_vcs_for_a_file_path(&active_file.unwrap_or_default()) + .await.unwrap_or_else(|| (workspace_folders.first().unwrap().clone(), "")); + + !active_project_path.join(".refact").join("project_summary.yaml").exists() +} \ No newline at end of file From f26b713d8ff0ac4bdcfd938892ae64d836dff22b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Mon, 2 Dec 2024 13:11:03 +0100 Subject: [PATCH 038/185] feat: add tool failure link detection, works in postgres only --- src/http/routers/v1/links.rs | 31 ++++++++++++++++++++++++++++- src/integrations/integr_postgres.rs | 5 +++-- src/integrations/mod.rs | 4 ++++ 3 files changed, 37 insertions(+), 3 deletions(-) diff --git a/src/http/routers/v1/links.rs b/src/http/routers/v1/links.rs index f6496ecfa..0d9a6585e 100644 --- a/src/http/routers/v1/links.rs +++ b/src/http/routers/v1/links.rs @@ -8,6 +8,7 @@ use tokio::sync::RwLock as ARwLock; use crate::call_validation::ChatMessage; use crate::custom_error::ScratchError; use crate::global_context::GlobalContext; +use crate::integrations::go_to_configuration_message; #[derive(Serialize, Deserialize, Clone, Debug)] pub struct LinksPost { @@ -37,7 +38,7 @@ pub async fn handle_v1_links( Extension(gcx): Extension>>, body_bytes: hyper::body::Bytes, ) -> Result, ScratchError> { - let _post = serde_json::from_slice::(&body_bytes) + let post = serde_json::from_slice::(&body_bytes) .map_err(|e| ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, format!("JSON problem: {}", e)))?; let mut links = Vec::new(); @@ -49,6 +50,14 @@ pub async fn handle_v1_links( goto: None, }); } + + for failed_tool_name in failed_tool_names_after_last_user_message(&post.messages) { + links.push(Link { + action: LinkAction::Goto, + text: format!("Configure {failed_tool_name}"), + goto: Some(format!("SETTINGS:{failed_tool_name}")), + }) + } Ok(Response::builder() .status(StatusCode::OK) @@ -69,4 +78,24 @@ async fn project_summarization_is_missing(gcx: Arc>) -> b .await.unwrap_or_else(|| (workspace_folders.first().unwrap().clone(), "")); !active_project_path.join(".refact").join("project_summary.yaml").exists() +} + +fn failed_tool_names_after_last_user_message(messages: &Vec) -> Vec { + let last_user_msg_index = messages.iter().rposition(|m| m.role == "user").unwrap_or(0); + let tool_calls = messages[last_user_msg_index..].iter().filter(|m| m.role == "assistant") + .filter_map(|m| m.tool_calls.as_ref()).flatten().collect::>(); + + let mut result = Vec::new(); + for tool_call in tool_calls { + if let Some(answer_text) = messages.iter() + .find(|m| m.role == "tool" && m.tool_call_id == tool_call.id) + .map(|m| m.content.content_text_only()) { + if answer_text.contains(&go_to_configuration_message(&tool_call.function.name)) { + result.push(tool_call.function.name.clone()); + } + } + } + result.sort(); + result.dedup(); + result } \ No newline at end of file diff --git a/src/integrations/integr_postgres.rs b/src/integrations/integr_postgres.rs index 409a2c04f..928a4ba6c 100644 --- a/src/integrations/integr_postgres.rs +++ b/src/integrations/integr_postgres.rs @@ -1,6 +1,7 @@ use crate::at_commands::at_commands::AtCommandsContext; use crate::call_validation::ContextEnum; use crate::call_validation::{ChatContent, ChatMessage, ChatUsage}; +use crate::integrations::go_to_configuration_message; use crate::tools::tools_description::Tool; use async_trait::async_trait; use serde::{Deserialize, Serialize}; @@ -79,7 +80,7 @@ impl ToolPostgres { if output.is_err() { let err_text = format!("{}", output.unwrap_err()); tracing::error!("psql didn't work:\n{}\n{}", query, err_text); - return Err(format!("psql failed:\n{}", err_text)); + return Err(format!("{}, psql failed:\n{}", go_to_configuration_message("postgres"), err_text)); } let output = output.unwrap(); if output.status.success() { @@ -88,7 +89,7 @@ impl ToolPostgres { // XXX: limit stderr, can be infinite let stderr_string = String::from_utf8_lossy(&output.stderr); tracing::error!("psql didn't work:\n{}\n{}", query, stderr_string); - Err(format!("psql failed:\n{}", stderr_string)) + Err(format!("{}, psql failed:\n{}", go_to_configuration_message("postgres"), stderr_string)) } } else { tracing::error!("psql timed out:\n{}", query); diff --git a/src/integrations/mod.rs b/src/integrations/mod.rs index dc98685e4..b4272f9b0 100644 --- a/src/integrations/mod.rs +++ b/src/integrations/mod.rs @@ -74,6 +74,10 @@ pub fn integrations_list() -> Vec<&'static str> { ] } +pub fn go_to_configuration_message(integration_name: &str) -> String { + format!("šŸ§© for configuration go to SETTINGS:{integration_name}") +} + // pub fn get_integration_path(cache_dir: &PathBuf, name: &str) -> PathBuf { From 06433b57cf01b9182e6666586b3a6c19a4b66a59 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Tue, 3 Dec 2024 09:23:34 +0100 Subject: [PATCH 039/185] feat: adding follow up logic to link detection --- src/http/routers/v1/links.rs | 58 ++++++++++++++++++++++++++++++++++-- 1 file changed, 56 insertions(+), 2 deletions(-) diff --git a/src/http/routers/v1/links.rs b/src/http/routers/v1/links.rs index 0d9a6585e..9b81d703d 100644 --- a/src/http/routers/v1/links.rs +++ b/src/http/routers/v1/links.rs @@ -3,16 +3,19 @@ use axum::Extension; use axum::http::{Response, StatusCode}; use hyper::Body; use serde::{Deserialize, Serialize}; -use tokio::sync::RwLock as ARwLock; +use tokio::sync::{Mutex as AMutex, RwLock as ARwLock}; +use crate::at_commands::at_commands::AtCommandsContext; use crate::call_validation::ChatMessage; use crate::custom_error::ScratchError; use crate::global_context::GlobalContext; use crate::integrations::go_to_configuration_message; +use crate::subchat::subchat_single; #[derive(Serialize, Deserialize, Clone, Debug)] pub struct LinksPost { messages: Vec, + model_name: String, chat_id: String, } @@ -43,7 +46,7 @@ pub async fn handle_v1_links( let mut links = Vec::new(); - if project_summarization_is_missing(gcx.clone()).await { + if project_summarization_is_missing(gcx.clone()).await && post.messages.is_empty() { links.push(Link { action: LinkAction::SummarizeProject, text: "Investigate Project".to_string(), @@ -58,6 +61,17 @@ pub async fn handle_v1_links( goto: Some(format!("SETTINGS:{failed_tool_name}")), }) } + + // TODO: Only do this for "Explore", "Agent" or configuration chats. + if links.is_empty() { + let follow_up_message = generate_follow_up_message( + post.messages.clone(), gcx.clone(), &post.model_name, &post.chat_id).await?; + links.push(Link { + action: LinkAction::FollowUp, + text: follow_up_message, + goto: None, + }); + } Ok(Response::builder() .status(StatusCode::OK) @@ -98,4 +112,44 @@ fn failed_tool_names_after_last_user_message(messages: &Vec) -> Vec result.sort(); result.dedup(); result +} + +async fn generate_follow_up_message( + mut messages: Vec, + gcx: Arc>, + model_name: &str, + chat_id: &str, +) -> Result { + if messages.first().map(|m| m.role == "system").unwrap_or(false) { + messages.remove(0); + } + messages.insert(0, ChatMessage::new( + "system".to_string(), + "Generate a 2-3 word user response, like 'Can you fix it?' for errors or 'Proceed' for plan validation".to_string(), + )); + let ccx = Arc::new(AMutex::new(AtCommandsContext::new( + gcx.clone(), + 1024, + 1, + false, + messages.clone(), + chat_id.to_string(), + false, + ).await)); + let new_messages = subchat_single( + ccx.clone(), + model_name, + messages, + vec![], + None, + false, + Some(0.5), + None, + 1, + None, + None, + None, + ).await?; + new_messages.into_iter().next().map(|x| x.into_iter().last().map(|last_m| { + last_m.content.content_text_only() })).flatten().ok_or("No commit message found".to_string()) } \ No newline at end of file From 88f42f4e4cefa617f37b36d0be53997af513df24 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Tue, 3 Dec 2024 09:42:14 +0100 Subject: [PATCH 040/185] feat: add link request examples --- examples/links.sh | 82 ++++++++++++++++++++++++++++++++++++ src/http/routers/v1/links.rs | 4 +- 2 files changed, 84 insertions(+), 2 deletions(-) create mode 100644 examples/links.sh diff --git a/examples/links.sh b/examples/links.sh new file mode 100644 index 000000000..5e97dfff5 --- /dev/null +++ b/examples/links.sh @@ -0,0 +1,82 @@ +# Follow up example +curl -X POST http://localhost:8001/v1/links \ + -H "Content-Type: application/json" \ + -d '{ + "chat_id": "chat-example", + "model_name": "gpt-4o-mini", + "messages": [ + { + "role": "user", + "content": "List all containers", + "tool_calls": null, + "finish_reason": "", + "tool_call_id": "", + "usage": null, + "subchats": null + }, + { + "role": "assistant", + "content": "Docker tool not found, you can use cli and run docker ps", + "tool_calls": null, + "finish_reason": "", + "tool_call_id": "", + "usage": null, + "subchats": null + } + ] + }' + +# Configuration goto example +curl -X POST http://localhost:8001/v1/links \ + -H "Content-Type: application/json" \ + -d '{ + "chat_id": "chat-example", + "model_name": "gpt-4o-mini", + "messages": [ + { + "role": "user", + "content": "use psql tool to list all tables", + "tool_calls": null, + "finish_reason": "", + "tool_call_id": "", + "usage": null, + "subchats": null + }, + { + "role": "assistant", + "content": null, + "tool_calls": [ + { + "id": "call_BGLjpFFQJbD00euir9rQ8V7w", + "function": { + "arguments": "{\"query\":\"\\\\dt;\"}", + "name": "postgres" + }, + "type": "function" + } + ], + "finish_reason": "tool_calls", + "tool_call_id": "", + "usage": null, + "subchats": null + }, + { + "role": "tool", + "content": "šŸ§© for configuration go to SETTINGS:postgres, psql failed:\nNo such file or directory (os error 2)", + "tool_calls": null, + "finish_reason": "", + "tool_call_id": "call_BGLjpFFQJbD00euir9rQ8V7w", + "usage": null, + "subchats": null + } + ] + }' + +# Summarize project example +curl -X POST http://localhost:8001/v1/links \ + -H "Content-Type: application/json" \ + -d '{ + "chat_id": "chat-example", + "model_name": "gpt-4o-mini", + "messages": [] + }' \ No newline at end of file diff --git a/src/http/routers/v1/links.rs b/src/http/routers/v1/links.rs index 9b81d703d..36bc0d2d8 100644 --- a/src/http/routers/v1/links.rs +++ b/src/http/routers/v1/links.rs @@ -64,8 +64,8 @@ pub async fn handle_v1_links( // TODO: Only do this for "Explore", "Agent" or configuration chats. if links.is_empty() { - let follow_up_message = generate_follow_up_message( - post.messages.clone(), gcx.clone(), &post.model_name, &post.chat_id).await?; + let follow_up_message = generate_follow_up_message(post.messages.clone(), gcx.clone(), &post.model_name, &post.chat_id).await + .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Error generating follow-up message: {}", e)))?; links.push(Link { action: LinkAction::FollowUp, text: follow_up_message, From b14d121d597a2e470324eec734db2ab5ccac11a5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Tue, 3 Dec 2024 15:41:02 +0100 Subject: [PATCH 041/185] refactor: ask only gcx and messages to parse tickets --- src/http/routers/v1/patch.rs | 8 ++++---- src/tools/tool_patch.rs | 7 +++++-- src/tools/tool_patch_aux/tickets_parsing.rs | 11 ++++------- 3 files changed, 13 insertions(+), 13 deletions(-) diff --git a/src/http/routers/v1/patch.rs b/src/http/routers/v1/patch.rs index 4a1287a17..c3510307c 100644 --- a/src/http/routers/v1/patch.rs +++ b/src/http/routers/v1/patch.rs @@ -82,7 +82,7 @@ pub async fn handle_v1_patch_single_file_from_ticket( 8096, 10, false, - messages, + messages.clone(), "".to_string(), false, ).await)); @@ -94,7 +94,7 @@ pub async fn handle_v1_patch_single_file_from_ticket( ccx_lock.n_ctx = params.subchat_n_ctx; } - let all_tickets_from_above = get_tickets_from_messages(ccx.clone()).await; + let all_tickets_from_above = get_tickets_from_messages(global_context.clone(), &messages).await; let mut active_tickets = get_and_correct_active_tickets( global_context.clone(), post.ticket_ids.clone(), all_tickets_from_above.clone(), ).await.map_err(|(e, _)| { @@ -159,7 +159,7 @@ pub async fn handle_v1_patch_apply_all( 8096, 10, false, - messages, + messages.clone(), "".to_string(), false, ).await)); @@ -172,7 +172,7 @@ pub async fn handle_v1_patch_apply_all( } // leave only the latest ticket for each file - let all_tickets = get_tickets_from_messages(ccx.clone()).await; + let all_tickets = get_tickets_from_messages(global_context.clone(), &messages).await; let mut filename_by_ticket: HashMap = HashMap::new(); for ticket in all_tickets.values() { if let Some(el) = filename_by_ticket.get(&ticket.filename_before) { diff --git a/src/tools/tool_patch.rs b/src/tools/tool_patch.rs index 8cab7ea0a..401869de1 100644 --- a/src/tools/tool_patch.rs +++ b/src/tools/tool_patch.rs @@ -162,8 +162,11 @@ impl Tool for ToolPatch { ).await)) }; - let gcx = ccx_subchat.lock().await.global_context.clone(); - let all_tickets_from_above = get_tickets_from_messages(ccx.clone()).await; + let (gcx, messages) = { + let ccx_lock = ccx_subchat.lock().await; + (ccx_lock.global_context.clone(), ccx_lock.messages.clone()) + }; + let all_tickets_from_above = get_tickets_from_messages(gcx.clone(), &messages).await; let mut active_tickets = match get_and_correct_active_tickets( gcx.clone(), tickets.clone(), diff --git a/src/tools/tool_patch_aux/tickets_parsing.rs b/src/tools/tool_patch_aux/tickets_parsing.rs index defd09038..b3c913498 100644 --- a/src/tools/tool_patch_aux/tickets_parsing.rs +++ b/src/tools/tool_patch_aux/tickets_parsing.rs @@ -3,12 +3,12 @@ use serde::{Deserialize, Serialize}; use std::collections::HashMap; use std::path::PathBuf; use std::sync::Arc; -use tokio::sync::{Mutex as AMutex, RwLock as ARwLock}; +use tokio::sync::RwLock as ARwLock; use tracing::warn; use crate::ast::ast_structs::AstDefinition; -use crate::at_commands::at_commands::AtCommandsContext; use crate::at_commands::at_file::{file_repair_candidates, return_one_candidate_or_a_good_error}; +use crate::call_validation::ChatMessage; use crate::files_correction::get_project_dirs; use crate::global_context::GlobalContext; use crate::tools::tool_patch_aux::postprocessing_utils::does_doc_have_symbol; @@ -275,12 +275,9 @@ async fn parse_tickets(gcx: Arc>, content: &str, message_ } pub async fn get_tickets_from_messages( - ccx: Arc>, + gcx: Arc>, + messages: &Vec, ) -> HashMap { - let (gcx, messages) = { - let ccx_lock = ccx.lock().await; - (ccx_lock.global_context.clone(), ccx_lock.messages.clone()) - }; let mut tickets: HashMap = HashMap::new(); for (idx, message) in messages.iter().enumerate().filter(|(_, x)| x.role == "assistant") { for ticket in parse_tickets(gcx.clone(), &message.content.content_text_only(), idx).await.into_iter() { From b9c9ead54fb0906b6288d1e4e9a31e8bf972f725 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Tue, 3 Dec 2024 15:41:30 +0100 Subject: [PATCH 042/185] feat: add patch all functionality to the linnks handler --- examples/links.sh | 37 ++++++++++++++++++++++++++++++++++++ src/http/routers/v1/links.rs | 15 +++++++++++++-- 2 files changed, 50 insertions(+), 2 deletions(-) diff --git a/examples/links.sh b/examples/links.sh index 5e97dfff5..1cc1f3bfc 100644 --- a/examples/links.sh +++ b/examples/links.sh @@ -79,4 +79,41 @@ curl -X POST http://localhost:8001/v1/links \ "chat_id": "chat-example", "model_name": "gpt-4o-mini", "messages": [] + }' + +# Example of Save and return in configuration chat +curl -X POST http://localhost:8001/v1/links \ + -H "Content-Type: application/json" \ + -d '{ + "chat_id": "chat-example", + "model_name": "gpt-4o-mini", + "messages": [ + { + "role": "system", + "content": "[mode3config] You are a refact coding agent working on improving configuration files", + "tool_calls": null, + "finish_reason": "", + "tool_call_id": "", + "usage": null, + "subchats": null + }, + { + "role": "user", + "content": "create a new file called \"hello_world.py\"", + "tool_calls": null, + "finish_reason": "", + "tool_call_id": "", + "usage": null, + "subchats": null + }, + { + "role": "assistant", + "content": "šŸ“REWRITE_WHOLE_FILE 000 \"/app/hello_world.py\"\n```python\nprint(\"Hello, World!\")\n```", + "tool_calls": null, + "finish_reason": "", + "tool_call_id": "", + "usage": null, + "subchats": null + } + ] }' \ No newline at end of file diff --git a/src/http/routers/v1/links.rs b/src/http/routers/v1/links.rs index 36bc0d2d8..41cc2e1b3 100644 --- a/src/http/routers/v1/links.rs +++ b/src/http/routers/v1/links.rs @@ -11,6 +11,7 @@ use crate::custom_error::ScratchError; use crate::global_context::GlobalContext; use crate::integrations::go_to_configuration_message; use crate::subchat::subchat_single; +use crate::tools::tool_patch_aux::tickets_parsing::get_tickets_from_messages; #[derive(Serialize, Deserialize, Clone, Debug)] pub struct LinksPost { @@ -46,7 +47,7 @@ pub async fn handle_v1_links( let mut links = Vec::new(); - if project_summarization_is_missing(gcx.clone()).await && post.messages.is_empty() { + if post.messages.is_empty() && project_summarization_is_missing(gcx.clone()).await { links.push(Link { action: LinkAction::SummarizeProject, text: "Investigate Project".to_string(), @@ -54,6 +55,16 @@ pub async fn handle_v1_links( }); } + // TODO: Only do this for configuration chats, detect it in system prompt. + if !get_tickets_from_messages(gcx.clone(), &post.messages).await.is_empty() { + links.push(Link { + action: LinkAction::PatchAll, + text: "Save and return".to_string(), + goto: Some("SETTINGS:DEFAULT".to_string()), + }); + } + + // TODO: This probably should not appear in configuration chats, unless we can know that this is not the main one being configured for failed_tool_name in failed_tool_names_after_last_user_message(&post.messages) { links.push(Link { action: LinkAction::Goto, @@ -62,7 +73,7 @@ pub async fn handle_v1_links( }) } - // TODO: Only do this for "Explore", "Agent" or configuration chats. + // TODO: Only do this for "Explore", "Agent" or configuration chats, detect it in system prompt. if links.is_empty() { let follow_up_message = generate_follow_up_message(post.messages.clone(), gcx.clone(), &post.model_name, &post.chat_id).await .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Error generating follow-up message: {}", e)))?; From 134136048c631bd6f5000d43e1fbbf090386e981 Mon Sep 17 00:00:00 2001 From: JegernOUTT Date: Wed, 4 Dec 2024 16:44:48 +1030 Subject: [PATCH 043/185] - move generating commit message functionality to the other module - add generate_commit_message_for_projects --- src/agentic/generate_commit_message.rs | 297 +++++++++++++++++++++ src/agentic/mod.rs | 1 + src/http/routers/v1/chat_based_handlers.rs | 282 ++----------------- src/main.rs | 2 + 4 files changed, 322 insertions(+), 260 deletions(-) create mode 100644 src/agentic/generate_commit_message.rs create mode 100644 src/agentic/mod.rs diff --git a/src/agentic/generate_commit_message.rs b/src/agentic/generate_commit_message.rs new file mode 100644 index 000000000..74a041561 --- /dev/null +++ b/src/agentic/generate_commit_message.rs @@ -0,0 +1,297 @@ +use std::path::PathBuf; +use crate::at_commands::at_commands::AtCommandsContext; +use crate::call_validation::{ChatContent, ChatMessage}; +use crate::global_context::{try_load_caps_quickly_if_not_present, GlobalContext}; +use crate::subchat::subchat_single; +use std::sync::Arc; +use hashbrown::HashMap; +use tokio::sync::Mutex as AMutex; +use tokio::sync::RwLock as ARwLock; +use tracing::warn; +use crate::files_in_workspace::detect_vcs_for_a_file_path; + +const DIFF_ONLY_PROMPT: &str = r#"Analyze the given diff and generate a clear and descriptive commit message that explains the purpose of the changes. Your commit message should convey *why* the changes were made, *how* they improve the code, or what features or fixes are implemented, rather than just restating *what* the changes are. Aim for an informative, concise summary that would be easy for others to understand when reviewing the commit history. + +# Steps +1. Analyze the code diff to understand the changes made. +2. Determine the functionality added or removed, and the reason for these adjustments. +3. Summarize the details of the change in an accurate and informative, yet concise way. +4. Structure the message in a way that starts with a short summary line, followed by optional details if the change is complex. + +# Output Format + +The output should be a single commit message in the following format: +- A **first line summarizing** the purpose of the change. This line should be concise. +- Optionally, include a **second paragraph** with *additional context* if the change is complex or otherwise needs further clarification. + (e.g., if there's a bug fix, mention what problem was fixed and why the change works.) + +# Examples + +**Input (diff)**: +```diff +- public class UserManager { +- private final UserDAO userDAO; + ++ public class UserManager { ++ private final UserService userService; ++ private final NotificationService notificationService; + + public UserManager(UserDAO userDAO) { +- this.userDAO = userDAO; ++ this.userService = new UserService(); ++ this.notificationService = new NotificationService(); + } +``` + +**Output (commit message)**: +``` +Refactor `UserManager` to use `UserService` and `NotificationService` + +Replaced `UserDAO` with `UserService` and introduced `NotificationService` to improve separation of concerns and make user management logic reusable and extendable. +``` + +**Input (diff)**: +```diff +- if (age > 17) { +- accessAllowed = true; +- } else { +- accessAllowed = false; +- } ++ accessAllowed = age > 17; +``` + +**Output (commit message)**: +``` +Simplify age check logic for accessing permissions by using a single expression +``` + +# Notes +- Make sure the commit messages are descriptive enough to convey why the change is being made without being too verbose. +- If applicable, add `Fixes #` or other references to link the commit to specific tickets. +- Avoid wording: "Updated", "Modified", or "Changed" without explicitly stating *why*ā€”focus on *intent*."#; + +const DIFF_WITH_USERS_TEXT_PROMPT: &str = r#"Generate a commit message using the diff and the provided initial commit message as a template for context. + +[Additional details as needed.] + +# Steps + +1. Analyze the code diff to understand the changes made. +2. Review the user's initial commit message to understand the intent and use it as a contextual starting point. +3. Determine the functionality added or removed, and the reason for these adjustments. +4. Combine insights from the diff and user's initial commit message to generate a more descriptive and complete commit message. +5. Summarize the details of the change in an accurate and informative, yet concise way. +6. Structure the message in a way that starts with a short summary line, followed by optional details if the change is complex. + +# Output Format + +The output should be a single commit message in the following format: +- A **first line summarizing** the purpose of the change. This line should be concise. +- Optionally, include a **second paragraph** with *additional context* if the change is complex or otherwise needs further clarification. + (e.g., if there's a bug fix, mention what problem was fixed and why the change works.) + +# Examples + +**Input (initial commit message)**: +``` +Refactor UserManager to use services instead of DAOs +``` + +**Input (diff)**: +```diff +- public class UserManager { +- private final UserDAO userDAO; + ++ public class UserManager { ++ private final UserService userService; ++ private final NotificationService notificationService; + + public UserManager(UserDAO userDAO) { +- this.userDAO = userDAO; ++ this.userService = new UserService(); ++ this.notificationService = new NotificationService(); + } +``` + +**Output (commit message)**: +``` +Refactor `UserManager` to use `UserService` and `NotificationService` + +Replaced `UserDAO` with `UserService` and introduced `NotificationService` to improve separation of concerns and make user management logic reusable and extendable. +``` + +**Input (initial commit message)**: +``` +Simplify age check logic +``` + +**Input (diff)**: +```diff +- if (age > 17) { +- accessAllowed = true; +- } else { +- accessAllowed = false; +- } ++ accessAllowed = age > 17; +``` + +**Output (commit message)**: +``` +Simplify age check logic for accessing permissions by using a single expression +``` + +# Notes +- Make sure the commit messages are descriptive enough to convey why the change is being made without being too verbose. +- If applicable, add `Fixes #` or other references to link the commit to specific tickets. +- Avoid wording: "Updated", "Modified", or "Changed" without explicitly stating *why*ā€”focus on *intent*."#; +const N_CTX: usize = 32000; +const TEMPERATURE: f32 = 0.5; + +fn remove_fencing(message: &String) -> String { + let trimmed_message = message.trim(); + let without_leading_fence = if trimmed_message.starts_with("```") { + let mut lines = trimmed_message.lines(); + lines.next(); + lines.collect::>().join("\n") + } else { + trimmed_message.to_string() + }; + let without_trailing_fence = if without_leading_fence.ends_with("```") { + let mut lines = without_leading_fence.lines().collect::>(); + lines.pop(); + lines.join("\n") + } else { + without_leading_fence + }; + without_trailing_fence.trim().to_string() +} + +pub async fn generate_commit_message_by_diff( + gcx: Arc>, + diff: &String, + commit_message_prompt: &Option, +) -> Result { + if diff.is_empty() { + return Err("The provided diff is empty".to_string()); + } + let messages = if let Some(text) = commit_message_prompt { + vec![ + ChatMessage { + role: "system".to_string(), + content: ChatContent::SimpleText(DIFF_WITH_USERS_TEXT_PROMPT.to_string()), + ..Default::default() + }, + ChatMessage { + role: "user".to_string(), + content: ChatContent::SimpleText(format!( + "Commit message:\n```\n{}\n```\nDiff:\n```\n{}\n```\n", + text, diff + )), + ..Default::default() + }, + ] + } else { + vec![ + ChatMessage { + role: "system".to_string(), + content: ChatContent::SimpleText(DIFF_ONLY_PROMPT.to_string()), + ..Default::default() + }, + ChatMessage { + role: "user".to_string(), + content: ChatContent::SimpleText(format!("Diff:\n```\n{}\n```\n", diff)), + ..Default::default() + }, + ] + }; + let model_name = match try_load_caps_quickly_if_not_present(gcx.clone(), 0).await { + Ok(caps) => caps + .read() + .map(|x| Ok(x.code_chat_default_model.clone())) + .map_err(|_| "Caps are not available".to_string())?, + Err(_) => Err("No caps available".to_string()), + }?; + let ccx: Arc> = Arc::new(AMutex::new( + AtCommandsContext::new( + gcx.clone(), + N_CTX, + 1, + false, + messages.clone(), + "".to_string(), + false, + ) + .await, + )); + let new_messages = subchat_single( + ccx.clone(), + model_name.as_str(), + messages, + vec![], + None, + false, + Some(TEMPERATURE), + None, + 1, + None, + None, + None, + ) + .await + .map_err(|e| format!("Error: {}", e))?; + + let commit_message = new_messages + .into_iter() + .next() + .map(|x| { + x.into_iter().last().map(|last_m| match last_m.content { + ChatContent::SimpleText(text) => Some(text), + ChatContent::Multimodal(_) => None, + }) + }) + .flatten() + .flatten() + .ok_or("No commit message was generated".to_string())?; + Ok(remove_fencing(&commit_message)) +} + +pub async fn _generate_commit_message_for_projects( + gcx: Arc>, +) -> Result, String> { + let project_folders = gcx.read().await.documents_state.workspace_folders.lock().unwrap().clone(); + let mut commit_messages = HashMap::new(); + + for folder in project_folders { + let command = if let Some((_, vcs_type)) = detect_vcs_for_a_file_path(&folder).await { + match vcs_type { + "git" => "git diff", + "svn" => "svn diff", + "hg" => "hg diff", + other => { + warn!("Unrecognizable version control detected for the folder {folder:?}: {other}"); + continue; + } + } + } else { + warn!("There's no recognizable version control detected for the folder {folder:?}"); + continue; + }; + + let output = tokio::process::Command::new(command) + .current_dir(&folder) + .output() + .await + .map_err(|e| format!("Failed to execute command for folder {folder:?}: {e}"))?; + + if !output.status.success() { + warn!("Command failed for folder {folder:?}: {}", String::from_utf8_lossy(&output.stderr)); + continue; + } + + let diff_output = String::from_utf8_lossy(&output.stdout).to_string(); + let commit_message = generate_commit_message_by_diff(gcx.clone(), &diff_output, &None).await?; + commit_messages.insert(folder, commit_message); + } + + Ok(commit_messages) +} \ No newline at end of file diff --git a/src/agentic/mod.rs b/src/agentic/mod.rs new file mode 100644 index 000000000..483f4493a --- /dev/null +++ b/src/agentic/mod.rs @@ -0,0 +1 @@ +pub mod generate_commit_message; \ No newline at end of file diff --git a/src/http/routers/v1/chat_based_handlers.rs b/src/http/routers/v1/chat_based_handlers.rs index 47fc40e22..af562ca77 100644 --- a/src/http/routers/v1/chat_based_handlers.rs +++ b/src/http/routers/v1/chat_based_handlers.rs @@ -1,276 +1,38 @@ -use std::sync::Arc; -use tokio::sync::Mutex as AMutex; -use axum::Extension; +use crate::custom_error::ScratchError; +use crate::global_context::GlobalContext; use axum::http::{Response, StatusCode}; +use axum::Extension; use hyper::Body; use serde::Deserialize; +use std::sync::Arc; use tokio::sync::RwLock as ARwLock; -use crate::subchat::subchat_single; -use crate::at_commands::at_commands::AtCommandsContext; -use crate::call_validation::{ChatContent, ChatMessage}; -use crate::custom_error::ScratchError; -use crate::global_context::{try_load_caps_quickly_if_not_present, GlobalContext}; - - -const DIFF_ONLY_PROMPT: &str = r#"Analyze the given diff and generate a clear and descriptive commit message that explains the purpose of the changes. Your commit message should convey *why* the changes were made, *how* they improve the code, or what features or fixes are implemented, rather than just restating *what* the changes are. Aim for an informative, concise summary that would be easy for others to understand when reviewing the commit history. - -# Steps -1. Analyze the code diff to understand the changes made. -2. Determine the functionality added or removed, and the reason for these adjustments. -3. Summarize the details of the change in an accurate and informative, yet concise way. -4. Structure the message in a way that starts with a short summary line, followed by optional details if the change is complex. - -# Output Format - -The output should be a single commit message in the following format: -- A **first line summarizing** the purpose of the change. This line should be concise. -- Optionally, include a **second paragraph** with *additional context* if the change is complex or otherwise needs further clarification. - (e.g., if there's a bug fix, mention what problem was fixed and why the change works.) - -# Examples - -**Input (diff)**: -```diff -- public class UserManager { -- private final UserDAO userDAO; - -+ public class UserManager { -+ private final UserService userService; -+ private final NotificationService notificationService; - - public UserManager(UserDAO userDAO) { -- this.userDAO = userDAO; -+ this.userService = new UserService(); -+ this.notificationService = new NotificationService(); - } -``` - -**Output (commit message)**: -``` -Refactor `UserManager` to use `UserService` and `NotificationService` - -Replaced `UserDAO` with `UserService` and introduced `NotificationService` to improve separation of concerns and make user management logic reusable and extendable. -``` - -**Input (diff)**: -```diff -- if (age > 17) { -- accessAllowed = true; -- } else { -- accessAllowed = false; -- } -+ accessAllowed = age > 17; -``` - -**Output (commit message)**: -``` -Simplify age check logic for accessing permissions by using a single expression -``` - -# Notes -- Make sure the commit messages are descriptive enough to convey why the change is being made without being too verbose. -- If applicable, add `Fixes #` or other references to link the commit to specific tickets. -- Avoid wording: "Updated", "Modified", or "Changed" without explicitly stating *why*ā€”focus on *intent*."#; - - - -const DIFF_WITH_USERS_TEXT_PROMPT: &str = r#"Generate a commit message using the diff and the provided initial commit message as a template for context. - -[Additional details as needed.] - -# Steps - -1. Analyze the code diff to understand the changes made. -2. Review the user's initial commit message to understand the intent and use it as a contextual starting point. -3. Determine the functionality added or removed, and the reason for these adjustments. -4. Combine insights from the diff and user's initial commit message to generate a more descriptive and complete commit message. -5. Summarize the details of the change in an accurate and informative, yet concise way. -6. Structure the message in a way that starts with a short summary line, followed by optional details if the change is complex. - -# Output Format - -The output should be a single commit message in the following format: -- A **first line summarizing** the purpose of the change. This line should be concise. -- Optionally, include a **second paragraph** with *additional context* if the change is complex or otherwise needs further clarification. - (e.g., if there's a bug fix, mention what problem was fixed and why the change works.) - -# Examples - -**Input (initial commit message)**: -``` -Refactor UserManager to use services instead of DAOs -``` - -**Input (diff)**: -```diff -- public class UserManager { -- private final UserDAO userDAO; - -+ public class UserManager { -+ private final UserService userService; -+ private final NotificationService notificationService; - - public UserManager(UserDAO userDAO) { -- this.userDAO = userDAO; -+ this.userService = new UserService(); -+ this.notificationService = new NotificationService(); - } -``` - -**Output (commit message)**: -``` -Refactor `UserManager` to use `UserService` and `NotificationService` - -Replaced `UserDAO` with `UserService` and introduced `NotificationService` to improve separation of concerns and make user management logic reusable and extendable. -``` - -**Input (initial commit message)**: -``` -Simplify age check logic -``` - -**Input (diff)**: -```diff -- if (age > 17) { -- accessAllowed = true; -- } else { -- accessAllowed = false; -- } -+ accessAllowed = age > 17; -``` - -**Output (commit message)**: -``` -Simplify age check logic for accessing permissions by using a single expression -``` - -# Notes -- Make sure the commit messages are descriptive enough to convey why the change is being made without being too verbose. -- If applicable, add `Fixes #` or other references to link the commit to specific tickets. -- Avoid wording: "Updated", "Modified", or "Changed" without explicitly stating *why*ā€”focus on *intent*."#; -const N_CTX: usize = 32000; -const TEMPERATURE: f32 = 0.5; - - -fn remove_fencing(message: &String) -> String { - let trimmed_message = message.trim(); - let without_leading_fence = if trimmed_message.starts_with("```") { - let mut lines = trimmed_message.lines(); - lines.next(); - lines.collect::>().join("\n") - } else { - trimmed_message.to_string() - }; - let without_trailing_fence = if without_leading_fence.ends_with("```") { - let mut lines = without_leading_fence.lines().collect::>(); - lines.pop(); - lines.join("\n") - } else { - without_leading_fence - }; - without_trailing_fence.trim().to_string() -} - +use crate::agentic::generate_commit_message::generate_commit_message_by_diff; #[derive(Deserialize)] struct CommitMessageFromDiffPost { diff: String, #[serde(default)] - text: Option // a prompt for the commit message + text: Option, // a prompt for the commit message } pub async fn handle_v1_commit_message_from_diff( Extension(global_context): Extension>>, body_bytes: hyper::body::Bytes, ) -> axum::response::Result, ScratchError> { - let post = serde_json::from_slice::(&body_bytes) - .map_err(|e| ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, format!("JSON problem: {}", e)))?; - if post.diff.is_empty() { - return Err(ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, "The provided diff is empty".to_string())) - } - - let messages = if let Some(text) = &post.text { - vec![ - ChatMessage { - role: "system".to_string(), - content: ChatContent::SimpleText(DIFF_WITH_USERS_TEXT_PROMPT.to_string()), - ..Default::default() - }, - ChatMessage { - role: "user".to_string(), - content: ChatContent::SimpleText(format!("Commit message:\n```\n{}\n```\nDiff:\n```\n{}\n```\n", text, post.diff)), - ..Default::default() - }, - ] - } else { - vec![ - ChatMessage { - role: "system".to_string(), - content: ChatContent::SimpleText(DIFF_ONLY_PROMPT.to_string()), - ..Default::default() - }, - ChatMessage { - role: "user".to_string(), - content: ChatContent::SimpleText(format!("Diff:\n```\n{}\n```\n", post.diff)), - ..Default::default() - }, - ] - }; - let model_name = match try_load_caps_quickly_if_not_present(global_context.clone(), 0).await { - Ok(caps) => { - caps.read() - .map(|x| Ok(x.code_chat_default_model.clone())) - .map_err(|_| - ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, "Caps are not available".to_string()) - )? - }, - Err(_) => Err(ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, "No caps available".to_string())) - }?; - - let ccx: Arc> = Arc::new(AMutex::new( - AtCommandsContext::new( - global_context.clone(), - N_CTX, - 1, - false, - messages.clone(), - "".to_string(), - false - ).await) - ); - - let new_messages = subchat_single( - ccx.clone(), - model_name.as_str(), - messages, - vec![], - None, - false, - Some(TEMPERATURE), - None, - 1, - None, - None, - None, - ).await.map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Error: {}", e)))?; - - let commit_message = new_messages - .into_iter() - .next() - .map(|x| x.into_iter().last().map(|last_m| { - match last_m.content { - ChatContent::SimpleText(text) => Some(text), - ChatContent::Multimodal(_) => { None } - } - })) - .flatten() - .flatten() - .ok_or(ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, "No commit message was generated".to_string()))?; - Ok( - Response::builder() - .status(StatusCode::OK) - .header("Content-Type", "application/json") - .body(Body::from(remove_fencing(&commit_message))) - .unwrap() - ) + let post = serde_json::from_slice::(&body_bytes).map_err(|e| { + ScratchError::new( + StatusCode::UNPROCESSABLE_ENTITY, + format!("JSON problem: {}", e), + ) + })?; + + let commit_message = generate_commit_message_by_diff(global_context.clone(), &post.diff, &post.text) + .await + .map_err(|e| ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, e))?; + + Ok(Response::builder() + .status(StatusCode::OK) + .header("Content-Type", "application/json") + .body(Body::from(commit_message)) + .unwrap()) } diff --git a/src/main.rs b/src/main.rs index 67243ab5e..b30a74b90 100644 --- a/src/main.rs +++ b/src/main.rs @@ -65,6 +65,8 @@ mod integrations; mod privacy; mod privacy_compiled_in; mod git; +mod agentic; + #[tokio::main] async fn main() { From e15cc289457c2dd894ec2747dcc552411382e321 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Wed, 4 Dec 2024 09:43:58 +0100 Subject: [PATCH 044/185] feat: add commit msg link --- src/git.rs | 30 +++++++++++++++++++++++- src/http/routers/v1/links.rs | 44 +++++++++++++++++++++++++++++------- 2 files changed, 65 insertions(+), 9 deletions(-) diff --git a/src/git.rs b/src/git.rs index 64b62db13..0fb62bff3 100644 --- a/src/git.rs +++ b/src/git.rs @@ -1,6 +1,6 @@ use std::path::PathBuf; use tracing::error; -use git2::{Branch, BranchType, IndexAddOption, Oid, Repository, Signature, Status, StatusOptions}; +use git2::{Branch, BranchType, DiffOptions, IndexAddOption, Oid, Repository, Signature, Status, StatusOptions}; pub fn git_ls_files(repository_path: &PathBuf) -> Option> { let repository = Repository::open(repository_path) @@ -107,3 +107,31 @@ pub fn commit(repository: &Repository, branch: &Branch, message: &str, author_na ).map_err(|e| format!("Failed to create commit: {}", e)) } +/// Similar to `git diff`, but including untracked files. +pub fn git_diff_from_all_changes(repository: &Repository) -> Result { + let mut diff_options = DiffOptions::new(); + diff_options.include_untracked(true); + diff_options.recurse_untracked_dirs(true); + + // Create a new temporary tree, with all changes staged + let mut index = repository.index().map_err(|e| format!("Failed to get repository index: {}", e))?; + index.add_all(["*"].iter(), IndexAddOption::DEFAULT, None) + .map_err(|e| format!("Failed to add files to index: {}", e))?; + let oid = index.write_tree().map_err(|e| format!("Failed to write tree: {}", e))?; + let new_tree = repository.find_tree(oid).map_err(|e| format!("Failed to find tree: {}", e))?; + + let head = repository.head().and_then(|head_ref| head_ref.peel_to_tree()) + .map_err(|e| format!("Failed to get HEAD tree: {}", e))?; + + let diff = repository.diff_tree_to_tree(Some(&head), Some(&new_tree), Some(&mut diff_options)) + .map_err(|e| format!("Failed to generate diff: {}", e))?; + + let mut diff_str = String::new(); + diff.print(git2::DiffFormat::Patch, |_, _, line| { + diff_str.push(line.origin()); + diff_str.push_str(std::str::from_utf8(line.content()).unwrap_or("")); + true + }).map_err(|e| format!("Failed to print diff: {}", e))?; + + Ok(diff_str) +} diff --git a/src/http/routers/v1/links.rs b/src/http/routers/v1/links.rs index 41cc2e1b3..100e13ea1 100644 --- a/src/http/routers/v1/links.rs +++ b/src/http/routers/v1/links.rs @@ -1,10 +1,13 @@ +use std::path::PathBuf; use std::sync::Arc; use axum::Extension; use axum::http::{Response, StatusCode}; use hyper::Body; use serde::{Deserialize, Serialize}; use tokio::sync::{Mutex as AMutex, RwLock as ARwLock}; +use tracing::error; +use crate::agentic::generate_commit_message::generate_commit_message_by_diff; use crate::at_commands::at_commands::AtCommandsContext; use crate::call_validation::ChatMessage; use crate::custom_error::ScratchError; @@ -64,6 +67,17 @@ pub async fn handle_v1_links( }); } + // TODO: Only do this for "Agent" chat. + if let Ok(diff) = get_diff_with_all_changes_in_current_project(gcx.clone()).await.map_err(|e| error!(e)) { + if let Ok(commit_msg) = generate_commit_message_by_diff(gcx.clone(), &diff, &None).await.map_err(|e| error!(e)) { + links.push(Link { + action: LinkAction::Commit, + text: format!("git commit -m \"{}\"", commit_msg), + goto: None, + }); + } + } + // TODO: This probably should not appear in configuration chats, unless we can know that this is not the main one being configured for failed_tool_name in failed_tool_names_after_last_user_message(&post.messages) { links.push(Link { @@ -91,18 +105,32 @@ pub async fn handle_v1_links( .unwrap()) } -async fn project_summarization_is_missing(gcx: Arc>) -> bool { +async fn get_diff_with_all_changes_in_current_project(gcx: Arc>) -> Result { + let active_project_path = get_active_project_path(gcx.clone()).await.ok_or("No active project found".to_string())?; + let repository = git2::Repository::open(&active_project_path).map_err(|e| e.to_string())?; + crate::git::git_diff_from_all_changes(&repository) +} + +async fn get_active_project_path(gcx: Arc>) -> Option { let active_file = gcx.read().await.documents_state.active_file_path.clone(); let workspace_folders = crate::files_correction::get_project_dirs(gcx.clone()).await; - if workspace_folders.is_empty() { - tracing::info!("No projects found, project summarization is not relevant."); - return false; - } + if workspace_folders.is_empty() { return None; } - let (active_project_path, _) = crate::files_in_workspace::detect_vcs_for_a_file_path(&active_file.unwrap_or_default()) - .await.unwrap_or_else(|| (workspace_folders.first().unwrap().clone(), "")); + Some(crate::files_in_workspace::detect_vcs_for_a_file_path( + &active_file.unwrap_or_else(|| workspace_folders[0].clone()) + ).await.map(|(path, _)| path).unwrap_or_else(|| workspace_folders[0].clone())) +} - !active_project_path.join(".refact").join("project_summary.yaml").exists() +async fn project_summarization_is_missing(gcx: Arc>) -> bool { + match get_active_project_path(gcx.clone()).await { + Some(active_project_path) => { + !active_project_path.join(".refact").join("project_summary.yaml").exists() + } + None => { + tracing::info!("No projects found, project summarization is not relevant."); + false + } + } } fn failed_tool_names_after_last_user_message(messages: &Vec) -> Vec { From 2644b1dda0b0a5e0687b7d812a892c96d277ab37 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Wed, 4 Dec 2024 11:41:46 +0100 Subject: [PATCH 045/185] feat: only show links in appropiate chat modes based on system prompt + refactor some logic to different files --- examples/links.sh | 27 +++++ src/agentic/generate_follow_up_message.rs | 47 +++++++++ src/agentic/mod.rs | 3 +- src/files_correction.rs | 11 ++ src/http/routers/v1/links.rs | 121 +++++++++------------- 5 files changed, 135 insertions(+), 74 deletions(-) create mode 100644 src/agentic/generate_follow_up_message.rs diff --git a/examples/links.sh b/examples/links.sh index 1cc1f3bfc..b83d2ce4f 100644 --- a/examples/links.sh +++ b/examples/links.sh @@ -5,6 +5,15 @@ curl -X POST http://localhost:8001/v1/links \ "chat_id": "chat-example", "model_name": "gpt-4o-mini", "messages": [ + { + "role": "system", + "content": "[mode2] You are a refact exploration assistant", + "tool_calls": null, + "finish_reason": "", + "tool_call_id": "", + "usage": null, + "subchats": null + }, { "role": "user", "content": "List all containers", @@ -33,6 +42,15 @@ curl -X POST http://localhost:8001/v1/links \ "chat_id": "chat-example", "model_name": "gpt-4o-mini", "messages": [ + { + "role": "system", + "content": "[mode3] You are a refact agentic assistant", + "tool_calls": null, + "finish_reason": "", + "tool_call_id": "", + "usage": null, + "subchats": null + }, { "role": "user", "content": "use psql tool to list all tables", @@ -88,6 +106,15 @@ curl -X POST http://localhost:8001/v1/links \ "chat_id": "chat-example", "model_name": "gpt-4o-mini", "messages": [ + { + "role": "system", + "content": "[mode3config] You are a refact configuration assistant", + "tool_calls": null, + "finish_reason": "", + "tool_call_id": "", + "usage": null, + "subchats": null + }, { "role": "system", "content": "[mode3config] You are a refact coding agent working on improving configuration files", diff --git a/src/agentic/generate_follow_up_message.rs b/src/agentic/generate_follow_up_message.rs new file mode 100644 index 000000000..2ed5261d1 --- /dev/null +++ b/src/agentic/generate_follow_up_message.rs @@ -0,0 +1,47 @@ +use std::sync::Arc; +use tokio::sync::{RwLock as ARwLock, Mutex as AMutex}; + +use crate::global_context::GlobalContext; +use crate::at_commands::at_commands::AtCommandsContext; +use crate::subchat::subchat_single; +use crate::call_validation::ChatMessage; + +pub async fn generate_follow_up_message( + mut messages: Vec, + gcx: Arc>, + model_name: &str, + chat_id: &str, +) -> Result { + if messages.first().map(|m| m.role == "system").unwrap_or(false) { + messages.remove(0); + } + messages.insert(0, ChatMessage::new( + "system".to_string(), + "Generate a 2-3 word user response, like 'Can you fix it?' for errors or 'Proceed' for plan validation".to_string(), + )); + let ccx = Arc::new(AMutex::new(AtCommandsContext::new( + gcx.clone(), + 1024, + 1, + false, + messages.clone(), + chat_id.to_string(), + false, + ).await)); + let new_messages = subchat_single( + ccx.clone(), + model_name, + messages, + vec![], + None, + false, + Some(0.5), + None, + 1, + None, + None, + None, + ).await?; + new_messages.into_iter().next().map(|x| x.into_iter().last().map(|last_m| { + last_m.content.content_text_only() })).flatten().ok_or("No commit message found".to_string()) +} \ No newline at end of file diff --git a/src/agentic/mod.rs b/src/agentic/mod.rs index 483f4493a..90dabecf3 100644 --- a/src/agentic/mod.rs +++ b/src/agentic/mod.rs @@ -1 +1,2 @@ -pub mod generate_commit_message; \ No newline at end of file +pub mod generate_commit_message; +pub mod generate_follow_up_message; \ No newline at end of file diff --git a/src/files_correction.rs b/src/files_correction.rs index 608bad883..ddb4be2a6 100644 --- a/src/files_correction.rs +++ b/src/files_correction.rs @@ -4,6 +4,8 @@ use std::sync::Arc; use std::time::Instant; use tokio::sync::RwLock as ARwLock; use tracing::info; + +use crate::files_in_workspace::detect_vcs_for_a_file_path; use crate::global_context::GlobalContext; use crate::fuzzy_search::fuzzy_search; @@ -271,6 +273,15 @@ pub async fn get_project_dirs(gcx: Arc>) -> Vec workspace_folders.iter().cloned().collect::>() } +pub async fn get_active_project_path(gcx: Arc>) -> Option { + let active_file = gcx.read().await.documents_state.active_file_path.clone(); + let workspace_folders = get_project_dirs(gcx.clone()).await; + if workspace_folders.is_empty() { return None; } + + Some(detect_vcs_for_a_file_path(&active_file.unwrap_or_else(|| workspace_folders[0].clone())) + .await.map(|(path, _)| path).unwrap_or_else(|| workspace_folders[0].clone())) +} + pub async fn shortify_paths(gcx: Arc>, paths: &Vec) -> Vec { let (_, indexed_paths) = files_cache_rebuild_as_needed(gcx.clone()).await; let workspace_folders = get_project_dirs(gcx.clone()).await diff --git a/src/http/routers/v1/links.rs b/src/http/routers/v1/links.rs index 100e13ea1..4c813bc5d 100644 --- a/src/http/routers/v1/links.rs +++ b/src/http/routers/v1/links.rs @@ -1,20 +1,18 @@ -use std::path::PathBuf; use std::sync::Arc; use axum::Extension; use axum::http::{Response, StatusCode}; use hyper::Body; use serde::{Deserialize, Serialize}; -use tokio::sync::{Mutex as AMutex, RwLock as ARwLock}; +use tokio::sync::RwLock as ARwLock; use tracing::error; use crate::agentic::generate_commit_message::generate_commit_message_by_diff; -use crate::at_commands::at_commands::AtCommandsContext; use crate::call_validation::ChatMessage; use crate::custom_error::ScratchError; use crate::global_context::GlobalContext; use crate::integrations::go_to_configuration_message; -use crate::subchat::subchat_single; use crate::tools::tool_patch_aux::tickets_parsing::get_tickets_from_messages; +use crate::agentic::generate_follow_up_message::generate_follow_up_message; #[derive(Serialize, Deserialize, Clone, Debug)] pub struct LinksPost { @@ -41,6 +39,29 @@ pub struct Link { goto: Option, } +// TODO: Move this to a more appropiate file +#[derive(PartialEq, Debug)] +pub enum ChatMode { + Quick, + Exploration, + Agentic, + Configuration, +} + +// TODO: Move this to a more appropiate file +pub async fn get_chat_mode(messages: &Vec) -> Result { + let system_prompt_content = messages.first().filter(|m| m.role == "system") + .ok_or("No system prompt found")?.content.content_text_only(); + + match system_prompt_content.as_str() { + content if content.contains("[mode1]") => Ok(ChatMode::Quick), + content if content.contains("[mode2]") => Ok(ChatMode::Exploration), + content if content.contains("[mode3]") => Ok(ChatMode::Agentic), + content if content.contains("[mode3config]") => Ok(ChatMode::Configuration), + _ => Err("No valid mode found in system prompt".to_string()), + } +} + pub async fn handle_v1_links( Extension(gcx): Extension>>, body_bytes: hyper::body::Bytes, @@ -48,6 +69,8 @@ pub async fn handle_v1_links( let post = serde_json::from_slice::(&body_bytes) .map_err(|e| ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, format!("JSON problem: {}", e)))?; + let chat_mode = get_chat_mode(&post.messages).await.map_err(|e| error!(e)).ok().unwrap_or(ChatMode::Quick); + let mut links = Vec::new(); if post.messages.is_empty() && project_summarization_is_missing(gcx.clone()).await { @@ -58,8 +81,7 @@ pub async fn handle_v1_links( }); } - // TODO: Only do this for configuration chats, detect it in system prompt. - if !get_tickets_from_messages(gcx.clone(), &post.messages).await.is_empty() { + if chat_mode == ChatMode::Configuration && !get_tickets_from_messages(gcx.clone(), &post.messages).await.is_empty() { links.push(Link { action: LinkAction::PatchAll, text: "Save and return".to_string(), @@ -67,9 +89,9 @@ pub async fn handle_v1_links( }); } - // TODO: Only do this for "Agent" chat. - if let Ok(diff) = get_diff_with_all_changes_in_current_project(gcx.clone()).await.map_err(|e| error!(e)) { - if let Ok(commit_msg) = generate_commit_message_by_diff(gcx.clone(), &diff, &None).await.map_err(|e| error!(e)) { + if chat_mode == ChatMode::Agentic { + if let Ok(commit_msg) = generate_commit_messages_with_current_changes(gcx.clone()) + .await.map_err(|e| error!(e)) { links.push(Link { action: LinkAction::Commit, text: format!("git commit -m \"{}\"", commit_msg), @@ -78,17 +100,17 @@ pub async fn handle_v1_links( } } - // TODO: This probably should not appear in configuration chats, unless we can know that this is not the main one being configured - for failed_tool_name in failed_tool_names_after_last_user_message(&post.messages) { - links.push(Link { - action: LinkAction::Goto, - text: format!("Configure {failed_tool_name}"), - goto: Some(format!("SETTINGS:{failed_tool_name}")), - }) + if chat_mode != ChatMode::Configuration { + for failed_integr_name in failed_integration_names_after_last_user_message(&post.messages) { + links.push(Link { + action: LinkAction::Goto, + text: format!("Configure {failed_integr_name}"), + goto: Some(format!("SETTINGS:{failed_integr_name}")), + }) + } } - // TODO: Only do this for "Explore", "Agent" or configuration chats, detect it in system prompt. - if links.is_empty() { + if chat_mode != ChatMode::Quick && links.is_empty() { let follow_up_message = generate_follow_up_message(post.messages.clone(), gcx.clone(), &post.model_name, &post.chat_id).await .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Error generating follow-up message: {}", e)))?; links.push(Link { @@ -105,24 +127,17 @@ pub async fn handle_v1_links( .unwrap()) } -async fn get_diff_with_all_changes_in_current_project(gcx: Arc>) -> Result { - let active_project_path = get_active_project_path(gcx.clone()).await.ok_or("No active project found".to_string())?; +async fn generate_commit_messages_with_current_changes(gcx: Arc>) -> Result { + let active_project_path = crate::files_correction::get_active_project_path(gcx.clone()).await.ok_or("No active project found".to_string())?; let repository = git2::Repository::open(&active_project_path).map_err(|e| e.to_string())?; - crate::git::git_diff_from_all_changes(&repository) -} - -async fn get_active_project_path(gcx: Arc>) -> Option { - let active_file = gcx.read().await.documents_state.active_file_path.clone(); - let workspace_folders = crate::files_correction::get_project_dirs(gcx.clone()).await; - if workspace_folders.is_empty() { return None; } - - Some(crate::files_in_workspace::detect_vcs_for_a_file_path( - &active_file.unwrap_or_else(|| workspace_folders[0].clone()) - ).await.map(|(path, _)| path).unwrap_or_else(|| workspace_folders[0].clone())) + let diff = crate::git::git_diff_from_all_changes(&repository)?; + let commit_msg = generate_commit_message_by_diff(gcx.clone(), &diff, &None).await.map_err(|e| e.to_string())?; + Ok(commit_msg) } +// TODO: Move all logic below to more appropiate files async fn project_summarization_is_missing(gcx: Arc>) -> bool { - match get_active_project_path(gcx.clone()).await { + match crate::files_correction::get_active_project_path(gcx.clone()).await { Some(active_project_path) => { !active_project_path.join(".refact").join("project_summary.yaml").exists() } @@ -133,7 +148,7 @@ async fn project_summarization_is_missing(gcx: Arc>) -> b } } -fn failed_tool_names_after_last_user_message(messages: &Vec) -> Vec { +fn failed_integration_names_after_last_user_message(messages: &Vec) -> Vec { let last_user_msg_index = messages.iter().rposition(|m| m.role == "user").unwrap_or(0); let tool_calls = messages[last_user_msg_index..].iter().filter(|m| m.role == "assistant") .filter_map(|m| m.tool_calls.as_ref()).flatten().collect::>(); @@ -151,44 +166,4 @@ fn failed_tool_names_after_last_user_message(messages: &Vec) -> Vec result.sort(); result.dedup(); result -} - -async fn generate_follow_up_message( - mut messages: Vec, - gcx: Arc>, - model_name: &str, - chat_id: &str, -) -> Result { - if messages.first().map(|m| m.role == "system").unwrap_or(false) { - messages.remove(0); - } - messages.insert(0, ChatMessage::new( - "system".to_string(), - "Generate a 2-3 word user response, like 'Can you fix it?' for errors or 'Proceed' for plan validation".to_string(), - )); - let ccx = Arc::new(AMutex::new(AtCommandsContext::new( - gcx.clone(), - 1024, - 1, - false, - messages.clone(), - chat_id.to_string(), - false, - ).await)); - let new_messages = subchat_single( - ccx.clone(), - model_name, - messages, - vec![], - None, - false, - Some(0.5), - None, - 1, - None, - None, - None, - ).await?; - new_messages.into_iter().next().map(|x| x.into_iter().last().map(|last_m| { - last_m.content.content_text_only() })).flatten().ok_or("No commit message found".to_string()) } \ No newline at end of file From d723bc37b9842b279d34ccf908ecc6dcf2a8ce78 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Wed, 4 Dec 2024 13:52:23 +0100 Subject: [PATCH 046/185] fix: links endpoint works with meta --- examples/links.sh | 24 +++++++++++++++++---- src/call_validation.rs | 2 +- src/http/routers/v1/links.rs | 42 +++++++----------------------------- 3 files changed, 29 insertions(+), 39 deletions(-) diff --git a/examples/links.sh b/examples/links.sh index b83d2ce4f..a1bfb3f94 100644 --- a/examples/links.sh +++ b/examples/links.sh @@ -2,7 +2,11 @@ curl -X POST http://localhost:8001/v1/links \ -H "Content-Type: application/json" \ -d '{ - "chat_id": "chat-example", + "meta": { + "chat_id": "chat-example", + "chat_remote": false, + "chat_mode": "EXPLORE" + }, "model_name": "gpt-4o-mini", "messages": [ { @@ -39,7 +43,11 @@ curl -X POST http://localhost:8001/v1/links \ curl -X POST http://localhost:8001/v1/links \ -H "Content-Type: application/json" \ -d '{ - "chat_id": "chat-example", + "meta": { + "chat_id": "chat-example", + "chat_remote": false, + "chat_mode": "AGENT" + }, "model_name": "gpt-4o-mini", "messages": [ { @@ -94,7 +102,11 @@ curl -X POST http://localhost:8001/v1/links \ curl -X POST http://localhost:8001/v1/links \ -H "Content-Type: application/json" \ -d '{ - "chat_id": "chat-example", + "meta": { + "chat_id": "chat-example", + "chat_remote": false, + "chat_mode": "NOTOOLS" + }, "model_name": "gpt-4o-mini", "messages": [] }' @@ -103,7 +115,11 @@ curl -X POST http://localhost:8001/v1/links \ curl -X POST http://localhost:8001/v1/links \ -H "Content-Type: application/json" \ -d '{ - "chat_id": "chat-example", + "meta": { + "chat_id": "chat-example", + "chat_remote": false, + "chat_mode": "CONFIGURE" + }, "model_name": "gpt-4o-mini", "messages": [ { diff --git a/src/call_validation.rs b/src/call_validation.rs index 0700617ac..d1dece971 100644 --- a/src/call_validation.rs +++ b/src/call_validation.rs @@ -208,7 +208,7 @@ pub struct ChatMeta { pub current_config_file: String, } -#[derive(Debug, Serialize, Deserialize, Clone)] +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] #[serde(rename_all = "UPPERCASE")] pub enum ChatMode { NoTools, diff --git a/src/http/routers/v1/links.rs b/src/http/routers/v1/links.rs index 4c813bc5d..1881778ab 100644 --- a/src/http/routers/v1/links.rs +++ b/src/http/routers/v1/links.rs @@ -7,18 +7,18 @@ use tokio::sync::RwLock as ARwLock; use tracing::error; use crate::agentic::generate_commit_message::generate_commit_message_by_diff; -use crate::call_validation::ChatMessage; +use crate::call_validation::{ChatMessage, ChatMeta, ChatMode}; use crate::custom_error::ScratchError; use crate::global_context::GlobalContext; use crate::integrations::go_to_configuration_message; use crate::tools::tool_patch_aux::tickets_parsing::get_tickets_from_messages; use crate::agentic::generate_follow_up_message::generate_follow_up_message; -#[derive(Serialize, Deserialize, Clone, Debug)] +#[derive(Deserialize, Clone, Debug)] pub struct LinksPost { messages: Vec, model_name: String, - chat_id: String, + meta: ChatMeta, } #[derive(Serialize, Deserialize, Debug)] @@ -39,38 +39,12 @@ pub struct Link { goto: Option, } -// TODO: Move this to a more appropiate file -#[derive(PartialEq, Debug)] -pub enum ChatMode { - Quick, - Exploration, - Agentic, - Configuration, -} - -// TODO: Move this to a more appropiate file -pub async fn get_chat_mode(messages: &Vec) -> Result { - let system_prompt_content = messages.first().filter(|m| m.role == "system") - .ok_or("No system prompt found")?.content.content_text_only(); - - match system_prompt_content.as_str() { - content if content.contains("[mode1]") => Ok(ChatMode::Quick), - content if content.contains("[mode2]") => Ok(ChatMode::Exploration), - content if content.contains("[mode3]") => Ok(ChatMode::Agentic), - content if content.contains("[mode3config]") => Ok(ChatMode::Configuration), - _ => Err("No valid mode found in system prompt".to_string()), - } -} - pub async fn handle_v1_links( Extension(gcx): Extension>>, body_bytes: hyper::body::Bytes, ) -> Result, ScratchError> { let post = serde_json::from_slice::(&body_bytes) .map_err(|e| ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, format!("JSON problem: {}", e)))?; - - let chat_mode = get_chat_mode(&post.messages).await.map_err(|e| error!(e)).ok().unwrap_or(ChatMode::Quick); - let mut links = Vec::new(); if post.messages.is_empty() && project_summarization_is_missing(gcx.clone()).await { @@ -81,7 +55,7 @@ pub async fn handle_v1_links( }); } - if chat_mode == ChatMode::Configuration && !get_tickets_from_messages(gcx.clone(), &post.messages).await.is_empty() { + if post.meta.chat_mode == ChatMode::Configure && !get_tickets_from_messages(gcx.clone(), &post.messages).await.is_empty() { links.push(Link { action: LinkAction::PatchAll, text: "Save and return".to_string(), @@ -89,7 +63,7 @@ pub async fn handle_v1_links( }); } - if chat_mode == ChatMode::Agentic { + if post.meta.chat_mode == ChatMode::Agent { if let Ok(commit_msg) = generate_commit_messages_with_current_changes(gcx.clone()) .await.map_err(|e| error!(e)) { links.push(Link { @@ -100,7 +74,7 @@ pub async fn handle_v1_links( } } - if chat_mode != ChatMode::Configuration { + if post.meta.chat_mode != ChatMode::Configure { for failed_integr_name in failed_integration_names_after_last_user_message(&post.messages) { links.push(Link { action: LinkAction::Goto, @@ -110,8 +84,8 @@ pub async fn handle_v1_links( } } - if chat_mode != ChatMode::Quick && links.is_empty() { - let follow_up_message = generate_follow_up_message(post.messages.clone(), gcx.clone(), &post.model_name, &post.chat_id).await + if post.meta.chat_mode != ChatMode::NoTools && links.is_empty() { + let follow_up_message = generate_follow_up_message(post.messages.clone(), gcx.clone(), &post.model_name, &post.meta.chat_id).await .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Error generating follow-up message: {}", e)))?; links.push(Link { action: LinkAction::FollowUp, From 42dfb9d5a5aa1a62648b594289b1817e2c050bb1 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Wed, 4 Dec 2024 17:28:18 +0100 Subject: [PATCH 047/185] warnings and remove some code --- src/http/routers/v1/chat.rs | 1 - src/integrations/setting_up_integrations.rs | 2 +- src/scratchpad_abstract.rs | 7 +++--- src/scratchpads/chat_generic.rs | 4 ---- src/scratchpads/chat_llama2.rs | 1 - src/scratchpads/chat_passthrough.rs | 6 +---- src/scratchpads/chat_utils_limit_history.rs | 5 +--- src/scratchpads/code_completion_fim.rs | 5 ++-- src/scratchpads/code_completion_replace.rs | 26 ++++++++++----------- src/scratchpads/mod.rs | 11 ++++----- src/subchat.rs | 1 - src/yaml_configs/customization_loader.rs | 2 +- 12 files changed, 26 insertions(+), 45 deletions(-) diff --git a/src/http/routers/v1/chat.rs b/src/http/routers/v1/chat.rs index f9c14933f..bd5ec0f46 100644 --- a/src/http/routers/v1/chat.rs +++ b/src/http/routers/v1/chat.rs @@ -233,7 +233,6 @@ async fn _chat( allow_at, supports_tools, supports_clicks, - should_execute_remotely, ).await.map_err(|e| ScratchError::new(StatusCode::BAD_REQUEST, e) )?; diff --git a/src/integrations/setting_up_integrations.rs b/src/integrations/setting_up_integrations.rs index 012c645b1..8648911ac 100644 --- a/src/integrations/setting_up_integrations.rs +++ b/src/integrations/setting_up_integrations.rs @@ -185,7 +185,7 @@ pub fn read_integrations_d( // 3. Replace vars in config_unparsed for rec in &mut result { if let serde_json::Value::Object(map) = &mut rec.config_unparsed { - for (key, value) in map.iter_mut() { + for (_key, value) in map.iter_mut() { if let Some(str_value) = value.as_str() { let replaced_value = vars_for_replacements.iter().fold(str_value.to_string(), |acc, (var, replacement)| { acc.replace(&format!("${}", var), replacement) diff --git a/src/scratchpad_abstract.rs b/src/scratchpad_abstract.rs index de0c73c70..dd9e9fa99 100644 --- a/src/scratchpad_abstract.rs +++ b/src/scratchpad_abstract.rs @@ -32,7 +32,7 @@ impl FinishReason { } } } - + pub fn from_json_val(json: &Value) -> Result { if json.is_null() { return Ok(FinishReason::None); @@ -43,7 +43,7 @@ impl FinishReason { Err(format!("expected string, got {}", json)) } } - + pub fn to_string(&self) -> String { match self { FinishReason::None => "".to_string(), @@ -53,7 +53,7 @@ impl FinishReason { FinishReason::ScratchpadStop => "stop".to_string(), } } - + pub fn to_json_val(&self) -> Value { match self { FinishReason::None => Value::Null, @@ -73,7 +73,6 @@ pub trait ScratchpadAbstract: Send { patch: &Value, exploration_tools: bool, agentic_tools: bool, - should_execute_remotely: bool, ) -> Result<(), String>; async fn prompt( diff --git a/src/scratchpads/chat_generic.rs b/src/scratchpads/chat_generic.rs index 4755e289e..54b23b8e2 100644 --- a/src/scratchpads/chat_generic.rs +++ b/src/scratchpads/chat_generic.rs @@ -34,7 +34,6 @@ pub struct GenericChatScratchpad { pub keyword_user: String, pub keyword_asst: String, pub has_rag_results: HasRagResults, - pub global_context: Arc>, pub allow_at: bool, } @@ -43,7 +42,6 @@ impl GenericChatScratchpad { tokenizer: Arc>, post: &ChatPost, messages: &Vec, - global_context: Arc>, allow_at: bool, ) -> Self { GenericChatScratchpad { @@ -57,7 +55,6 @@ impl GenericChatScratchpad { keyword_user: "".to_string(), keyword_asst: "".to_string(), has_rag_results: HasRagResults::new(), - global_context, allow_at, } } @@ -70,7 +67,6 @@ impl ScratchpadAbstract for GenericChatScratchpad { patch: &Value, exploration_tools: bool, agentic_tools: bool, - _should_execute_remotely: bool, ) -> Result<(), String> { self.token_bos = patch.get("token_bos").and_then(|x| x.as_str()).unwrap_or("").to_string(); self.token_esc = patch.get("token_esc").and_then(|x| x.as_str()).unwrap_or("").to_string(); diff --git a/src/scratchpads/chat_llama2.rs b/src/scratchpads/chat_llama2.rs index 9d1f96310..9fe7dc7c7 100644 --- a/src/scratchpads/chat_llama2.rs +++ b/src/scratchpads/chat_llama2.rs @@ -65,7 +65,6 @@ impl ScratchpadAbstract for ChatLlama2 { patch: &Value, exploration_tools: bool, agentic_tools: bool, - _should_execute_remotely: bool, ) -> Result<(), String> { self.keyword_s = patch.get("s").and_then(|x| x.as_str()).unwrap_or("").to_string(); self.keyword_slash_s = patch.get("slash_s").and_then(|x| x.as_str()).unwrap_or("").to_string(); diff --git a/src/scratchpads/chat_passthrough.rs b/src/scratchpads/chat_passthrough.rs index 6637c045c..9faec7bc9 100644 --- a/src/scratchpads/chat_passthrough.rs +++ b/src/scratchpads/chat_passthrough.rs @@ -57,7 +57,6 @@ pub struct ChatPassthrough { pub messages: Vec, pub has_rag_results: HasRagResults, pub delta_sender: DeltaSender, - pub global_context: Arc>, pub allow_at: bool, pub supports_tools: bool, pub supports_clicks: bool, @@ -68,7 +67,6 @@ impl ChatPassthrough { tokenizer: Arc>, post: &ChatPost, messages: &Vec, - global_context: Arc>, allow_at: bool, supports_tools: bool, supports_clicks: bool, @@ -79,7 +77,6 @@ impl ChatPassthrough { messages: messages.clone(), has_rag_results: HasRagResults::new(), delta_sender: DeltaSender::new(), - global_context, allow_at, supports_tools, supports_clicks, @@ -94,7 +91,6 @@ impl ScratchpadAbstract for ChatPassthrough { _patch: &Value, exploration_tools: bool, agentic_tools: bool, - should_execute_remotely: bool, ) -> Result<(), String> { Ok(()) } @@ -124,7 +120,7 @@ impl ScratchpadAbstract for ChatPassthrough { run_tools_locally(ccx.clone(), at_tools.clone(), self.t.tokenizer.clone(), sampling_parameters_to_patch.max_new_tokens, &messages, &mut self.has_rag_results, &style).await? } }; - let mut limited_msgs = limit_messages_history(&self.t, &messages, undroppable_msg_n, sampling_parameters_to_patch.max_new_tokens, n_ctx).unwrap_or_else(|e| { + let limited_msgs = limit_messages_history(&self.t, &messages, undroppable_msg_n, sampling_parameters_to_patch.max_new_tokens, n_ctx).unwrap_or_else(|e| { error!("error limiting messages: {}", e); vec![] }); diff --git a/src/scratchpads/chat_utils_limit_history.rs b/src/scratchpads/chat_utils_limit_history.rs index 4cd4d458c..148f6d59a 100644 --- a/src/scratchpads/chat_utils_limit_history.rs +++ b/src/scratchpads/chat_utils_limit_history.rs @@ -16,16 +16,13 @@ pub fn limit_messages_history( let mut tokens_used: i32 = 0; let mut message_token_count: Vec = vec![0; messages.len()]; let mut message_take: Vec = vec![false; messages.len()]; - let mut have_system = false; for (i, msg) in messages.iter().enumerate() { let tcnt = 3 + msg.content.count_tokens(t.tokenizer.clone(), &None)?; message_token_count[i] = tcnt; if i==0 && msg.role == "system" { message_take[i] = true; tokens_used += tcnt; - have_system = true; - } - if i >= last_user_msg_starts { + } else if i >= last_user_msg_starts { message_take[i] = true; tokens_used += tcnt; } diff --git a/src/scratchpads/code_completion_fim.rs b/src/scratchpads/code_completion_fim.rs index f15b70d93..b85efdd09 100644 --- a/src/scratchpads/code_completion_fim.rs +++ b/src/scratchpads/code_completion_fim.rs @@ -81,7 +81,6 @@ impl ScratchpadAbstract for FillInTheMiddleScratchpad { patch: &Value, _exploration_tools: bool, _agentic_tools: bool, - _should_execute_remotely: bool, ) -> Result<(), String> { // That will work for some models (starcoder) without patching self.fim_prefix = patch.get("fim_prefix").and_then(|x| x.as_str()).unwrap_or("").to_string(); @@ -334,8 +333,8 @@ impl ScratchpadAbstract for FillInTheMiddleScratchpad { } fn response_message_n_choices( - &mut self, - _choices: Vec, + &mut self, + _choices: Vec, _finish_reasons: Vec ) -> Result { Err("not implemented".to_string()) diff --git a/src/scratchpads/code_completion_replace.rs b/src/scratchpads/code_completion_replace.rs index 2cb207eae..39405cb3b 100644 --- a/src/scratchpads/code_completion_replace.rs +++ b/src/scratchpads/code_completion_replace.rs @@ -27,8 +27,8 @@ use crate::ast::ast_structs::AstDefinition; use crate::scratchpads::completon_rag::retrieve_ast_based_extra_context; const DEBUG: bool = false; -const SYSTEM_PROMPT: &str = r#"You are given a code file, from that file and an extra context from other files. -An unfinished line in the is marked with the . +const SYSTEM_PROMPT: &str = r#"You are given a code file, from that file and an extra context from other files. +An unfinished line in the is marked with the . Your task is to complete the code after the by rewriting the using the provided context and make the . Ensure the introduces all necessary updates to the such as code completion, function definitions, or comments. Keep identation symbols unchanged. Do not output multiple blocks and make sure changes are made only after the "#; @@ -286,7 +286,7 @@ fn skip_similar_rows(pred_text: &Vec, text_to_remove: &Vec) -> V ]; simple_tokens.contains(&s.as_str()) } - + let mut pred_text_trimmed = pred_text.clone(); for to_remove_row in text_to_remove.iter() { if pred_text_trimmed.is_empty() { @@ -295,7 +295,7 @@ fn skip_similar_rows(pred_text: &Vec, text_to_remove: &Vec) -> V // if is_too_simple_to_compare(to_remove_row) { // continue // } - + for idx in 0..(if to_remove_row.trim().is_empty() {1} else {pred_text_trimmed.len()}) { if *to_remove_row == pred_text_trimmed[idx] { pred_text_trimmed = pred_text_trimmed[idx + 1..].to_vec(); @@ -378,7 +378,7 @@ fn unfence_the_last_code_block(text: &String) -> Option { if let Some(block) = current_block { blocks.push(block); } - + blocks.iter().last().cloned() } @@ -396,7 +396,7 @@ fn process_n_choices( let before_lines_str = subblock_ref.before_lines_str(); let cursor_line = subblock_ref.cursor_line.trim_end().to_string(); let cursor_line_is_empty = cursor_line.replace(" ", "").replace("\t", "").is_empty(); - + let json_choices = choices .iter() .enumerate() @@ -415,7 +415,7 @@ fn process_n_choices( let mut cc = x.clone(); if let Some(last_fenced_block) = unfence_the_last_code_block(&cc) { cc = last_fenced_block; - + // First, we're trying to locate cursor position and remove everything above it let pred_lines = cc.lines().map(|x| x.to_string()).collect::>(); let cursor_idx_mb = if !cursor_line_is_empty { @@ -427,7 +427,7 @@ fn process_n_choices( .collect::>(); if cursor_matches.len() != 1 { None } else { cursor_matches.get(0).copied() } } else { None }; - + if let Some(idx) = cursor_idx_mb { cc = pred_lines[idx..].join("\n") } else { @@ -600,7 +600,6 @@ impl ScratchpadAbstract for CodeCompletionReplaceScratchpad { patch: &Value, _exploration_tools: bool, _agentic_tools: bool, - _should_execute_remotely: bool, ) -> Result<(), String> { self.token_bos = patch .get("token_bos") @@ -806,7 +805,7 @@ impl ScratchpadAbstract for CodeCompletionReplaceScratchpad { } )) } - + fn response_streaming( &mut self, _delta: String, @@ -884,7 +883,6 @@ impl ScratchpadAbstract for CodeCompletionReplacePassthroughScratchpad { patch: &Value, _exploration_tools: bool, _agentic_tools: bool, - _should_execute_remotely: bool, ) -> Result<(), String> { self.t.context_format = patch .get("context_format") @@ -1023,13 +1021,13 @@ impl ScratchpadAbstract for CodeCompletionReplacePassthroughScratchpad { })) .unwrap(); let prompt = format!("PASSTHROUGH {json_messages}").to_string(); - + let completion_ms = completion_t0.elapsed().as_millis() as i32; self.context_used["fim_ms"] = Value::from(completion_ms); self.context_used["n_ctx".to_string()] = Value::from(n_ctx as i64); self.context_used["rag_tokens_limit".to_string()] = Value::from(rag_tokens_n as i64); info!(" -- /post completion {}ms-- ", completion_ms); - + if DEBUG { info!( "chat re-encode whole prompt again gives {} tokens", @@ -1038,7 +1036,7 @@ impl ScratchpadAbstract for CodeCompletionReplacePassthroughScratchpad { } Ok(prompt) } - + fn response_n_choices( &mut self, _choices: Vec, diff --git a/src/scratchpads/mod.rs b/src/scratchpads/mod.rs index aca0ae775..d04b5e9de 100644 --- a/src/scratchpads/mod.rs +++ b/src/scratchpads/mod.rs @@ -63,7 +63,7 @@ pub async fn create_code_completion_scratchpad( } else { return Err(format!("This rust binary doesn't have code completion scratchpad \"{}\" compiled in", scratchpad_name)); } - result.apply_model_adaptation_patch(scratchpad_patch, false, false, false).await?; + result.apply_model_adaptation_patch(scratchpad_patch, false, false).await?; verify_has_send(&result); Ok(result) } @@ -79,22 +79,21 @@ pub async fn create_chat_scratchpad( allow_at: bool, supports_tools: bool, supports_clicks: bool, - should_execute_remotely: bool, ) -> Result, String> { let mut result: Box; let tokenizer_arc = cached_tokenizers::cached_tokenizer(caps, global_context.clone(), model_name_for_tokenizer).await?; if scratchpad_name == "CHAT-GENERIC" { result = Box::new(chat_generic::GenericChatScratchpad::new( - tokenizer_arc.clone(), post, messages, global_context.clone(), allow_at + tokenizer_arc.clone(), post, messages, allow_at )); } else if scratchpad_name == "CHAT-LLAMA2" { result = Box::new(chat_llama2::ChatLlama2::new( - tokenizer_arc.clone(), post, messages, global_context.clone(), allow_at + tokenizer_arc.clone(), post, messages, allow_at )); } else if scratchpad_name == "PASSTHROUGH" { post.stream = Some(true); // this should be passed from the request result = Box::new(chat_passthrough::ChatPassthrough::new( - tokenizer_arc.clone(), post, messages, global_context.clone(), allow_at, supports_tools, supports_clicks + tokenizer_arc.clone(), post, messages, allow_at, supports_tools, supports_clicks )); } else { return Err(format!("This rust binary doesn't have chat scratchpad \"{}\" compiled in", scratchpad_name)); @@ -116,7 +115,7 @@ pub async fn create_chat_scratchpad( } } } - result.apply_model_adaptation_patch(scratchpad_patch, exploration_tools, agentic_tools, should_execute_remotely).await?; + result.apply_model_adaptation_patch(scratchpad_patch, exploration_tools, agentic_tools).await?; verify_has_send(&result); Ok(result) } diff --git a/src/subchat.rs b/src/subchat.rs index 87c8fcbb4..1525d9554 100644 --- a/src/subchat.rs +++ b/src/subchat.rs @@ -91,7 +91,6 @@ async fn create_chat_post_and_scratchpad( false, supports_tools, supports_clicks, - should_execute_remotely, ).await?; Ok((chat_post, scratchpad)) diff --git a/src/yaml_configs/customization_loader.rs b/src/yaml_configs/customization_loader.rs index d68b014fc..5fa7d11a1 100644 --- a/src/yaml_configs/customization_loader.rs +++ b/src/yaml_configs/customization_loader.rs @@ -212,7 +212,7 @@ pub async fn load_customization( // String::new() // } // }; - let mut system_prompt_vars = HashMap::new(); + let system_prompt_vars = HashMap::new(); // let system_prompt_vars = if competency_yaml.is_empty() { // let mut map = HashMap::new(); From 54bb8505598c3d1d7d9128da12e13e0f43ae2cc0 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Wed, 4 Dec 2024 17:37:55 +0100 Subject: [PATCH 048/185] more warnings and code removal --- .../docker/docker_container_manager.rs | 1 - src/scratchpads/chat_generic.rs | 6 ++---- src/scratchpads/chat_llama2.rs | 18 +++++------------- src/scratchpads/chat_passthrough.rs | 10 ++++------ src/subchat.rs | 2 +- 5 files changed, 12 insertions(+), 25 deletions(-) diff --git a/src/integrations/docker/docker_container_manager.rs b/src/integrations/docker/docker_container_manager.rs index 9fe4751bb..648a46503 100644 --- a/src/integrations/docker/docker_container_manager.rs +++ b/src/integrations/docker/docker_container_manager.rs @@ -1,7 +1,6 @@ use std::path::PathBuf; use std::{sync::Arc, sync::Weak, time::SystemTime}; use std::future::Future; -use serde::{Deserialize, Serialize}; use tokio::fs::File; use tokio::sync::{Mutex as AMutex, RwLock as ARwLock}; use tokio::time::Duration; diff --git a/src/scratchpads/chat_generic.rs b/src/scratchpads/chat_generic.rs index 54b23b8e2..b0a5bb027 100644 --- a/src/scratchpads/chat_generic.rs +++ b/src/scratchpads/chat_generic.rs @@ -4,14 +4,12 @@ use std::sync::RwLock; use async_trait::async_trait; use serde_json::Value; use tokenizers::Tokenizer; -use tokio::sync::RwLock as ARwLock; use tokio::sync::Mutex as AMutex; use tracing::{info, error}; use crate::at_commands::execute_at::run_at_commands; use crate::at_commands::at_commands::AtCommandsContext; use crate::call_validation::{ChatMessage, ChatPost, ContextFile, SamplingParameters}; -use crate::global_context::GlobalContext; use crate::scratchpad_abstract::{FinishReason, HasTokenizerAndEot, ScratchpadAbstract}; use crate::scratchpads::chat_utils_deltadelta::DeltaDeltaChatStreamer; use crate::scratchpads::chat_utils_limit_history::limit_messages_history; @@ -65,8 +63,8 @@ impl ScratchpadAbstract for GenericChatScratchpad { async fn apply_model_adaptation_patch( &mut self, patch: &Value, - exploration_tools: bool, - agentic_tools: bool, + _exploration_tools: bool, + _agentic_tools: bool, ) -> Result<(), String> { self.token_bos = patch.get("token_bos").and_then(|x| x.as_str()).unwrap_or("").to_string(); self.token_esc = patch.get("token_esc").and_then(|x| x.as_str()).unwrap_or("").to_string(); diff --git a/src/scratchpads/chat_llama2.rs b/src/scratchpads/chat_llama2.rs index 9fe7dc7c7..ee5d1d611 100644 --- a/src/scratchpads/chat_llama2.rs +++ b/src/scratchpads/chat_llama2.rs @@ -3,14 +3,12 @@ use std::sync::RwLock as StdRwLock; use async_trait::async_trait; use serde_json::Value; use tokenizers::Tokenizer; -use tokio::sync::RwLock as ARwLock; use tokio::sync::Mutex as AMutex; use tracing::{info, error}; use crate::at_commands::execute_at::run_at_commands; use crate::at_commands::at_commands::AtCommandsContext; -use crate::call_validation::{ChatContent, ChatMessage, ChatPost, ContextFile, SamplingParameters}; -use crate::global_context::GlobalContext; +use crate::call_validation::{ChatMessage, ChatPost, ContextFile, SamplingParameters}; use crate::scratchpad_abstract::{FinishReason, HasTokenizerAndEot, ScratchpadAbstract}; use crate::scratchpads::chat_utils_deltadelta::DeltaDeltaChatStreamer; use crate::scratchpads::chat_utils_limit_history::limit_messages_history; @@ -30,7 +28,6 @@ pub struct ChatLlama2 { pub keyword_s: String, // "SYSTEM:" keyword means it's not one token pub keyword_slash_s: String, pub has_rag_results: HasRagResults, - pub global_context: Arc>, pub allow_at: bool, } @@ -40,7 +37,6 @@ impl ChatLlama2 { tokenizer: Arc>, post: &ChatPost, messages: &Vec, - global_context: Arc>, allow_at: bool, ) -> Self { ChatLlama2 { @@ -52,7 +48,6 @@ impl ChatLlama2 { keyword_slash_s: "".to_string(), // default_system_message: "".to_string(), has_rag_results: HasRagResults::new(), - global_context, allow_at, } } @@ -63,8 +58,8 @@ impl ScratchpadAbstract for ChatLlama2 { async fn apply_model_adaptation_patch( &mut self, patch: &Value, - exploration_tools: bool, - agentic_tools: bool, + _exploration_tools: bool, + _agentic_tools: bool, ) -> Result<(), String> { self.keyword_s = patch.get("s").and_then(|x| x.as_str()).unwrap_or("").to_string(); self.keyword_slash_s = patch.get("slash_s").and_then(|x| x.as_str()).unwrap_or("").to_string(); @@ -82,16 +77,13 @@ impl ScratchpadAbstract for ChatLlama2 { ccx: Arc>, sampling_parameters_to_patch: &mut SamplingParameters, ) -> Result { - let (n_ctx, gcx) = { - let ccx_locked = ccx.lock().await; - (ccx_locked.n_ctx, ccx_locked.global_context.clone()) - }; + let n_ctx = ccx.lock().await.n_ctx; let (messages, undroppable_msg_n, _any_context_produced) = if self.allow_at { run_at_commands(ccx.clone(), self.t.tokenizer.clone(), sampling_parameters_to_patch.max_new_tokens, &self.messages, &mut self.has_rag_results).await } else { (self.messages.clone(), self.messages.len(), false) }; - let mut limited_msgs: Vec = limit_messages_history(&self.t, &messages, undroppable_msg_n, sampling_parameters_to_patch.max_new_tokens, n_ctx)?; + let limited_msgs: Vec = limit_messages_history(&self.t, &messages, undroppable_msg_n, sampling_parameters_to_patch.max_new_tokens, n_ctx)?; sampling_parameters_to_patch.stop = self.dd.stop_list.clone(); // loosely adapted from https://huggingface.co/spaces/huggingface-projects/llama-2-13b-chat/blob/main/model.py#L24 let mut prompt = "".to_string(); diff --git a/src/scratchpads/chat_passthrough.rs b/src/scratchpads/chat_passthrough.rs index 9faec7bc9..02eed8b20 100644 --- a/src/scratchpads/chat_passthrough.rs +++ b/src/scratchpads/chat_passthrough.rs @@ -1,16 +1,14 @@ use std::sync::Arc; use std::sync::RwLock as StdRwLock; - -use async_trait::async_trait; use serde_json::{json, Value}; use tokenizers::Tokenizer; -use tokio::sync::RwLock as ARwLock; use tokio::sync::Mutex as AMutex; +use async_trait::async_trait; use tracing::{error, info}; + use crate::at_commands::execute_at::run_at_commands; use crate::at_commands::at_commands::AtCommandsContext; use crate::call_validation::{ChatMessage, ChatPost, SamplingParameters}; -use crate::global_context::GlobalContext; use crate::scratchpad_abstract::{FinishReason, HasTokenizerAndEot, ScratchpadAbstract}; use crate::scratchpads::chat_utils_limit_history::limit_messages_history; use crate::scratchpads::scratchpad_utils::HasRagResults; @@ -89,8 +87,8 @@ impl ScratchpadAbstract for ChatPassthrough { async fn apply_model_adaptation_patch( &mut self, _patch: &Value, - exploration_tools: bool, - agentic_tools: bool, + _exploration_tools: bool, + _agentic_tools: bool, ) -> Result<(), String> { Ok(()) } diff --git a/src/subchat.rs b/src/subchat.rs index 1525d9554..2ca6b739e 100644 --- a/src/subchat.rs +++ b/src/subchat.rs @@ -30,7 +30,7 @@ async fn create_chat_post_and_scratchpad( tools: Option>, tool_choice: Option, only_deterministic_messages: bool, - should_execute_remotely: bool, + _should_execute_remotely: bool, ) -> Result<(ChatPost, Box), String> { let caps = try_load_caps_quickly_if_not_present( global_context.clone(), 0, From 29cde9aef14c4948e0ceb13236883ebd881b2bf9 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Wed, 4 Dec 2024 18:17:21 +0100 Subject: [PATCH 049/185] fix some unwrap()s --- src/restream.rs | 53 ++++++++++++++++++++++++++++++++++++++----------- 1 file changed, 41 insertions(+), 12 deletions(-) diff --git a/src/restream.rs b/src/restream.rs index 97613e8c7..fe9164e91 100644 --- a/src/restream.rs +++ b/src/restream.rs @@ -7,7 +7,7 @@ use futures::StreamExt; use hyper::{Body, Response, StatusCode}; use reqwest_eventsource::Event; use serde_json::json; -use tracing::{error, info, warn}; +use tracing::info; use crate::call_validation::SamplingParameters; use crate::custom_error::ScratchError; @@ -149,15 +149,25 @@ pub async fn scratchpad_interaction_not_stream_json( model_says["choices"] = serde_json::Value::Array(vec![]); } scratchpad_result = Ok(model_says.clone()); + } else if let Some(hf_arr) = model_says.as_array() { - let choices = hf_arr.iter().map(|x| x.get("generated_text").unwrap().as_str().unwrap().to_string()).collect::>(); + let choices = hf_arr.iter().map(|x| { + x.get("generated_text") + .and_then(|val| val.as_str()) + .map(|s| s.to_string()) + .unwrap_or_else(|| { + tracing::error!("Failed to get generated_text or convert to str"); + "".to_string() + }) + }).collect::>(); let finish_reasons = vec![FinishReason::Length; choices.len()]; scratchpad_result = scratchpad.response_n_choices(choices, finish_reasons); + } else if let Some(oai_choices) = model_says.clone().get("choices") { let choice0 = oai_choices.as_array().unwrap().get(0).unwrap(); let finish_reasons = oai_choices.clone().as_array().unwrap().iter().map( |x| FinishReason::from_json_val(x.get("finish_reason").unwrap_or(&json!(""))).unwrap_or_else(|err| { - warn!("Couldn't parse finish_reason: {err}. Fallback to finish_reason=null"); + tracing::error!("Couldn't parse finish_reason: {err}. Fallback to finish_reason=null"); FinishReason::None }) ).collect::>(); @@ -165,14 +175,25 @@ pub async fn scratchpad_interaction_not_stream_json( if let Ok(det_msgs) = scratchpad.response_spontaneous() { model_says["deterministic_messages"] = json!(det_msgs); } - let choices = oai_choices.clone().as_array().unwrap().iter().map(|x| x.get("message").unwrap().get("content").unwrap().as_str().unwrap().to_string()).collect::>(); + let choices = oai_choices.clone().as_array().unwrap().iter().map(|x| { + match (x.get("message"), x.get("message").and_then(|msg| msg.get("content")), x.get("message").and_then(|msg| msg.get("content")).and_then(|content| content.as_str())) { + (Some(_), Some(_), Some(content)) => content.to_string(), + (msg, content, as_str) => { + tracing::error!( + "Failed to get message content: msg={:?}, content={:?}, as_str={:?}", + msg, content, as_str + ); + "".to_string() + } + } + }).collect::>(); scratchpad_result = match scratchpad.response_message_n_choices(choices, finish_reasons) { Ok(res) => Ok(res), Err(err) => { if err == "not implemented" { info!("scratchpad doesn't implement response_message_n_choices, passing the original message through"); Ok(model_says.clone()) - } else { + } else { Err(err) } } @@ -182,7 +203,15 @@ pub async fn scratchpad_interaction_not_stream_json( // for oai_choice in oai_choices.as_array().unwrap() { // let index = oai_choice.get("index").unwrap().as_u64().unwrap() as usize; // } - let choices = oai_choices.as_array().unwrap().iter().map(|x| x.get("text").unwrap().as_str().unwrap().to_string()).collect::>(); + let choices = oai_choices.as_array().unwrap().iter().map(|x| { + x.get("text") + .and_then(|val| val.as_str()) + .map(|s| s.to_string()) + .unwrap_or_else(|| { + tracing::error!("Failed to get text or convert to str"); + "".to_string() + }) + }).collect::>(); scratchpad_result = scratchpad.response_n_choices(choices, finish_reasons); } @@ -350,7 +379,7 @@ pub async fn scratchpad_interaction_stream( } } else { let err_str = value_maybe.unwrap_err(); - error!("response_spontaneous error: {}", err_str); + tracing::error!("response_spontaneous error: {}", err_str); let value_str = format!("data: {}\n\n", serde_json::to_string(&json!({"detail": err_str})).unwrap()); yield Result::<_, String>::Ok(value_str); } @@ -390,7 +419,7 @@ pub async fn scratchpad_interaction_stream( false, e_str.to_string(), )); - error!(e_str); + tracing::error!(e_str); let value_str = serde_json::to_string(&json!({"detail": e_str})).unwrap(); yield Result::<_, String>::Ok(value_str); break; @@ -425,7 +454,7 @@ pub async fn scratchpad_interaction_stream( yield Result::<_, String>::Ok(value_str); }, Err(err_str) => { - error!("unexpected error: {}", err_str); + tracing::error!("unexpected error: {}", err_str); let value_str = format!("data: {}\n\n", serde_json::to_string(&json!({"detail": err_str})).unwrap()); yield Result::<_, String>::Ok(value_str); // TODO: send telemetry @@ -439,7 +468,7 @@ pub async fn scratchpad_interaction_stream( // "restream error: Stream ended" break; } - error!("restream error: {}\n{:?}", err, err); + tracing::error!("restream error: {}\n{:?}", err, err); let problem_str = format!("restream error: {}", err); { tele_storage.write().unwrap().tele_net.push(telemetry_structs::TelemetryNetwork::new( @@ -455,7 +484,7 @@ pub async fn scratchpad_interaction_stream( }, } } - + let mut value = my_scratchpad.streaming_finished(last_finish_reason)?; value["created"] = json!(t1.duration_since(std::time::UNIX_EPOCH).unwrap().as_millis() as f64 / 1000.0); value["model"] = json!(model_name.clone()); @@ -538,7 +567,7 @@ fn _push_streaming_json_into_scratchpad( let choice0 = &choices[0]; let mut value: serde_json::Value; let mut finish_reason = FinishReason::from_json_val(choice0.get("finish_reason").unwrap_or(&json!(""))).unwrap_or_else(|err| { - warn!("Couldn't parse finish_reason: {err}. Fallback to finish_reason=null"); + tracing::error!("Couldn't parse finish_reason: {err}. Fallback to finish_reason=null"); FinishReason::None }); if let Some(_delta) = choice0.get("delta") { From 8453af5b2aacd5240cfe21f96eb09f0d9c47071a Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Wed, 4 Dec 2024 18:29:39 +0100 Subject: [PATCH 050/185] repair _cmdline --- src/integrations/integr_cmdline.rs | 282 ++--------- src/integrations/integr_cmdline_service.rs | 504 ++++++++++++++++++++ src/integrations/mod.rs | 2 +- src/integrations/setting_up_integrations.rs | 6 +- 4 files changed, 560 insertions(+), 234 deletions(-) create mode 100644 src/integrations/integr_cmdline_service.rs diff --git a/src/integrations/integr_cmdline.rs b/src/integrations/integr_cmdline.rs index 0cf5883a5..8bf5414f4 100644 --- a/src/integrations/integr_cmdline.rs +++ b/src/integrations/integr_cmdline.rs @@ -1,32 +1,23 @@ -use std::any::Any; use std::collections::HashMap; -use std::future::Future; use std::sync::Arc; use std::process::Stdio; -use indexmap::IndexMap; -use tokio::sync::{Mutex as AMutex, RwLock as ARwLock}; +use tokio::sync::Mutex as AMutex; use tokio::io::BufReader; use serde::Deserialize; use serde::Serialize; use async_trait::async_trait; use tokio::process::Command; use tracing::info; -use process_wrap::tokio::*; use crate::at_commands::at_commands::AtCommandsContext; use crate::tools::tools_description::{ToolParam, Tool, ToolDesc}; use crate::call_validation::{ChatMessage, ChatContent, ContextEnum}; -use crate::global_context::GlobalContext; -use crate::integrations::process_io_utils::{blocking_read_until_token_or_timeout, is_someone_listening_on_that_tcp_port}; -use crate::integrations::sessions::IntegrationSession; +use crate::integrations::process_io_utils::blocking_read_until_token_or_timeout; use crate::postprocessing::pp_command_output::{CmdlineOutputFilter, output_mini_postprocessing}; use crate::integrations::integr_abstract::IntegrationTrait; -const REALLY_HORRIBLE_ROUNDTRIP: u64 = 3000; // 3000 should be a really bad ping via internet, just in rare case it's a remote port - - -#[derive(Deserialize, Serialize, Clone)] +#[derive(Deserialize, Serialize, Clone, Default)] struct CmdlineToolConfig { command: String, command_workdir: String, @@ -58,10 +49,11 @@ fn _default_startup_wait() -> u64 { 10 } +#[derive(Default)] pub struct ToolCmdline { - is_service: bool, - name: String, - cfg: CmdlineToolConfig, + // is_service: bool, + pub name: String, + pub cfg: CmdlineToolConfig, } impl IntegrationTrait for ToolCmdline { @@ -82,7 +74,7 @@ impl IntegrationTrait for ToolCmdline { fn integr_upgrade_to_tool(&self) -> Box { Box::new(ToolCmdline { - is_service: self.is_service, + // is_service: self.is_service, name: self.name.clone(), cfg: self.cfg.clone(), }) as Box @@ -94,70 +86,37 @@ impl IntegrationTrait for ToolCmdline { } } -pub fn cmdline_tool_from_yaml_value( - cfg_cmdline_value: &serde_yaml::Value, - background: bool, -) -> Result>>>, String> { - let mut result = IndexMap::new(); - let cfgmap = match serde_yaml::from_value::>(cfg_cmdline_value.clone()) { - Ok(cfgmap) => cfgmap, - Err(e) => { - let location = e.location().map(|loc| format!(" at line {}, column {}", loc.line(), loc.column())).unwrap_or_default(); - return Err(format!("failed to parse cmdline section: {:?}{}", e, location)); - } - }; - for (c_name, mut c_cmd_tool) in cfgmap.into_iter() { - if background { - c_cmd_tool.parameters.push(ToolParam { - name: "action".to_string(), - param_type: "string".to_string(), - description: "start | stop | restart | status".to_string(), - }); - } - let tool = Arc::new(AMutex::new(Box::new( - ToolCmdline { - is_service: background, - name: c_name.clone(), - cfg: c_cmd_tool, - } - ) as Box)); - result.insert(c_name, tool); - } - Ok(result) -} - -pub struct CmdlineSession { - cmdline_string: String, - cmdline_workdir: String, - cmdline_process: Box, - #[allow(dead_code)] - cmdline_stdout: BufReader, - #[allow(dead_code)] - cmdline_stderr: BufReader, - service_name: String, -} - -impl IntegrationSession for CmdlineSession { - fn as_any_mut(&mut self) -> &mut dyn Any { - self - } - fn is_expired(&self) -> bool { false } - fn try_stop(&mut self) -> Box + Send + '_> { - Box::new(async { - info!("SERVICE STOP workdir {}:\n{:?}", self.cmdline_workdir, self.cmdline_string); - let t0 = tokio::time::Instant::now(); - match Box::into_pin(self.cmdline_process.kill()).await { - Ok(_) => { - format!("Success, it took {:.3}s to stop it.\n\n", t0.elapsed().as_secs_f64()) - }, - Err(e) => { - tracing::warn!("Failed to kill service '{}'. Error: {}. Assuming process died on its own.", self.service_name, e); - format!("Failed to kill service. Error: {}.\nAssuming process died on its own, let's continue.\n\n", e) - } - } - }) - } -} +// pub fn cmdline_tool_from_yaml_value( +// cfg_cmdline_value: &serde_yaml::Value, +// background: bool, +// ) -> Result>>>, String> { +// let mut result = IndexMap::new(); +// let cfgmap = match serde_yaml::from_value::>(cfg_cmdline_value.clone()) { +// Ok(cfgmap) => cfgmap, +// Err(e) => { +// let location = e.location().map(|loc| format!(" at line {}, column {}", loc.line(), loc.column())).unwrap_or_default(); +// return Err(format!("failed to parse cmdline section: {:?}{}", e, location)); +// } +// }; +// for (c_name, mut c_cmd_tool) in cfgmap.into_iter() { +// // if background { +// // c_cmd_tool.parameters.push(ToolParam { +// // name: "action".to_string(), +// // param_type: "string".to_string(), +// // description: "start | stop | restart | status".to_string(), +// // }); +// // } +// let tool = Arc::new(AMutex::new(Box::new( +// ToolCmdline { +// // is_service: background, +// name: c_name.clone(), +// cfg: c_cmd_tool, +// } +// ) as Box)); +// result.insert(c_name, tool); +// } +// Ok(result) +// } fn _replace_args(x: &str, args_str: &HashMap) -> String { let mut result = x.to_string(); @@ -253,146 +212,6 @@ async fn get_stdout_and_stderr( Ok((stdout_out, stderr_out)) } -async fn execute_background_command( - gcx: Arc>, - service_name: &str, - command_str: &str, - cmdline_workdir: &String, - cfg: &CmdlineToolConfig, - action: &str, -) -> Result { - let session_key = format!("custom_service_{service_name}"); - let mut session_mb = gcx.read().await.integration_sessions.get(&session_key).cloned(); - let command_str = command_str.to_string(); - let mut actions_log = String::new(); - - if session_mb.is_some() { - let session_arc = session_mb.clone().unwrap(); - let mut session_locked = session_arc.lock().await; - let session = session_locked.as_any_mut().downcast_mut::().unwrap(); - actions_log.push_str(&format!("Currently have service running, workdir {}:\n{}\n", session.cmdline_workdir, session.cmdline_string)); - let (stdout_out, stderr_out) = get_stdout_and_stderr(100, &mut session.cmdline_stdout, &mut session.cmdline_stderr).await?; - let filtered_stdout = output_mini_postprocessing(&cfg.output_filter, &stdout_out); - let filtered_stderr = output_mini_postprocessing(&cfg.output_filter, &stderr_out); - actions_log.push_str(&format!("Here are stdin/stderr since the last checking out on the service:\n{}\n\n", format_output(&filtered_stdout, &filtered_stderr))); - } else { - actions_log.push_str(&format!("Service is currently not running\n")); - } - - if session_mb.is_some() && (action == "restart" || action == "stop") { - let session_arc = session_mb.clone().unwrap(); - { - let mut session_locked = session_arc.lock().await; - let session = session_locked.as_any_mut().downcast_mut::().unwrap(); - actions_log.push_str(&format!("Stopping it...\n")); - let stop_log = Box::into_pin(session.try_stop()).await; - actions_log.push_str(&stop_log); - } - gcx.write().await.integration_sessions.remove(&session_key); - session_mb = None; - } - - if session_mb.is_none() && (action == "restart" || action == "start") { - let mut port_already_open = false; - if let Some(wait_port) = cfg.startup_wait_port { - port_already_open = is_someone_listening_on_that_tcp_port(wait_port, tokio::time::Duration::from_millis(REALLY_HORRIBLE_ROUNDTRIP)).await; - if port_already_open { - actions_log.push_str(&format!( - "This service startup sequence requires to wait until a TCP port gets occupied, but this port {} is already busy even before the service start is attempted. Not good, but let's try to run it anyway.\n\n", - wait_port, - )); - } - } - info!("SERVICE START workdir {}:\n{:?}", cmdline_workdir, command_str); - actions_log.push_str(&format!("Starting service with the following command line:\n{}\n", command_str)); - - let mut command = create_command_from_string(&command_str, cmdline_workdir).await?; - command.stdout(Stdio::piped()); - command.stderr(Stdio::piped()); - let mut command_wrap = TokioCommandWrap::from(command); - #[cfg(unix)] - command_wrap.wrap(ProcessGroup::leader()); - #[cfg(windows)] - command_wrap.wrap(JobObject); - let mut process = command_wrap.spawn().map_err(|e| format!("failed to create process: {e}"))?; - - let mut stdout_reader = BufReader::new(process.stdout().take().ok_or("Failed to open stdout")?); - let mut stderr_reader = BufReader::new(process.stderr().take().ok_or("Failed to open stderr")?); - - let t0 = tokio::time::Instant::now(); - - let mut accumulated_stdout = String::new(); - let mut accumulated_stderr = String::new(); - let mut exit_code: i32 = -100000; - - loop { - if t0.elapsed() >= tokio::time::Duration::from_secs(cfg.startup_wait) { - actions_log.push_str(&format!("Timeout {:.2}s reached while waiting for the service to start.\n\n", t0.elapsed().as_secs_f64())); - break; - } - - let (stdout_out, stderr_out) = get_stdout_and_stderr(100, &mut stdout_reader, &mut stderr_reader).await?; - accumulated_stdout.push_str(&stdout_out); - accumulated_stderr.push_str(&stderr_out); - - // XXX rename keyword to phrase or something - if let Some(keyword) = &cfg.startup_wait_keyword { - if accumulated_stdout.contains(keyword) || accumulated_stderr.contains(keyword) { - actions_log.push_str(&format!("Startup keyword '{}' found in output, success!\n\n", keyword)); - break; - } - } - - let exit_status = process.try_wait().map_err(|e| e.to_string())?; - if let Some(status) = exit_status { - exit_code = status.code().unwrap_or(-1); - actions_log.push_str(&format!("Service process exited prematurely with exit code: {}\nService did not start.\n\n", exit_code)); - break; - } - - if let Some(wait_port) = cfg.startup_wait_port { - match is_someone_listening_on_that_tcp_port(wait_port, tokio::time::Duration::from_millis(REALLY_HORRIBLE_ROUNDTRIP)).await { - true => { - if !port_already_open { - actions_log.push_str(&format!("Port {} is now busy, success!\n", wait_port)); - break; - } - }, - false => { - if port_already_open { - port_already_open = false; - actions_log.push_str(&format!("Port {} is now free\n", wait_port)); - } - } - } - } - - tokio::time::sleep(tokio::time::Duration::from_millis(50)).await; - } - - let filtered_stdout = output_mini_postprocessing(&cfg.output_filter, &accumulated_stdout); - let filtered_stderr = output_mini_postprocessing(&cfg.output_filter, &accumulated_stderr); - let out = format_output(&filtered_stdout, &filtered_stderr); - actions_log.push_str(&out); - - if exit_code == -100000 { - let session: Box = Box::new(CmdlineSession { - cmdline_process: process, - cmdline_string: command_str, - cmdline_workdir: cmdline_workdir.clone(), - cmdline_stdout: stdout_reader, - cmdline_stderr: stderr_reader, - service_name: service_name.to_string(), - }); - gcx.write().await.integration_sessions.insert(session_key.to_string(), Arc::new(AMutex::new(session))); - } - - info!("SERVICE START LOG:\n{}", actions_log); - } - - Ok(actions_log) -} - #[async_trait] impl Tool for ToolCmdline { fn as_any(&self) -> &dyn std::any::Any { self } @@ -427,18 +246,19 @@ impl Tool for ToolCmdline { let command = _replace_args(self.cfg.command.as_str(), &args_str); let workdir = _replace_args(self.cfg.command_workdir.as_str(), &args_str); - let tool_ouput = if self.is_service { - let action = args_str.get("action").cloned().unwrap_or("start".to_string()); - if !["start", "restart", "stop", "status"].contains(&action.as_str()) { - return Err("Tool call is invalid. Param 'action' must be one of 'start', 'restart', 'stop', 'status'. Try again".to_string()); - } - execute_background_command( - gcx, &self.name, &command, &workdir, &self.cfg, action.as_str() - ).await? + // let tool_ouput = if self.is_service { + // let action = args_str.get("action").cloned().unwrap_or("start".to_string()); + // if !["start", "restart", "stop", "status"].contains(&action.as_str()) { + // return Err("Tool call is invalid. Param 'action' must be one of 'start', 'restart', 'stop', 'status'. Try again".to_string()); + // } + // execute_background_command( + // gcx, &self.name, &command, &workdir, &self.cfg, action.as_str() + // ).await? - } else { - execute_blocking_command(&command, &self.cfg, &workdir).await? - }; + // } else { + // }; + + let tool_ouput = execute_blocking_command(&command, &self.cfg, &workdir).await?; let result = vec![ContextEnum::ChatMessage(ChatMessage { role: "tool".to_string(), diff --git a/src/integrations/integr_cmdline_service.rs b/src/integrations/integr_cmdline_service.rs new file mode 100644 index 000000000..0cf5883a5 --- /dev/null +++ b/src/integrations/integr_cmdline_service.rs @@ -0,0 +1,504 @@ +use std::any::Any; +use std::collections::HashMap; +use std::future::Future; +use std::sync::Arc; +use std::process::Stdio; +use indexmap::IndexMap; +use tokio::sync::{Mutex as AMutex, RwLock as ARwLock}; +use tokio::io::BufReader; +use serde::Deserialize; +use serde::Serialize; +use async_trait::async_trait; +use tokio::process::Command; +use tracing::info; +use process_wrap::tokio::*; + +use crate::at_commands::at_commands::AtCommandsContext; +use crate::tools::tools_description::{ToolParam, Tool, ToolDesc}; +use crate::call_validation::{ChatMessage, ChatContent, ContextEnum}; +use crate::global_context::GlobalContext; +use crate::integrations::process_io_utils::{blocking_read_until_token_or_timeout, is_someone_listening_on_that_tcp_port}; +use crate::integrations::sessions::IntegrationSession; +use crate::postprocessing::pp_command_output::{CmdlineOutputFilter, output_mini_postprocessing}; +use crate::integrations::integr_abstract::IntegrationTrait; + + +const REALLY_HORRIBLE_ROUNDTRIP: u64 = 3000; // 3000 should be a really bad ping via internet, just in rare case it's a remote port + + +#[derive(Deserialize, Serialize, Clone)] +struct CmdlineToolConfig { + command: String, + command_workdir: String, + + description: String, + parameters: Vec, + parameters_required: Option>, + + // blocking + #[serde(default = "_default_timeout")] + timeout: u64, + #[serde(default)] + output_filter: CmdlineOutputFilter, + + // background + #[serde(default)] + startup_wait_port: Option, + #[serde(default = "_default_startup_wait")] + startup_wait: u64, + #[serde(default)] + startup_wait_keyword: Option, +} + +fn _default_timeout() -> u64 { + 120 +} + +fn _default_startup_wait() -> u64 { + 10 +} + +pub struct ToolCmdline { + is_service: bool, + name: String, + cfg: CmdlineToolConfig, +} + +impl IntegrationTrait for ToolCmdline { + fn integr_settings_apply(&mut self, value: &serde_json::Value) -> Result<(), String> { + match serde_json::from_value::(value.clone()) { + Ok(x) => self.cfg = x, + Err(e) => { + tracing::error!("Failed to apply settings: {}\n{:?}", e, value); + return Err(e.to_string()); + } + } + Ok(()) + } + + fn integr_settings_as_json(&self) -> serde_json::Value { + serde_json::to_value(&self.cfg).unwrap() + } + + fn integr_upgrade_to_tool(&self) -> Box { + Box::new(ToolCmdline { + is_service: self.is_service, + name: self.name.clone(), + cfg: self.cfg.clone(), + }) as Box + } + + fn integr_schema(&self) -> &str + { + CMDLINE_INTEGRATION_SCHEMA + } +} + +pub fn cmdline_tool_from_yaml_value( + cfg_cmdline_value: &serde_yaml::Value, + background: bool, +) -> Result>>>, String> { + let mut result = IndexMap::new(); + let cfgmap = match serde_yaml::from_value::>(cfg_cmdline_value.clone()) { + Ok(cfgmap) => cfgmap, + Err(e) => { + let location = e.location().map(|loc| format!(" at line {}, column {}", loc.line(), loc.column())).unwrap_or_default(); + return Err(format!("failed to parse cmdline section: {:?}{}", e, location)); + } + }; + for (c_name, mut c_cmd_tool) in cfgmap.into_iter() { + if background { + c_cmd_tool.parameters.push(ToolParam { + name: "action".to_string(), + param_type: "string".to_string(), + description: "start | stop | restart | status".to_string(), + }); + } + let tool = Arc::new(AMutex::new(Box::new( + ToolCmdline { + is_service: background, + name: c_name.clone(), + cfg: c_cmd_tool, + } + ) as Box)); + result.insert(c_name, tool); + } + Ok(result) +} + +pub struct CmdlineSession { + cmdline_string: String, + cmdline_workdir: String, + cmdline_process: Box, + #[allow(dead_code)] + cmdline_stdout: BufReader, + #[allow(dead_code)] + cmdline_stderr: BufReader, + service_name: String, +} + +impl IntegrationSession for CmdlineSession { + fn as_any_mut(&mut self) -> &mut dyn Any { + self + } + fn is_expired(&self) -> bool { false } + fn try_stop(&mut self) -> Box + Send + '_> { + Box::new(async { + info!("SERVICE STOP workdir {}:\n{:?}", self.cmdline_workdir, self.cmdline_string); + let t0 = tokio::time::Instant::now(); + match Box::into_pin(self.cmdline_process.kill()).await { + Ok(_) => { + format!("Success, it took {:.3}s to stop it.\n\n", t0.elapsed().as_secs_f64()) + }, + Err(e) => { + tracing::warn!("Failed to kill service '{}'. Error: {}. Assuming process died on its own.", self.service_name, e); + format!("Failed to kill service. Error: {}.\nAssuming process died on its own, let's continue.\n\n", e) + } + } + }) + } +} + +fn _replace_args(x: &str, args_str: &HashMap) -> String { + let mut result = x.to_string(); + for (key, value) in args_str { + result = result.replace(&format!("%{}%", key), value); + } + result +} + +fn format_output(stdout_out: &str, stderr_out: &str) -> String { + let mut out = String::new(); + if !stdout_out.is_empty() && stderr_out.is_empty() { + // special case: just clean output, nice + out.push_str(&format!("{}\n", stdout_out)); + } else { + if !stdout_out.is_empty() { + out.push_str(&format!("STDOUT\n```\n{}```\n\n", stdout_out)); + } + if !stderr_out.is_empty() { + out.push_str(&format!("STDERR\n```\n{}```\n\n", stderr_out)); + } + } + out +} + +async fn create_command_from_string( + cmd_string: &str, + command_workdir: &String, +) -> Result { + let command_args = shell_words::split(cmd_string) + .map_err(|e| format!("Failed to parse command: {}", e))?; + if command_args.is_empty() { + return Err("Command is empty after parsing".to_string()); + } + let mut cmd = Command::new(&command_args[0]); + if command_args.len() > 1 { + cmd.args(&command_args[1..]); + } + cmd.current_dir(command_workdir); + Ok(cmd) +} + +async fn execute_blocking_command( + command: &str, + cfg: &CmdlineToolConfig, + command_workdir: &String, +) -> Result { + info!("EXEC workdir {}:\n{:?}", command_workdir, command); + let command_future = async { + let mut cmd = create_command_from_string(command, command_workdir).await?; + let t0 = tokio::time::Instant::now(); + let result = cmd + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .output() + .await; + let duration = t0.elapsed(); + info!("EXEC: /finished in {:?}", duration); + + let output = match result { + Ok(output) => output, + Err(e) => { + let msg = format!("cannot run command: '{}'. workdir: '{}'. Error: {}", &command, command_workdir, e); + tracing::error!("{msg}"); + return Err(msg); + } + }; + + let stdout = output_mini_postprocessing(&cfg.output_filter, &String::from_utf8_lossy(&output.stdout).to_string()); + let stderr = output_mini_postprocessing(&cfg.output_filter, &String::from_utf8_lossy(&output.stderr).to_string()); + + let mut out = format_output(&stdout, &stderr); + let exit_code = output.status.code().unwrap_or_default(); + out.push_str(&format!("command was running {:.3}s, finished with exit code {exit_code}\n", duration.as_secs_f64())); + Ok(out) + }; + + let timeout_duration = tokio::time::Duration::from_secs(cfg.timeout); + let result = tokio::time::timeout(timeout_duration, command_future).await; + + match result { + Ok(res) => res, + Err(_) => Err(format!("command timed out after {:?}", timeout_duration)), + } +} + +async fn get_stdout_and_stderr( + timeout_ms: u64, + stdout: &mut BufReader, + stderr: &mut BufReader, +) -> Result<(String, String), String> { + let (stdout_out, stderr_out, _) = blocking_read_until_token_or_timeout(stdout, stderr, timeout_ms, "").await?; + Ok((stdout_out, stderr_out)) +} + +async fn execute_background_command( + gcx: Arc>, + service_name: &str, + command_str: &str, + cmdline_workdir: &String, + cfg: &CmdlineToolConfig, + action: &str, +) -> Result { + let session_key = format!("custom_service_{service_name}"); + let mut session_mb = gcx.read().await.integration_sessions.get(&session_key).cloned(); + let command_str = command_str.to_string(); + let mut actions_log = String::new(); + + if session_mb.is_some() { + let session_arc = session_mb.clone().unwrap(); + let mut session_locked = session_arc.lock().await; + let session = session_locked.as_any_mut().downcast_mut::().unwrap(); + actions_log.push_str(&format!("Currently have service running, workdir {}:\n{}\n", session.cmdline_workdir, session.cmdline_string)); + let (stdout_out, stderr_out) = get_stdout_and_stderr(100, &mut session.cmdline_stdout, &mut session.cmdline_stderr).await?; + let filtered_stdout = output_mini_postprocessing(&cfg.output_filter, &stdout_out); + let filtered_stderr = output_mini_postprocessing(&cfg.output_filter, &stderr_out); + actions_log.push_str(&format!("Here are stdin/stderr since the last checking out on the service:\n{}\n\n", format_output(&filtered_stdout, &filtered_stderr))); + } else { + actions_log.push_str(&format!("Service is currently not running\n")); + } + + if session_mb.is_some() && (action == "restart" || action == "stop") { + let session_arc = session_mb.clone().unwrap(); + { + let mut session_locked = session_arc.lock().await; + let session = session_locked.as_any_mut().downcast_mut::().unwrap(); + actions_log.push_str(&format!("Stopping it...\n")); + let stop_log = Box::into_pin(session.try_stop()).await; + actions_log.push_str(&stop_log); + } + gcx.write().await.integration_sessions.remove(&session_key); + session_mb = None; + } + + if session_mb.is_none() && (action == "restart" || action == "start") { + let mut port_already_open = false; + if let Some(wait_port) = cfg.startup_wait_port { + port_already_open = is_someone_listening_on_that_tcp_port(wait_port, tokio::time::Duration::from_millis(REALLY_HORRIBLE_ROUNDTRIP)).await; + if port_already_open { + actions_log.push_str(&format!( + "This service startup sequence requires to wait until a TCP port gets occupied, but this port {} is already busy even before the service start is attempted. Not good, but let's try to run it anyway.\n\n", + wait_port, + )); + } + } + info!("SERVICE START workdir {}:\n{:?}", cmdline_workdir, command_str); + actions_log.push_str(&format!("Starting service with the following command line:\n{}\n", command_str)); + + let mut command = create_command_from_string(&command_str, cmdline_workdir).await?; + command.stdout(Stdio::piped()); + command.stderr(Stdio::piped()); + let mut command_wrap = TokioCommandWrap::from(command); + #[cfg(unix)] + command_wrap.wrap(ProcessGroup::leader()); + #[cfg(windows)] + command_wrap.wrap(JobObject); + let mut process = command_wrap.spawn().map_err(|e| format!("failed to create process: {e}"))?; + + let mut stdout_reader = BufReader::new(process.stdout().take().ok_or("Failed to open stdout")?); + let mut stderr_reader = BufReader::new(process.stderr().take().ok_or("Failed to open stderr")?); + + let t0 = tokio::time::Instant::now(); + + let mut accumulated_stdout = String::new(); + let mut accumulated_stderr = String::new(); + let mut exit_code: i32 = -100000; + + loop { + if t0.elapsed() >= tokio::time::Duration::from_secs(cfg.startup_wait) { + actions_log.push_str(&format!("Timeout {:.2}s reached while waiting for the service to start.\n\n", t0.elapsed().as_secs_f64())); + break; + } + + let (stdout_out, stderr_out) = get_stdout_and_stderr(100, &mut stdout_reader, &mut stderr_reader).await?; + accumulated_stdout.push_str(&stdout_out); + accumulated_stderr.push_str(&stderr_out); + + // XXX rename keyword to phrase or something + if let Some(keyword) = &cfg.startup_wait_keyword { + if accumulated_stdout.contains(keyword) || accumulated_stderr.contains(keyword) { + actions_log.push_str(&format!("Startup keyword '{}' found in output, success!\n\n", keyword)); + break; + } + } + + let exit_status = process.try_wait().map_err(|e| e.to_string())?; + if let Some(status) = exit_status { + exit_code = status.code().unwrap_or(-1); + actions_log.push_str(&format!("Service process exited prematurely with exit code: {}\nService did not start.\n\n", exit_code)); + break; + } + + if let Some(wait_port) = cfg.startup_wait_port { + match is_someone_listening_on_that_tcp_port(wait_port, tokio::time::Duration::from_millis(REALLY_HORRIBLE_ROUNDTRIP)).await { + true => { + if !port_already_open { + actions_log.push_str(&format!("Port {} is now busy, success!\n", wait_port)); + break; + } + }, + false => { + if port_already_open { + port_already_open = false; + actions_log.push_str(&format!("Port {} is now free\n", wait_port)); + } + } + } + } + + tokio::time::sleep(tokio::time::Duration::from_millis(50)).await; + } + + let filtered_stdout = output_mini_postprocessing(&cfg.output_filter, &accumulated_stdout); + let filtered_stderr = output_mini_postprocessing(&cfg.output_filter, &accumulated_stderr); + let out = format_output(&filtered_stdout, &filtered_stderr); + actions_log.push_str(&out); + + if exit_code == -100000 { + let session: Box = Box::new(CmdlineSession { + cmdline_process: process, + cmdline_string: command_str, + cmdline_workdir: cmdline_workdir.clone(), + cmdline_stdout: stdout_reader, + cmdline_stderr: stderr_reader, + service_name: service_name.to_string(), + }); + gcx.write().await.integration_sessions.insert(session_key.to_string(), Arc::new(AMutex::new(session))); + } + + info!("SERVICE START LOG:\n{}", actions_log); + } + + Ok(actions_log) +} + +#[async_trait] +impl Tool for ToolCmdline { + fn as_any(&self) -> &dyn std::any::Any { self } + + async fn tool_execute( + &mut self, + ccx: Arc>, + tool_call_id: &String, + args: &HashMap, + ) -> Result<(bool, Vec), String> { + let gcx = ccx.lock().await.global_context.clone(); + + let mut args_str: HashMap = HashMap::new(); + let valid_params: Vec = self.cfg.parameters.iter().map(|p| p.name.clone()).collect(); + + for (k, v) in args.iter() { + if !valid_params.contains(k) { + return Err(format!("Unexpected argument `{}`", k)); + } + match v { + serde_json::Value::String(s) => { args_str.insert(k.clone(), s.clone()); }, + _ => return Err(format!("argument `{}` is not a string: {:?}", k, v)), + } + } + + for param in &self.cfg.parameters { + if self.cfg.parameters_required.as_ref().map_or(false, |req| req.contains(¶m.name)) && !args_str.contains_key(¶m.name) { + return Err(format!("Missing required argument `{}`", param.name)); + } + } + + let command = _replace_args(self.cfg.command.as_str(), &args_str); + let workdir = _replace_args(self.cfg.command_workdir.as_str(), &args_str); + + let tool_ouput = if self.is_service { + let action = args_str.get("action").cloned().unwrap_or("start".to_string()); + if !["start", "restart", "stop", "status"].contains(&action.as_str()) { + return Err("Tool call is invalid. Param 'action' must be one of 'start', 'restart', 'stop', 'status'. Try again".to_string()); + } + execute_background_command( + gcx, &self.name, &command, &workdir, &self.cfg, action.as_str() + ).await? + + } else { + execute_blocking_command(&command, &self.cfg, &workdir).await? + }; + + let result = vec![ContextEnum::ChatMessage(ChatMessage { + role: "tool".to_string(), + content: ChatContent::SimpleText(tool_ouput), + tool_calls: None, + tool_call_id: tool_call_id.clone(), + ..Default::default() + })]; + + Ok((false, result)) + } + + fn tool_depends_on(&self) -> Vec { + vec![] + } + + fn tool_description(&self) -> ToolDesc { + let parameters_required = self.cfg.parameters_required.clone().unwrap_or_else(|| { + self.cfg.parameters.iter().map(|param| param.name.clone()).collect() + }); + ToolDesc { + name: self.name.clone(), + agentic: true, + experimental: false, + description: self.cfg.description.clone(), + parameters: self.cfg.parameters.clone(), + parameters_required, + } + } +} + +pub const CMDLINE_INTEGRATION_SCHEMA: &str = r#" +fields: + command: + f_type: string_long + f_desc: "The command to execute." + f_placeholder: "echo Hello World" + command_workdir: + f_type: string_long + f_desc: "The working directory for the command." + f_placeholder: "/path/to/workdir" + description: + f_type: string_long + f_desc: "The model will see this description, why the model should call this?" + f_placeholder: "" + parameters: + f_type: "tool_parameters" + f_desc: "The model will fill in those parameters." + timeout: + f_type: integer + f_desc: "The command must immediately return the results, it can't be interactive. If the command runs for too long, it will be terminated and stderr/stdout collected will be presented to the model." + f_default: 10 + output_filter: + f_type: "output_filter" + f_desc: "The output from the command can be long or even quasi-infinite. This section allows to set limits, prioritize top or bottom, or use regexp to show the model the relevant part." + f_placeholder: "filter" +description: | + There you can adapt any command line tool for use by AI model. You can give the model instructions why to call it, which parameters to provide, + set a timeout and restrict the output. If you want a tool that runs in the background such as a web server, use service_* instead. +available: + on_your_laptop_possible: true + when_isolated_possible: true +"#; diff --git a/src/integrations/mod.rs b/src/integrations/mod.rs index b4272f9b0..fa94cf06f 100644 --- a/src/integrations/mod.rs +++ b/src/integrations/mod.rs @@ -39,7 +39,7 @@ pub fn integration_from_name(n: &str) -> Result { let tool_name = cmdline.strip_prefix("cmdline_").unwrap(); tracing::info!("todo finish me tool_name={}", tool_name); - Err("todo finish me".to_string()) + Ok(Box::new(integr_cmdline::ToolCmdline {..Default::default()}) as Box) }, service if service.starts_with("service_") => { let tool_name = service.strip_prefix("service_").unwrap(); diff --git a/src/integrations/setting_up_integrations.rs b/src/integrations/setting_up_integrations.rs index 8648911ac..4279ebc7b 100644 --- a/src/integrations/setting_up_integrations.rs +++ b/src/integrations/setting_up_integrations.rs @@ -226,7 +226,8 @@ pub async fn get_integrations_yaml_path(gcx: Arc>) -> Str r } -pub async fn get_vars_for_replacements(gcx: Arc>) -> HashMap { +pub async fn get_vars_for_replacements(gcx: Arc>) -> HashMap +{ let gcx_locked = gcx.read().await; let secrets_yaml_path = gcx_locked.config_dir.join("secrets.yaml"); let variables_yaml_path = gcx_locked.config_dir.join("variables.yaml"); @@ -252,7 +253,8 @@ pub async fn get_vars_for_replacements(gcx: Arc>) -> Hash variables } -pub fn join_config_path(config_dir: &PathBuf, integr_name: &str) -> String { +pub fn join_config_path(config_dir: &PathBuf, integr_name: &str) -> String +{ config_dir.join("integrations.d").join(format!("{}.yaml", integr_name)).to_string_lossy().into_owned() } From a94b3e2e0dd9caf3138af056266f5c38acd9b4c5 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Thu, 5 Dec 2024 08:01:12 +0100 Subject: [PATCH 051/185] repair cmdline_* --- src/integrations/docker/integr_docker.rs | 28 +++++++++++----------- src/integrations/integr_abstract.rs | 2 +- src/integrations/integr_chrome.rs | 2 +- src/integrations/integr_cmdline.rs | 4 ++-- src/integrations/integr_cmdline_service.rs | 2 +- src/integrations/integr_github.rs | 4 ++-- src/integrations/integr_gitlab.rs | 4 ++-- src/integrations/integr_pdb.rs | 26 ++++++++++---------- src/integrations/integr_postgres.rs | 2 +- src/integrations/mod.rs | 1 - src/integrations/running_integrations.rs | 2 +- 11 files changed, 38 insertions(+), 39 deletions(-) diff --git a/src/integrations/docker/integr_docker.rs b/src/integrations/docker/integr_docker.rs index 42b6ed980..5b586a79e 100644 --- a/src/integrations/docker/integr_docker.rs +++ b/src/integrations/docker/integr_docker.rs @@ -69,7 +69,7 @@ impl SettingsDocker { host: self.ssh_host.clone(), user: self.ssh_user.clone(), port: self.ssh_port.clone(), - identity_file: if !self.ssh_identity_file.is_empty() + identity_file: if !self.ssh_identity_file.is_empty() { Some(self.ssh_identity_file.clone()) } else { None }, }) } else { @@ -102,7 +102,7 @@ impl IntegrationTrait for ToolDocker { serde_json::to_value(&self.settings_docker).unwrap() } - fn integr_upgrade_to_tool(&self) -> Box { + fn integr_upgrade_to_tool(&self, _integr_name: &String) -> Box { Box::new(ToolDocker { settings_docker: self.settings_docker.clone() }) as Box @@ -115,7 +115,7 @@ impl IntegrationTrait for ToolDocker { } impl ToolDocker { - pub async fn command_execute(&self, command: &str, gcx: Arc>, fail_if_stderr_is_not_empty: bool, verbose_error: bool) -> Result<(String, String), String> + pub async fn command_execute(&self, command: &str, gcx: Arc>, fail_if_stderr_is_not_empty: bool, verbose_error: bool) -> Result<(String, String), String> { let mut command_args = split_command(&command)?; @@ -177,7 +177,7 @@ impl Tool for ToolDocker { let ccx_locked = ccx.lock().await; ccx_locked.global_context.clone() }; - + let (stdout, _) = self.command_execute(&command, gcx.clone(), true, false).await?; Ok((false, vec![ @@ -228,7 +228,7 @@ fn split_command(command: &str) -> Result, String> { Ok(parsed_args) } -fn command_is_interactive_or_blocking(command_args: &Vec) -> bool +fn command_is_interactive_or_blocking(command_args: &Vec) -> bool { const COMMANDS_THAT_CAN_BE_INTERACTIVE: &[&str] = &["run", "exec"]; const COMMANDS_ALWAYS_BLOCKING: &[&str] = &["attach", "events", "wait"]; @@ -255,13 +255,13 @@ fn command_is_interactive_or_blocking(command_args: &Vec) -> bool subcommand_generic }; - if COMMANDS_THAT_CAN_BE_INTERACTIVE.contains(&subcommand_specific) && - command_contains_flag(command_args, "i", "interactive") + if COMMANDS_THAT_CAN_BE_INTERACTIVE.contains(&subcommand_specific) && + command_contains_flag(command_args, "i", "interactive") { return true; } - if subcommand_specific == "logs" && command_contains_flag(command_args, "f", "follow") { + if subcommand_specific == "logs" && command_contains_flag(command_args, "f", "follow") { return true; } @@ -274,14 +274,14 @@ fn command_is_interactive_or_blocking(command_args: &Vec) -> bool fn command_append_label_if_creates_resource(command_args: &mut Vec, label: &str) -> () { const COMMANDS_FOR_RESOURCE_CREATION: &[&[&str]] = &[ - &["build"], - &["buildx", "build"], - &["image", "build"], - &["builder", "build"], - &["buildx", "b"], + &["build"], + &["buildx", "build"], + &["image", "build"], + &["builder", "build"], + &["buildx", "b"], &["create"], &["container", "create"], - &["network", "create"], + &["network", "create"], &["volume", "create"], &["run"], &["container", "run"], diff --git a/src/integrations/integr_abstract.rs b/src/integrations/integr_abstract.rs index 09e75462b..7a82e76e3 100644 --- a/src/integrations/integr_abstract.rs +++ b/src/integrations/integr_abstract.rs @@ -2,5 +2,5 @@ pub trait IntegrationTrait: Send + Sync { fn integr_schema(&self) -> &str; fn integr_settings_apply(&mut self, value: &serde_json::Value) -> Result<(), String>; fn integr_settings_as_json(&self) -> serde_json::Value; - fn integr_upgrade_to_tool(&self) -> Box; + fn integr_upgrade_to_tool(&self, integr_name: &String) -> Box; // integr_name is sometimes different, "cmdline_compile_by_project" != "cmdline" } diff --git a/src/integrations/integr_chrome.rs b/src/integrations/integr_chrome.rs index 96768a76c..5e1c57965 100644 --- a/src/integrations/integr_chrome.rs +++ b/src/integrations/integr_chrome.rs @@ -138,7 +138,7 @@ impl Integration for ToolChrome { serde_json::to_value(&integration_github).map_err(|e| e.to_string()) } - fn integr_upgrade_to_tool(&self) -> Box { + fn integr_upgrade_to_tool(&self, integr_name: &String) -> Box { Box::new(ToolChrome { integration_chrome: self.integration_chrome.clone(), supports_clicks: false} diff --git a/src/integrations/integr_cmdline.rs b/src/integrations/integr_cmdline.rs index 8bf5414f4..4830dc596 100644 --- a/src/integrations/integr_cmdline.rs +++ b/src/integrations/integr_cmdline.rs @@ -72,10 +72,10 @@ impl IntegrationTrait for ToolCmdline { serde_json::to_value(&self.cfg).unwrap() } - fn integr_upgrade_to_tool(&self) -> Box { + fn integr_upgrade_to_tool(&self, integr_name: &String) -> Box { Box::new(ToolCmdline { // is_service: self.is_service, - name: self.name.clone(), + name: integr_name.clone(), cfg: self.cfg.clone(), }) as Box } diff --git a/src/integrations/integr_cmdline_service.rs b/src/integrations/integr_cmdline_service.rs index 0cf5883a5..d3a47e9c8 100644 --- a/src/integrations/integr_cmdline_service.rs +++ b/src/integrations/integr_cmdline_service.rs @@ -80,7 +80,7 @@ impl IntegrationTrait for ToolCmdline { serde_json::to_value(&self.cfg).unwrap() } - fn integr_upgrade_to_tool(&self) -> Box { + fn integr_upgrade_to_tool(&self, integr_name: &String) -> Box { Box::new(ToolCmdline { is_service: self.is_service, name: self.name.clone(), diff --git a/src/integrations/integr_github.rs b/src/integrations/integr_github.rs index fd23f2585..57ad7c9f9 100644 --- a/src/integrations/integr_github.rs +++ b/src/integrations/integr_github.rs @@ -42,7 +42,7 @@ impl Integration for ToolGithub { serde_json::to_value(&integration_github).map_err(|e| e.to_string()) } - fn integr_upgrade_to_tool(&self) -> Box { + fn integr_upgrade_to_tool(&self, integr_name: &String) -> Box { Box::new(ToolGithub {integration_github: self.integration_github.clone()}) as Box } @@ -57,7 +57,7 @@ impl Integration for ToolGithub { #[async_trait] impl Tool for ToolGithub { fn as_any(&self) -> &dyn std::any::Any { self } - + async fn tool_execute( &mut self, _ccx: Arc>, diff --git a/src/integrations/integr_gitlab.rs b/src/integrations/integr_gitlab.rs index c76d2b3ef..fb9ec8ed2 100644 --- a/src/integrations/integr_gitlab.rs +++ b/src/integrations/integr_gitlab.rs @@ -41,7 +41,7 @@ impl Integration for ToolGitlab{ serde_json::to_value(&integration_gitlab).map_err(|e| e.to_string()) } - fn integr_upgrade_to_tool(&self) -> Box { + fn integr_upgrade_to_tool(&self, integr_name: &String) -> Box { Box::new(ToolGitlab {integration_gitlab: self.integration_gitlab.clone()}) as Box } @@ -56,7 +56,7 @@ impl Integration for ToolGitlab{ #[async_trait] impl Tool for ToolGitlab { fn as_any(&self) -> &dyn std::any::Any { self } - + async fn tool_execute( &mut self, _ccx: Arc>, diff --git a/src/integrations/integr_pdb.rs b/src/integrations/integr_pdb.rs index 8b7d4544a..cb8e575d8 100644 --- a/src/integrations/integr_pdb.rs +++ b/src/integrations/integr_pdb.rs @@ -82,7 +82,7 @@ impl Integration for ToolPdb { serde_json::to_value(&integration_github).map_err(|e| e.to_string()) } - fn integr_upgrade_to_tool(&self) -> Box { + fn integr_upgrade_to_tool(&self, integr_name: &String) -> Box { Box::new(ToolPdb {settings_pdb: self.settings_pdb.clone()}) as Box } @@ -97,7 +97,7 @@ impl Integration for ToolPdb { #[async_trait] impl Tool for ToolPdb { fn as_any(&self) -> &dyn std::any::Any { self } - + async fn tool_execute( &mut self, ccx: Arc>, @@ -119,14 +119,14 @@ impl Tool for ToolPdb { let output = start_pdb_session(&python_command, &mut command_args, &session_hashmap_key, gcx.clone(), 10).await?; return Ok(tool_answer(output, tool_call_id)); } - + let command_session = { let gcx_locked = gcx.read().await; gcx_locked.integration_sessions.get(&session_hashmap_key) .ok_or("There is no active pdb session in this chat, you can open it by running pdb(\"python -m pdb my_script.py\")")? .clone() }; - + let mut command_session_locked = command_session.lock().await; let mut pdb_session = command_session_locked.as_any_mut().downcast_mut::() .ok_or("Failed to downcast to PdbSession")?; @@ -135,7 +135,7 @@ impl Tool for ToolPdb { "kill" => { let mut gcx_locked = gcx.write().await; gcx_locked.integration_sessions.remove(&session_hashmap_key); - "Pdb session has been killed".to_string() + "Pdb session has been killed".to_string() }, "wait" => { if command_args.len() < 2 { @@ -194,10 +194,10 @@ fn split_command(command: &str) -> Result, String> { } async fn start_pdb_session( - python_command: &String, - command_args: &mut Vec, - session_hashmap_key: &String, - gcx: Arc>, + python_command: &String, + command_args: &mut Vec, + session_hashmap_key: &String, + gcx: Arc>, timeout_seconds: u64, ) -> Result { if !(command_args.len() >= 3 && command_args[0] == "python" && command_args[1] == "-m" && command_args[2] == "pdb") { @@ -248,7 +248,7 @@ async fn interact_with_pdb( return Err(format!("There is leftover output from previous commands, run pdb tool again with \"wait n_seconds\" to wait for it or \"kill\" command to kill the session.\nstdout:\n{}\nstderr:\n{}", prev_output, prev_error)); } } - + let (output_main_command, error_main_command) = send_command_and_get_output_and_error( pdb_session, input_command, session_hashmap_key, gcx.clone(), timeout_seconds * 1000, true).await?; let (output_list, error_list) = send_command_and_get_output_and_error( @@ -268,9 +268,9 @@ async fn interact_with_pdb( } async fn send_command_and_get_output_and_error( - pdb_session: &mut PdbSession, - input_command: &str, - session_hashmap_key: &str, + pdb_session: &mut PdbSession, + input_command: &str, + session_hashmap_key: &str, gcx: Arc>, timeout_ms: u64, ask_for_continuation_if_timeout: bool, diff --git a/src/integrations/integr_postgres.rs b/src/integrations/integr_postgres.rs index 928a4ba6c..3f8904615 100644 --- a/src/integrations/integr_postgres.rs +++ b/src/integrations/integr_postgres.rs @@ -45,7 +45,7 @@ impl IntegrationTrait for ToolPostgres { serde_json::to_value(&self.settings_postgres).unwrap() } - fn integr_upgrade_to_tool(&self) -> Box { + fn integr_upgrade_to_tool(&self, _integr_name: &String) -> Box { Box::new(ToolPostgres { settings_postgres: self.settings_postgres.clone() }) as Box diff --git a/src/integrations/mod.rs b/src/integrations/mod.rs index fa94cf06f..1090b1ef7 100644 --- a/src/integrations/mod.rs +++ b/src/integrations/mod.rs @@ -38,7 +38,6 @@ pub fn integration_from_name(n: &str) -> Result Ok(Box::new(docker::integr_docker::ToolDocker {..Default::default() }) as Box), cmdline if cmdline.starts_with("cmdline_") => { let tool_name = cmdline.strip_prefix("cmdline_").unwrap(); - tracing::info!("todo finish me tool_name={}", tool_name); Ok(Box::new(integr_cmdline::ToolCmdline {..Default::default()}) as Box) }, service if service.starts_with("service_") => { diff --git a/src/integrations/running_integrations.rs b/src/integrations/running_integrations.rs index 87d2e81f5..3e3937a21 100644 --- a/src/integrations/running_integrations.rs +++ b/src/integrations/running_integrations.rs @@ -38,7 +38,7 @@ pub async fn load_integration_tools( } }; integr.integr_settings_apply(&rec.config_unparsed); - tools.insert(rec.integr_name.clone(), Arc::new(AMutex::new(integr.integr_upgrade_to_tool()))); + tools.insert(rec.integr_name.clone(), Arc::new(AMutex::new(integr.integr_upgrade_to_tool(&rec.integr_name)))); } for e in error_log { From dad8b2fe0a5ce158c8499ae2f9c8178e549b3296 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Thu, 5 Dec 2024 08:36:42 +0100 Subject: [PATCH 052/185] to_string_pretty() in links, links.sh more runnable --- examples/links.sh | 12 ++++++++---- src/http/routers/v1/links.rs | 6 +++--- 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/examples/links.sh b/examples/links.sh index a1bfb3f94..4cdb3a50e 100644 --- a/examples/links.sh +++ b/examples/links.sh @@ -1,4 +1,5 @@ -# Follow up example +echo +echo Follow up example curl -X POST http://localhost:8001/v1/links \ -H "Content-Type: application/json" \ -d '{ @@ -39,7 +40,8 @@ curl -X POST http://localhost:8001/v1/links \ ] }' -# Configuration goto example +echo +echo Configuration goto example curl -X POST http://localhost:8001/v1/links \ -H "Content-Type: application/json" \ -d '{ @@ -98,7 +100,8 @@ curl -X POST http://localhost:8001/v1/links \ ] }' -# Summarize project example +echo +echo Summarize project example curl -X POST http://localhost:8001/v1/links \ -H "Content-Type: application/json" \ -d '{ @@ -111,7 +114,8 @@ curl -X POST http://localhost:8001/v1/links \ "messages": [] }' -# Example of Save and return in configuration chat +echo +echo Example of Save and return in configuration chat curl -X POST http://localhost:8001/v1/links \ -H "Content-Type: application/json" \ -d '{ diff --git a/src/http/routers/v1/links.rs b/src/http/routers/v1/links.rs index 1881778ab..b242778c1 100644 --- a/src/http/routers/v1/links.rs +++ b/src/http/routers/v1/links.rs @@ -93,11 +93,11 @@ pub async fn handle_v1_links( goto: None, }); } - + Ok(Response::builder() .status(StatusCode::OK) .header("Content-Type", "application/json") - .body(Body::from(serde_json::json!({"links": links}).to_string())) + .body(Body::from(serde_json::to_string_pretty(&serde_json::json!({"links": links})).unwrap())) .unwrap()) } @@ -140,4 +140,4 @@ fn failed_integration_names_after_last_user_message(messages: &Vec) result.sort(); result.dedup(); result -} \ No newline at end of file +} From 308c1c36c7dabb685eeea6b17b4f62681d0c5adf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Wed, 4 Dec 2024 16:06:55 +0100 Subject: [PATCH 053/185] fix: run project in isolation based on chat post --- src/http/routers/v1/chat.rs | 10 ++-------- src/integrations/docker/docker_container_manager.rs | 3 +-- 2 files changed, 3 insertions(+), 10 deletions(-) diff --git a/src/http/routers/v1/chat.rs b/src/http/routers/v1/chat.rs index bd5ec0f46..6d6affc66 100644 --- a/src/http/routers/v1/chat.rs +++ b/src/http/routers/v1/chat.rs @@ -172,15 +172,9 @@ async fn _chat( } } - let docker_tool_maybe = docker_tool_load(gcx.clone()).await - .map_err(|e| info!("No docker tool available: {e}")).ok().map(Arc::new); - // XXX change this for post.isolation, not docker settings - let run_chat_threads_inside_container = docker_tool_maybe.clone() - .map(|docker_tool| docker_tool.settings_docker.run_chat_threads_inside_container) - .unwrap_or(false); - let should_execute_remotely = run_chat_threads_inside_container && !gcx.read().await.cmdline.inside_container; + let should_execute_remotely = chat_post.meta.chat_remote && !gcx.read().await.cmdline.inside_container; if should_execute_remotely { - docker_container_check_status_or_start(gcx.clone(), docker_tool_maybe.clone(), &chat_post.meta.chat_id).await + docker_container_check_status_or_start(gcx.clone(), &chat_post.meta.chat_id).await .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, e))?; } diff --git a/src/integrations/docker/docker_container_manager.rs b/src/integrations/docker/docker_container_manager.rs index 648a46503..e5560e2f1 100644 --- a/src/integrations/docker/docker_container_manager.rs +++ b/src/integrations/docker/docker_container_manager.rs @@ -76,11 +76,10 @@ impl IntegrationSession for DockerContainerSession { pub async fn docker_container_check_status_or_start( gcx: Arc>, - docker_tool_maybe: Option>, chat_id: &str, ) -> Result<(), String> { - let docker = docker_tool_maybe.ok_or_else(|| "Docker tool not found".to_string())?; + let docker = docker_tool_load(gcx.clone()).await?; let docker_container_session_maybe = { let gcx_locked = gcx.read().await; gcx_locked.integration_sessions.get(&get_session_hashmap_key("docker", &chat_id)).cloned() From 3e1a2c8fbe2097c50cfbf0826b694d0709af3573 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Wed, 4 Dec 2024 20:50:20 +0100 Subject: [PATCH 054/185] fix: get agentic and exploration tools based on chat meta --- src/call_validation.rs | 10 ++++++++++ src/http/routers/v1/chat.rs | 20 ++------------------ 2 files changed, 12 insertions(+), 18 deletions(-) diff --git a/src/call_validation.rs b/src/call_validation.rs index d1dece971..96f9fe826 100644 --- a/src/call_validation.rs +++ b/src/call_validation.rs @@ -218,6 +218,16 @@ pub enum ChatMode { ProjectSummary, } +impl ChatMode { + pub fn has_exploration_tools(&self) -> bool { + *self != ChatMode::NoTools + } + + pub fn has_agentic_tools(&self) -> bool { + matches!(self, ChatMode::Agent | ChatMode::Configure | ChatMode::ProjectSummary) + } +} + impl Default for ChatMode { fn default() -> Self { ChatMode::NoTools diff --git a/src/http/routers/v1/chat.rs b/src/http/routers/v1/chat.rs index 6d6affc66..208fb1187 100644 --- a/src/http/routers/v1/chat.rs +++ b/src/http/routers/v1/chat.rs @@ -180,25 +180,9 @@ async fn _chat( let have_system = !messages.is_empty() && messages[0].role == "system"; if !have_system { - // XXX: make it explicit instead of auto-detector - let mut exploration_tools: bool = false; - let mut agentic_tools: bool = false; - if chat_post.tools.is_some() { - for t in chat_post.tools.as_ref().unwrap() { - let tobj = t.as_object().unwrap(); - if let Some(function) = tobj.get("function") { - if let Some(name) = function.get("name") { - if name.as_str() == Some("web") { // anything that will still be on without ast and vecdb - exploration_tools = true; - } - if name.as_str() == Some("patch") { - agentic_tools = true; - } - } - } - } - } use crate::scratchpads::chat_utils_prompts::{get_default_system_prompt, get_default_system_prompt_from_remote, system_prompt_add_workspace_info}; + let exploration_tools = chat_post.meta.chat_mode.has_exploration_tools(); + let agentic_tools = chat_post.meta.chat_mode.has_agentic_tools(); let system_message_content = if should_execute_remotely { get_default_system_prompt_from_remote(gcx.clone(), exploration_tools, agentic_tools, &chat_post.meta.chat_id).await.map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, e) From 8e7d10a7d1d763e90beeacfe6e6c6295953b6436 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Wed, 4 Dec 2024 20:52:42 +0100 Subject: [PATCH 055/185] fix: remote system prompt is already returned in the endpoint --- src/http/routers/v1/chat.rs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/http/routers/v1/chat.rs b/src/http/routers/v1/chat.rs index 208fb1187..eddf7a723 100644 --- a/src/http/routers/v1/chat.rs +++ b/src/http/routers/v1/chat.rs @@ -188,13 +188,14 @@ async fn _chat( ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, e) )? } else { - get_default_system_prompt(gcx.clone(), exploration_tools, agentic_tools).await + system_prompt_add_workspace_info(gcx.clone(), + &get_default_system_prompt(gcx.clone(), exploration_tools, agentic_tools).await + ).await }; messages.insert(0, ChatMessage { role: "system".to_string(), - // XXX: need remote %WORKSPACE_INFO% as well - content: ChatContent::SimpleText(system_prompt_add_workspace_info(gcx.clone(), &system_message_content).await), + content: ChatContent::SimpleText(system_message_content), ..Default::default() }) } From cb4f39ed97dc21e679e2da1e1970fd7cb97bc014 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Wed, 4 Dec 2024 21:42:40 +0100 Subject: [PATCH 056/185] fix: default docker daemon address is empty string, works on any local OS and remote linux hosts. --- .../docker/docker_ssh_tunnel_utils.rs | 12 ++++++++---- src/integrations/docker/integr_docker.rs | 16 +++++++--------- 2 files changed, 15 insertions(+), 13 deletions(-) diff --git a/src/integrations/docker/docker_ssh_tunnel_utils.rs b/src/integrations/docker/docker_ssh_tunnel_utils.rs index 3fc0384e0..b74aa595b 100644 --- a/src/integrations/docker/docker_ssh_tunnel_utils.rs +++ b/src/integrations/docker/docker_ssh_tunnel_utils.rs @@ -48,10 +48,14 @@ pub async fn forward_remote_docker_if_needed(docker_daemon_address: &str, ssh_co } } - let remote_port_or_socket = if docker_daemon_address.starts_with("unix://") || docker_daemon_address.starts_with("npipe://") { - docker_daemon_address.split("://").nth(1).unwrap_or_default().to_string() - } else { - docker_daemon_address.split(":").last().unwrap_or_default().to_string() + let remote_port_or_socket = match docker_daemon_address { + "" => "/var/run/docker.sock".to_string(), + _ if docker_daemon_address.starts_with("unix://") || docker_daemon_address.starts_with("npipe://") => { + docker_daemon_address.split("://").nth(1).unwrap_or_default().to_string() + }, + _ => { + docker_daemon_address.split(":").last().unwrap_or_default().to_string() + } }; let ssh_tunnel = ssh_tunnel_open(&mut vec![Port { published: "0".to_string(), target: remote_port_or_socket }], ssh_config).await?; diff --git a/src/integrations/docker/integr_docker.rs b/src/integrations/docker/integr_docker.rs index 5b586a79e..e6402705b 100644 --- a/src/integrations/docker/integr_docker.rs +++ b/src/integrations/docker/integr_docker.rs @@ -126,13 +126,11 @@ impl ToolDocker { command_append_label_if_creates_resource(&mut command_args, &self.settings_docker.label); let docker_host = self.get_docker_host(gcx.clone()).await?; - let output = Command::new(&self.settings_docker.docker_cli_path) - .arg("-H") - .arg(&docker_host) - .args(&command_args) - .output() - .await - .map_err(|e| e.to_string())?; + let mut command_process = Command::new(&self.settings_docker.docker_cli_path); + if !docker_host.is_empty() { + command_process.arg("-H").arg(&docker_host); + } + let output = command_process.args(&command_args).output().await.map_err(|e| e.to_string())?; let stdout = String::from_utf8_lossy(&output.stdout).to_string(); let stderr = String::from_utf8_lossy(&output.stderr).to_string(); @@ -301,8 +299,8 @@ pub const DOCKER_INTEGRATION_SCHEMA: &str = r#" fields: docker_daemon_address: f_type: string_long - f_desc: "The address to connect to the Docker daemon." - f_default: "unix:///var/run/docker.sock" + f_desc: "The address to connect to the Docker daemon; specify only if not using the default." + f_default: "" docker_cli_path: f_type: string_long f_desc: "Path to the Docker CLI executable." From e516b91883375a584e95a02f072fd5691c6d0a44 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Thu, 5 Dec 2024 10:40:09 +0100 Subject: [PATCH 057/185] feat: allow counting unstaged changes optionally --- src/git.rs | 7 ++++++- src/http/routers/v1/git.rs | 2 +- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/src/git.rs b/src/git.rs index 0fb62bff3..fd4167a48 100644 --- a/src/git.rs +++ b/src/git.rs @@ -65,7 +65,7 @@ pub fn stage_all_changes(repository: &Repository) -> Result<(), String> { /// Returns: /// /// A tuple containing the number of new files, modified files, and deleted files. -pub fn count_file_changes(repository: &Repository) -> Result<(usize, usize, usize), String> { +pub fn count_file_changes(repository: &Repository, include_unstaged: bool) -> Result<(usize, usize, usize), String> { let (mut new_files, mut modified_files, mut deleted_files) = (0, 0, 0); let statuses = repository.statuses(None) @@ -75,6 +75,11 @@ pub fn count_file_changes(repository: &Repository) -> Result<(usize, usize, usiz if status.contains(Status::INDEX_NEW) { new_files += 1; } if status.contains(Status::INDEX_MODIFIED) { modified_files += 1;} if status.contains(Status::INDEX_DELETED) { deleted_files += 1; } + if include_unstaged { + if status.contains(Status::WT_NEW) { new_files += 1; } + if status.contains(Status::WT_MODIFIED) { modified_files += 1;} + if status.contains(Status::WT_DELETED) { deleted_files += 1; } + } } Ok((new_files, modified_files, deleted_files)) diff --git a/src/http/routers/v1/git.rs b/src/http/routers/v1/git.rs index 7e78ebfca..aae94f1b7 100644 --- a/src/http/routers/v1/git.rs +++ b/src/http/routers/v1/git.rs @@ -36,7 +36,7 @@ pub async fn handle_v1_git_stage_and_commit( stage_all_changes(&repository) .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, e))?; - let (new_files, modified_files, deleted_files) = count_file_changes(&repository) + let (new_files, modified_files, deleted_files) = count_file_changes(&repository, false) .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, e))?; let commit_oid = if new_files + modified_files + deleted_files != 0 { From c62d11739cf3ff21c7caa6150fb376b76874ef58 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Thu, 5 Dec 2024 11:18:35 +0100 Subject: [PATCH 058/185] feat: list of projects to commit --- src/http/routers/v1/links.rs | 60 ++++++++++++++++++++++++++++++------ 1 file changed, 51 insertions(+), 9 deletions(-) diff --git a/src/http/routers/v1/links.rs b/src/http/routers/v1/links.rs index b242778c1..759fc27cf 100644 --- a/src/http/routers/v1/links.rs +++ b/src/http/routers/v1/links.rs @@ -37,6 +37,14 @@ pub struct Link { text: String, #[serde(skip_serializing_if = "Option::is_none")] goto: Option, + #[serde(skip_serializing_if = "Option::is_none")] + projects: Option>, +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct ProjectCommit { + path: String, + commit_message: String, } pub async fn handle_v1_links( @@ -52,6 +60,7 @@ pub async fn handle_v1_links( action: LinkAction::SummarizeProject, text: "Investigate Project".to_string(), goto: None, + projects: None, }); } @@ -60,16 +69,18 @@ pub async fn handle_v1_links( action: LinkAction::PatchAll, text: "Save and return".to_string(), goto: Some("SETTINGS:DEFAULT".to_string()), + projects: None, }); } if post.meta.chat_mode == ChatMode::Agent { - if let Ok(commit_msg) = generate_commit_messages_with_current_changes(gcx.clone()) - .await.map_err(|e| error!(e)) { + let (project_commits, files_changed) = generate_commit_messages_with_current_changes(gcx.clone()).await; + if !project_commits.is_empty() { links.push(Link { action: LinkAction::Commit, - text: format!("git commit -m \"{}\"", commit_msg), + text: format!("Commit {files_changed} files"), goto: None, + projects: Some(project_commits), }); } } @@ -80,6 +91,7 @@ pub async fn handle_v1_links( action: LinkAction::Goto, text: format!("Configure {failed_integr_name}"), goto: Some(format!("SETTINGS:{failed_integr_name}")), + projects: None, }) } } @@ -91,6 +103,7 @@ pub async fn handle_v1_links( action: LinkAction::FollowUp, text: follow_up_message, goto: None, + projects: None, }); } @@ -101,12 +114,41 @@ pub async fn handle_v1_links( .unwrap()) } -async fn generate_commit_messages_with_current_changes(gcx: Arc>) -> Result { - let active_project_path = crate::files_correction::get_active_project_path(gcx.clone()).await.ok_or("No active project found".to_string())?; - let repository = git2::Repository::open(&active_project_path).map_err(|e| e.to_string())?; - let diff = crate::git::git_diff_from_all_changes(&repository)?; - let commit_msg = generate_commit_message_by_diff(gcx.clone(), &diff, &None).await.map_err(|e| e.to_string())?; - Ok(commit_msg) +async fn generate_commit_messages_with_current_changes(gcx: Arc>) -> (Vec, usize) { + let mut project_commits = Vec::new(); + let mut total_file_changes = 0; + + for project_path in crate::files_correction::get_project_dirs(gcx.clone()).await { + let repository = match git2::Repository::open(&project_path) { + Ok(repo) => repo, + Err(e) => { error!("{}", e); continue; } + }; + + let (added, modified, deleted) = match crate::git::count_file_changes(&repository, true) { + Ok((0, 0, 0)) => { continue; } + Ok(changes) => changes, + Err(e) => { error!("{}", e); continue; } + }; + + let diff = match crate::git::git_diff_from_all_changes(&repository) { + Ok(d) if d.is_empty() => { continue; } + Ok(d) => d, + Err(e) => { error!("{}", e); continue; } + }; + + let commit_msg = match generate_commit_message_by_diff(gcx.clone(), &diff, &None).await { + Ok(msg) => msg, + Err(e) => { error!("{}", e); continue; } + }; + + project_commits.push(ProjectCommit { + path: project_path.to_string_lossy().to_string(), + commit_message: commit_msg, + }); + total_file_changes += added + modified + deleted; + } + + (project_commits, total_file_changes) } // TODO: Move all logic below to more appropiate files From ba2c22d95cad6e99e1df3701a5c3a2a1bc09bb9b Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Thu, 5 Dec 2024 12:52:48 +0100 Subject: [PATCH 059/185] warnings --- src/http/routers/v1/chat.rs | 1 - src/integrations/integr_cmdline.rs | 2 +- src/scratchpads/chat_generic.rs | 2 +- 3 files changed, 2 insertions(+), 3 deletions(-) diff --git a/src/http/routers/v1/chat.rs b/src/http/routers/v1/chat.rs index eddf7a723..19dcb13e2 100644 --- a/src/http/routers/v1/chat.rs +++ b/src/http/routers/v1/chat.rs @@ -13,7 +13,6 @@ use crate::custom_error::ScratchError; use crate::at_commands::at_commands::AtCommandsContext; use crate::global_context::SharedGlobalContext; use crate::integrations::docker::docker_container_manager::docker_container_check_status_or_start; -use crate::integrations::docker::integr_docker::docker_tool_load; use crate::{caps, scratchpads}; diff --git a/src/integrations/integr_cmdline.rs b/src/integrations/integr_cmdline.rs index 4830dc596..2c64ba803 100644 --- a/src/integrations/integr_cmdline.rs +++ b/src/integrations/integr_cmdline.rs @@ -222,7 +222,7 @@ impl Tool for ToolCmdline { tool_call_id: &String, args: &HashMap, ) -> Result<(bool, Vec), String> { - let gcx = ccx.lock().await.global_context.clone(); + // let gcx = ccx.lock().await.global_context.clone(); let mut args_str: HashMap = HashMap::new(); let valid_params: Vec = self.cfg.parameters.iter().map(|p| p.name.clone()).collect(); diff --git a/src/scratchpads/chat_generic.rs b/src/scratchpads/chat_generic.rs index b0a5bb027..331e59c0a 100644 --- a/src/scratchpads/chat_generic.rs +++ b/src/scratchpads/chat_generic.rs @@ -105,7 +105,7 @@ impl ScratchpadAbstract for GenericChatScratchpad { } else { (self.messages.clone(), self.messages.len(), false) }; - let mut limited_msgs: Vec = limit_messages_history(&self.t, &messages, undroppable_msg_n, self.post.parameters.max_new_tokens, n_ctx)?; + let limited_msgs: Vec = limit_messages_history(&self.t, &messages, undroppable_msg_n, self.post.parameters.max_new_tokens, n_ctx)?; // if self.supports_tools { // }; sampling_parameters_to_patch.stop = self.dd.stop_list.clone(); From c4e9a1bf87dcc7244b6a10461ba66d55e5543a91 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Thu, 5 Dec 2024 12:52:59 +0100 Subject: [PATCH 060/185] cmdline_TEMPLATE --- src/integrations/mod.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/integrations/mod.rs b/src/integrations/mod.rs index 1090b1ef7..15d18b9ff 100644 --- a/src/integrations/mod.rs +++ b/src/integrations/mod.rs @@ -37,7 +37,7 @@ pub fn integration_from_name(n: &str) -> Result Ok(Box::new(ToolChrome { ..Default::default() }) as Box), "docker" => Ok(Box::new(docker::integr_docker::ToolDocker {..Default::default() }) as Box), cmdline if cmdline.starts_with("cmdline_") => { - let tool_name = cmdline.strip_prefix("cmdline_").unwrap(); + // let tool_name = cmdline.strip_prefix("cmdline_").unwrap(); Ok(Box::new(integr_cmdline::ToolCmdline {..Default::default()}) as Box) }, service if service.starts_with("service_") => { @@ -68,6 +68,7 @@ pub fn integrations_list() -> Vec<&'static str> { // "gitlab", // "pdb", "postgres", + "cmdline_TEMPLATE", // "chrome", "docker" ] From 9551524d9f8af4047cd96220b03594eb5da56e9e Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Thu, 5 Dec 2024 17:22:59 +0100 Subject: [PATCH 061/185] ChatMode mode direct translation to prompt --- .../refact/chat_client.py | 5 +++- src/call_validation.rs | 10 ------- src/http/routers/v1/chat.rs | 18 ++++++------- src/http/routers/v1/system_prompt.rs | 3 ++- src/integrations/integr_postgres.rs | 2 +- src/integrations/running_integrations.rs | 5 +++- src/integrations/setting_up_integrations.rs | 2 +- src/scratchpads/chat_generic.rs | 5 +--- src/scratchpads/chat_utils_prompts.rs | 16 +++++------- src/yaml_configs/create_configs.rs | 26 +++++++++---------- src/yaml_configs/customization_compiled_in.rs | 5 +++- src/yaml_configs/customization_loader.rs | 1 + 12 files changed, 47 insertions(+), 51 deletions(-) diff --git a/python_binding_and_cmdline/refact/chat_client.py b/python_binding_and_cmdline/refact/chat_client.py index c14ab4e71..b37de4a39 100644 --- a/python_binding_and_cmdline/refact/chat_client.py +++ b/python_binding_and_cmdline/refact/chat_client.py @@ -241,8 +241,11 @@ async def ask_using_http( } if postprocess_parameters is not None: post_me["postprocess_parameters"] = postprocess_parameters + meta = {} if chat_id is not None: - post_me["chat_id"] = chat_id + meta["chat_id"] = chat_id + meta["chat_mode"] = "AGENT" + post_me["meta"] = meta choices: List[Optional[Message]] = [None] * n_answers async with aiohttp.ClientSession() as session: async with session.post(base_url + "/chat", json=post_me) as response: diff --git a/src/call_validation.rs b/src/call_validation.rs index 96f9fe826..d1dece971 100644 --- a/src/call_validation.rs +++ b/src/call_validation.rs @@ -218,16 +218,6 @@ pub enum ChatMode { ProjectSummary, } -impl ChatMode { - pub fn has_exploration_tools(&self) -> bool { - *self != ChatMode::NoTools - } - - pub fn has_agentic_tools(&self) -> bool { - matches!(self, ChatMode::Agent | ChatMode::Configure | ChatMode::ProjectSummary) - } -} - impl Default for ChatMode { fn default() -> Self { ChatMode::NoTools diff --git a/src/http/routers/v1/chat.rs b/src/http/routers/v1/chat.rs index 19dcb13e2..8e8f6c886 100644 --- a/src/http/routers/v1/chat.rs +++ b/src/http/routers/v1/chat.rs @@ -7,13 +7,13 @@ use axum::response::Result; use hyper::{Body, Response, StatusCode}; use tracing::info; -use crate::call_validation::{ChatContent, ChatMessage, ChatPost}; +use crate::call_validation::{ChatContent, ChatMessage, ChatPost, ChatMode}; use crate::caps::CodeAssistantCaps; use crate::custom_error::ScratchError; use crate::at_commands::at_commands::AtCommandsContext; use crate::global_context::SharedGlobalContext; use crate::integrations::docker::docker_container_manager::docker_container_check_status_or_start; -use crate::{caps, scratchpads}; +use crate::scratchpads::chat_utils_prompts::{get_default_system_prompt, get_default_system_prompt_from_remote, system_prompt_add_workspace_info}; pub const CHAT_TOP_N: usize = 7; @@ -24,12 +24,12 @@ pub async fn lookup_chat_scratchpad( ) -> Result<(String, String, serde_json::Value, usize, bool, bool, bool), String> { let caps_locked = caps.read().unwrap(); let (model_name, recommended_model_record) = - caps::which_model_to_use( + crate::caps::which_model_to_use( &caps_locked.code_chat_models, &chat_post.model, &caps_locked.code_chat_default_model, )?; - let (sname, patch) = caps::which_scratchpad_to_use( + let (sname, patch) = crate::caps::which_scratchpad_to_use( &recommended_model_record.supports_scratchpads, &chat_post.scratchpad, &recommended_model_record.default_scratchpad, @@ -179,16 +179,16 @@ async fn _chat( let have_system = !messages.is_empty() && messages[0].role == "system"; if !have_system { - use crate::scratchpads::chat_utils_prompts::{get_default_system_prompt, get_default_system_prompt_from_remote, system_prompt_add_workspace_info}; - let exploration_tools = chat_post.meta.chat_mode.has_exploration_tools(); - let agentic_tools = chat_post.meta.chat_mode.has_agentic_tools(); + let exploration_tools = chat_post.meta.chat_mode != ChatMode::NoTools; + let agentic_tools = matches!(chat_post.meta.chat_mode, ChatMode::Agent | ChatMode::Configure | ChatMode::ProjectSummary); let system_message_content = if should_execute_remotely { + // XXX pass chat_post.meta.chat_mode get_default_system_prompt_from_remote(gcx.clone(), exploration_tools, agentic_tools, &chat_post.meta.chat_id).await.map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, e) )? } else { system_prompt_add_workspace_info(gcx.clone(), - &get_default_system_prompt(gcx.clone(), exploration_tools, agentic_tools).await + &get_default_system_prompt(gcx.clone(), chat_post.meta.chat_mode.clone()).await ).await }; @@ -200,7 +200,7 @@ async fn _chat( } // chat_post.stream = Some(false); // for debugging 400 errors that are hard to debug with streaming (because "data: " is not present and the error message is ignored by the library) - let mut scratchpad = scratchpads::create_chat_scratchpad( + let mut scratchpad = crate::scratchpads::create_chat_scratchpad( gcx.clone(), caps, model_name.clone(), diff --git a/src/http/routers/v1/system_prompt.rs b/src/http/routers/v1/system_prompt.rs index a54ec8a1a..9623a8c91 100644 --- a/src/http/routers/v1/system_prompt.rs +++ b/src/http/routers/v1/system_prompt.rs @@ -26,10 +26,11 @@ pub async fn handle_v1_system_prompt( Extension(gcx): Extension>>, body_bytes: hyper::body::Bytes, ) -> Result, ScratchError> { + // XXX receive ChatMode let post = serde_json::from_slice::(&body_bytes) .map_err(|e| ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, format!("JSON problem: {}", e)))?; - let prompt = get_default_system_prompt(gcx.clone(), post.have_exploration_tools, post.have_agentic_tools).await; + let prompt = get_default_system_prompt(gcx.clone(), crate::call_validation::ChatMode::Agent).await; let prompt_with_workspace_info = system_prompt_add_workspace_info(gcx.clone(), &prompt).await; diff --git a/src/integrations/integr_postgres.rs b/src/integrations/integr_postgres.rs index 3f8904615..54e2a8c86 100644 --- a/src/integrations/integr_postgres.rs +++ b/src/integrations/integr_postgres.rs @@ -217,7 +217,7 @@ smartlinks: sl_chat: - role: "user" content: | - šŸ”§ The postgres tool should be visible now. To test the tool, list the tables available, briefly desctibe the tables and express + šŸ”§ The postgres tool should be visible now. To test the tool, list the tables available, briefly describe the tables and express satisfaction and relief if it works, and change nothing. If it doesn't work or the tool isn't available, go through the usual plan in the system prompt. The current config file is %CURRENT_CONFIG%. docker: diff --git a/src/integrations/running_integrations.rs b/src/integrations/running_integrations.rs index 3e3937a21..9de0e25b0 100644 --- a/src/integrations/running_integrations.rs +++ b/src/integrations/running_integrations.rs @@ -37,7 +37,10 @@ pub async fn load_integration_tools( continue; } }; - integr.integr_settings_apply(&rec.config_unparsed); + let should_be_fine = integr.integr_settings_apply(&rec.config_unparsed); + if should_be_fine.is_err() { + tracing::error!("failed to apply settings for integration {}: {:?}", rec.integr_name, should_be_fine.err()); + } tools.insert(rec.integr_name.clone(), Arc::new(AMutex::new(integr.integr_upgrade_to_tool(&rec.integr_name)))); } diff --git a/src/integrations/setting_up_integrations.rs b/src/integrations/setting_up_integrations.rs index 4279ebc7b..adab40841 100644 --- a/src/integrations/setting_up_integrations.rs +++ b/src/integrations/setting_up_integrations.rs @@ -261,7 +261,7 @@ pub fn join_config_path(config_dir: &PathBuf, integr_name: &str) -> String pub async fn get_config_dirs( gcx: Arc>, ) -> (Vec, PathBuf) { - let (global_config_dir, workspace_folders_arc, integrations_yaml) = { + let (global_config_dir, workspace_folders_arc, _integrations_yaml) = { let gcx_locked = gcx.read().await; (gcx_locked.config_dir.clone(), gcx_locked.documents_state.workspace_folders.clone(), gcx_locked.cmdline.integrations_yaml.clone()) }; diff --git a/src/scratchpads/chat_generic.rs b/src/scratchpads/chat_generic.rs index 331e59c0a..3b50d438d 100644 --- a/src/scratchpads/chat_generic.rs +++ b/src/scratchpads/chat_generic.rs @@ -96,10 +96,7 @@ impl ScratchpadAbstract for GenericChatScratchpad { ccx: Arc>, sampling_parameters_to_patch: &mut SamplingParameters, ) -> Result { - let (n_ctx, gcx) = { - let ccx_locked = ccx.lock().await; - (ccx_locked.n_ctx, ccx_locked.global_context.clone()) - }; + let n_ctx = ccx.lock().await.n_ctx; let (messages, undroppable_msg_n, _any_context_produced) = if self.allow_at { run_at_commands(ccx.clone(), self.t.tokenizer.clone(), sampling_parameters_to_patch.max_new_tokens, &self.messages, &mut self.has_rag_results).await } else { diff --git a/src/scratchpads/chat_utils_prompts.rs b/src/scratchpads/chat_utils_prompts.rs index 8ae4a0c09..c9d49331f 100644 --- a/src/scratchpads/chat_utils_prompts.rs +++ b/src/scratchpads/chat_utils_prompts.rs @@ -11,8 +11,7 @@ use crate::integrations::docker::docker_container_manager::docker_container_get_ pub async fn get_default_system_prompt( gcx: Arc>, - have_exploration_tools: bool, - have_agentic_tools: bool, + chat_mode: crate::call_validation::ChatMode, ) -> String { let tconfig = match crate::yaml_configs::customization_loader::load_customization(gcx.clone(), true).await { Ok(tconfig) => tconfig, @@ -21,18 +20,17 @@ pub async fn get_default_system_prompt( return String::new(); }, }; - let prompt_key = if have_agentic_tools { - "agentic_tools" - } else if have_exploration_tools { - "exploration_tools" - } else { - "default" + let prompt_key = match chat_mode { + crate::call_validation::ChatMode::NoTools => "default", + crate::call_validation::ChatMode::Explore => "exploration_tools", + crate::call_validation::ChatMode::Agent => "agentic_tools", + crate::call_validation::ChatMode::Configure => "configurator", + crate::call_validation::ChatMode::ProjectSummary => "project_summary", }; let system_prompt = tconfig.system_prompts.get(prompt_key).map_or_else(|| { tracing::error!("cannot find system prompt `{}`", prompt_key); String::new() }, |x| x.text.clone()); - // tracing::info!("system_prompt:\n{}", system_prompt); system_prompt } diff --git a/src/yaml_configs/create_configs.rs b/src/yaml_configs/create_configs.rs index a6a9eb486..1fc5f86a2 100644 --- a/src/yaml_configs/create_configs.rs +++ b/src/yaml_configs/create_configs.rs @@ -145,19 +145,19 @@ async fn update_checksum(cache_dir: &Path, config_name: String, checksum: &str) // read_yaml_into_value(integrations_enabled_path).await.unwrap_or_else(|_| serde_yaml::Value::Mapping(Default::default())) // } -pub async fn read_yaml_into_value(yaml_path: &PathBuf) -> Result { - let file = std::fs::File::open(&yaml_path).map_err( - |e| format!("Failed to open {}: {}", yaml_path.display(), e) - )?; - - let reader = std::io::BufReader::new(file); - serde_yaml::from_reader(reader).map_err( - |e| { - let location = e.location().map(|loc| format!(" at line {}, column {}", loc.line(), loc.column())).unwrap_or_default(); - format!("Failed to parse {}{}: {}", yaml_path.display(), location, e) - } - ) -} +// pub async fn read_yaml_into_value(yaml_path: &PathBuf) -> Result { +// let file = std::fs::File::open(&yaml_path).map_err( +// |e| format!("Failed to open {}: {}", yaml_path.display(), e) +// )?; + +// let reader = std::io::BufReader::new(file); +// serde_yaml::from_reader(reader).map_err( +// |e| { +// let location = e.location().map(|loc| format!(" at line {}, column {}", loc.line(), loc.column())).unwrap_or_default(); +// format!("Failed to parse {}{}: {}", yaml_path.display(), location, e) +// } +// ) +// } // pub async fn write_yaml_value(path: &Path, value: &serde_yaml::Value) -> Result<(), String> { // let content = serde_yaml::to_string(value).map_err(|e| format!("Failed to serialize YAML: {}", e))?; diff --git a/src/yaml_configs/customization_compiled_in.rs b/src/yaml_configs/customization_compiled_in.rs index 4c25662b6..715152fce 100644 --- a/src/yaml_configs/customization_compiled_in.rs +++ b/src/yaml_configs/customization_compiled_in.rs @@ -167,7 +167,10 @@ system_prompts: show: never configurator: text: "%PROMPT_CONFIGURATOR%" - show: experimental + show: never + project_summary: + text: "TBD" + show: never subchat_tool_parameters: diff --git a/src/yaml_configs/customization_loader.rs b/src/yaml_configs/customization_loader.rs index 5fa7d11a1..4f62c8c59 100644 --- a/src/yaml_configs/customization_loader.rs +++ b/src/yaml_configs/customization_loader.rs @@ -251,5 +251,6 @@ mod tests { assert_eq!(config.system_prompts.get("exploration_tools").is_some(), true); assert_eq!(config.system_prompts.get("agentic_tools").is_some(), true); assert_eq!(config.system_prompts.get("configurator").is_some(), true); + assert_eq!(config.system_prompts.get("project_summary").is_some(), true); } } From a5e549a708c96dc837bc98a638708a79946f4a39 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Thu, 5 Dec 2024 17:23:11 +0100 Subject: [PATCH 062/185] repair cmdline_service --- src/integrations/integr_cmdline.rs | 94 ++------ src/integrations/integr_cmdline_service.rs | 243 +++++---------------- src/integrations/mod.rs | 7 +- 3 files changed, 78 insertions(+), 266 deletions(-) diff --git a/src/integrations/integr_cmdline.rs b/src/integrations/integr_cmdline.rs index 2c64ba803..0f1d302eb 100644 --- a/src/integrations/integr_cmdline.rs +++ b/src/integrations/integr_cmdline.rs @@ -2,7 +2,6 @@ use std::collections::HashMap; use std::sync::Arc; use std::process::Stdio; use tokio::sync::Mutex as AMutex; -use tokio::io::BufReader; use serde::Deserialize; use serde::Serialize; use async_trait::async_trait; @@ -12,33 +11,32 @@ use tracing::info; use crate::at_commands::at_commands::AtCommandsContext; use crate::tools::tools_description::{ToolParam, Tool, ToolDesc}; use crate::call_validation::{ChatMessage, ChatContent, ContextEnum}; -use crate::integrations::process_io_utils::blocking_read_until_token_or_timeout; use crate::postprocessing::pp_command_output::{CmdlineOutputFilter, output_mini_postprocessing}; use crate::integrations::integr_abstract::IntegrationTrait; #[derive(Deserialize, Serialize, Clone, Default)] -struct CmdlineToolConfig { - command: String, - command_workdir: String, +pub struct CmdlineToolConfig { + pub command: String, + pub command_workdir: String, - description: String, - parameters: Vec, - parameters_required: Option>, + pub description: String, + pub parameters: Vec, + pub parameters_required: Option>, // blocking #[serde(default = "_default_timeout")] - timeout: u64, + pub timeout: u64, #[serde(default)] - output_filter: CmdlineOutputFilter, + pub output_filter: CmdlineOutputFilter, // background #[serde(default)] - startup_wait_port: Option, + pub startup_wait_port: Option, #[serde(default = "_default_startup_wait")] - startup_wait: u64, + pub startup_wait: u64, #[serde(default)] - startup_wait_keyword: Option, + pub startup_wait_keyword: Option, } fn _default_timeout() -> u64 { @@ -86,39 +84,7 @@ impl IntegrationTrait for ToolCmdline { } } -// pub fn cmdline_tool_from_yaml_value( -// cfg_cmdline_value: &serde_yaml::Value, -// background: bool, -// ) -> Result>>>, String> { -// let mut result = IndexMap::new(); -// let cfgmap = match serde_yaml::from_value::>(cfg_cmdline_value.clone()) { -// Ok(cfgmap) => cfgmap, -// Err(e) => { -// let location = e.location().map(|loc| format!(" at line {}, column {}", loc.line(), loc.column())).unwrap_or_default(); -// return Err(format!("failed to parse cmdline section: {:?}{}", e, location)); -// } -// }; -// for (c_name, mut c_cmd_tool) in cfgmap.into_iter() { -// // if background { -// // c_cmd_tool.parameters.push(ToolParam { -// // name: "action".to_string(), -// // param_type: "string".to_string(), -// // description: "start | stop | restart | status".to_string(), -// // }); -// // } -// let tool = Arc::new(AMutex::new(Box::new( -// ToolCmdline { -// // is_service: background, -// name: c_name.clone(), -// cfg: c_cmd_tool, -// } -// ) as Box)); -// result.insert(c_name, tool); -// } -// Ok(result) -// } - -fn _replace_args(x: &str, args_str: &HashMap) -> String { +pub fn replace_args(x: &str, args_str: &HashMap) -> String { let mut result = x.to_string(); for (key, value) in args_str { result = result.replace(&format!("%{}%", key), value); @@ -126,7 +92,7 @@ fn _replace_args(x: &str, args_str: &HashMap) -> String { result } -fn format_output(stdout_out: &str, stderr_out: &str) -> String { +pub fn format_output(stdout_out: &str, stderr_out: &str) -> String { let mut out = String::new(); if !stdout_out.is_empty() && stderr_out.is_empty() { // special case: just clean output, nice @@ -142,7 +108,7 @@ fn format_output(stdout_out: &str, stderr_out: &str) -> String { out } -async fn create_command_from_string( +pub fn _create_command_from_string( cmd_string: &str, command_workdir: &String, ) -> Result { @@ -159,14 +125,14 @@ async fn create_command_from_string( Ok(cmd) } -async fn execute_blocking_command( +pub async fn execute_blocking_command( command: &str, cfg: &CmdlineToolConfig, command_workdir: &String, ) -> Result { info!("EXEC workdir {}:\n{:?}", command_workdir, command); let command_future = async { - let mut cmd = create_command_from_string(command, command_workdir).await?; + let mut cmd = _create_command_from_string(command, command_workdir)?; let t0 = tokio::time::Instant::now(); let result = cmd .stdout(Stdio::piped()) @@ -203,26 +169,16 @@ async fn execute_blocking_command( } } -async fn get_stdout_and_stderr( - timeout_ms: u64, - stdout: &mut BufReader, - stderr: &mut BufReader, -) -> Result<(String, String), String> { - let (stdout_out, stderr_out, _) = blocking_read_until_token_or_timeout(stdout, stderr, timeout_ms, "").await?; - Ok((stdout_out, stderr_out)) -} - #[async_trait] impl Tool for ToolCmdline { fn as_any(&self) -> &dyn std::any::Any { self } async fn tool_execute( &mut self, - ccx: Arc>, + _ccx: Arc>, tool_call_id: &String, args: &HashMap, ) -> Result<(bool, Vec), String> { - // let gcx = ccx.lock().await.global_context.clone(); let mut args_str: HashMap = HashMap::new(); let valid_params: Vec = self.cfg.parameters.iter().map(|p| p.name.clone()).collect(); @@ -243,20 +199,8 @@ impl Tool for ToolCmdline { } } - let command = _replace_args(self.cfg.command.as_str(), &args_str); - let workdir = _replace_args(self.cfg.command_workdir.as_str(), &args_str); - - // let tool_ouput = if self.is_service { - // let action = args_str.get("action").cloned().unwrap_or("start".to_string()); - // if !["start", "restart", "stop", "status"].contains(&action.as_str()) { - // return Err("Tool call is invalid. Param 'action' must be one of 'start', 'restart', 'stop', 'status'. Try again".to_string()); - // } - // execute_background_command( - // gcx, &self.name, &command, &workdir, &self.cfg, action.as_str() - // ).await? - - // } else { - // }; + let command = replace_args(self.cfg.command.as_str(), &args_str); + let workdir = replace_args(self.cfg.command_workdir.as_str(), &args_str); let tool_ouput = execute_blocking_command(&command, &self.cfg, &workdir).await?; diff --git a/src/integrations/integr_cmdline_service.rs b/src/integrations/integr_cmdline_service.rs index d3a47e9c8..df605ad86 100644 --- a/src/integrations/integr_cmdline_service.rs +++ b/src/integrations/integr_cmdline_service.rs @@ -3,68 +3,31 @@ use std::collections::HashMap; use std::future::Future; use std::sync::Arc; use std::process::Stdio; -use indexmap::IndexMap; -use tokio::sync::{Mutex as AMutex, RwLock as ARwLock}; use tokio::io::BufReader; -use serde::Deserialize; -use serde::Serialize; +use tokio::sync::{Mutex as AMutex, RwLock as ARwLock}; use async_trait::async_trait; -use tokio::process::Command; -use tracing::info; use process_wrap::tokio::*; use crate::at_commands::at_commands::AtCommandsContext; -use crate::tools::tools_description::{ToolParam, Tool, ToolDesc}; +use crate::tools::tools_description::{Tool, ToolDesc}; use crate::call_validation::{ChatMessage, ChatContent, ContextEnum}; use crate::global_context::GlobalContext; use crate::integrations::process_io_utils::{blocking_read_until_token_or_timeout, is_someone_listening_on_that_tcp_port}; use crate::integrations::sessions::IntegrationSession; -use crate::postprocessing::pp_command_output::{CmdlineOutputFilter, output_mini_postprocessing}; +use crate::postprocessing::pp_command_output::output_mini_postprocessing; use crate::integrations::integr_abstract::IntegrationTrait; +use crate::integrations::integr_cmdline::*; const REALLY_HORRIBLE_ROUNDTRIP: u64 = 3000; // 3000 should be a really bad ping via internet, just in rare case it's a remote port - -#[derive(Deserialize, Serialize, Clone)] -struct CmdlineToolConfig { - command: String, - command_workdir: String, - - description: String, - parameters: Vec, - parameters_required: Option>, - - // blocking - #[serde(default = "_default_timeout")] - timeout: u64, - #[serde(default)] - output_filter: CmdlineOutputFilter, - - // background - #[serde(default)] - startup_wait_port: Option, - #[serde(default = "_default_startup_wait")] - startup_wait: u64, - #[serde(default)] - startup_wait_keyword: Option, -} - -fn _default_timeout() -> u64 { - 120 -} - -fn _default_startup_wait() -> u64 { - 10 +#[derive(Default)] +pub struct ToolService { + pub name: String, + pub cfg: CmdlineToolConfig, } -pub struct ToolCmdline { - is_service: bool, - name: String, - cfg: CmdlineToolConfig, -} - -impl IntegrationTrait for ToolCmdline { +impl IntegrationTrait for ToolService { fn integr_settings_apply(&mut self, value: &serde_json::Value) -> Result<(), String> { match serde_json::from_value::(value.clone()) { Ok(x) => self.cfg = x, @@ -81,49 +44,16 @@ impl IntegrationTrait for ToolCmdline { } fn integr_upgrade_to_tool(&self, integr_name: &String) -> Box { - Box::new(ToolCmdline { - is_service: self.is_service, - name: self.name.clone(), + Box::new(ToolService { + name: integr_name.clone(), cfg: self.cfg.clone(), }) as Box } fn integr_schema(&self) -> &str { - CMDLINE_INTEGRATION_SCHEMA - } -} - -pub fn cmdline_tool_from_yaml_value( - cfg_cmdline_value: &serde_yaml::Value, - background: bool, -) -> Result>>>, String> { - let mut result = IndexMap::new(); - let cfgmap = match serde_yaml::from_value::>(cfg_cmdline_value.clone()) { - Ok(cfgmap) => cfgmap, - Err(e) => { - let location = e.location().map(|loc| format!(" at line {}, column {}", loc.line(), loc.column())).unwrap_or_default(); - return Err(format!("failed to parse cmdline section: {:?}{}", e, location)); - } - }; - for (c_name, mut c_cmd_tool) in cfgmap.into_iter() { - if background { - c_cmd_tool.parameters.push(ToolParam { - name: "action".to_string(), - param_type: "string".to_string(), - description: "start | stop | restart | status".to_string(), - }); - } - let tool = Arc::new(AMutex::new(Box::new( - ToolCmdline { - is_service: background, - name: c_name.clone(), - cfg: c_cmd_tool, - } - ) as Box)); - result.insert(c_name, tool); + CMDLINE_SERVICE_INTEGRATION_SCHEMA } - Ok(result) } pub struct CmdlineSession { @@ -141,10 +71,12 @@ impl IntegrationSession for CmdlineSession { fn as_any_mut(&mut self) -> &mut dyn Any { self } + fn is_expired(&self) -> bool { false } + fn try_stop(&mut self) -> Box + Send + '_> { Box::new(async { - info!("SERVICE STOP workdir {}:\n{:?}", self.cmdline_workdir, self.cmdline_string); + tracing::info!("SERVICE STOP workdir {}:\n{:?}", self.cmdline_workdir, self.cmdline_string); let t0 = tokio::time::Instant::now(); match Box::into_pin(self.cmdline_process.kill()).await { Ok(_) => { @@ -159,91 +91,6 @@ impl IntegrationSession for CmdlineSession { } } -fn _replace_args(x: &str, args_str: &HashMap) -> String { - let mut result = x.to_string(); - for (key, value) in args_str { - result = result.replace(&format!("%{}%", key), value); - } - result -} - -fn format_output(stdout_out: &str, stderr_out: &str) -> String { - let mut out = String::new(); - if !stdout_out.is_empty() && stderr_out.is_empty() { - // special case: just clean output, nice - out.push_str(&format!("{}\n", stdout_out)); - } else { - if !stdout_out.is_empty() { - out.push_str(&format!("STDOUT\n```\n{}```\n\n", stdout_out)); - } - if !stderr_out.is_empty() { - out.push_str(&format!("STDERR\n```\n{}```\n\n", stderr_out)); - } - } - out -} - -async fn create_command_from_string( - cmd_string: &str, - command_workdir: &String, -) -> Result { - let command_args = shell_words::split(cmd_string) - .map_err(|e| format!("Failed to parse command: {}", e))?; - if command_args.is_empty() { - return Err("Command is empty after parsing".to_string()); - } - let mut cmd = Command::new(&command_args[0]); - if command_args.len() > 1 { - cmd.args(&command_args[1..]); - } - cmd.current_dir(command_workdir); - Ok(cmd) -} - -async fn execute_blocking_command( - command: &str, - cfg: &CmdlineToolConfig, - command_workdir: &String, -) -> Result { - info!("EXEC workdir {}:\n{:?}", command_workdir, command); - let command_future = async { - let mut cmd = create_command_from_string(command, command_workdir).await?; - let t0 = tokio::time::Instant::now(); - let result = cmd - .stdout(Stdio::piped()) - .stderr(Stdio::piped()) - .output() - .await; - let duration = t0.elapsed(); - info!("EXEC: /finished in {:?}", duration); - - let output = match result { - Ok(output) => output, - Err(e) => { - let msg = format!("cannot run command: '{}'. workdir: '{}'. Error: {}", &command, command_workdir, e); - tracing::error!("{msg}"); - return Err(msg); - } - }; - - let stdout = output_mini_postprocessing(&cfg.output_filter, &String::from_utf8_lossy(&output.stdout).to_string()); - let stderr = output_mini_postprocessing(&cfg.output_filter, &String::from_utf8_lossy(&output.stderr).to_string()); - - let mut out = format_output(&stdout, &stderr); - let exit_code = output.status.code().unwrap_or_default(); - out.push_str(&format!("command was running {:.3}s, finished with exit code {exit_code}\n", duration.as_secs_f64())); - Ok(out) - }; - - let timeout_duration = tokio::time::Duration::from_secs(cfg.timeout); - let result = tokio::time::timeout(timeout_duration, command_future).await; - - match result { - Ok(res) => res, - Err(_) => Err(format!("command timed out after {:?}", timeout_duration)), - } -} - async fn get_stdout_and_stderr( timeout_ms: u64, stdout: &mut BufReader, @@ -303,10 +150,10 @@ async fn execute_background_command( )); } } - info!("SERVICE START workdir {}:\n{:?}", cmdline_workdir, command_str); + tracing::info!("SERVICE START workdir {}:\n{:?}", cmdline_workdir, command_str); actions_log.push_str(&format!("Starting service with the following command line:\n{}\n", command_str)); - let mut command = create_command_from_string(&command_str, cmdline_workdir).await?; + let mut command = _create_command_from_string(&command_str, cmdline_workdir)?; command.stdout(Stdio::piped()); command.stderr(Stdio::piped()); let mut command_wrap = TokioCommandWrap::from(command); @@ -387,14 +234,14 @@ async fn execute_background_command( gcx.write().await.integration_sessions.insert(session_key.to_string(), Arc::new(AMutex::new(session))); } - info!("SERVICE START LOG:\n{}", actions_log); + tracing::info!("SERVICE START LOG:\n{}", actions_log); } Ok(actions_log) } #[async_trait] -impl Tool for ToolCmdline { +impl Tool for ToolService { fn as_any(&self) -> &dyn std::any::Any { self } async fn tool_execute( @@ -403,8 +250,6 @@ impl Tool for ToolCmdline { tool_call_id: &String, args: &HashMap, ) -> Result<(bool, Vec), String> { - let gcx = ccx.lock().await.global_context.clone(); - let mut args_str: HashMap = HashMap::new(); let valid_params: Vec = self.cfg.parameters.iter().map(|p| p.name.clone()).collect(); @@ -424,10 +269,11 @@ impl Tool for ToolCmdline { } } - let command = _replace_args(self.cfg.command.as_str(), &args_str); - let workdir = _replace_args(self.cfg.command_workdir.as_str(), &args_str); + let command = replace_args(self.cfg.command.as_str(), &args_str); + let workdir = replace_args(self.cfg.command_workdir.as_str(), &args_str); - let tool_ouput = if self.is_service { + let tool_ouput = { + let gcx = ccx.lock().await.global_context.clone(); let action = args_str.get("action").cloned().unwrap_or("start".to_string()); if !["start", "restart", "stop", "status"].contains(&action.as_str()) { return Err("Tool call is invalid. Param 'action' must be one of 'start', 'restart', 'stop', 'status'. Try again".to_string()); @@ -435,9 +281,6 @@ impl Tool for ToolCmdline { execute_background_command( gcx, &self.name, &command, &workdir, &self.cfg, action.as_str() ).await? - - } else { - execute_blocking_command(&command, &self.cfg, &workdir).await? }; let result = vec![ContextEnum::ChatMessage(ChatMessage { @@ -470,7 +313,7 @@ impl Tool for ToolCmdline { } } -pub const CMDLINE_INTEGRATION_SCHEMA: &str = r#" +pub const CMDLINE_SERVICE_INTEGRATION_SCHEMA: &str = r#" fields: command: f_type: string_long @@ -487,18 +330,42 @@ fields: parameters: f_type: "tool_parameters" f_desc: "The model will fill in those parameters." - timeout: - f_type: integer - f_desc: "The command must immediately return the results, it can't be interactive. If the command runs for too long, it will be terminated and stderr/stdout collected will be presented to the model." + startup_wait_port: + f_type: string_short + f_desc: "Wait for TCP to become occupied during startup." + f_placeholder: "8080" + startup_wait: + f_type: string_short + f_desc: "Max time to wait for service to start." f_default: 10 - output_filter: - f_type: "output_filter" - f_desc: "The output from the command can be long or even quasi-infinite. This section allows to set limits, prioritize top or bottom, or use regexp to show the model the relevant part." - f_placeholder: "filter" + startup_wait_keyword: + f_type: string + f_desc: "Wait until a keyword appears in stdout or stderr at startup." + f_placeholder: "Ready" + description: | + As opposed to command line argumenets + There you can adapt any command line tool for use by AI model. You can give the model instructions why to call it, which parameters to provide, set a timeout and restrict the output. If you want a tool that runs in the background such as a web server, use service_* instead. available: on_your_laptop_possible: true when_isolated_possible: true "#; + + +// #[serde(default)] +// pub startup_wait_port: Option, +// #[serde(default = "_default_startup_wait")] +// pub startup_wait: u64, +// #[serde(default)] +// pub startup_wait_keyword: Option, + +// timeout: +// f_type: integer +// f_desc: "The command must immediately return the results, it can't be interactive. If the command runs for too long, it will be terminated and stderr/stdout collected will be presented to the model." +// f_default: 10 +// output_filter: +// f_type: "output_filter" +// f_desc: "The output from the command can be long or even quasi-infinite. This section allows to set limits, prioritize top or bottom, or use regexp to show the model the relevant part." +// f_placeholder: "filter" diff --git a/src/integrations/mod.rs b/src/integrations/mod.rs index 15d18b9ff..fe8ff5fa7 100644 --- a/src/integrations/mod.rs +++ b/src/integrations/mod.rs @@ -15,6 +15,7 @@ pub mod integr_abstract; // pub mod integr_chrome; pub mod integr_postgres; pub mod integr_cmdline; +pub mod integr_cmdline_service; pub mod process_io_utils; pub mod docker; @@ -41,9 +42,8 @@ pub fn integration_from_name(n: &str) -> Result) }, service if service.starts_with("service_") => { - let tool_name = service.strip_prefix("service_").unwrap(); - tracing::info!("todo finish me service tool_name={}", tool_name); - Err("todo finish me".to_string()) + // let tool_name = service.strip_prefix("service_").unwrap(); + Ok(Box::new(integr_cmdline_service::ToolService {..Default::default()}) as Box) }, _ => Err(format!("Unknown integration name: {}", n)), } @@ -69,6 +69,7 @@ pub fn integrations_list() -> Vec<&'static str> { // "pdb", "postgres", "cmdline_TEMPLATE", + "service_TEMPLATE", // "chrome", "docker" ] From 4e483bdda4885b71021593dee97c62e8ec2682a9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Fri, 6 Dec 2024 04:01:51 +0100 Subject: [PATCH 063/185] Chrome in docker (#462) * feat: integr chrome works with new config and in docker isolation * fix: repair chrome tool --------- Co-authored-by: Oleg Klimov --- src/integrations/integr_chrome.rs | 162 ++++++++++++++---------------- src/integrations/mod.rs | 6 +- 2 files changed, 80 insertions(+), 88 deletions(-) diff --git a/src/integrations/integr_chrome.rs b/src/integrations/integr_chrome.rs index 5e1c57965..9e6cec64e 100644 --- a/src/integrations/integr_chrome.rs +++ b/src/integrations/integr_chrome.rs @@ -1,5 +1,5 @@ use std::any::Any; -use std::sync::Arc; +use std::sync::{Arc, Mutex}; use std::collections::HashMap; use std::future::Future; use std::time::Duration; @@ -15,7 +15,7 @@ use crate::call_validation::{ChatContent, ChatMessage}; use crate::scratchpads::multimodality::MultimodalElement; use crate::postprocessing::pp_command_output::{CmdlineOutputFilter, output_mini_postprocessing}; use crate::tools::tools_description::{Tool, ToolDesc, ToolParam}; -use crate::integrations::integr_abstract::Integration; +use crate::integrations::integr_abstract::IntegrationTrait; use tokio::time::sleep; use chrono::DateTime; @@ -35,7 +35,7 @@ use image::{ImageFormat, ImageReader}; #[derive(Clone, Serialize, Deserialize, Debug, Default)] -pub struct IntegrationChrome { +pub struct SettingsChrome { pub chrome_path: Option, pub window_width: Option, pub window_height: Option, @@ -48,7 +48,7 @@ fn default_headless() -> bool { true } #[derive(Debug, Default)] pub struct ToolChrome { - pub integration_chrome: IntegrationChrome, + pub settings_chrome: SettingsChrome, pub supports_clicks: bool, } @@ -58,8 +58,8 @@ enum DeviceType { MOBILE, } -impl fmt::Display for DeviceType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { +impl std::fmt::Display for DeviceType { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { DeviceType::DESKTOP => write!(f, "desktop"), DeviceType::MOBILE => write!(f, "mobile"), @@ -122,35 +122,33 @@ impl IntegrationSession for ChromeSession } } -impl Integration for ToolChrome { +impl IntegrationTrait for ToolChrome { fn integr_settings_apply(&mut self, value: &Value) -> Result<(), String> { - let integration_github = serde_json::from_value::(value.clone()) - .map_err(|e|e.to_string())?; - self.integration_chrome = integration_github; + match serde_json::from_value::(value.clone()) { + Ok(settings_chrome) => self.settings_chrome = settings_chrome, + Err(e) => { + tracing::error!("Failed to apply settings: {}\n{:?}", e, value); + return Err(e.to_string()); + } + } Ok(()) } - fn integr_yaml2json(&self, value: &serde_yaml::Value) -> Result { - let integration_github = serde_yaml::from_value::(value.clone()).map_err(|e| { - let location = e.location().map(|loc| format!(" at line {}, column {}", loc.line(), loc.column())).unwrap_or_default(); - format!("{}{}", e.to_string(), location) - })?; - serde_json::to_value(&integration_github).map_err(|e| e.to_string()) + fn integr_settings_as_json(&self) -> Value { + serde_json::to_value(&self.settings_chrome).unwrap() } - fn integr_upgrade_to_tool(&self, integr_name: &String) -> Box { + fn integr_upgrade_to_tool(&self, _integr_name: &String) -> Box { Box::new(ToolChrome { - integration_chrome: self.integration_chrome.clone(), - supports_clicks: false} - ) as Box + settings_chrome: self.settings_chrome.clone(), + supports_clicks: false, + }) as Box } - fn integr_settings_as_json(&self) -> Result { - serde_json::to_value(&self.integration_chrome).map_err(|e| e.to_string()) + fn integr_schema(&self) -> &str + { + CHROME_INTEGRATION_SCHEMA } - - fn integr_settings_default(&self) -> String { DEFAULT_CHROME_INTEGRATION_YAML.to_string() } - fn icon_link(&self) -> String { "https://cdn-icons-png.flaticon.com/512/732/732205.png".to_string() } } #[async_trait] @@ -175,7 +173,7 @@ impl Tool for ToolChrome { }; let session_hashmap_key = get_session_hashmap_key("chrome", &chat_id); - let mut tool_log = setup_chrome_session(gcx.clone(), &self.integration_chrome, &session_hashmap_key).await?; + let mut tool_log = setup_chrome_session(gcx.clone(), &self.settings_chrome, &session_hashmap_key).await?; let command_session = { let gcx_locked = gcx.read().await; @@ -259,7 +257,7 @@ impl Tool for ToolChrome { async fn setup_chrome_session( gcx: Arc>, - args: &IntegrationChrome, + args: &SettingsChrome, session_hashmap_key: &String, ) -> Result, String> { let mut setup_log = vec![]; @@ -693,8 +691,8 @@ enum Key { END, } -impl fmt::Display for Key { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { +impl std::fmt::Display for Key { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { Key::ENTER => write!(f, "Enter"), Key::ESC => write!(f, "Escape"), @@ -840,60 +838,54 @@ fn parse_single_command(command: &String) -> Result { _ => Err(format!("Unknown command: {:?}.", command_name)), } } -<<<<<<< HEAD - -async fn interact_with_chrome( - gcx: Arc>, - chat_id: &String, - integration_chrome: &IntegrationChrome, - command: &Command, -) -> Result, String> { - let session_hashmap_key = get_session_hashmap_key("chrome", &chat_id); - let setup_log = setup_chrome_session(gcx.clone(), &integration_chrome, &session_hashmap_key).await?; - - let command_session = { - let gcx_locked = gcx.read().await; - gcx_locked.integration_sessions.get(&session_hashmap_key) - .ok_or(format!("Error getting chrome session for chat: {}", chat_id))? - .clone() - }; - let mut command_session_locked = command_session.lock().await; - let chrome_session = command_session_locked.as_any_mut().downcast_mut::().ok_or("Failed to downcast to ChromeSession")?; - - let (execute_log, mut multimodal_els) = command.execute(chrome_session).await?; - - let tool_log = setup_log.iter().chain(execute_log.iter()).map(|s| s.clone()).collect::>(); - multimodal_els.push(MultimodalElement::new( - "text".to_string(), tool_log.join("\n") - )?); - - Ok(multimodal_els) -} - -async fn screenshot_jpeg_base64(tab: &Arc, capture_beyond_viewport: bool) -> Result { - let jpeg_data = tab.call_method(Page::CaptureScreenshot { - format: Some(Page::CaptureScreenshotFormatOption::Jpeg), - clip: None, - quality: Some(75), - from_surface: Some(true), - capture_beyond_viewport: Some(capture_beyond_viewport), - }).map_err(|e| e.to_string())?.data; - - MultimodalElement::new("image/jpeg".to_string(), jpeg_data) -} -const DEFAULT_CHROME_INTEGRATION_YAML: &str = r#" -# Chrome integration - -# This can be path to your chrome binary. You can install with "npx @puppeteer/browsers install chrome@stable", read -# more here https://developer.chrome.com/blog/chrome-for-testing/?utm_source=Fibery&utm_medium=iframely -#chrome_path: "/Users/me/my_path/chrome/mac_arm-130.0.6723.69/chrome-mac-arm64/Google Chrome for Testing.app/Contents/MacOS/Google Chrome for Testing" -# Or you can give it ws:// path, read more here https://developer.chrome.com/docs/devtools/remote-debugging/local-server/ -# In that case start chrome with --remote-debugging-port -# chrome_path: "ws://127.0.0.1:6006/" -# window_width: 1024 -# window_height: 768 -# idle_browser_timeout: 600 -"#; -======= ->>>>>>> main +const CHROME_INTEGRATION_SCHEMA: &str = r#" +fields: + chrome_path: + f_type: string_long + f_desc: "Path to your chrome binary. You can install with \"npx @puppeteer/browsers install chrome@stable\", read more here https://developer.chrome.com/blog/chrome-for-testing. You can also give it ws:// path, in that case start chrome with --remote-debugging-port, read more here https://developer.chrome.com/docs/devtools/remote-debugging/local-server" + f_placeholder: "/Users/me/my_path/chrome/mac_arm-130.0.6723.69/chrome-mac-arm64/Google Chrome for Testing.app/Contents/MacOS/Google Chrome for Testing or ws://127.0.0.1:4444/" + window_width: + f_type: string_short + f_desc: "Width of the browser window." + f_default: "1024" + window_height: + f_type: string_short + f_desc: "Height of the browser window." + f_default: "768" + idle_browser_timeout: + f_type: string_short + f_desc: "Idle timeout for the browser in seconds." + f_default: "600" + headless: + f_type: bool + f_desc: "Run Chrome in headless mode." + f_default: "true" +available: + on_your_laptop_possible: true + when_isolated_possible: true +smartlinks: + - sl_label: "Test" + sl_chat: + - role: "user" + content: | + šŸ”§ The chrome tool should be visible now. To test the tool, navigate to a website, take a screenshot, and express satisfaction and relief if it works. If it doesn't work or the tool isn't available, go through the usual plan in the system prompt. The current config file is %CURRENT_CONFIG%. +docker: + filter_label: "" + filter_image: "standalone-chrome" + new_container_default: + image: "selenium/standalone-chrome:latest" + environment: {} + smartlinks: + - sl_label: "Add Chrome Container" + sl_chat: + - role: "user" + content: | + šŸ”§ Your job is to create a chrome container, using the image and environment from new_container_default section in the current config file: %CURRENT_CONFIG%. Follow the system prompt. + smartlinks_for_each_container: + - sl_label: "Use for integration" + sl_chat: + - role: "user" + content: | + šŸ”§ Your job is to modify chrome config in the current file to connect through websockets to the container, use docker tool to inspect the container if needed. Current config file: %CURRENT_CONFIG%. +"#; \ No newline at end of file diff --git a/src/integrations/mod.rs b/src/integrations/mod.rs index fe8ff5fa7..c180dae36 100644 --- a/src/integrations/mod.rs +++ b/src/integrations/mod.rs @@ -12,7 +12,7 @@ pub mod integr_abstract; // pub mod integr_github; // pub mod integr_gitlab; // pub mod integr_pdb; -// pub mod integr_chrome; +pub mod integr_chrome; pub mod integr_postgres; pub mod integr_cmdline; pub mod integr_cmdline_service; @@ -35,7 +35,7 @@ pub fn integration_from_name(n: &str) -> Result Ok(Box::new(ToolGitlab { ..Default::default() }) as Box), // "pdb" => Ok(Box::new(ToolPdb { ..Default::default() }) as Box), "postgres" => Ok(Box::new(integr_postgres::ToolPostgres { ..Default::default() }) as Box), - // "chrome" => Ok(Box::new(ToolChrome { ..Default::default() }) as Box), + "chrome" => Ok(Box::new(integr_chrome::ToolChrome { ..Default::default() }) as Box), "docker" => Ok(Box::new(docker::integr_docker::ToolDocker {..Default::default() }) as Box), cmdline if cmdline.starts_with("cmdline_") => { // let tool_name = cmdline.strip_prefix("cmdline_").unwrap(); @@ -68,9 +68,9 @@ pub fn integrations_list() -> Vec<&'static str> { // "gitlab", // "pdb", "postgres", + "chrome", "cmdline_TEMPLATE", "service_TEMPLATE", - // "chrome", "docker" ] } From 8fc7d4a6fb08799fa4ceff2ca1a71227df37dce9 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Fri, 6 Dec 2024 05:55:23 +0100 Subject: [PATCH 064/185] chrome links --- src/integrations/integr_chrome.rs | 67 ++++++++++++++++++------------- 1 file changed, 40 insertions(+), 27 deletions(-) diff --git a/src/integrations/integr_chrome.rs b/src/integrations/integr_chrome.rs index 9e6cec64e..a254dd975 100644 --- a/src/integrations/integr_chrome.rs +++ b/src/integrations/integr_chrome.rs @@ -36,16 +36,17 @@ use image::{ImageFormat, ImageReader}; #[derive(Clone, Serialize, Deserialize, Debug, Default)] pub struct SettingsChrome { - pub chrome_path: Option, - pub window_width: Option, - pub window_height: Option, - pub idle_browser_timeout: Option, - #[serde(default = "default_headless")] - pub headless: bool, + pub chrome_path: String, + #[serde(default )] + pub window_width: String, + #[serde(default)] + pub window_height: String, + #[serde(default)] + pub idle_browser_timeout: String, + #[serde(default)] + pub headless: String, } -fn default_headless() -> bool { true } - #[derive(Debug, Default)] pub struct ToolChrome { pub settings_chrome: SettingsChrome, @@ -278,37 +279,35 @@ async fn setup_chrome_session( } } - let window_size = if args.window_width.is_some() && args.window_height.is_some() { - Some((args.window_width.unwrap(), args.window_height.unwrap())) - } else if args.window_width.is_some() { - Some((args.window_width.unwrap(), args.window_width.unwrap())) - } else { - None + let window_size = match (args.window_width.parse::(), args.window_height.parse::()) { + (Ok(width), Ok(height)) => Some((width, height)), + _ => None, }; let idle_browser_timeout = args.idle_browser_timeout - .map(|timeout| Duration::from_secs(timeout as u64)) + .parse::() + .map(Duration::from_secs) .unwrap_or(Duration::from_secs(600)); - let browser = if args.chrome_path.clone().unwrap_or_default().starts_with("ws://") { - let debug_ws_url: String = args.chrome_path.clone().unwrap(); + let browser = if args.chrome_path.clone().starts_with("ws://") { + let debug_ws_url: String = args.chrome_path.clone(); setup_log.push("Connect to existing web socket.".to_string()); Browser::connect_with_timeout(debug_ws_url, idle_browser_timeout).map_err(|e| e.to_string()) } else { - let path = args.chrome_path.clone().map(PathBuf::from); + let path = PathBuf::from(args.chrome_path.clone()); let launch_options = LaunchOptions { - path, + path: Some(path), window_size, idle_browser_timeout, - headless: args.headless, + headless: args.headless.parse::().unwrap_or(true), ..Default::default() }; - setup_log.push("Start new chrome process.".to_string()); + setup_log.push("Started new chrome process.".to_string()); Browser::new(launch_options).map_err(|e| e.to_string()) }?; // NOTE: we're not register any tabs because they can be used by another chat - setup_log.push("No opened tabs.".to_string()); + setup_log.push("No opened tabs at this moment.".to_string()); let command_session: Box = Box::new(ChromeSession { browser, tabs: HashMap::new() }); gcx.write().await.integration_sessions.insert( @@ -406,7 +405,7 @@ async fn session_open_tab( } })).map_err(|e| e.to_string())?; chrome_session.tabs.insert(tab_id.clone(), tab.clone()); - Ok(format!("opened a new tab: {}\n", tab_lock.state_string())) + Ok(format!("Opened a new tab: {}\n", tab_lock.state_string())) } } } @@ -485,11 +484,11 @@ async fn chrome_command_exec( Ok(multimodal_el) => { multimodal_els.push(multimodal_el); let tab_lock = tab.lock().await; - format!("made a screenshot of {}", tab_lock.state_string()) + format!("Made a screenshot of {}", tab_lock.state_string()) }, Err(e) => { let tab_lock = tab.lock().await; - format!("screenshot failed for {}: {}", tab_lock.state_string(), e.to_string()) + format!("Screenshot failed for {}: {}", tab_lock.state_string(), e.to_string()) }, } }; @@ -849,18 +848,22 @@ fields: f_type: string_short f_desc: "Width of the browser window." f_default: "1024" + f_extra: true window_height: f_type: string_short f_desc: "Height of the browser window." f_default: "768" + f_extra: true idle_browser_timeout: f_type: string_short f_desc: "Idle timeout for the browser in seconds." f_default: "600" + f_extra: true headless: - f_type: bool + f_type: string_short f_desc: "Run Chrome in headless mode." f_default: "true" + f_extra: true available: on_your_laptop_possible: true when_isolated_possible: true @@ -869,7 +872,17 @@ smartlinks: sl_chat: - role: "user" content: | - šŸ”§ The chrome tool should be visible now. To test the tool, navigate to a website, take a screenshot, and express satisfaction and relief if it works. If it doesn't work or the tool isn't available, go through the usual plan in the system prompt. The current config file is %CURRENT_CONFIG%. + šŸ”§ The chrome tool should be visible now. To test the tool, navigate to a website, take a screenshot, and express happiness if it works. If it doesn't work or the tool isn't available, go through the usual plan in the system prompt. The current config file is %CURRENT_CONFIG%. + - sl_label: "Help me install Chrome for Testing" + sl_chat: + - role: "user" + content: | + šŸ”§ Help user to install Chrome for Testing using npm, once that done rewrite the current config file %CURRENT_CONFIG% to use it. + - sl_label: "Help me connect regular Chrome via ws:// protocol" + sl_chat: + - role: "user" + content: | + šŸ”§ Help user to connect regular Chrome via ws:// protocol, rewrite the current config file %CURRENT_CONFIG% to use it. The `chrome_path` accepts the "ws://..." notation. docker: filter_label: "" filter_image: "standalone-chrome" From 70941b642862aaff38f6891dae9794f1ae360cce Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Fri, 6 Dec 2024 05:55:51 +0100 Subject: [PATCH 065/185] don't stream system prompt if there are a lot of messages already --- src/scratchpads/chat_passthrough.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/scratchpads/chat_passthrough.rs b/src/scratchpads/chat_passthrough.rs index 02eed8b20..5fbcd0961 100644 --- a/src/scratchpads/chat_passthrough.rs +++ b/src/scratchpads/chat_passthrough.rs @@ -214,7 +214,7 @@ impl ScratchpadAbstract for ChatPassthrough { let mut deterministic: Vec = vec![]; let have_system_prompt_in_post = !self.post.messages.is_empty() && self.post.messages[0].get("role") == Some(&serde_json::Value::String("system".to_string())); let have_system_prompt_in_messages = !self.messages.is_empty() && self.messages[0].role == "system"; - if !have_system_prompt_in_post && have_system_prompt_in_messages { + if !have_system_prompt_in_post && have_system_prompt_in_messages && self.post.messages.len() == 1 { // only the user message present in request self.has_rag_results.in_json.insert(0, json!(self.messages[0])); } deterministic.extend(self.has_rag_results.response_streaming()?); From a4995fe4365adaa508ff0e358f3f199ea48527b0 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Fri, 6 Dec 2024 09:13:12 +0100 Subject: [PATCH 066/185] knowledge(), agent system prompt, debugging session --- src/caps.rs | 39 +---- src/http/routers/v1/handlers_memdb.rs | 5 +- src/http/routers/v1/vecdb.rs | 21 +-- src/integrations/integr_postgres.rs | 16 +-- src/tools/tool_knowledge.rs | 133 +++--------------- src/tools/tools_description.rs | 11 +- src/vecdb/vdb_highlev.rs | 19 ++- src/yaml_configs/customization_compiled_in.rs | 11 +- 8 files changed, 77 insertions(+), 178 deletions(-) diff --git a/src/caps.rs b/src/caps.rs index 733446494..98b2bb60b 100644 --- a/src/caps.rs +++ b/src/caps.rs @@ -266,47 +266,20 @@ pub async fn get_api_key( #[allow(dead_code)] async fn get_custom_chat_api_key(gcx: Arc>) -> Result { - let caps = try_load_caps_quickly_if_not_present( - gcx.clone(), 0, - ).await; - - if let Err(err) = caps { - return Err(err); - } - let caps = caps?; - - let api_key = get_api_key_macro!(gcx, caps, chat_apikey); - Ok(api_key) + let caps = try_load_caps_quickly_if_not_present(gcx.clone(), 0).await?; + Ok(get_api_key_macro!(gcx, caps, chat_apikey)) } #[cfg(feature="vecdb")] pub async fn get_custom_embedding_api_key(gcx: Arc>) -> Result { - let caps = try_load_caps_quickly_if_not_present( - gcx.clone(), 0, - ).await; - - if let Err(err) = caps { - return Err(err); - } - let caps = caps?; - - let api_key = get_api_key_macro!(gcx, caps, embedding_apikey); - Ok(api_key) + let caps = try_load_caps_quickly_if_not_present(gcx.clone(), 0).await?; + Ok(get_api_key_macro!(gcx, caps, embedding_apikey)) } #[allow(dead_code)] async fn get_custom_completion_api_key(gcx: Arc>) -> Result { - let caps = try_load_caps_quickly_if_not_present( - gcx.clone(), 0, - ).await; - - if let Err(err) = caps { - return Err(err); - } - let caps = caps?; - - let api_key = get_api_key_macro!(gcx, caps, completion_apikey); - Ok(api_key) + let caps = try_load_caps_quickly_if_not_present(gcx.clone(), 0).await?; + Ok(get_api_key_macro!(gcx, caps, completion_apikey)) } diff --git a/src/http/routers/v1/handlers_memdb.rs b/src/http/routers/v1/handlers_memdb.rs index a89b44c2e..a3df4d648 100644 --- a/src/http/routers/v1/handlers_memdb.rs +++ b/src/http/routers/v1/handlers_memdb.rs @@ -145,11 +145,10 @@ pub async fn handle_mem_query( ScratchError::new(StatusCode::BAD_REQUEST, format!("JSON problem: {}", e)) })?; - let vec_db = gcx.read().await.vec_db.clone(); let memories = crate::vecdb::vdb_highlev::memories_search( - vec_db, + gcx.clone(), &post.goal, - post.top_n + post.top_n, ).await.map_err(|e| { ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("{e}")) })?; diff --git a/src/http/routers/v1/vecdb.rs b/src/http/routers/v1/vecdb.rs index 81c3c44b2..9bced7ed4 100644 --- a/src/http/routers/v1/vecdb.rs +++ b/src/http/routers/v1/vecdb.rs @@ -1,11 +1,14 @@ -use crate::caps::get_custom_embedding_api_key; -use crate::custom_error::ScratchError; -use crate::global_context::SharedGlobalContext; -use crate::vecdb::vdb_structs::VecdbSearch; use axum::response::Result; use axum::Extension; use hyper::{Body, Response, StatusCode}; use serde::{Deserialize, Serialize}; + +use crate::caps::get_custom_embedding_api_key; +use crate::custom_error::ScratchError; +use crate::global_context::SharedGlobalContext; +use crate::vecdb::vdb_structs::VecdbSearch; + + #[derive(Serialize, Deserialize, Clone)] struct VecDBPost { query: String, @@ -16,15 +19,15 @@ const NO_VECDB: &str = "Vector db is not running, check if you have --vecdb para pub async fn handle_v1_vecdb_search( - Extension(global_context): Extension, + Extension(gcx): Extension, body_bytes: hyper::body::Bytes, ) -> Result, ScratchError> { let post = serde_json::from_slice::(&body_bytes).map_err(|e| { ScratchError::new(StatusCode::BAD_REQUEST, format!("JSON problem: {}", e)) })?; - let api_key = get_custom_embedding_api_key(global_context.clone()).await?; - let cx_locked = global_context.read().await; + let api_key = get_custom_embedding_api_key(gcx.clone()).await?; + let cx_locked = gcx.read().await; let search_res = match *cx_locked.vec_db.lock().await { Some(ref db) => db.vecdb_search(post.query.to_string(), post.top_n, None, &api_key).await, @@ -53,10 +56,10 @@ pub async fn handle_v1_vecdb_search( pub async fn handle_v1_vecdb_status( - Extension(global_context): Extension, + Extension(gcx): Extension, _: hyper::body::Bytes, ) -> Result, ScratchError> { - let vec_db = global_context.read().await.vec_db.clone(); + let vec_db = gcx.read().await.vec_db.clone(); let status_str = match crate::vecdb::vdb_highlev::get_status(vec_db).await { Ok(Some(status)) => serde_json::to_string_pretty(&status).unwrap(), Ok(None) => "{\"success\": 0, \"detail\": \"turned_off\"}".to_string(), diff --git a/src/integrations/integr_postgres.rs b/src/integrations/integr_postgres.rs index 54e2a8c86..c74f5cada 100644 --- a/src/integrations/integr_postgres.rs +++ b/src/integrations/integr_postgres.rs @@ -218,7 +218,14 @@ smartlinks: - role: "user" content: | šŸ”§ The postgres tool should be visible now. To test the tool, list the tables available, briefly describe the tables and express - satisfaction and relief if it works, and change nothing. If it doesn't work or the tool isn't available, go through the usual plan in the system prompt. + happiness, and change nothing. If it doesn't work or the tool isn't available, go through the usual plan in the system prompt. + The current config file is %CURRENT_CONFIG%. + - sl_label: "Look at the project, fill in automatically" + sl_chat: + - role: "user" + content: | + šŸ”§ Your goal is to set up postgres client. Look at the project, especially files like "docker-compose.yaml" or ".env". Call tree() to see what files the project has. + After that is completed, go through the usual plan in the system prompt. The current config file is %CURRENT_CONFIG%. docker: filter_label: "" @@ -242,10 +249,3 @@ docker: content: | šŸ”§ Your job is to modify postgres connection config in the current file to match the variables from the container, use docker tool to inspect the container if needed. Current config file: %CURRENT_CONFIG%. "#; - - -// available: -// on_your_laptop: -// possible: true -// when_isolated: -// possible: true diff --git a/src/tools/tool_knowledge.rs b/src/tools/tool_knowledge.rs index e71f7e7c3..316f18f98 100644 --- a/src/tools/tool_knowledge.rs +++ b/src/tools/tool_knowledge.rs @@ -2,7 +2,6 @@ use std::sync::Arc; use std::collections::{HashMap, HashSet}; use serde_json::Value; use tracing::info; -// use indexmap::IndexMap; use tokio::sync::Mutex as AMutex; use async_trait::async_trait; @@ -10,7 +9,6 @@ use crate::at_commands::at_commands::AtCommandsContext; use crate::tools::tools_description::Tool; use crate::call_validation::{ChatMessage, ChatContent, ContextEnum}; use crate::vecdb::vdb_highlev::memories_search; -// use crate::vecdb::vdb_highlev::ongoing_find; pub struct ToolGetKnowledge; @@ -43,32 +41,36 @@ impl Tool for ToolGetKnowledge { Some(v) => { return Err(format!("argument `im_going_to_apply_to` is not a string: {:?}", v)) }, None => { return Err("argument `im_going_to_apply_to` is missing".to_string()) } }; + let goal = match args.get("goal") { + Some(Value::String(s)) => s.clone(), + Some(v) => { return Err(format!("argument `goal` is not a string: {:?}", v)) }, + None => { return Err("argument `goal` is missing".to_string()) } + }; + let language_slash_framework = match args.get("language_slash_framework") { + Some(Value::String(s)) => s.clone(), + Some(v) => { return Err(format!("argument `language_slash_framework` is not a string: {:?}", v)) }, + None => { return Err("argument `language_slash_framework` is missing".to_string()) } + }; - let vec_db = gcx.read().await.vec_db.clone(); let mem_top_n = 3; - let memories1: crate::vecdb::vdb_structs::MemoSearchResult = memories_search(vec_db.clone(), &im_going_to_use_tools, mem_top_n).await?; - let memories2: crate::vecdb::vdb_structs::MemoSearchResult = memories_search(vec_db.clone(), &im_going_to_apply_to, mem_top_n).await?; - let combined_memories = [memories1.results, memories2.results].concat(); + let memories1: crate::vecdb::vdb_structs::MemoSearchResult = memories_search(gcx.clone(), &im_going_to_use_tools, mem_top_n).await?; + let memories2: crate::vecdb::vdb_structs::MemoSearchResult = memories_search(gcx.clone(), &im_going_to_apply_to, mem_top_n).await?; + let memories3: crate::vecdb::vdb_structs::MemoSearchResult = memories_search(gcx.clone(), &goal, mem_top_n).await?; + let memories4: crate::vecdb::vdb_structs::MemoSearchResult = memories_search(gcx.clone(), &language_slash_framework, mem_top_n).await?; + let combined_memories = [memories1.results, memories2.results, memories3.results, memories4.results].concat(); let mut seen_memids = HashSet::new(); let unique_memories: Vec<_> = combined_memories.into_iter() .filter(|m| seen_memids.insert(m.memid.clone())) .collect(); - // TODO: verify it's valid json in payload when accepting the mem into db - - let memories_json = unique_memories.iter().map(|m| { - let payload: serde_json::Value = serde_json::from_str(&m.m_payload).unwrap_or(Value::Object(serde_json::Map::new())); - assert!(payload.is_object(), "Payload is not a dictionary"); - let mut combined = serde_json::Map::new(); - combined.insert("memid".to_string(), Value::String(m.memid.clone())); - combined.extend(payload.as_object().unwrap().clone()); - Value::Object(combined) - }).collect::>(); - - let mut memories_str = serde_json::to_string_pretty(&memories_json).unwrap(); - memories_str.push_str(format!( - "\n\nšŸ’æ Look at relevant successful trajectories, you can recognize them by looking at \"outcome\" especially look at \"SUCCESS\" and \"THUMBS_UP\". Write your own short plan for your next steps that is informed by previous successes." - ).as_str()); + let memories_str = unique_memories.iter().map(|m| { + let payload: String = m.m_payload.clone(); + let mut combined = String::new(); + combined.push_str(&format!("šŸ—ƒļø{}\n", m.memid)); + combined.push_str(&payload); + combined.push_str("\n\n"); + combined + }).collect::(); let mut results = vec![]; results.push(ContextEnum::ChatMessage(ChatMessage { @@ -79,42 +81,6 @@ impl Tool for ToolGetKnowledge { ..Default::default() })); - // let ongoing_maybe: Option = ongoing_find(vec_db.clone(), im_going_to_do.clone()).await?; - // if let Some(ongoing) = ongoing_maybe { - // let mut toplevel = IndexMap::new(); - // toplevel.insert("PROGRESS".to_string(), serde_json::Value::Object(ongoing.ongoing_progress.into_iter().collect())); - // let action_sequences: Vec = ongoing.ongoing_action_sequences - // .into_iter() - // .map(|map| serde_json::Value::Object(map.into_iter().collect())) - // .collect(); - // toplevel.insert("TRIED_ACTION_SEQUENCES".to_string(), serde_json::Value::Array(action_sequences)); - // let output_value: serde_json::Value = indexmap_to_json_value( - // ongoing.ongoing_output - // .into_iter() - // .map(|(k, v)| (k, indexmap_to_json_value(v))) - // .collect() - // ); - // toplevel.insert("OUTPUT".to_string(), output_value); - // results.push(ContextEnum::ChatMessage(ChatMessage { - // role: "user".to_string(), - // content: format!("šŸ’æ An ongoing session with this goal is found, it's your attempt {}. Here is the summary of your progress. Read it and follow the system prompt, especially pay attention to strategy choice:\n\n{}", - // ongoing.ongoing_attempt_n + 1, - // serde_json::to_string_pretty(&toplevel).unwrap() - // ), - // tool_calls: None, - // tool_call_id: String::new(), - // ..Default::default() - // })); - // } else { - // results.push(ContextEnum::ChatMessage(ChatMessage { - // role: "user".to_string(), - // content: format!("šŸ’æ There is no ongoing session with this goal. A new empty ongoing session is created, this is your attempt 1."), - // tool_calls: None, - // tool_call_id: String::new(), - // ..Default::default() - // })); - // } - Ok((false, results)) } @@ -122,56 +88,3 @@ impl Tool for ToolGetKnowledge { vec!["vecdb".to_string()] } } - -// fn indexmap_to_json_value(map: IndexMap) -> Value { -// Value::Object(serde_json::Map::from_iter( -// map.into_iter().map(|(k, v)| { -// (k, match v { -// Value::Object(o) => indexmap_to_json_value(IndexMap::from_iter(o)), -// _ => v, -// }) -// }) -// )) -// } - - -// pub struct ToolSaveKnowledge; -// #[async_trait] -// impl Tool for ToolSaveKnowledge { -// async fn execute(&self, ccx: &mut AtCommandsContext, tool_call_id: &String, args: &HashMap) -> Result, String> { -// info!("run @save-knowledge {:?}", args); -// let memory_topic = match args.get("memory_topic") { -// Some(Value::String(s)) => s, -// _ => return Err("argument `memory_topic` is missing or not a string".to_string()), -// }; -// let memory_text = match args.get("memory_text") { -// Some(Value::String(s)) => s, -// _ => return Err("argument `memory_text` is missing or not a string".to_string()), -// }; -// let memory_type = match args.get("memory_type") { -// Some(Value::String(s)) => s, -// _ => return Err("argument `memory_type` is missing or not a string".to_string()), -// }; -// if !["consequence", "reflection", "familiarity", "relationship"].contains(&memory_type.as_str()) { -// return Err(format!("Invalid memory_type: {}. Must be one of: consequence, reflection, familiarity, relationship", memory_type)); -// } -// let memdb = { -// let vec_db = ccx.global_context.read().await.vec_db.clone(); -// let vec_db_guard = vec_db.lock().await; -// let vec_db_ref = vec_db_guard.as_ref().ok_or("vecdb is not available".to_string())?; -// vec_db_ref.memdb.clone() -// }; -// let _memid = memdb.lock().await.permdb_add(memory_type, memory_topic, "current_project", memory_text)?; -// let mut results = vec![]; -// results.push(ContextEnum::ChatMessage(ChatMessage { -// role: "tool".to_string(), -// content: format!("Model will remember it:\n{memory_text}"), -// tool_calls: None, -// tool_call_id: tool_call_id.clone(), -// })); -// Ok(results) -// } -// fn depends_on(&self) -> Vec { -// vec!["vecdb".to_string()] -// } -// } diff --git a/src/tools/tools_description.rs b/src/tools/tools_description.rs index 2c43b6833..230aff330 100644 --- a/src/tools/tools_description.rs +++ b/src/tools/tools_description.rs @@ -287,7 +287,6 @@ tools: - name: "patch" agentic: true - experimental: true description: | Collect context first, then write the necessary changes using the šŸ“-notation before code blocks, then call this function to apply the changes. To make this call correctly, you only need the tickets. @@ -307,7 +306,6 @@ tools: - name: "github" agentic: true - experimental: true description: "Access to gh command line command, to fetch issues, review PRs." parameters: - name: "project_dir" @@ -322,7 +320,6 @@ tools: - name: "gitlab" agentic: true - experimental: true description: "Access to glab command line command, to fetch issues, review PRs." parameters: - name: "project_dir" @@ -337,7 +334,6 @@ tools: - name: "postgres" agentic: true - experimental: true description: "PostgreSQL integration, can run a single query per call." parameters: - name: "query" @@ -368,18 +364,21 @@ tools: parameters: - name: "im_going_to_use_tools" type: "string" - description: "Which tools are you about to use? Comma-separated list, examples: hg, git, github, gitlab, rust debugger, patch" + description: "Which tools are you about to use? Comma-separated list, examples: hg, git, gitlab, rust debugger, patch" - name: "im_going_to_apply_to" type: "string" description: "What your actions will be applied to? List all you can identify, starting with the project name. Comma-separated list, examples: project1, file1.cpp, MyClass, PRs, issues" + - name: "goal" + type: "string" + description: "What is your goal here?" - name: "language_slash_framework" type: "string" description: "What programming language and framework is the current project using? Use lowercase, dashes and dots. Examples: python/django, typescript/node.js, rust/tokio, ruby/rails, php/laravel, c++/boost-asio" parameters_required: - "im_going_to_use_tools" - "im_going_to_apply_to" + - "goal" - "language_slash_framework" - "####; diff --git a/src/vecdb/vdb_highlev.rs b/src/vecdb/vdb_highlev.rs index 1db424c41..2650d9781 100644 --- a/src/vecdb/vdb_highlev.rs +++ b/src/vecdb/vdb_highlev.rs @@ -58,14 +58,14 @@ async fn _create_vecdb( ) -> Result<(), String> { info!("vecdb: attempting to launch"); let api_key = get_custom_embedding_api_key(gcx.clone()).await; + if let Err(err) = api_key { + return Err(err.message); + } let (cache_dir, cmdline) = { let gcx_locked = gcx.read().await; (gcx_locked.cache_dir.clone(), gcx_locked.cmdline.clone()) }; - if let Err(err) = api_key { - return Err(err.message); - } let api_key = api_key.unwrap(); let base_dir: PathBuf = match cmdline.vecdb_force_path.as_str() { @@ -418,34 +418,39 @@ pub async fn memories_update( } pub async fn memories_search( - vec_db: Arc>>, + gcx: Arc>, query: &String, top_n: usize, ) -> Result { + let vec_db = gcx.read().await.vec_db.clone(); fn calculate_score(distance: f32, _times_used: i32) -> f32 { distance // distance - (times_used as f32) * 0.01 } let t0 = std::time::Instant::now(); - let (memdb, vecdb_emb_client, constants, cmdline) = { + let (memdb, vecdb_emb_client, constants) = { let vec_db_guard = vec_db.lock().await; let vec_db = vec_db_guard.as_ref().ok_or("VecDb is not initialized")?; ( vec_db.memdb.clone(), vec_db.vecdb_emb_client.clone(), vec_db.constants.clone(), - vec_db.cmdline.clone(), ) }; + let api_key = get_custom_embedding_api_key(gcx.clone()).await; + if let Err(err) = api_key { + return Err(err.message); + } + let embedding = fetch_embedding::get_embedding_with_retry( vecdb_emb_client, &constants.endpoint_embeddings_style, &constants.embedding_model, &constants.endpoint_embeddings_template, vec![query.clone()], - &cmdline.api_key, + &api_key.unwrap(), 5, ).await?; if embedding.is_empty() { diff --git a/src/yaml_configs/customization_compiled_in.rs b/src/yaml_configs/customization_compiled_in.rs index 715152fce..dcbb92054 100644 --- a/src/yaml_configs/customization_compiled_in.rs +++ b/src/yaml_configs/customization_compiled_in.rs @@ -83,16 +83,21 @@ PROMPT_EXPLORATION_TOOLS: | PROMPT_AGENTIC_TOOLS: | [mode3] You are Refact Agent, an autonomous bot for coding tasks. - %CD_INSTRUCTIONS% %PROMPT_PINS% %WORKSPACE_INFO% + Good practice using knowledge(): it's the key to successfully completing complex tasks the user might present you with. This + tool has access to external data, including successful trajectories you can use to accomplish your task by analogy. The knowledge() + call should be your first call when you encounter a complex task. All the records from external database start with šŸ—ƒļø and a record + identifier. Use good trajectories to your advantage, and help user better. There might be also instructions on how to deal with certain + frameworks and complex systems. + Good practice using problem_statement argument in locate(): you really need to copy the entire user's request, to avoid telephone game situation. Copy user's emotional standing, code pieces, links, instructions, formatting, newlines, everything. It's fine if you need to copy a lot, just copy word-for-word. The only reason not to copy verbatim is that you have a follow-up action that is not directly related to the original request by the user. - Thinking strategy: + Answering strategy: * Question unrelated to the project => just answer immediately. @@ -107,6 +112,8 @@ PROMPT_AGENTIC_TOOLS: | If the task requires changes, write the changes yourself using šŸ“-notation, then call patch() in parallel for each file to change, and put all tickets you want to apply to a file in a comma-separated list. + %CD_INSTRUCTIONS% + WHEN USING EXPLORATION TOOLS, USE SEVERAL IN PARALLEL! USE šŸ“ BEFORE ANY CODE BLOCK! From 1b2e8586b4543bd71f9647c22b368e461efb2f19 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Fri, 6 Dec 2024 13:17:26 +0100 Subject: [PATCH 067/185] call knowledge() more often --- src/yaml_configs/customization_compiled_in.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/yaml_configs/customization_compiled_in.rs b/src/yaml_configs/customization_compiled_in.rs index dcbb92054..942bbbf72 100644 --- a/src/yaml_configs/customization_compiled_in.rs +++ b/src/yaml_configs/customization_compiled_in.rs @@ -104,7 +104,8 @@ PROMPT_AGENTIC_TOOLS: | * Related to the project, and user gives a code snippet to rewrite or explain => maybe quickly call definition() for symbols needed, and immediately rewrite user's code, that's an interactive use case. - * Related to the project, user describes an issue that appears to be somewhere in the code => call locate() to find where exactly in the code that is. + * Related to the project, user describes an issue that appears to be somewhere in the code => call knowledge() first, and locate() after that to find where + exactly in the code that is. * User's request likely involves several steps, function calls, agentic tools like browser, database, debugger => then you need to call knowledge() first to get access to the latest and best trajectories accomplishing a similar thing. @@ -114,7 +115,7 @@ PROMPT_AGENTIC_TOOLS: | %CD_INSTRUCTIONS% - WHEN USING EXPLORATION TOOLS, USE SEVERAL IN PARALLEL! USE šŸ“ BEFORE ANY CODE BLOCK! + WHEN USING EXPLORATION TOOLS, USE SEVERAL IN PARALLEL! USE šŸ“ BEFORE ANY CODE BLOCK! FOR COMPLEX TASKS, CALL knowledege() BEFORE DOING ANYTHING! PROMPT_CONFIGURATOR: | From fdafffc41cf7c3b2b2abb5bb84275894b80b02b5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Thu, 5 Dec 2024 22:36:48 +0100 Subject: [PATCH 068/185] feat: enable --chat-remote in cli to run in isolation --- examples/links.sh | 2 ++ python_binding_and_cmdline/refact/chat_client.py | 4 +++- python_binding_and_cmdline/refact/cli_main.py | 4 +++- python_binding_and_cmdline/refact/cli_settings.py | 3 ++- python_binding_and_cmdline/refact/cli_streaming.py | 3 ++- 5 files changed, 12 insertions(+), 4 deletions(-) diff --git a/examples/links.sh b/examples/links.sh index 4cdb3a50e..c61360147 100644 --- a/examples/links.sh +++ b/examples/links.sh @@ -1,3 +1,5 @@ +#!/usr/bin/env bash + echo echo Follow up example curl -X POST http://localhost:8001/v1/links \ diff --git a/python_binding_and_cmdline/refact/chat_client.py b/python_binding_and_cmdline/refact/chat_client.py index b37de4a39..ac02e954b 100644 --- a/python_binding_and_cmdline/refact/chat_client.py +++ b/python_binding_and_cmdline/refact/chat_client.py @@ -222,7 +222,8 @@ async def ask_using_http( only_deterministic_messages: bool = False, postprocess_parameters: Optional[Dict[str, Any]] = None, callback: Optional[Callable] = None, - chat_id: Optional[str] = None + chat_id: Optional[str] = None, + chat_remote: bool = False, ) -> List[List[Message]]: deterministic: List[Message] = [] subchats: DefaultDict[str, List[Message]] = collections.defaultdict(list) @@ -245,6 +246,7 @@ async def ask_using_http( if chat_id is not None: meta["chat_id"] = chat_id meta["chat_mode"] = "AGENT" + meta["chat_remote"] = chat_remote post_me["meta"] = meta choices: List[Optional[Message]] = [None] * n_answers async with aiohttp.ClientSession() as session: diff --git a/python_binding_and_cmdline/refact/cli_main.py b/python_binding_and_cmdline/refact/cli_main.py index 251052b09..43a5adb55 100644 --- a/python_binding_and_cmdline/refact/cli_main.py +++ b/python_binding_and_cmdline/refact/cli_main.py @@ -203,7 +203,7 @@ def on_submit(buffer): start_streaming() async def asyncfunc(): - await the_chatting_loop(cli_settings.args.model, cli_settings.args.chat_id, max_auto_resubmit=(1 if cli_settings.args.always_pause else 6)) + await the_chatting_loop(cli_settings.args.model, cli_settings.args.chat_id, cli_settings.args.chat_remote, max_auto_resubmit=(1 if cli_settings.args.always_pause else 6)) if len(cli_streaming.streaming_messages) == 0: return # cli_streaming.print_response("\n") # flush_response inside @@ -243,6 +243,7 @@ async def chat_main(): parser.add_argument('--start-with', type=str, default=False, help="Start with messages in a .json file, the format is [msg, msg, ...]") parser.add_argument('--compressor', action='store_true', help="Compress trajectory that comes from reading --start-with and exit") parser.add_argument('--chat-id', type=str, default=None, help="Optional unique id of the chat") + parser.add_argument('--chat-remote', type=bool, default=False, help="Run the chat on isolation in docker") parser.add_argument('question', nargs=argparse.REMAINDER, help="You can continue your question in the command line after --") args_parsed = parser.parse_args(before_minus_minus) arg_question = " ".join(after_minus_minus) @@ -286,6 +287,7 @@ async def chat_main(): path_to_project=args_parsed.path_to_project, always_pause=args_parsed.always_pause, chat_id=chat_id, + chat_remote=args_parsed.chat_remote, ) await actual_chat(lsp_runner, start_with=args_parsed.start_with, caps=caps, arg_question=arg_question, run_compressor=args_parsed.compressor) diff --git a/python_binding_and_cmdline/refact/cli_settings.py b/python_binding_and_cmdline/refact/cli_settings.py index c7d80f1a7..7dcbe8c5c 100644 --- a/python_binding_and_cmdline/refact/cli_settings.py +++ b/python_binding_and_cmdline/refact/cli_settings.py @@ -64,12 +64,13 @@ def get_editing_mode(self): class CmdlineArgs: - def __init__(self, caps: Caps, *, model: str, path_to_project: str, always_pause: bool, chat_id: str): + def __init__(self, caps: Caps, *, model: str, path_to_project: str, always_pause: bool, chat_id: str, chat_remote: bool): self.caps = caps self.model = model or caps.code_chat_default_model self.project_path = path_to_project self.always_pause = always_pause self.chat_id = chat_id + self.chat_remote = chat_remote def n_ctx(self): return self.caps.code_chat_models[self.model].n_ctx diff --git a/python_binding_and_cmdline/refact/cli_streaming.py b/python_binding_and_cmdline/refact/cli_streaming.py index 046e9f029..4ca448f8c 100644 --- a/python_binding_and_cmdline/refact/cli_streaming.py +++ b/python_binding_and_cmdline/refact/cli_streaming.py @@ -226,7 +226,7 @@ def process_streaming_data(data: Dict[str, Any], deltas_collector: Optional[chat print_response("unknown streaming data:\n%s" % data) -async def the_chatting_loop(model, chat_id, max_auto_resubmit): +async def the_chatting_loop(model, chat_id, chat_remote, max_auto_resubmit): global streaming_messages global _is_streaming @@ -258,6 +258,7 @@ def callback(data, deltas_collector): only_deterministic_messages=False, callback=callback, chat_id=chat_id, + chat_remote=chat_remote, ) streaming_messages = choices[0] From 3a8d6b97b84b1cee1e7deac3a930f9a63bfbf19a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Thu, 5 Dec 2024 22:39:08 +0100 Subject: [PATCH 069/185] refactor: add as any to integrations to downcast, use &str instead of &String as param --- src/integrations/integr_abstract.rs | 4 +++- src/integrations/integr_chrome.rs | 4 +++- src/integrations/integr_cmdline.rs | 6 ++++-- src/integrations/integr_cmdline_service.rs | 6 ++++-- src/integrations/integr_github.rs | 4 +++- src/integrations/integr_gitlab.rs | 4 +++- src/integrations/integr_pdb.rs | 4 +++- src/integrations/integr_postgres.rs | 4 +++- src/integrations/setting_up_integrations.rs | 4 +--- 9 files changed, 27 insertions(+), 13 deletions(-) diff --git a/src/integrations/integr_abstract.rs b/src/integrations/integr_abstract.rs index 7a82e76e3..9472d133e 100644 --- a/src/integrations/integr_abstract.rs +++ b/src/integrations/integr_abstract.rs @@ -1,6 +1,8 @@ pub trait IntegrationTrait: Send + Sync { + fn as_any(&self) -> &dyn std::any::Any; fn integr_schema(&self) -> &str; fn integr_settings_apply(&mut self, value: &serde_json::Value) -> Result<(), String>; fn integr_settings_as_json(&self) -> serde_json::Value; - fn integr_upgrade_to_tool(&self, integr_name: &String) -> Box; // integr_name is sometimes different, "cmdline_compile_by_project" != "cmdline" + fn can_upgrade_to_tool(&self) -> bool { true } + fn integr_upgrade_to_tool(&self, integr_name: &str) -> Box; // integr_name is sometimes different, "cmdline_compile_by_project" != "cmdline" } diff --git a/src/integrations/integr_chrome.rs b/src/integrations/integr_chrome.rs index a254dd975..47e71cbf5 100644 --- a/src/integrations/integr_chrome.rs +++ b/src/integrations/integr_chrome.rs @@ -124,6 +124,8 @@ impl IntegrationSession for ChromeSession } impl IntegrationTrait for ToolChrome { + fn as_any(&self) -> &dyn std::any::Any { self } + fn integr_settings_apply(&mut self, value: &Value) -> Result<(), String> { match serde_json::from_value::(value.clone()) { Ok(settings_chrome) => self.settings_chrome = settings_chrome, @@ -139,7 +141,7 @@ impl IntegrationTrait for ToolChrome { serde_json::to_value(&self.settings_chrome).unwrap() } - fn integr_upgrade_to_tool(&self, _integr_name: &String) -> Box { + fn integr_upgrade_to_tool(&self, _integr_name: &str) -> Box { Box::new(ToolChrome { settings_chrome: self.settings_chrome.clone(), supports_clicks: false, diff --git a/src/integrations/integr_cmdline.rs b/src/integrations/integr_cmdline.rs index 0f1d302eb..9f66336ef 100644 --- a/src/integrations/integr_cmdline.rs +++ b/src/integrations/integr_cmdline.rs @@ -55,6 +55,8 @@ pub struct ToolCmdline { } impl IntegrationTrait for ToolCmdline { + fn as_any(&self) -> &dyn std::any::Any { self } + fn integr_settings_apply(&mut self, value: &serde_json::Value) -> Result<(), String> { match serde_json::from_value::(value.clone()) { Ok(x) => self.cfg = x, @@ -70,10 +72,10 @@ impl IntegrationTrait for ToolCmdline { serde_json::to_value(&self.cfg).unwrap() } - fn integr_upgrade_to_tool(&self, integr_name: &String) -> Box { + fn integr_upgrade_to_tool(&self, integr_name: &str) -> Box { Box::new(ToolCmdline { // is_service: self.is_service, - name: integr_name.clone(), + name: integr_name.to_string(), cfg: self.cfg.clone(), }) as Box } diff --git a/src/integrations/integr_cmdline_service.rs b/src/integrations/integr_cmdline_service.rs index df605ad86..c698c89f3 100644 --- a/src/integrations/integr_cmdline_service.rs +++ b/src/integrations/integr_cmdline_service.rs @@ -28,6 +28,8 @@ pub struct ToolService { } impl IntegrationTrait for ToolService { + fn as_any(&self) -> &dyn std::any::Any { self } + fn integr_settings_apply(&mut self, value: &serde_json::Value) -> Result<(), String> { match serde_json::from_value::(value.clone()) { Ok(x) => self.cfg = x, @@ -43,9 +45,9 @@ impl IntegrationTrait for ToolService { serde_json::to_value(&self.cfg).unwrap() } - fn integr_upgrade_to_tool(&self, integr_name: &String) -> Box { + fn integr_upgrade_to_tool(&self, integr_name: &str) -> Box { Box::new(ToolService { - name: integr_name.clone(), + name: integr_name.to_string(), cfg: self.cfg.clone(), }) as Box } diff --git a/src/integrations/integr_github.rs b/src/integrations/integr_github.rs index 57ad7c9f9..e0906b830 100644 --- a/src/integrations/integr_github.rs +++ b/src/integrations/integr_github.rs @@ -27,6 +27,8 @@ pub struct ToolGithub { } impl Integration for ToolGithub { + fn as_any(&self) -> &dyn std::any::Any { self } + fn integr_settings_apply(&mut self, value: &Value) -> Result<(), String> { let integration_github = serde_json::from_value::(value.clone()) .map_err(|e|e.to_string())?; @@ -42,7 +44,7 @@ impl Integration for ToolGithub { serde_json::to_value(&integration_github).map_err(|e| e.to_string()) } - fn integr_upgrade_to_tool(&self, integr_name: &String) -> Box { + fn integr_upgrade_to_tool(&self, integr_name: &str) -> Box { Box::new(ToolGithub {integration_github: self.integration_github.clone()}) as Box } diff --git a/src/integrations/integr_gitlab.rs b/src/integrations/integr_gitlab.rs index fb9ec8ed2..908cc78d2 100644 --- a/src/integrations/integr_gitlab.rs +++ b/src/integrations/integr_gitlab.rs @@ -26,6 +26,8 @@ pub struct ToolGitlab { } impl Integration for ToolGitlab{ + fn as_any(&self) -> &dyn std::any::Any { self } + fn integr_settings_apply(&mut self, value: &Value) -> Result<(), String> { let integration_gitlab = serde_json::from_value::(value.clone()) .map_err(|e|e.to_string())?; @@ -41,7 +43,7 @@ impl Integration for ToolGitlab{ serde_json::to_value(&integration_gitlab).map_err(|e| e.to_string()) } - fn integr_upgrade_to_tool(&self, integr_name: &String) -> Box { + fn integr_upgrade_to_tool(&self, integr_name: &str) -> Box { Box::new(ToolGitlab {integration_gitlab: self.integration_gitlab.clone()}) as Box } diff --git a/src/integrations/integr_pdb.rs b/src/integrations/integr_pdb.rs index cb8e575d8..5876d2587 100644 --- a/src/integrations/integr_pdb.rs +++ b/src/integrations/integr_pdb.rs @@ -67,6 +67,8 @@ impl IntegrationSession for PdbSession } impl Integration for ToolPdb { + fn as_any(&self) -> &dyn std::any::Any { self } + fn integr_settings_apply(&mut self, value: &Value) -> Result<(), String> { let settings_pdb = serde_json::from_value::(value.clone()) .map_err(|e|e.to_string())?; @@ -82,7 +84,7 @@ impl Integration for ToolPdb { serde_json::to_value(&integration_github).map_err(|e| e.to_string()) } - fn integr_upgrade_to_tool(&self, integr_name: &String) -> Box { + fn integr_upgrade_to_tool(&self, integr_name: &str) -> Box { Box::new(ToolPdb {settings_pdb: self.settings_pdb.clone()}) as Box } diff --git a/src/integrations/integr_postgres.rs b/src/integrations/integr_postgres.rs index c74f5cada..14e32edae 100644 --- a/src/integrations/integr_postgres.rs +++ b/src/integrations/integr_postgres.rs @@ -30,6 +30,8 @@ pub struct ToolPostgres { } impl IntegrationTrait for ToolPostgres { + fn as_any(&self) -> &dyn std::any::Any { self } + fn integr_settings_apply(&mut self, value: &Value) -> Result<(), String> { match serde_json::from_value::(value.clone()) { Ok(settings_postgres) => self.settings_postgres = settings_postgres, @@ -45,7 +47,7 @@ impl IntegrationTrait for ToolPostgres { serde_json::to_value(&self.settings_postgres).unwrap() } - fn integr_upgrade_to_tool(&self, _integr_name: &String) -> Box { + fn integr_upgrade_to_tool(&self, _integr_name: &str) -> Box { Box::new(ToolPostgres { settings_postgres: self.settings_postgres.clone() }) as Box diff --git a/src/integrations/setting_up_integrations.rs b/src/integrations/setting_up_integrations.rs index adab40841..f03d8e810 100644 --- a/src/integrations/setting_up_integrations.rs +++ b/src/integrations/setting_up_integrations.rs @@ -20,7 +20,7 @@ pub struct YamlError { pub error_msg: String, } -#[derive(Serialize, Default)] +#[derive(Serialize, Default, Debug)] pub struct IntegrationRecord { pub project_path: String, pub integr_name: String, @@ -416,10 +416,8 @@ pub async fn integration_config_save( #[cfg(test)] mod tests { // use super::*; - use crate::integrations::integr_abstract::IntegrationTrait; use crate::integrations::yaml_schema::ISchema; use serde_yaml; - use indexmap::IndexMap; use std::fs::File; use std::io::Write; From 7795364c51804ef01bfd2c583992eb8217a53ac3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Thu, 5 Dec 2024 22:40:01 +0100 Subject: [PATCH 070/185] feat: load integrations apart from tools before upgrading, integrations may not need to be upgraded to tool --- src/integrations/running_integrations.rs | 23 +++++++++++++++++++---- 1 file changed, 19 insertions(+), 4 deletions(-) diff --git a/src/integrations/running_integrations.rs b/src/integrations/running_integrations.rs index 9de0e25b0..b166a0ea1 100644 --- a/src/integrations/running_integrations.rs +++ b/src/integrations/running_integrations.rs @@ -6,13 +6,28 @@ use tokio::sync::Mutex as AMutex; use crate::tools::tools_description::Tool; use crate::global_context::GlobalContext; - +use crate::integrations::integr_abstract::IntegrationTrait; pub async fn load_integration_tools( gcx: Arc>, _current_project: String, _allow_experimental: bool, ) -> IndexMap>>> { + let integraions_map = load_integrations(gcx.clone(), _current_project, _allow_experimental).await; + let mut tools = IndexMap::new(); + for (name, integr) in integraions_map { + if integr.can_upgrade_to_tool() { + tools.insert(name.clone(), Arc::new(AMutex::new(integr.integr_upgrade_to_tool(&name)))); + } + } + tools +} + +pub async fn load_integrations( + gcx: Arc>, + _current_project: String, + _allow_experimental: bool, +) -> IndexMap> { // XXX filter _workspace_folders_arc that fit _current_project let (config_dirs, global_config_dir) = crate::integrations::setting_up_integrations::get_config_dirs(gcx.clone()).await; let integrations_yaml_path = crate::integrations::setting_up_integrations::get_integrations_yaml_path(gcx.clone()).await; @@ -22,7 +37,7 @@ pub async fn load_integration_tools( let vars_for_replacements = crate::integrations::setting_up_integrations::get_vars_for_replacements(gcx.clone()).await; let records = crate::integrations::setting_up_integrations::read_integrations_d(&config_dirs, &global_config_dir, &integrations_yaml_path, &vars_for_replacements, &lst, &mut error_log); - let mut tools = IndexMap::new(); + let mut integrations_map = IndexMap::new(); for rec in records { if !rec.on_your_laptop { continue; @@ -41,7 +56,7 @@ pub async fn load_integration_tools( if should_be_fine.is_err() { tracing::error!("failed to apply settings for integration {}: {:?}", rec.integr_name, should_be_fine.err()); } - tools.insert(rec.integr_name.clone(), Arc::new(AMutex::new(integr.integr_upgrade_to_tool(&rec.integr_name)))); + integrations_map.insert(rec.integr_name.clone(), integr); } for e in error_log { @@ -53,5 +68,5 @@ pub async fn load_integration_tools( ); } - tools + integrations_map } From 347942cab906f5687c38ac3b73918bedfe796b8d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Thu, 5 Dec 2024 22:40:37 +0100 Subject: [PATCH 071/185] feat: separate docker and isolation configurations --- src/http/routers/v1/docker.rs | 6 +- .../docker/docker_container_manager.rs | 45 ++++----- src/integrations/docker/integr_docker.rs | 76 ++------------ src/integrations/docker/integr_isolation.rs | 99 +++++++++++++++++++ src/integrations/docker/mod.rs | 29 +++++- src/integrations/mod.rs | 5 +- 6 files changed, 162 insertions(+), 98 deletions(-) create mode 100644 src/integrations/docker/integr_isolation.rs diff --git a/src/http/routers/v1/docker.rs b/src/http/routers/v1/docker.rs index 6ad626bd7..12fad10f4 100644 --- a/src/http/routers/v1/docker.rs +++ b/src/http/routers/v1/docker.rs @@ -8,7 +8,7 @@ use tokio::sync::RwLock as ARwLock; use crate::custom_error::ScratchError; use crate::global_context::GlobalContext; -use crate::integrations::docker::integr_docker::docker_tool_load; +use crate::integrations::docker::docker_and_isolation_load; #[derive(Serialize, Deserialize, Clone, Debug)] #[serde(rename_all = "lowercase")] @@ -55,7 +55,7 @@ pub async fn handle_v1_docker_container_action( let post = serde_json::from_slice::(&body_bytes) .map_err(|e| ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, format!("JSON problem: {}", e)))?; - let docker = docker_tool_load(gcx.clone()).await + let (docker, _) = docker_and_isolation_load(gcx.clone()).await .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Cannot load docker tool: {}", e)))?; let docker_command = match post.action { @@ -79,7 +79,7 @@ pub async fn handle_v1_docker_container_list( let post = serde_json::from_slice::(&body_bytes) .map_err(|e| ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, format!("JSON problem: {}", e)))?; - let docker = docker_tool_load(gcx.clone()).await + let (docker, _) = docker_and_isolation_load(gcx.clone()).await .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Cannot load docker tool: {}", e)))?; let docker_command = match post.label { diff --git a/src/integrations/docker/docker_container_manager.rs b/src/integrations/docker/docker_container_manager.rs index e5560e2f1..1d9cc2d69 100644 --- a/src/integrations/docker/docker_container_manager.rs +++ b/src/integrations/docker/docker_container_manager.rs @@ -17,13 +17,12 @@ use crate::http::routers::v1::lsp_like_handlers::LspLikeInit; use crate::http::routers::v1::sync_files::SyncFilesExtractTarPost; use crate::integrations::sessions::get_session_hashmap_key; use crate::integrations::sessions::IntegrationSession; -use crate::integrations::docker::docker_ssh_tunnel_utils::{ssh_tunnel_open, SshTunnel}; -use crate::integrations::docker::integr_docker::{docker_tool_load, ToolDocker}; - -use super::docker_ssh_tunnel_utils::ssh_tunnel_check_status; +use crate::integrations::docker::docker_ssh_tunnel_utils::{ssh_tunnel_open, SshTunnel, ssh_tunnel_check_status}; +use crate::integrations::docker::integr_docker::ToolDocker; +use crate::integrations::docker::docker_and_isolation_load; +use crate::integrations::docker::integr_isolation::SettingsIsolation; pub const DEFAULT_CONTAINER_LSP_PATH: &str = "/usr/local/bin/refact-lsp"; -pub const TARGET_LSP_PORT: &str = "8001"; #[derive(Clone, Debug)] @@ -79,7 +78,8 @@ pub async fn docker_container_check_status_or_start( chat_id: &str, ) -> Result<(), String> { - let docker = docker_tool_load(gcx.clone()).await?; + let (docker, isolation_maybe) = docker_and_isolation_load(gcx.clone()).await?; + let isolation = isolation_maybe.ok_or_else(|| "No isolation tool available".to_string())?; let docker_container_session_maybe = { let gcx_locked = gcx.read().await; gcx_locked.integration_sessions.get(&get_session_hashmap_key("docker", &chat_id)).cloned() @@ -115,27 +115,25 @@ pub async fn docker_container_check_status_or_start( const LSP_PORT: &str = "8001"; let mut ports_to_forward = if ssh_config_maybe.is_some() { - docker.settings_docker.ports.iter() + isolation.ports.iter() .map(|p| Port {published: "0".to_string(), target: p.target.clone()}).collect::>() } else { - docker.settings_docker.ports.clone() + isolation.ports.clone() }; ports_to_forward.insert(0, Port {published: "0".to_string(), target: LSP_PORT.to_string()}); - let container_id = docker_container_create(&docker, &chat_id, &ports_to_forward, LSP_PORT, gcx.clone()).await?; + let container_id = docker_container_create(&docker, &isolation, &chat_id, &ports_to_forward, LSP_PORT, gcx.clone()).await?; docker_container_sync_yaml_configs(&docker, &container_id, gcx.clone()).await?; docker_container_start(gcx.clone(), &docker, &container_id).await?; let exposed_ports = docker_container_get_exposed_ports(&docker, &container_id, &ports_to_forward, gcx.clone()).await?; let host_lsp_port = exposed_ports.iter().find(|p| p.target == LSP_PORT) .ok_or_else(|| "No LSP port exposed".to_string())?.published.clone(); - let keep_containers_alive_for_x_minutes = docker.settings_docker.keep_containers_alive_for_x_minutes; - let connection = match ssh_config_maybe { Some(ssh_config) => { let mut ports_to_forward_through_ssh = exposed_ports.into_iter() .map(|exposed_port| { - let matched_external_port = docker.settings_docker.ports.iter() + let matched_external_port = isolation.ports.iter() .find(|configured_port| configured_port.target == exposed_port.target) .map_or_else(|| "0".to_string(), |forwarded_port| forwarded_port.published.clone()); Port { @@ -157,21 +155,13 @@ pub async fn docker_container_check_status_or_start( internal_port.to_string() } }; - docker_container_sync_workspace(gcx.clone(), &docker, &container_id, &lsp_port_to_connect).await?; - - if !docker.settings_docker.command.is_empty() { - let cmd_to_execute = format!("exec --detach {} {}", container_id, docker.settings_docker.command); - match docker.command_execute(&cmd_to_execute, gcx.clone(), false, true).await { - Ok((cmd_stdout, cmd_stderr)) => { info!("Command executed: {cmd_stdout}\n{cmd_stderr}") }, - Err(e) => { error!("Command execution failed: {}", e) }, - }; - } + docker_container_sync_workspace(gcx.clone(), &docker, &isolation, &container_id, &lsp_port_to_connect).await?; let session: Arc>> = Arc::new(AMutex::new(Box::new(DockerContainerSession { container_id, connection, last_usage_ts: SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_secs(), - session_timeout_after_inactivity: Duration::from_secs(60 * keep_containers_alive_for_x_minutes), + session_timeout_after_inactivity: Duration::from_secs(60 * isolation.keep_containers_alive_for_x_minutes), weak_gcx: Arc::downgrade(&gcx), }))); @@ -217,17 +207,17 @@ pub async fn docker_container_get_host_lsp_port_to_connect( async fn docker_container_create( docker: &ToolDocker, + isolation: &SettingsIsolation, chat_id: &str, ports_to_forward: &Vec, lsp_port: &str, gcx: Arc>, ) -> Result { - let docker_image_id = docker.settings_docker.docker_image_id.clone(); + let docker_image_id = isolation.docker_image_id.clone(); if docker_image_id.is_empty() { return Err("No image ID to run container from, please specify one.".to_string()); } - let workspace_folder = docker.settings_docker.container_workspace_folder.clone(); - let host_lsp_path = docker.settings_docker.host_lsp_path.clone(); + let host_lsp_path = isolation.host_lsp_path.clone(); let (address_url, api_key) = { let gcx_locked = gcx.read().await; @@ -340,6 +330,7 @@ async fn docker_container_start( async fn docker_container_sync_workspace( gcx: Arc>, docker: &ToolDocker, + isolation: &SettingsIsolation, container_id: &str, lsp_port_to_connect: &str, ) -> Result<(), String> { @@ -348,7 +339,7 @@ async fn docker_container_sync_workspace( .into_iter() .next() .ok_or_else(|| "No workspace folders found".to_string())?; - let container_workspace_folder = PathBuf::from(&docker.settings_docker.container_workspace_folder); + let container_workspace_folder = PathBuf::from(&isolation.container_workspace_folder); let temp_tar_file = tempfile::Builder::new().suffix(".tar").tempfile() .map_err(|e| format!("Error creating temporary tar file: {}", e))?.into_temp_path(); @@ -449,7 +440,7 @@ async fn docker_container_kill( gcx: Arc>, container_id: &str, ) -> Result<(), String> { - let docker = docker_tool_load(gcx.clone()).await?; + let (docker, _) = docker_and_isolation_load(gcx.clone()).await?; docker.command_execute(&format!("container stop {container_id}"), gcx.clone(), true, true).await?; info!("Stopped docker container {container_id}."); diff --git a/src/integrations/docker/integr_docker.rs b/src/integrations/docker/integr_docker.rs index e6402705b..4501514d0 100644 --- a/src/integrations/docker/integr_docker.rs +++ b/src/integrations/docker/integr_docker.rs @@ -10,13 +10,12 @@ use crate::at_commands::at_commands::AtCommandsContext; use crate::call_validation::{ChatContent, ChatMessage, ContextEnum}; use crate::global_context::GlobalContext; use crate::integrations::integr_abstract::IntegrationTrait; -use crate::integrations::running_integrations::load_integration_tools; use crate::tools::tools_description::Tool; use crate::integrations::docker::docker_ssh_tunnel_utils::{SshConfig, forward_remote_docker_if_needed}; -use crate::integrations::docker::docker_container_manager::Port; #[derive(Clone, Serialize, Deserialize, Default, Debug)] pub struct SettingsDocker { + pub label: String, pub docker_daemon_address: String, pub docker_cli_path: String, pub remote_docker: bool, @@ -25,43 +24,18 @@ pub struct SettingsDocker { #[serde(serialize_with = "serialize_num_to_str", deserialize_with = "deserialize_str_to_num")] pub ssh_port: u16, pub ssh_identity_file: String, - pub container_workspace_folder: String, - pub docker_image_id: String, - pub host_lsp_path: String, - pub run_chat_threads_inside_container: bool, - pub label: String, - pub command: String, - #[serde(serialize_with = "serialize_num_to_str", deserialize_with = "deserialize_str_to_num")] - pub keep_containers_alive_for_x_minutes: u64, - #[serde(serialize_with = "serialize_ports", deserialize_with = "deserialize_ports")] - pub ports: Vec, } -fn serialize_num_to_str(num: &T, serializer: S) -> Result { +pub fn serialize_num_to_str(num: &T, serializer: S) -> Result { serializer.serialize_str(&num.to_string()) } -fn deserialize_str_to_num<'de, T, D>(deserializer: D) -> Result +pub fn deserialize_str_to_num<'de, T, D>(deserializer: D) -> Result where T: std::str::FromStr, T::Err: std::fmt::Display, D: serde::Deserializer<'de>, { String::deserialize(deserializer)?.parse().map_err(serde::de::Error::custom) } -fn serialize_ports(ports: &Vec, serializer: S) -> Result { - let ports_str = ports.iter().map(|port| format!("{}:{}", port.published, port.target)) - .collect::>().join(","); - serializer.serialize_str(&ports_str) -} - -fn deserialize_ports<'de, D: serde::Deserializer<'de>>(deserializer: D) -> Result, D::Error> { - let ports_str = String::deserialize(deserializer)?; - ports_str.split(',').filter(|s| !s.is_empty()).map(|port_str| { - let (published, target) = port_str.split_once(':') - .ok_or_else(|| serde::de::Error::custom("expected format 'published:target'"))?; - Ok(Port { published: published.to_string(), target: target.to_string() }) - }).collect() -} - impl SettingsDocker { pub fn get_ssh_config(&self) -> Option { if self.remote_docker { @@ -84,6 +58,8 @@ pub struct ToolDocker { } impl IntegrationTrait for ToolDocker { + fn as_any(&self) -> &dyn std::any::Any { self } + fn integr_settings_apply(&mut self, value: &Value) -> Result<(), String> { match serde_json::from_value::(value.clone()) { Ok(settings_docker) => { @@ -102,7 +78,7 @@ impl IntegrationTrait for ToolDocker { serde_json::to_value(&self.settings_docker).unwrap() } - fn integr_upgrade_to_tool(&self, _integr_name: &String) -> Box { + fn integr_upgrade_to_tool(&self, _integr_name: &str) -> Box { Box::new(ToolDocker { settings_docker: self.settings_docker.clone() }) as Box @@ -200,13 +176,6 @@ impl Tool for ToolDocker { } } -pub async fn docker_tool_load(gcx: Arc>) -> Result { - let tools = load_integration_tools(gcx.clone(), "".to_string(), true).await; - let docker_tool = tools.get("docker").cloned().ok_or("Docker integration not found")? - .lock().await.as_any().downcast_ref::().cloned().unwrap(); - Ok(docker_tool) -} - fn parse_command(args: &HashMap) -> Result{ return match args.get("command") { Some(Value::String(s)) => Ok(s.to_string()), @@ -297,6 +266,10 @@ fn command_append_label_if_creates_resource(command_args: &mut Vec, labe pub const DOCKER_INTEGRATION_SCHEMA: &str = r#" fields: + label: + f_type: string_short + f_desc: "Label for the Docker container." + f_default: "refact" docker_daemon_address: f_type: string_long f_desc: "The address to connect to the Docker daemon; specify only if not using the default." @@ -326,35 +299,6 @@ fields: f_type: string_long f_desc: "Path to the SSH identity file to connect to remote Docker." f_label: "SSH Identity File" - container_workspace_folder: - f_type: string_long - f_desc: "The workspace folder inside the container." - f_default: "/app" - docker_image_id: - f_type: string_long - f_desc: "The Docker image ID to use." - host_lsp_path: - f_type: string_long - f_desc: "Path to the LSP on the host." - f_default: "/opt/refact/bin/refact-lsp" - run_chat_threads_inside_container: - f_type: bool - f_desc: "Whether to run chat threads inside the container." - f_default: "false" - label: - f_type: string_short - f_desc: "Label for the Docker container." - f_default: "refact" - command: - f_type: string_long - f_desc: "Command to run inside the Docker container." - keep_containers_alive_for_x_minutes: - f_type: string_short - f_desc: "How long to keep containers alive in minutes." - f_default: "60" - ports: - f_type: string_long - f_desc: "Comma separated published:target notation for ports to publish, example '8080:3000,5000:5432'" available: on_your_laptop_possible: true when_isolated_possible: false diff --git a/src/integrations/docker/integr_isolation.rs b/src/integrations/docker/integr_isolation.rs new file mode 100644 index 000000000..7e3e464c5 --- /dev/null +++ b/src/integrations/docker/integr_isolation.rs @@ -0,0 +1,99 @@ +use serde::{Serialize, Deserialize}; +use serde_json::Value; + +use crate::integrations::docker::integr_docker::{serialize_num_to_str, deserialize_str_to_num}; +use crate::integrations::docker::docker_container_manager::Port; +use crate::integrations::integr_abstract::IntegrationTrait; +use crate::tools::tools_description::Tool; + +#[derive(Clone, Serialize, Deserialize, Default, Debug)] +pub struct SettingsIsolation { + pub container_workspace_folder: String, + pub docker_image_id: String, + pub host_lsp_path: String, + #[serde(serialize_with = "serialize_ports", deserialize_with = "deserialize_ports")] + pub ports: Vec, + #[serde(serialize_with = "serialize_num_to_str", deserialize_with = "deserialize_str_to_num")] + pub keep_containers_alive_for_x_minutes: u64, +} + +fn serialize_ports(ports: &Vec, serializer: S) -> Result { + let ports_str = ports.iter().map(|port| format!("{}:{}", port.published, port.target)) + .collect::>().join(","); + serializer.serialize_str(&ports_str) +} +fn deserialize_ports<'de, D: serde::Deserializer<'de>>(deserializer: D) -> Result, D::Error> { + let ports_str = String::deserialize(deserializer)?; + ports_str.split(',').filter(|s| !s.is_empty()).map(|port_str| { + let (published, target) = port_str.split_once(':') + .ok_or_else(|| serde::de::Error::custom("expected format 'published:target'"))?; + Ok(Port { published: published.to_string(), target: target.to_string() }) + }).collect() +} + +#[derive(Clone, Default, Debug)] +pub struct IntegrationIsolation { + pub settings_isolation: SettingsIsolation, +} + +impl IntegrationTrait for IntegrationIsolation { + fn as_any(&self) -> &dyn std::any::Any { self } + + fn integr_settings_apply(&mut self, value: &Value) -> Result<(), String> { + match serde_json::from_value::(value.clone()) { + Ok(settings_isolation) => { + tracing::info!("Isolation settings applied: {:?}", settings_isolation); + self.settings_isolation = settings_isolation + }, + Err(e) => { + tracing::error!("Failed to apply settings: {}\n{:?}", e, value); + return Err(e.to_string()); + } + } + Ok(()) + } + + fn integr_settings_as_json(&self) -> Value { + serde_json::to_value(&self.settings_isolation).unwrap() + } + + fn can_upgrade_to_tool(&self) -> bool { false } + + fn integr_upgrade_to_tool(&self, _integr_name: &str) -> Box { + unimplemented!("Isolation cannot be upgraded to a tool, its configuration is used to run the project in isolation.") + } + + fn integr_schema(&self) -> &str + { + ISOLATION_INTEGRATION_SCHEMA + } +} + +pub const ISOLATION_INTEGRATION_SCHEMA: &str = r#" +fields: + container_workspace_folder: + f_type: string_long + f_desc: "The workspace folder inside the container." + f_default: "/app" + docker_image_id: + f_type: string_long + f_desc: "The Docker image ID to use." + host_lsp_path: + f_type: string_long + f_desc: "Path to the LSP on the host." + f_default: "/opt/refact/bin/refact-lsp" + command: + f_type: string_long + f_desc: "Command to run inside the Docker container." + keep_containers_alive_for_x_minutes: + f_type: string_short + f_desc: "How long to keep containers alive in minutes." + f_default: "60" + ports: + f_type: string_long + f_desc: "Comma separated published:target notation for ports to publish, example '8080:3000,5000:5432'" +available: + on_your_laptop_possible: true + when_isolated_possible: false +smartlinks: [] +"#; \ No newline at end of file diff --git a/src/integrations/docker/mod.rs b/src/integrations/docker/mod.rs index 13d50dba3..ec3e0d645 100644 --- a/src/integrations/docker/mod.rs +++ b/src/integrations/docker/mod.rs @@ -1,3 +1,30 @@ +use std::sync::Arc; + +use tokio::sync::RwLock as ARwLock; +use crate::global_context::GlobalContext; +use crate::integrations::integr_abstract::IntegrationTrait; +use crate::integrations::running_integrations::load_integrations; +use crate::integrations::docker::integr_docker::ToolDocker; +use crate::integrations::docker::integr_isolation::{SettingsIsolation, IntegrationIsolation}; + pub mod integr_docker; +pub mod integr_isolation; pub mod docker_ssh_tunnel_utils; -pub mod docker_container_manager; \ No newline at end of file +pub mod docker_container_manager; + +pub async fn docker_and_isolation_load(gcx: Arc>) -> Result<(ToolDocker, Option), String> { + let integrations = load_integrations(gcx.clone(), "".to_string(), true).await; + + let docker_tool = integrations.get("docker") + .ok_or("Docker integration not found".to_string())? + .integr_upgrade_to_tool("docker") + .as_any().downcast_ref::() + .ok_or("Failed to downcast docker tool".to_string())? + .clone(); + + let isolation_integration = integrations.get("isolation") + .and_then(|integration| integration.as_any().downcast_ref::()) + .map(|isolation| isolation.settings_isolation.clone()); + + Ok((docker_tool, isolation_integration)) +} \ No newline at end of file diff --git a/src/integrations/mod.rs b/src/integrations/mod.rs index c180dae36..46bf9bc15 100644 --- a/src/integrations/mod.rs +++ b/src/integrations/mod.rs @@ -45,6 +45,7 @@ pub fn integration_from_name(n: &str) -> Result) }, + "isolation" => Ok(Box::new(docker::integr_isolation::IntegrationIsolation {..Default::default()}) as Box), _ => Err(format!("Unknown integration name: {}", n)), } } @@ -71,7 +72,9 @@ pub fn integrations_list() -> Vec<&'static str> { "chrome", "cmdline_TEMPLATE", "service_TEMPLATE", - "docker" + // "chrome", + "docker", + "isolation" ] } From 06c5d114797e29a6ee2ed3db0692af425e628a49 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Fri, 6 Dec 2024 11:56:03 +0100 Subject: [PATCH 072/185] fix: if docker container execution fails or docker tool load fails, return empty list and a flag to indicate the failure --- src/http/routers/v1/docker.rs | 45 ++++++++++++++++++++++++++--------- 1 file changed, 34 insertions(+), 11 deletions(-) diff --git a/src/http/routers/v1/docker.rs b/src/http/routers/v1/docker.rs index 12fad10f4..f7916db2e 100644 --- a/src/http/routers/v1/docker.rs +++ b/src/http/routers/v1/docker.rs @@ -31,6 +31,13 @@ pub struct DockerContainerListPost { pub image: Option, } +#[derive(Serialize, Deserialize, Clone, Debug)] +pub struct DockerContainerListResponse { + pub container_list: Vec, + pub has_connection_to_docker_daemon: bool, + pub docker_error: String, +} + #[derive(Serialize, Deserialize, Clone, Debug, Default)] pub struct DockerContainerListOutput { id: String, @@ -79,16 +86,20 @@ pub async fn handle_v1_docker_container_list( let post = serde_json::from_slice::(&body_bytes) .map_err(|e| ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, format!("JSON problem: {}", e)))?; - let (docker, _) = docker_and_isolation_load(gcx.clone()).await - .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Cannot load docker tool: {}", e)))?; + let docker = match docker_and_isolation_load(gcx.clone()).await { + Ok((docker, _)) => docker, + Err(e) => return Ok(docker_container_list_response(vec![], false, &e)), + }; let docker_command = match post.label { Some(label) => format!("container list --all --no-trunc --format json --filter label={label}"), None => "container list --all --no-trunc --format json".to_string(), }; - let (unparsed_output, _) = docker.command_execute(&docker_command, gcx.clone(), true, false).await - .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Command {} failed: {}", docker_command, e)))?; + let unparsed_output = match docker.command_execute(&docker_command, gcx.clone(), true, false).await { + Ok((unparsed_output, _)) => unparsed_output, + Err(e) => return Ok(docker_container_list_response(vec![], false, &e)), + }; let mut output: Vec = unparsed_output.lines().map(|line| serde_json::from_str(line)).collect::, _>>() .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Container list JSON problem: {}", e)))?; @@ -105,16 +116,14 @@ pub async fn handle_v1_docker_container_list( }).collect::, ScratchError>>()?; if container_ids.len() == 0 { - return Ok(Response::builder() - .status(StatusCode::OK) - .header("Content-Type", "application/json") - .body(Body::from(serde_json::to_string(&serde_json::json!({"containers": Vec::::new()})).unwrap())) - .unwrap()) + return Ok(docker_container_list_response(vec![], true, "")); } let inspect_command = format!("container inspect --format json {}", container_ids.join(" ")); - let (inspect_unparsed_output, _) = docker.command_execute(&inspect_command, gcx.clone(), true, false).await - .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Command {} failed: {}", inspect_command, e)))?; + let inspect_unparsed_output = match docker.command_execute(&inspect_command, gcx.clone(), true, false).await { + Ok((inspect_unparsed_output, _)) => inspect_unparsed_output, + Err(e) => return Ok(docker_container_list_response(vec![], false, &e)), + }; let inspect_output = serde_json::from_str::>(&inspect_unparsed_output) .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Container inspect JSON problem: {}", e)))?; @@ -147,6 +156,20 @@ pub async fn handle_v1_docker_container_list( .unwrap()) } +fn docker_container_list_response( + container_list: Vec, + has_connection_to_daemon: bool, + error: &str, +) -> Response { + let response = DockerContainerListResponse { + container_list, + has_connection_to_docker_daemon: has_connection_to_daemon, + docker_error: error.to_string(), + }; + Response::builder().status(StatusCode::OK).header("Content-Type", "application/json") + .body(Body::from(serde_json::to_string(&response).unwrap())).unwrap() +} + fn extract_string_field<'a>(container: &'a serde_json::Value, field_path: &[&str], error_message: &str) -> Result { field_path.iter().fold(container, |acc, &key| &acc[key]).as_str().map(ToString::to_string) .ok_or_else(|| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("{}:\n{:?}", error_message, container))) From 7373bf3778d7493688d48b24156b820312396b51 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Fri, 6 Dec 2024 19:48:42 +0100 Subject: [PATCH 073/185] fix: same response format for ok list of containers --- src/http/routers/v1/docker.rs | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/src/http/routers/v1/docker.rs b/src/http/routers/v1/docker.rs index f7916db2e..190980230 100644 --- a/src/http/routers/v1/docker.rs +++ b/src/http/routers/v1/docker.rs @@ -33,7 +33,7 @@ pub struct DockerContainerListPost { #[derive(Serialize, Deserialize, Clone, Debug)] pub struct DockerContainerListResponse { - pub container_list: Vec, + pub containers: Vec, pub has_connection_to_docker_daemon: bool, pub docker_error: String, } @@ -128,7 +128,7 @@ pub async fn handle_v1_docker_container_list( let inspect_output = serde_json::from_str::>(&inspect_unparsed_output) .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Container inspect JSON problem: {}", e)))?; - let response_body: Vec = inspect_output.into_iter() + let containers: Vec = inspect_output.into_iter() .map(|container| { let mut container_name = extract_string_field(&container, &["Name"], "Missing container name")?; if container_name.starts_with('/') { container_name = container_name[1..].to_string() }; @@ -149,20 +149,16 @@ pub async fn handle_v1_docker_container_list( }) }).collect::, ScratchError>>()?; - Ok(Response::builder() - .status(StatusCode::OK) - .header("Content-Type", "application/json") - .body(Body::from(serde_json::to_string(&serde_json::json!({"containers": response_body})).unwrap())) - .unwrap()) + Ok(docker_container_list_response(containers, true, "")) } fn docker_container_list_response( - container_list: Vec, + containers: Vec, has_connection_to_daemon: bool, error: &str, ) -> Response { let response = DockerContainerListResponse { - container_list, + containers, has_connection_to_docker_daemon: has_connection_to_daemon, docker_error: error.to_string(), }; From f6ed23475353535474e4366fa92b5d4f6f53587f Mon Sep 17 00:00:00 2001 From: JegernOUTT Date: Fri, 6 Dec 2024 01:26:17 +1030 Subject: [PATCH 074/185] Add project summary feature to chat module Introduce `project_summary_chat` module and integrate it with the existing chat system. Implement asynchronous function `mix_config_messages` to load project-specific configurations and available integration tools, adding them to chat messages. Update HTTP router to include a new endpoint `/chat-project-summary` for handling project summaries. Add default prompt configuration for project summaries in `customization_compiled_in.rs`. This enhancement allows generating a concise project overview and identifying suitable tools within the project's context. --- src/http/routers/v1.rs | 3 +- src/http/routers/v1/chat.rs | 13 +++++ src/integrations/mod.rs | 1 + src/integrations/project_summary_chat.rs | 54 +++++++++++++++++++ src/yaml_configs/customization_compiled_in.rs | 27 +++++++++- 5 files changed, 96 insertions(+), 2 deletions(-) create mode 100644 src/integrations/project_summary_chat.rs diff --git a/src/http/routers/v1.rs b/src/http/routers/v1.rs index 72a9a2014..862ec237d 100644 --- a/src/http/routers/v1.rs +++ b/src/http/routers/v1.rs @@ -18,7 +18,7 @@ use crate::http::routers::v1::at_commands::{handle_v1_command_completion, handle use crate::http::routers::v1::at_tools::{handle_v1_tools, handle_v1_tools_check_if_confirmation_needed, handle_v1_tools_execute}; use crate::http::routers::v1::caps::handle_v1_caps; use crate::http::routers::v1::caps::handle_v1_ping; -use crate::http::routers::v1::chat::{handle_v1_chat, handle_v1_chat_completions, handle_v1_chat_configuration}; +use crate::http::routers::v1::chat::{handle_v1_chat, handle_v1_chat_completions, handle_v1_chat_configuration, handle_v1_chat_project_summary}; use crate::http::routers::v1::chat_based_handlers::handle_v1_commit_message_from_diff; use crate::http::routers::v1::dashboard::get_dashboard_plots; use crate::http::routers::v1::docker::{handle_v1_docker_container_action, handle_v1_docker_container_list}; @@ -86,6 +86,7 @@ pub fn make_v1_router() -> Router { .route("/chat", telemetry_post!(handle_v1_chat)) .route("/chat/completions", telemetry_post!(handle_v1_chat_completions)) // standard .route("/chat-configuration", telemetry_post!(handle_v1_chat_configuration)) + .route("/chat-project-summary", telemetry_post!(handle_v1_chat_project_summary)) .route("/telemetry-network", telemetry_post!(handle_v1_telemetry_network)) .route("/snippet-accepted", telemetry_post!(handle_v1_snippet_accepted)) diff --git a/src/http/routers/v1/chat.rs b/src/http/routers/v1/chat.rs index 8e8f6c886..2609f0651 100644 --- a/src/http/routers/v1/chat.rs +++ b/src/http/routers/v1/chat.rs @@ -71,6 +71,19 @@ pub async fn handle_v1_chat_configuration( _chat(gcx, &mut chat_post, &mut messages, true).await } +pub async fn handle_v1_chat_project_summary( + Extension(gcx): Extension, + body_bytes: hyper::body::Bytes, +) -> Result, ScratchError> { + let mut chat_post = serde_json::from_slice::(&body_bytes).map_err(|e| { + info!("chat handler cannot parse input:\n{:?}", body_bytes); + ScratchError::new(StatusCode::BAD_REQUEST, format!("JSON problem: {}", e)) + })?; + let mut messages = deserialize_messages_from_post(&chat_post.messages)?; + crate::integrations::project_summary_chat::mix_config_messages(gcx.clone(), &mut messages, &chat_post.meta.current_config_file).await; + _chat(gcx, &mut chat_post, &mut messages, true).await +} + pub async fn handle_v1_chat( // less-standard openai-style handler that sends role="context_*" messages first, rewrites the user message Extension(gcx): Extension, diff --git a/src/integrations/mod.rs b/src/integrations/mod.rs index 46bf9bc15..36a3371c1 100644 --- a/src/integrations/mod.rs +++ b/src/integrations/mod.rs @@ -21,6 +21,7 @@ pub mod process_io_utils; pub mod docker; pub mod sessions; pub mod config_chat; +pub mod project_summary_chat; pub mod yaml_schema; pub mod setting_up_integrations; pub mod running_integrations; diff --git a/src/integrations/project_summary_chat.rs b/src/integrations/project_summary_chat.rs new file mode 100644 index 000000000..afcc09441 --- /dev/null +++ b/src/integrations/project_summary_chat.rs @@ -0,0 +1,54 @@ +use std::sync::Arc; +use std::fs; +use tokio::sync::RwLock as ARwLock; +use std::collections::HashMap; +use itertools::Itertools; +use crate::global_context::GlobalContext; +use crate::call_validation::{ChatContent, ChatMessage, ContextFile}; +use crate::scratchpads::chat_utils_prompts::system_prompt_add_workspace_info; + +pub async fn mix_config_messages( + gcx: Arc>, + messages: &mut Vec, + current_config_file: &String, +) { + let custom: crate::yaml_configs::customization_loader::CustomizationYaml = match crate::yaml_configs::customization_loader::load_customization(gcx.clone(), true).await { + Ok(x) => x, + Err(why) => { + tracing::error!("Failed to load customization.yaml, will use compiled-in default for the configurator system prompt:\n{:?}", why); + crate::yaml_configs::customization_loader::load_and_mix_with_users_config( + crate::yaml_configs::customization_compiled_in::COMPILED_IN_INITIAL_USER_YAML, + "", "", true, true, &HashMap::new(), + ).unwrap() + } + }; + let sp: &crate::yaml_configs::customization_loader::SystemPrompt = custom.system_prompts.get("project_summary").unwrap(); + let mut sp_text = sp.text.clone(); + sp_text = system_prompt_add_workspace_info(gcx.clone(), &sp_text.replace("%CONFIG_PATH%", current_config_file)).await; + + let available_integrations = crate::integrations::setting_up_integrations::integrations_all_with_icons( + gcx.clone() + ).await; + let mut available_integrations_text: String = "Choose tools from this list:\n".to_string(); + for integration in available_integrations.integrations.iter().map(|x| x.integr_name.clone()).unique() { + available_integrations_text.push_str(&format!("- {}\n", integration)) + } + + if messages.is_empty() { + messages.push(ChatMessage { + role: "system".to_string(), + content: ChatContent::SimpleText(sp_text), + tool_calls: None, + tool_call_id: String::new(), + usage: None, + }); + messages.push(ChatMessage { + role: "user".to_string(), + content: ChatContent::SimpleText(available_integrations_text), + tool_calls: None, + tool_call_id: String::new(), + usage: None, + }); + }; +} + diff --git a/src/yaml_configs/customization_compiled_in.rs b/src/yaml_configs/customization_compiled_in.rs index 942bbbf72..c502556da 100644 --- a/src/yaml_configs/customization_compiled_in.rs +++ b/src/yaml_configs/customization_compiled_in.rs @@ -163,6 +163,31 @@ PROMPT_CONFIGURATOR: | - ask the user if they want to change anything - write updated configs using šŸ“REWRITE_WHOLE_FILE +PROMPT_PROJECT_SUMMARY: | + You are Refact Agent, a coding assistant. + Your task is to make a summary of the project you're working with and also choose tools from the given list which could be useful to work with the project. + Select only those tools which are really using inside the project. + + %PROMPT_PINS% + %WORKSPACE_INFO% + + Plan to follow: + 1. Look at the current project by calling tree(). + 2. After investigating the project's tree, call cat() to look inside documentation (especially *.md) files like README.md. + 2. Also use cat() to look inside configuration files like Cargo.toml, package.json, requirements.txt, .... + 3. Write everything you've gathered about the project and list tools which could be useful + 4. Ask the user if they want to change anything + 5. Write the project summary and tools list in the YAML format using šŸ“REWRITE_WHOLE_FILE + + The project summary config format is the following YAML: + ``` + project_summary: + + recommended_tools: + - tool_name: + - tool_name: + ``` + Put the generated config to this path: %CONFIG_PATH% system_prompts: default: @@ -177,7 +202,7 @@ system_prompts: text: "%PROMPT_CONFIGURATOR%" show: never project_summary: - text: "TBD" + text: "%PROMPT_PROJECT_SUMMARY%" show: never From 18fc5fcc2a03442cc55b4db481147f49d53c7883 Mon Sep 17 00:00:00 2001 From: JegernOUTT Date: Fri, 6 Dec 2024 21:28:02 +1030 Subject: [PATCH 075/185] Refactor chat handlers and improve tool integration filtering - Simplify chat handler functions by consolidating message deserialization within the `_chat` function. - Improve tool filtering by chat mode with `available_tools_by_chat_mode` function. - Update system prompt handling to include available integrations directly. - Enhance project summary steps in YAML customization with clearer instructions. --- src/http/routers/v1/chat.rs | 90 ++++++++++++------- src/integrations/project_summary_chat.rs | 50 ++++++----- src/yaml_configs/customization_compiled_in.rs | 21 +++-- 3 files changed, 97 insertions(+), 64 deletions(-) diff --git a/src/http/routers/v1/chat.rs b/src/http/routers/v1/chat.rs index 2609f0651..f549e5b7a 100644 --- a/src/http/routers/v1/chat.rs +++ b/src/http/routers/v1/chat.rs @@ -1,21 +1,42 @@ use std::sync::Arc; use std::sync::RwLock as StdRwLock; use tokio::sync::Mutex as AMutex; +use tokio::sync::RwLock as ARwLock; use axum::Extension; use axum::response::Result; use hyper::{Body, Response, StatusCode}; -use tracing::info; +use serde_json::Value; +use tracing::{info}; use crate::call_validation::{ChatContent, ChatMessage, ChatPost, ChatMode}; use crate::caps::CodeAssistantCaps; use crate::custom_error::ScratchError; use crate::at_commands::at_commands::AtCommandsContext; -use crate::global_context::SharedGlobalContext; +use crate::global_context::{GlobalContext, SharedGlobalContext}; use crate::integrations::docker::docker_container_manager::docker_container_check_status_or_start; use crate::scratchpads::chat_utils_prompts::{get_default_system_prompt, get_default_system_prompt_from_remote, system_prompt_add_workspace_info}; +pub fn available_tools_by_chat_mode(current_tools: Vec, chat_mode: &ChatMode) -> Vec { + match chat_mode { + ChatMode::Explore | ChatMode::Agent | ChatMode::NoTools => current_tools, + ChatMode::Configure | ChatMode::ProjectSummary => { + let valid_tool_names = ["cat", "tree", "patch", "search", "knowledge"]; + current_tools + .into_iter() + .filter(|x| { + x.get("function") + .and_then(|x| x.get("name")) + .and_then(|tool_name| tool_name.as_str()) + .map(|tool_name_str| valid_tool_names.contains(&tool_name_str)) + .unwrap_or(false) + }) + .collect() + } + } +} + pub const CHAT_TOP_N: usize = 7; pub async fn lookup_chat_scratchpad( @@ -50,38 +71,21 @@ pub async fn handle_v1_chat_completions( Extension(gcx): Extension, body_bytes: hyper::body::Bytes, ) -> Result, ScratchError> { - let mut chat_post = serde_json::from_slice::(&body_bytes).map_err(|e| { - info!("chat handler cannot parse input:\n{:?}", body_bytes); - ScratchError::new(StatusCode::BAD_REQUEST, format!("JSON problem: {}", e)) - })?; - let mut messages = deserialize_messages_from_post(&chat_post.messages)?; - _chat(gcx, &mut chat_post, &mut messages, false).await + _chat(gcx, &body_bytes, false).await } pub async fn handle_v1_chat_configuration( Extension(gcx): Extension, body_bytes: hyper::body::Bytes, ) -> Result, ScratchError> { - let mut chat_post = serde_json::from_slice::(&body_bytes).map_err(|e| { - info!("chat handler cannot parse input:\n{:?}", body_bytes); - ScratchError::new(StatusCode::BAD_REQUEST, format!("JSON problem: {}", e)) - })?; - let mut messages = deserialize_messages_from_post(&chat_post.messages)?; - crate::integrations::config_chat::mix_config_messages(gcx.clone(), &mut messages, &chat_post.meta.current_config_file).await; - _chat(gcx, &mut chat_post, &mut messages, true).await + _chat(gcx, &body_bytes, true).await } pub async fn handle_v1_chat_project_summary( Extension(gcx): Extension, body_bytes: hyper::body::Bytes, ) -> Result, ScratchError> { - let mut chat_post = serde_json::from_slice::(&body_bytes).map_err(|e| { - info!("chat handler cannot parse input:\n{:?}", body_bytes); - ScratchError::new(StatusCode::BAD_REQUEST, format!("JSON problem: {}", e)) - })?; - let mut messages = deserialize_messages_from_post(&chat_post.messages)?; - crate::integrations::project_summary_chat::mix_config_messages(gcx.clone(), &mut messages, &chat_post.meta.current_config_file).await; - _chat(gcx, &mut chat_post, &mut messages, true).await + _chat(gcx, &body_bytes, true).await } pub async fn handle_v1_chat( @@ -89,12 +93,7 @@ pub async fn handle_v1_chat( Extension(gcx): Extension, body_bytes: hyper::body::Bytes, ) -> Result, ScratchError> { - let mut chat_post: ChatPost = serde_json::from_slice::(&body_bytes).map_err(|e| { - info!("chat handler cannot parse input:\n{:?}", body_bytes); - ScratchError::new(StatusCode::BAD_REQUEST, format!("JSON problem: {}", e)) - })?; - let mut messages = deserialize_messages_from_post(&chat_post.messages)?; - _chat(gcx, &mut chat_post, &mut messages, true).await + _chat(gcx, &body_bytes, true).await } pub fn deserialize_messages_from_post(messages: &Vec) -> Result, ScratchError> { @@ -109,18 +108,41 @@ pub fn deserialize_messages_from_post(messages: &Vec) -> Resu } async fn _chat( - gcx: SharedGlobalContext, - chat_post: &mut ChatPost, - messages: &mut Vec, - allow_at: bool, + gcx: Arc>, + body_bytes: &hyper::body::Bytes, + allow_at: bool ) -> Result, ScratchError> { + let mut chat_post: ChatPost = serde_json::from_slice::(&body_bytes).map_err(|e| { + info!("chat handler cannot parse input:\n{:?}", body_bytes); + ScratchError::new(StatusCode::BAD_REQUEST, format!("JSON problem: {}", e)) + })?; + let mut messages = deserialize_messages_from_post(&chat_post.messages)?; + match chat_post.meta.chat_mode { + ChatMode::Explore | ChatMode::Agent | ChatMode::NoTools => {}, + ChatMode::Configure => { + crate::integrations::config_chat::mix_config_messages( + gcx.clone(), + &mut messages, + &chat_post.meta.current_config_file + ).await; + } + ChatMode::ProjectSummary => { + crate::integrations::project_summary_chat::mix_config_messages( + gcx.clone(), + &mut messages, + &chat_post.meta.current_config_file + ).await; + } + } + // converts tools into openai style if let Some(tools) = &mut chat_post.tools { - for tool in tools { + for tool in &mut *tools { if let Some(function) = tool.get_mut("function") { function.as_object_mut().unwrap().remove("agentic"); } } + chat_post.tools = Some(available_tools_by_chat_mode(tools.clone(), &chat_post.meta.chat_mode)); } let caps = crate::global_context::try_load_caps_quickly_if_not_present(gcx.clone(), 0).await?; @@ -217,7 +239,7 @@ async fn _chat( gcx.clone(), caps, model_name.clone(), - chat_post, + &mut chat_post, &messages, &scratchpad_name, &scratchpad_patch, diff --git a/src/integrations/project_summary_chat.rs b/src/integrations/project_summary_chat.rs index afcc09441..d373d78ce 100644 --- a/src/integrations/project_summary_chat.rs +++ b/src/integrations/project_summary_chat.rs @@ -1,10 +1,9 @@ use std::sync::Arc; -use std::fs; use tokio::sync::RwLock as ARwLock; use std::collections::HashMap; use itertools::Itertools; use crate::global_context::GlobalContext; -use crate::call_validation::{ChatContent, ChatMessage, ContextFile}; +use crate::call_validation::{ChatContent, ChatMessage}; use crate::scratchpads::chat_utils_prompts::system_prompt_add_workspace_info; pub async fn mix_config_messages( @@ -21,34 +20,37 @@ pub async fn mix_config_messages( "", "", true, true, &HashMap::new(), ).unwrap() } - }; - let sp: &crate::yaml_configs::customization_loader::SystemPrompt = custom.system_prompts.get("project_summary").unwrap(); - let mut sp_text = sp.text.clone(); - sp_text = system_prompt_add_workspace_info(gcx.clone(), &sp_text.replace("%CONFIG_PATH%", current_config_file)).await; - + }; let available_integrations = crate::integrations::setting_up_integrations::integrations_all_with_icons( gcx.clone() ).await; let mut available_integrations_text: String = "Choose tools from this list:\n".to_string(); - for integration in available_integrations.integrations.iter().map(|x| x.integr_name.clone()).unique() { + for integration in available_integrations.integrations + .iter() + .map(|x| x.integr_name.clone()) + .filter(|x| !x.contains("_TEMPLATE")) + .unique() { available_integrations_text.push_str(&format!("- {}\n", integration)) } - - if messages.is_empty() { - messages.push(ChatMessage { - role: "system".to_string(), - content: ChatContent::SimpleText(sp_text), - tool_calls: None, - tool_call_id: String::new(), - usage: None, - }); - messages.push(ChatMessage { - role: "user".to_string(), - content: ChatContent::SimpleText(available_integrations_text), - tool_calls: None, - tool_call_id: String::new(), - usage: None, - }); + let sp: &crate::yaml_configs::customization_loader::SystemPrompt = custom.system_prompts.get("project_summary").unwrap(); + let mut sp_text = sp.text.clone(); + sp_text = system_prompt_add_workspace_info(gcx.clone(), &sp_text + .replace("%CONFIG_PATH%", current_config_file) + .replace("%AVAILABLE_INTEGRATIONS%", &available_integrations_text) + ).await; + + let system_message = ChatMessage { + role: "system".to_string(), + content: ChatContent::SimpleText(sp_text), + tool_calls: None, + tool_call_id: String::new(), + usage: None, }; + + if !messages.is_empty() { + messages[0] = system_message; + } else { + messages.push(system_message) + } } diff --git a/src/yaml_configs/customization_compiled_in.rs b/src/yaml_configs/customization_compiled_in.rs index c502556da..e58b972f7 100644 --- a/src/yaml_configs/customization_compiled_in.rs +++ b/src/yaml_configs/customization_compiled_in.rs @@ -167,17 +167,25 @@ PROMPT_PROJECT_SUMMARY: | You are Refact Agent, a coding assistant. Your task is to make a summary of the project you're working with and also choose tools from the given list which could be useful to work with the project. Select only those tools which are really using inside the project. + %AVAILABLE_INTEGRATIONS% %PROMPT_PINS% %WORKSPACE_INFO% Plan to follow: - 1. Look at the current project by calling tree(). - 2. After investigating the project's tree, call cat() to look inside documentation (especially *.md) files like README.md. - 2. Also use cat() to look inside configuration files like Cargo.toml, package.json, requirements.txt, .... - 3. Write everything you've gathered about the project and list tools which could be useful - 4. Ask the user if they want to change anything - 5. Write the project summary and tools list in the YAML format using šŸ“REWRITE_WHOLE_FILE + 1. Explore the Project Structure: + - Use the tree() command to display the directory structure of the current project. + 2. Investigate Key Files: + - Use cat() to review content in important documentation files, such as README.md and other .md files. + - Use cat() to examine configuration files, including Cargo.toml, package.json, requirements.txt, etc. + 3. Summarize Findings: + - Write a detailed summary of the gathered information about the project. + - Compile a list of tools that might be useful for working on the project. + 4. Get User Feedback: + - Ask the user if they would like to make changes or updates to any part of the project setup or tool list. + 5. Prepare YAML Output: + - Organize the project summary and tools list in YAML format. + - Use the tag šŸ“REWRITE_WHOLE_FILE to indicate the final structured YAML content. The project summary config format is the following YAML: ``` @@ -188,6 +196,7 @@ PROMPT_PROJECT_SUMMARY: | - tool_name: ``` Put the generated config to this path: %CONFIG_PATH% + Strictly follow the plan! system_prompts: default: From 8c2fbad869f46107dfd484ecbdb39630615deb66 Mon Sep 17 00:00:00 2001 From: JegernOUTT Date: Fri, 6 Dec 2024 21:48:44 +1030 Subject: [PATCH 076/185] Integrate project info into system prompts and YAML customization Add functionality to read and insert project information into system prompts using a new async function, `read_project_info`. Update the YAML configuration to include the `%PROJECT_INFO%` placeholder. Adjust system prompts to conditionally replace `%PROJECT_INFO%` with actual project data or an empty string if unavailable. --- src/scratchpads/chat_utils_prompts.rs | 27 +++++++++++++++++-- src/yaml_configs/customization_compiled_in.rs | 4 +++ 2 files changed, 29 insertions(+), 2 deletions(-) diff --git a/src/scratchpads/chat_utils_prompts.rs b/src/scratchpads/chat_utils_prompts.rs index c9d49331f..e5f130ef7 100644 --- a/src/scratchpads/chat_utils_prompts.rs +++ b/src/scratchpads/chat_utils_prompts.rs @@ -1,3 +1,4 @@ +use std::fs; use std::sync::Arc; use std::path::PathBuf; use tokio::sync::RwLock as ARwLock; @@ -99,14 +100,36 @@ pub async fn system_prompt_add_workspace_info( (workspace_dirs, active_file_path) } - let mut system_prompt = system_prompt.clone(); + async fn read_project_info( + gcx: Arc>, + ) -> Option { + let (config_dirs, _) = crate::integrations::setting_up_integrations::get_config_dirs(gcx.clone()).await; + let mut project_info = None; + for config_path in config_dirs + .iter() + .map(|x| x.join("project_summary.yaml")) + .filter(|x| !x.exists()) { + if let Some(text) = fs::read_to_string(&config_path).ok() { + project_info = Some(text); + break; + } + } + project_info + } + let mut system_prompt = system_prompt.clone(); if system_prompt.contains("%WORKSPACE_INFO%") { let (workspace_dirs, active_file_path) = workspace_files_info(&gcx).await; let info = _workspace_info(&workspace_dirs, &active_file_path).await; system_prompt = system_prompt.replace("%WORKSPACE_INFO%", &info); } + if system_prompt.contains("%PROJECT_INFO%") { + if let Some(project_info) = read_project_info(gcx.clone()).await { + system_prompt = system_prompt.replace("%PROJECT_INFO%", &project_info); + } else { + system_prompt = system_prompt.replace("%PROJECT_INFO%", ""); + } + } system_prompt } - diff --git a/src/yaml_configs/customization_compiled_in.rs b/src/yaml_configs/customization_compiled_in.rs index e58b972f7..dd19e96c5 100644 --- a/src/yaml_configs/customization_compiled_in.rs +++ b/src/yaml_configs/customization_compiled_in.rs @@ -68,6 +68,7 @@ PROMPT_EXPLORATION_TOOLS: | %PROMPT_PINS% %WORKSPACE_INFO% + %PROJECT_INFO% Good thinking strategy for the answers: is it a question related to the current project? Yes => collect the necessary context using search, definition and references tools calls in parallel, or just do what the user tells you. @@ -85,6 +86,7 @@ PROMPT_AGENTIC_TOOLS: | %PROMPT_PINS% %WORKSPACE_INFO% + %PROJECT_INFO% Good practice using knowledge(): it's the key to successfully completing complex tasks the user might present you with. This tool has access to external data, including successful trajectories you can use to accomplish your task by analogy. The knowledge() @@ -124,6 +126,7 @@ PROMPT_CONFIGURATOR: | %PROMPT_PINS% %WORKSPACE_INFO% + %PROJECT_INFO% The integration config format is the following YAML: ``` @@ -171,6 +174,7 @@ PROMPT_PROJECT_SUMMARY: | %PROMPT_PINS% %WORKSPACE_INFO% + %PROJECT_INFO% Plan to follow: 1. Explore the Project Structure: From 70a1d307f98caae596375f52e48df237aac8522d Mon Sep 17 00:00:00 2001 From: JegernOUTT Date: Fri, 6 Dec 2024 22:02:16 +1030 Subject: [PATCH 077/185] Remove PROJECT_INFO variable placeholder from PROMPT_CONFIGURATOR --- src/yaml_configs/customization_compiled_in.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/yaml_configs/customization_compiled_in.rs b/src/yaml_configs/customization_compiled_in.rs index dd19e96c5..0f46b94e5 100644 --- a/src/yaml_configs/customization_compiled_in.rs +++ b/src/yaml_configs/customization_compiled_in.rs @@ -174,7 +174,6 @@ PROMPT_PROJECT_SUMMARY: | %PROMPT_PINS% %WORKSPACE_INFO% - %PROJECT_INFO% Plan to follow: 1. Explore the Project Structure: From 34835d3a25f8cd3ca5c10f2df62b20e64c3d0c48 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Sat, 7 Dec 2024 08:36:59 +0100 Subject: [PATCH 078/185] fix tests and warnings --- src/http/routers/v1/system_prompt.rs | 2 +- src/integrations/docker/mod.rs | 2 +- src/integrations/integr_cmdline.rs | 15 +++++---------- src/integrations/integr_cmdline_service.rs | 21 ++------------------- src/integrations/mod.rs | 2 +- src/integrations/setting_up_integrations.rs | 3 ++- src/integrations/yaml_schema.rs | 3 +++ src/vecdb/vdb_highlev.rs | 3 +-- 8 files changed, 16 insertions(+), 35 deletions(-) diff --git a/src/http/routers/v1/system_prompt.rs b/src/http/routers/v1/system_prompt.rs index 9623a8c91..11057f95a 100644 --- a/src/http/routers/v1/system_prompt.rs +++ b/src/http/routers/v1/system_prompt.rs @@ -27,7 +27,7 @@ pub async fn handle_v1_system_prompt( body_bytes: hyper::body::Bytes, ) -> Result, ScratchError> { // XXX receive ChatMode - let post = serde_json::from_slice::(&body_bytes) + let _post = serde_json::from_slice::(&body_bytes) .map_err(|e| ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, format!("JSON problem: {}", e)))?; let prompt = get_default_system_prompt(gcx.clone(), crate::call_validation::ChatMode::Agent).await; diff --git a/src/integrations/docker/mod.rs b/src/integrations/docker/mod.rs index ec3e0d645..6b9c46b57 100644 --- a/src/integrations/docker/mod.rs +++ b/src/integrations/docker/mod.rs @@ -2,7 +2,7 @@ use std::sync::Arc; use tokio::sync::RwLock as ARwLock; use crate::global_context::GlobalContext; -use crate::integrations::integr_abstract::IntegrationTrait; +// use crate::integrations::integr_abstract::IntegrationTrait; use crate::integrations::running_integrations::load_integrations; use crate::integrations::docker::integr_docker::ToolDocker; use crate::integrations::docker::integr_isolation::{SettingsIsolation, IntegrationIsolation}; diff --git a/src/integrations/integr_cmdline.rs b/src/integrations/integr_cmdline.rs index 9f66336ef..664026187 100644 --- a/src/integrations/integr_cmdline.rs +++ b/src/integrations/integr_cmdline.rs @@ -25,8 +25,8 @@ pub struct CmdlineToolConfig { pub parameters_required: Option>, // blocking - #[serde(default = "_default_timeout")] - pub timeout: u64, + #[serde(default)] + pub timeout: String, #[serde(default)] pub output_filter: CmdlineOutputFilter, @@ -39,10 +39,6 @@ pub struct CmdlineToolConfig { pub startup_wait_keyword: Option, } -fn _default_timeout() -> u64 { - 120 -} - fn _default_startup_wait() -> u64 { 10 } @@ -162,7 +158,7 @@ pub async fn execute_blocking_command( Ok(out) }; - let timeout_duration = tokio::time::Duration::from_secs(cfg.timeout); + let timeout_duration = tokio::time::Duration::from_secs(cfg.timeout.parse::().unwrap_or(10)); let result = tokio::time::timeout(timeout_duration, command_future).await; match result { @@ -249,14 +245,13 @@ fields: description: f_type: string_long f_desc: "The model will see this description, why the model should call this?" - f_placeholder: "" parameters: f_type: "tool_parameters" f_desc: "The model will fill in those parameters." timeout: - f_type: integer + f_type: string_short f_desc: "The command must immediately return the results, it can't be interactive. If the command runs for too long, it will be terminated and stderr/stdout collected will be presented to the model." - f_default: 10 + f_default: "10" output_filter: f_type: "output_filter" f_desc: "The output from the command can be long or even quasi-infinite. This section allows to set limits, prioritize top or bottom, or use regexp to show the model the relevant part." diff --git a/src/integrations/integr_cmdline_service.rs b/src/integrations/integr_cmdline_service.rs index c698c89f3..f73c4d989 100644 --- a/src/integrations/integr_cmdline_service.rs +++ b/src/integrations/integr_cmdline_service.rs @@ -175,7 +175,7 @@ async fn execute_background_command( let mut exit_code: i32 = -100000; loop { - if t0.elapsed() >= tokio::time::Duration::from_secs(cfg.startup_wait) { + if t0.elapsed() >= tokio::time::Duration::from_secs(cfg.startup_wait.to_string().parse::().unwrap_or(10)) { actions_log.push_str(&format!("Timeout {:.2}s reached while waiting for the service to start.\n\n", t0.elapsed().as_secs_f64())); break; } @@ -339,7 +339,7 @@ fields: startup_wait: f_type: string_short f_desc: "Max time to wait for service to start." - f_default: 10 + f_default: "10" startup_wait_keyword: f_type: string f_desc: "Wait until a keyword appears in stdout or stderr at startup." @@ -354,20 +354,3 @@ available: on_your_laptop_possible: true when_isolated_possible: true "#; - - -// #[serde(default)] -// pub startup_wait_port: Option, -// #[serde(default = "_default_startup_wait")] -// pub startup_wait: u64, -// #[serde(default)] -// pub startup_wait_keyword: Option, - -// timeout: -// f_type: integer -// f_desc: "The command must immediately return the results, it can't be interactive. If the command runs for too long, it will be terminated and stderr/stdout collected will be presented to the model." -// f_default: 10 -// output_filter: -// f_type: "output_filter" -// f_desc: "The output from the command can be long or even quasi-infinite. This section allows to set limits, prioritize top or bottom, or use regexp to show the model the relevant part." -// f_placeholder: "filter" diff --git a/src/integrations/mod.rs b/src/integrations/mod.rs index 36a3371c1..100f83c21 100644 --- a/src/integrations/mod.rs +++ b/src/integrations/mod.rs @@ -51,7 +51,7 @@ pub fn integration_from_name(n: &str) -> Result String +pub fn icon_from_name(_n: &str) -> String { // match n { // // "github" => Box::new(ToolGithub { ..Default::default() }) as Box, diff --git a/src/integrations/setting_up_integrations.rs b/src/integrations/setting_up_integrations.rs index f03d8e810..afa877c4d 100644 --- a/src/integrations/setting_up_integrations.rs +++ b/src/integrations/setting_up_integrations.rs @@ -425,7 +425,7 @@ mod tests { async fn test_integration_schemas() { let integrations = crate::integrations::integrations_list(); for name in integrations { - let mut integration_box = crate::integrations::integration_from_name(name).unwrap(); + let integration_box = crate::integrations::integration_from_name(name).unwrap(); let schema_json = { let y: serde_yaml::Value = serde_yaml::from_str(integration_box.integr_schema()).unwrap(); let j = serde_json::to_value(y).unwrap(); @@ -433,6 +433,7 @@ mod tests { }; let schema_yaml: serde_yaml::Value = serde_json::from_value(schema_json.clone()).unwrap(); let compare_me1 = serde_yaml::to_string(&schema_yaml).unwrap(); + eprintln!("schema_json {:?}", schema_json); let schema_struct: ISchema = serde_json::from_value(schema_json).unwrap(); let schema_struct_yaml = serde_json::to_value(&schema_struct).unwrap(); let compare_me2 = serde_yaml::to_string(&schema_struct_yaml).unwrap(); diff --git a/src/integrations/yaml_schema.rs b/src/integrations/yaml_schema.rs index 0efd06838..9a5d13a72 100644 --- a/src/integrations/yaml_schema.rs +++ b/src/integrations/yaml_schema.rs @@ -47,7 +47,9 @@ pub struct ISchemaDocker { pub filter_label: String, pub filter_image: String, pub new_container_default: DockerService, + #[serde(default, skip_serializing_if="is_empty")] pub smartlinks: Vec, + #[serde(default, skip_serializing_if="is_empty")] pub smartlinks_for_each_container: Vec, } @@ -57,6 +59,7 @@ pub struct ISchema { #[serde(default, skip_serializing_if="is_default")] pub description: String, pub available: ISchemaAvailable, + #[serde(default, skip_serializing_if="is_empty")] pub smartlinks: Vec, #[serde(skip_serializing_if = "Option::is_none")] pub docker: Option, diff --git a/src/vecdb/vdb_highlev.rs b/src/vecdb/vdb_highlev.rs index 2650d9781..be3b02900 100644 --- a/src/vecdb/vdb_highlev.rs +++ b/src/vecdb/vdb_highlev.rs @@ -31,7 +31,7 @@ pub struct VecDb { vecdb_emb_client: Arc>, vecdb_handler: Arc>, pub vectorizer_service: Arc>, - cmdline: CommandLine, // TODO: take from command line what's needed, don't store a copy + // cmdline: CommandLine, // TODO: take from command line what's needed, don't store a copy constants: VecdbConstants, } @@ -245,7 +245,6 @@ impl VecDb { vecdb_emb_client: Arc::new(AMutex::new(reqwest::Client::new())), vecdb_handler, vectorizer_service, - cmdline: cmdline.clone(), constants: constants.clone(), }) } From de7785ba29b388efc2671d917552b28fc78a78b8 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Sat, 7 Dec 2024 09:56:33 +0100 Subject: [PATCH 079/185] integration yaml problems => links --- src/http/routers/v1/chat.rs | 9 ++++----- src/http/routers/v1/links.rs | 19 ++++++++++++++++++- src/integrations/docker/mod.rs | 8 +++++--- src/integrations/mod.rs | 1 + src/integrations/running_integrations.rs | 17 +++++++++++------ 5 files changed, 39 insertions(+), 15 deletions(-) diff --git a/src/http/routers/v1/chat.rs b/src/http/routers/v1/chat.rs index f549e5b7a..e8e49e223 100644 --- a/src/http/routers/v1/chat.rs +++ b/src/http/routers/v1/chat.rs @@ -7,7 +7,6 @@ use axum::Extension; use axum::response::Result; use hyper::{Body, Response, StatusCode}; use serde_json::Value; -use tracing::{info}; use crate::call_validation::{ChatContent, ChatMessage, ChatPost, ChatMode}; use crate::caps::CodeAssistantCaps; @@ -113,7 +112,7 @@ async fn _chat( allow_at: bool ) -> Result, ScratchError> { let mut chat_post: ChatPost = serde_json::from_slice::(&body_bytes).map_err(|e| { - info!("chat handler cannot parse input:\n{:?}", body_bytes); + tracing::warn!("chat handler cannot parse input:\n{:?}", body_bytes); ScratchError::new(StatusCode::BAD_REQUEST, format!("JSON problem: {}", e)) })?; let mut messages = deserialize_messages_from_post(&chat_post.messages)?; @@ -121,8 +120,8 @@ async fn _chat( ChatMode::Explore | ChatMode::Agent | ChatMode::NoTools => {}, ChatMode::Configure => { crate::integrations::config_chat::mix_config_messages( - gcx.clone(), - &mut messages, + gcx.clone(), + &mut messages, &chat_post.meta.current_config_file ).await; } @@ -134,7 +133,7 @@ async fn _chat( ).await; } } - + // converts tools into openai style if let Some(tools) = &mut chat_post.tools { for tool in &mut *tools { diff --git a/src/http/routers/v1/links.rs b/src/http/routers/v1/links.rs index 759fc27cf..6ce06ebbb 100644 --- a/src/http/routers/v1/links.rs +++ b/src/http/routers/v1/links.rs @@ -39,6 +39,7 @@ pub struct Link { goto: Option, #[serde(skip_serializing_if = "Option::is_none")] projects: Option>, + tooltip: String, } #[derive(Serialize, Deserialize, Debug)] @@ -58,9 +59,10 @@ pub async fn handle_v1_links( if post.messages.is_empty() && project_summarization_is_missing(gcx.clone()).await { links.push(Link { action: LinkAction::SummarizeProject, - text: "Investigate Project".to_string(), + text: "Initial project summarization".to_string(), goto: None, projects: None, + tooltip: format!("Project summary is a starting point for Refact Agent."), }); } @@ -70,6 +72,7 @@ pub async fn handle_v1_links( text: "Save and return".to_string(), goto: Some("SETTINGS:DEFAULT".to_string()), projects: None, + tooltip: format!(""), }); } @@ -81,6 +84,7 @@ pub async fn handle_v1_links( text: format!("Commit {files_changed} files"), goto: None, projects: Some(project_commits), + tooltip: format!(""), }); } } @@ -92,10 +96,22 @@ pub async fn handle_v1_links( text: format!("Configure {failed_integr_name}"), goto: Some(format!("SETTINGS:{failed_integr_name}")), projects: None, + tooltip: format!(""), }) } } + let (_, integration_yaml_errors) = crate::integrations::running_integrations::load_integrations(gcx.clone(), "".to_string(), true).await; + for e in integration_yaml_errors { + links.push(Link { + action: LinkAction::Goto, + text: format!("Syntax error in {}", crate::nicer_logs::last_n_chars(&e.integr_config_path, 20)), + goto: Some(format!("SETTINGS:{}", e.integr_config_path)), + projects: None, + tooltip: format!("Error at line {}: {}", e.error_line, e.error_msg), + }); + } + if post.meta.chat_mode != ChatMode::NoTools && links.is_empty() { let follow_up_message = generate_follow_up_message(post.messages.clone(), gcx.clone(), &post.model_name, &post.meta.chat_id).await .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Error generating follow-up message: {}", e)))?; @@ -104,6 +120,7 @@ pub async fn handle_v1_links( text: follow_up_message, goto: None, projects: None, + tooltip: format!(""), }); } diff --git a/src/integrations/docker/mod.rs b/src/integrations/docker/mod.rs index 6b9c46b57..7cfdcd616 100644 --- a/src/integrations/docker/mod.rs +++ b/src/integrations/docker/mod.rs @@ -12,8 +12,10 @@ pub mod integr_isolation; pub mod docker_ssh_tunnel_utils; pub mod docker_container_manager; -pub async fn docker_and_isolation_load(gcx: Arc>) -> Result<(ToolDocker, Option), String> { - let integrations = load_integrations(gcx.clone(), "".to_string(), true).await; +pub async fn docker_and_isolation_load(gcx: Arc>) -> Result<(ToolDocker, Option), String> +{ + // XXX: why load all integrations if we need one or two? + let (integrations, _yaml_errors) = load_integrations(gcx.clone(), "".to_string(), true).await; let docker_tool = integrations.get("docker") .ok_or("Docker integration not found".to_string())? @@ -27,4 +29,4 @@ pub async fn docker_and_isolation_load(gcx: Arc>) -> Resu .map(|isolation| isolation.settings_isolation.clone()); Ok((docker_tool, isolation_integration)) -} \ No newline at end of file +} diff --git a/src/integrations/mod.rs b/src/integrations/mod.rs index 100f83c21..ba01f4a0f 100644 --- a/src/integrations/mod.rs +++ b/src/integrations/mod.rs @@ -51,6 +51,7 @@ pub fn integration_from_name(n: &str) -> Result String { // match n { diff --git a/src/integrations/running_integrations.rs b/src/integrations/running_integrations.rs index b166a0ea1..3514f35c5 100644 --- a/src/integrations/running_integrations.rs +++ b/src/integrations/running_integrations.rs @@ -13,7 +13,7 @@ pub async fn load_integration_tools( _current_project: String, _allow_experimental: bool, ) -> IndexMap>>> { - let integraions_map = load_integrations(gcx.clone(), _current_project, _allow_experimental).await; + let (integraions_map, _yaml_errors) = load_integrations(gcx.clone(), _current_project, _allow_experimental).await; let mut tools = IndexMap::new(); for (name, integr) in integraions_map { if integr.can_upgrade_to_tool() { @@ -27,7 +27,7 @@ pub async fn load_integrations( gcx: Arc>, _current_project: String, _allow_experimental: bool, -) -> IndexMap> { +) -> (IndexMap>, Vec) { // XXX filter _workspace_folders_arc that fit _current_project let (config_dirs, global_config_dir) = crate::integrations::setting_up_integrations::get_config_dirs(gcx.clone()).await; let integrations_yaml_path = crate::integrations::setting_up_integrations::get_integrations_yaml_path(gcx.clone()).await; @@ -54,19 +54,24 @@ pub async fn load_integrations( }; let should_be_fine = integr.integr_settings_apply(&rec.config_unparsed); if should_be_fine.is_err() { - tracing::error!("failed to apply settings for integration {}: {:?}", rec.integr_name, should_be_fine.err()); + // tracing::warn!("failed to apply settings for integration {}: {:?}", rec.integr_name, should_be_fine.err()); + error_log.push(crate::integrations::setting_up_integrations::YamlError { + integr_config_path: rec.integr_config_path.clone(), + error_line: 0, + error_msg: format!("failed to apply settings: {:?}", should_be_fine.err()), + }); } integrations_map.insert(rec.integr_name.clone(), integr); } - for e in error_log { + for e in error_log.iter() { tracing::error!( "{}:{} {:?}", - crate::nicer_logs::last_n_chars(&&e.integr_config_path, 30), + crate::nicer_logs::last_n_chars(&e.integr_config_path, 30), e.error_line, e.error_msg, ); } - integrations_map + (integrations_map, error_log) } From fccf2ef286f54f51a0f7c8b82a72bfd125b413c3 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Sat, 7 Dec 2024 10:20:16 +0100 Subject: [PATCH 080/185] Uppercase chat mode --- src/call_validation.rs | 14 +++++++------- src/http/routers/v1/chat.rs | 14 +++++++------- src/http/routers/v1/links.rs | 8 ++++---- src/http/routers/v1/system_prompt.rs | 2 +- src/scratchpads/chat_utils_prompts.rs | 10 +++++----- src/tools/tool_patch.rs | 2 +- 6 files changed, 25 insertions(+), 25 deletions(-) diff --git a/src/call_validation.rs b/src/call_validation.rs index d1dece971..85f6c5c57 100644 --- a/src/call_validation.rs +++ b/src/call_validation.rs @@ -209,18 +209,18 @@ pub struct ChatMeta { } #[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] -#[serde(rename_all = "UPPERCASE")] +#[allow(non_camel_case_types)] pub enum ChatMode { - NoTools, - Explore, - Agent, - Configure, - ProjectSummary, + NO_TOOLS, + EXPLORE, + AGENT, + CONFIGURE, + PROJECT_SUMMARY, } impl Default for ChatMode { fn default() -> Self { - ChatMode::NoTools + ChatMode::NO_TOOLS } } diff --git a/src/http/routers/v1/chat.rs b/src/http/routers/v1/chat.rs index e8e49e223..335d11949 100644 --- a/src/http/routers/v1/chat.rs +++ b/src/http/routers/v1/chat.rs @@ -19,8 +19,8 @@ use crate::scratchpads::chat_utils_prompts::{get_default_system_prompt, get_defa pub fn available_tools_by_chat_mode(current_tools: Vec, chat_mode: &ChatMode) -> Vec { match chat_mode { - ChatMode::Explore | ChatMode::Agent | ChatMode::NoTools => current_tools, - ChatMode::Configure | ChatMode::ProjectSummary => { + ChatMode::EXPLORE | ChatMode::AGENT | ChatMode::NO_TOOLS => current_tools, + ChatMode::CONFIGURE | ChatMode::PROJECT_SUMMARY => { let valid_tool_names = ["cat", "tree", "patch", "search", "knowledge"]; current_tools .into_iter() @@ -117,15 +117,15 @@ async fn _chat( })?; let mut messages = deserialize_messages_from_post(&chat_post.messages)?; match chat_post.meta.chat_mode { - ChatMode::Explore | ChatMode::Agent | ChatMode::NoTools => {}, - ChatMode::Configure => { + ChatMode::EXPLORE | ChatMode::AGENT | ChatMode::NO_TOOLS => {}, + ChatMode::CONFIGURE => { crate::integrations::config_chat::mix_config_messages( gcx.clone(), &mut messages, &chat_post.meta.current_config_file ).await; } - ChatMode::ProjectSummary => { + ChatMode::PROJECT_SUMMARY => { crate::integrations::project_summary_chat::mix_config_messages( gcx.clone(), &mut messages, @@ -213,8 +213,8 @@ async fn _chat( let have_system = !messages.is_empty() && messages[0].role == "system"; if !have_system { - let exploration_tools = chat_post.meta.chat_mode != ChatMode::NoTools; - let agentic_tools = matches!(chat_post.meta.chat_mode, ChatMode::Agent | ChatMode::Configure | ChatMode::ProjectSummary); + let exploration_tools = chat_post.meta.chat_mode != ChatMode::NO_TOOLS; + let agentic_tools = matches!(chat_post.meta.chat_mode, ChatMode::AGENT | ChatMode::CONFIGURE | ChatMode::PROJECT_SUMMARY); let system_message_content = if should_execute_remotely { // XXX pass chat_post.meta.chat_mode get_default_system_prompt_from_remote(gcx.clone(), exploration_tools, agentic_tools, &chat_post.meta.chat_id).await.map_err(|e| diff --git a/src/http/routers/v1/links.rs b/src/http/routers/v1/links.rs index 6ce06ebbb..12a581022 100644 --- a/src/http/routers/v1/links.rs +++ b/src/http/routers/v1/links.rs @@ -66,7 +66,7 @@ pub async fn handle_v1_links( }); } - if post.meta.chat_mode == ChatMode::Configure && !get_tickets_from_messages(gcx.clone(), &post.messages).await.is_empty() { + if post.meta.chat_mode == ChatMode::CONFIGURE && !get_tickets_from_messages(gcx.clone(), &post.messages).await.is_empty() { links.push(Link { action: LinkAction::PatchAll, text: "Save and return".to_string(), @@ -76,7 +76,7 @@ pub async fn handle_v1_links( }); } - if post.meta.chat_mode == ChatMode::Agent { + if post.meta.chat_mode == ChatMode::AGENT { let (project_commits, files_changed) = generate_commit_messages_with_current_changes(gcx.clone()).await; if !project_commits.is_empty() { links.push(Link { @@ -89,7 +89,7 @@ pub async fn handle_v1_links( } } - if post.meta.chat_mode != ChatMode::Configure { + if post.meta.chat_mode != ChatMode::CONFIGURE { for failed_integr_name in failed_integration_names_after_last_user_message(&post.messages) { links.push(Link { action: LinkAction::Goto, @@ -112,7 +112,7 @@ pub async fn handle_v1_links( }); } - if post.meta.chat_mode != ChatMode::NoTools && links.is_empty() { + if post.meta.chat_mode != ChatMode::NO_TOOLS && links.is_empty() { let follow_up_message = generate_follow_up_message(post.messages.clone(), gcx.clone(), &post.model_name, &post.meta.chat_id).await .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Error generating follow-up message: {}", e)))?; links.push(Link { diff --git a/src/http/routers/v1/system_prompt.rs b/src/http/routers/v1/system_prompt.rs index 11057f95a..9ad4dd643 100644 --- a/src/http/routers/v1/system_prompt.rs +++ b/src/http/routers/v1/system_prompt.rs @@ -30,7 +30,7 @@ pub async fn handle_v1_system_prompt( let _post = serde_json::from_slice::(&body_bytes) .map_err(|e| ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, format!("JSON problem: {}", e)))?; - let prompt = get_default_system_prompt(gcx.clone(), crate::call_validation::ChatMode::Agent).await; + let prompt = get_default_system_prompt(gcx.clone(), crate::call_validation::ChatMode::AGENT).await; let prompt_with_workspace_info = system_prompt_add_workspace_info(gcx.clone(), &prompt).await; diff --git a/src/scratchpads/chat_utils_prompts.rs b/src/scratchpads/chat_utils_prompts.rs index e5f130ef7..a26373fdf 100644 --- a/src/scratchpads/chat_utils_prompts.rs +++ b/src/scratchpads/chat_utils_prompts.rs @@ -22,11 +22,11 @@ pub async fn get_default_system_prompt( }, }; let prompt_key = match chat_mode { - crate::call_validation::ChatMode::NoTools => "default", - crate::call_validation::ChatMode::Explore => "exploration_tools", - crate::call_validation::ChatMode::Agent => "agentic_tools", - crate::call_validation::ChatMode::Configure => "configurator", - crate::call_validation::ChatMode::ProjectSummary => "project_summary", + crate::call_validation::ChatMode::NO_TOOLS => "default", + crate::call_validation::ChatMode::EXPLORE => "exploration_tools", + crate::call_validation::ChatMode::AGENT => "agentic_tools", + crate::call_validation::ChatMode::CONFIGURE => "configurator", + crate::call_validation::ChatMode::PROJECT_SUMMARY => "project_summary", }; let system_prompt = tconfig.system_prompts.get(prompt_key).map_or_else(|| { tracing::error!("cannot find system prompt `{}`", prompt_key); diff --git a/src/tools/tool_patch.rs b/src/tools/tool_patch.rs index 401869de1..b46e074d9 100644 --- a/src/tools/tool_patch.rs +++ b/src/tools/tool_patch.rs @@ -126,7 +126,7 @@ fn return_cd_instruction_or_error( #[async_trait] impl Tool for ToolPatch { fn as_any(&self) -> &dyn std::any::Any { self } - + async fn tool_execute( &mut self, ccx: Arc>, From 17d2074cfc89d51bbaed22c400d439d2190950dd Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Sat, 7 Dec 2024 12:02:25 +0100 Subject: [PATCH 081/185] few prints and fixes --- src/http/routers/v1/chat.rs | 4 +++- src/integrations/project_summary_chat.rs | 6 ++++-- src/scratchpad_abstract.rs | 1 + 3 files changed, 8 insertions(+), 3 deletions(-) diff --git a/src/http/routers/v1/chat.rs b/src/http/routers/v1/chat.rs index 335d11949..0b0633f94 100644 --- a/src/http/routers/v1/chat.rs +++ b/src/http/routers/v1/chat.rs @@ -116,6 +116,8 @@ async fn _chat( ScratchError::new(StatusCode::BAD_REQUEST, format!("JSON problem: {}", e)) })?; let mut messages = deserialize_messages_from_post(&chat_post.messages)?; + + tracing::info!("\n\n new chat_mode {:?}\n", chat_post.meta.chat_mode); match chat_post.meta.chat_mode { ChatMode::EXPLORE | ChatMode::AGENT | ChatMode::NO_TOOLS => {}, ChatMode::CONFIGURE => { @@ -126,7 +128,7 @@ async fn _chat( ).await; } ChatMode::PROJECT_SUMMARY => { - crate::integrations::project_summary_chat::mix_config_messages( + crate::integrations::project_summary_chat::mix_project_summary_messages( gcx.clone(), &mut messages, &chat_post.meta.current_config_file diff --git a/src/integrations/project_summary_chat.rs b/src/integrations/project_summary_chat.rs index d373d78ce..769082727 100644 --- a/src/integrations/project_summary_chat.rs +++ b/src/integrations/project_summary_chat.rs @@ -6,7 +6,7 @@ use crate::global_context::GlobalContext; use crate::call_validation::{ChatContent, ChatMessage}; use crate::scratchpads::chat_utils_prompts::system_prompt_add_workspace_info; -pub async fn mix_config_messages( +pub async fn mix_project_summary_messages( gcx: Arc>, messages: &mut Vec, current_config_file: &String, @@ -20,7 +20,7 @@ pub async fn mix_config_messages( "", "", true, true, &HashMap::new(), ).unwrap() } - }; + }; let available_integrations = crate::integrations::setting_up_integrations::integrations_all_with_icons( gcx.clone() ).await; @@ -39,6 +39,8 @@ pub async fn mix_config_messages( .replace("%AVAILABLE_INTEGRATIONS%", &available_integrations_text) ).await; + tracing::info!("PROJECT_SUMMARY PROMPT\n{}", sp_text); + let system_message = ChatMessage { role: "system".to_string(), content: ChatContent::SimpleText(sp_text), diff --git a/src/scratchpad_abstract.rs b/src/scratchpad_abstract.rs index dd9e9fa99..fe6e2cacb 100644 --- a/src/scratchpad_abstract.rs +++ b/src/scratchpad_abstract.rs @@ -24,6 +24,7 @@ impl FinishReason { match s { "" => FinishReason::None, "stop" => FinishReason::Stop, + "tool_calls" => FinishReason::Stop, "length" => FinishReason::Length, "scratchpad-stop" => FinishReason::ScratchpadStop, _ => { From 1d896a914fbd2fb42bf68fa8c1527b85d23568f5 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Sat, 7 Dec 2024 15:45:30 +0100 Subject: [PATCH 082/185] links produces current_config_file --- src/http/routers/v1/links.rs | 62 +++++++++++++++++++++++------------- 1 file changed, 39 insertions(+), 23 deletions(-) diff --git a/src/http/routers/v1/links.rs b/src/http/routers/v1/links.rs index 12a581022..073ffb88e 100644 --- a/src/http/routers/v1/links.rs +++ b/src/http/routers/v1/links.rs @@ -33,13 +33,19 @@ enum LinkAction { #[derive(Serialize, Deserialize, Debug)] pub struct Link { + // XXX rename: + // link_action + // link_text + // link_goto + // link_tooltip action: LinkAction, text: String, #[serde(skip_serializing_if = "Option::is_none")] goto: Option, #[serde(skip_serializing_if = "Option::is_none")] - projects: Option>, - tooltip: String, + // projects: Option>, + current_config_file: Option, // XXX rename + link_tooltip: String, } #[derive(Serialize, Deserialize, Debug)] @@ -55,15 +61,19 @@ pub async fn handle_v1_links( let post = serde_json::from_slice::(&body_bytes) .map_err(|e| ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, format!("JSON problem: {}", e)))?; let mut links = Vec::new(); + tracing::info!("for links, post.meta.chat_mode == {:?}", post.meta.chat_mode); - if post.messages.is_empty() && project_summarization_is_missing(gcx.clone()).await { - links.push(Link { - action: LinkAction::SummarizeProject, - text: "Initial project summarization".to_string(), - goto: None, - projects: None, - tooltip: format!("Project summary is a starting point for Refact Agent."), - }); + if post.messages.is_empty() { + let (is_missing, summary_path) = project_summarization_is_missing(gcx.clone()).await; + if is_missing { + links.push(Link { + action: LinkAction::SummarizeProject, + text: "Initial project summarization".to_string(), + goto: None, + current_config_file: summary_path, + link_tooltip: format!("Project summary is a starting point for Refact Agent."), + }); + } } if post.meta.chat_mode == ChatMode::CONFIGURE && !get_tickets_from_messages(gcx.clone(), &post.messages).await.is_empty() { @@ -71,8 +81,8 @@ pub async fn handle_v1_links( action: LinkAction::PatchAll, text: "Save and return".to_string(), goto: Some("SETTINGS:DEFAULT".to_string()), - projects: None, - tooltip: format!(""), + current_config_file: None, + link_tooltip: format!(""), }); } @@ -83,8 +93,9 @@ pub async fn handle_v1_links( action: LinkAction::Commit, text: format!("Commit {files_changed} files"), goto: None, - projects: Some(project_commits), - tooltip: format!(""), + // projects: Some(project_commits), + current_config_file: None, + link_tooltip: format!(""), }); } } @@ -95,8 +106,8 @@ pub async fn handle_v1_links( action: LinkAction::Goto, text: format!("Configure {failed_integr_name}"), goto: Some(format!("SETTINGS:{failed_integr_name}")), - projects: None, - tooltip: format!(""), + current_config_file: None, + link_tooltip: format!(""), }) } } @@ -107,8 +118,8 @@ pub async fn handle_v1_links( action: LinkAction::Goto, text: format!("Syntax error in {}", crate::nicer_logs::last_n_chars(&e.integr_config_path, 20)), goto: Some(format!("SETTINGS:{}", e.integr_config_path)), - projects: None, - tooltip: format!("Error at line {}: {}", e.error_line, e.error_msg), + current_config_file: None, + link_tooltip: format!("Error at line {}: {}", e.error_line, e.error_msg), }); } @@ -119,8 +130,8 @@ pub async fn handle_v1_links( action: LinkAction::FollowUp, text: follow_up_message, goto: None, - projects: None, - tooltip: format!(""), + current_config_file: None, + link_tooltip: format!(""), }); } @@ -169,14 +180,19 @@ async fn generate_commit_messages_with_current_changes(gcx: Arc>) -> bool { +async fn project_summarization_is_missing(gcx: Arc>) -> (bool, Option) { match crate::files_correction::get_active_project_path(gcx.clone()).await { Some(active_project_path) => { - !active_project_path.join(".refact").join("project_summary.yaml").exists() + let summary_path = active_project_path.join(".refact").join("project_summary.yaml"); + if !summary_path.exists() { + (true, Some(summary_path.to_string_lossy().to_string())) + } else { + (false, Some(summary_path.to_string_lossy().to_string())) + } } None => { tracing::info!("No projects found, project summarization is not relevant."); - false + (false, None) } } } From 548e7db6dafc6b5d68d27dc560c1617ce87fcf49 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Sat, 7 Dec 2024 15:45:45 +0100 Subject: [PATCH 083/185] less logs --- src/integrations/setting_up_integrations.rs | 10 +++++----- src/restream.rs | 4 ++-- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/src/integrations/setting_up_integrations.rs b/src/integrations/setting_up_integrations.rs index afa877c4d..257b5d1f5 100644 --- a/src/integrations/setting_up_integrations.rs +++ b/src/integrations/setting_up_integrations.rs @@ -89,7 +89,7 @@ pub fn read_integrations_d( for (path_str, integr_name, project_path) in files_to_read { let path = PathBuf::from(&path_str); - let short_pp = if project_path.is_empty() { format!("global") } else { crate::nicer_logs::last_n_chars(&project_path, 15) }; + // let short_pp = if project_path.is_empty() { format!("global") } else { crate::nicer_logs::last_n_chars(&project_path, 15) }; let mut rec: IntegrationRecord = Default::default(); rec.project_path = project_path.clone(); rec.integr_name = integr_name.clone(); @@ -99,7 +99,7 @@ pub fn read_integrations_d( match fs::read_to_string(&path) { Ok(file_content) => match serde_yaml::from_str::(&file_content) { Ok(yaml_value) => { - tracing::info!("{} has {}", short_pp, integr_name); + // tracing::info!("{} has {}", short_pp, integr_name); rec.config_unparsed = serde_json::to_value(yaml_value.clone()).unwrap(); } Err(e) => { @@ -122,7 +122,7 @@ pub fn read_integrations_d( } } } else { - tracing::info!("{} no config file for {}", short_pp, integr_name); + // tracing::info!("{} no config file for {}", short_pp, integr_name); } result.push(rec); } @@ -205,8 +205,8 @@ pub fn read_integrations_d( rec.on_your_laptop = available.get("on_your_laptop").and_then(|v| v.as_bool()).unwrap_or(false); rec.when_isolated = available.get("when_isolated").and_then(|v| v.as_bool()).unwrap_or(false); } else { - let short_pp = if rec.project_path.is_empty() { format!("global") } else { crate::nicer_logs::last_n_chars(&rec.project_path, 15) }; - tracing::info!("{} no 'available' mapping in `{}` will default to true", short_pp, rec.integr_name); + // let short_pp = if rec.project_path.is_empty() { format!("global") } else { crate::nicer_logs::last_n_chars(&rec.project_path, 15) }; + // tracing::info!("{} no 'available' mapping in `{}` will default to true", short_pp, rec.integr_name); rec.on_your_laptop = true; rec.when_isolated = true; } diff --git a/src/restream.rs b/src/restream.rs index fe9164e91..0df24e866 100644 --- a/src/restream.rs +++ b/src/restream.rs @@ -449,8 +449,8 @@ pub async fn scratchpad_interaction_stream( try_insert_usage(&mut value); value["created"] = json!(t1.duration_since(std::time::UNIX_EPOCH).unwrap().as_millis() as f64 / 1000.0); let value_str = format!("data: {}\n\n", serde_json::to_string(&value).unwrap()); - let last_60_chars: String = crate::nicer_logs::first_n_chars(&value_str, 60); - info!("yield: {:?}", last_60_chars); + // let last_60_chars: String = crate::nicer_logs::first_n_chars(&value_str, 60); + // info!("yield: {:?}", last_60_chars); yield Result::<_, String>::Ok(value_str); }, Err(err_str) => { From d89df6870178e06a24bb5faf0c01a0b49d1ad285 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Sat, 7 Dec 2024 17:13:08 +0100 Subject: [PATCH 084/185] remove /v1/chat-* --- src/http/routers/v1.rs | 4 +--- src/http/routers/v1/chat.rs | 14 -------------- 2 files changed, 1 insertion(+), 17 deletions(-) diff --git a/src/http/routers/v1.rs b/src/http/routers/v1.rs index 862ec237d..74257ddf2 100644 --- a/src/http/routers/v1.rs +++ b/src/http/routers/v1.rs @@ -18,7 +18,7 @@ use crate::http::routers::v1::at_commands::{handle_v1_command_completion, handle use crate::http::routers::v1::at_tools::{handle_v1_tools, handle_v1_tools_check_if_confirmation_needed, handle_v1_tools_execute}; use crate::http::routers::v1::caps::handle_v1_caps; use crate::http::routers::v1::caps::handle_v1_ping; -use crate::http::routers::v1::chat::{handle_v1_chat, handle_v1_chat_completions, handle_v1_chat_configuration, handle_v1_chat_project_summary}; +use crate::http::routers::v1::chat::{handle_v1_chat, handle_v1_chat_completions}; use crate::http::routers::v1::chat_based_handlers::handle_v1_commit_message_from_diff; use crate::http::routers::v1::dashboard::get_dashboard_plots; use crate::http::routers::v1::docker::{handle_v1_docker_container_action, handle_v1_docker_container_list}; @@ -85,8 +85,6 @@ pub fn make_v1_router() -> Router { .route("/chat", telemetry_post!(handle_v1_chat)) .route("/chat/completions", telemetry_post!(handle_v1_chat_completions)) // standard - .route("/chat-configuration", telemetry_post!(handle_v1_chat_configuration)) - .route("/chat-project-summary", telemetry_post!(handle_v1_chat_project_summary)) .route("/telemetry-network", telemetry_post!(handle_v1_telemetry_network)) .route("/snippet-accepted", telemetry_post!(handle_v1_snippet_accepted)) diff --git a/src/http/routers/v1/chat.rs b/src/http/routers/v1/chat.rs index 0b0633f94..5693bde00 100644 --- a/src/http/routers/v1/chat.rs +++ b/src/http/routers/v1/chat.rs @@ -73,20 +73,6 @@ pub async fn handle_v1_chat_completions( _chat(gcx, &body_bytes, false).await } -pub async fn handle_v1_chat_configuration( - Extension(gcx): Extension, - body_bytes: hyper::body::Bytes, -) -> Result, ScratchError> { - _chat(gcx, &body_bytes, true).await -} - -pub async fn handle_v1_chat_project_summary( - Extension(gcx): Extension, - body_bytes: hyper::body::Bytes, -) -> Result, ScratchError> { - _chat(gcx, &body_bytes, true).await -} - pub async fn handle_v1_chat( // less-standard openai-style handler that sends role="context_*" messages first, rewrites the user message Extension(gcx): Extension, From 92bc8866b7712bc9c8f540daee6910d312bf287b Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Sat, 7 Dec 2024 17:13:55 +0100 Subject: [PATCH 085/185] PROJECT_SUMMARY in system prompt --- src/http/routers/v1/links.rs | 23 +---- src/scratchpads/chat_utils_prompts.rs | 84 ++++++++++++++----- src/yaml_configs/customization_compiled_in.rs | 11 ++- 3 files changed, 73 insertions(+), 45 deletions(-) diff --git a/src/http/routers/v1/links.rs b/src/http/routers/v1/links.rs index 073ffb88e..b5824382e 100644 --- a/src/http/routers/v1/links.rs +++ b/src/http/routers/v1/links.rs @@ -64,8 +64,9 @@ pub async fn handle_v1_links( tracing::info!("for links, post.meta.chat_mode == {:?}", post.meta.chat_mode); if post.messages.is_empty() { - let (is_missing, summary_path) = project_summarization_is_missing(gcx.clone()).await; - if is_missing { + + let (already_exists, summary_path) = crate::scratchpads::chat_utils_prompts::dig_for_project_summarization_file(gcx.clone()).await; + if !already_exists { links.push(Link { action: LinkAction::SummarizeProject, text: "Initial project summarization".to_string(), @@ -179,24 +180,6 @@ async fn generate_commit_messages_with_current_changes(gcx: Arc>) -> (bool, Option) { - match crate::files_correction::get_active_project_path(gcx.clone()).await { - Some(active_project_path) => { - let summary_path = active_project_path.join(".refact").join("project_summary.yaml"); - if !summary_path.exists() { - (true, Some(summary_path.to_string_lossy().to_string())) - } else { - (false, Some(summary_path.to_string_lossy().to_string())) - } - } - None => { - tracing::info!("No projects found, project summarization is not relevant."); - (false, None) - } - } -} - fn failed_integration_names_after_last_user_message(messages: &Vec) -> Vec { let last_user_msg_index = messages.iter().rposition(|m| m.role == "user").unwrap_or(0); let tool_calls = messages[last_user_msg_index..].iter().filter(|m| m.role == "assistant") diff --git a/src/scratchpads/chat_utils_prompts.rs b/src/scratchpads/chat_utils_prompts.rs index a26373fdf..809a9ece0 100644 --- a/src/scratchpads/chat_utils_prompts.rs +++ b/src/scratchpads/chat_utils_prompts.rs @@ -87,6 +87,62 @@ async fn _workspace_info( info } +pub async fn dig_for_project_summarization_file(gcx: Arc>) -> (bool, Option) { + match crate::files_correction::get_active_project_path(gcx.clone()).await { + Some(active_project_path) => { + let summary_path = active_project_path.join(".refact").join("project_summary.yaml"); + if !summary_path.exists() { + (false, Some(summary_path.to_string_lossy().to_string())) + } else { + (true, Some(summary_path.to_string_lossy().to_string())) + } + } + None => { + tracing::info!("No projects found, project summarization is not relevant."); + (false, None) + } + } +} + +async fn _read_project_summary( + gcx: Arc>, +) -> Option { + let (exists, summary_path_option) = dig_for_project_summarization_file(gcx).await; + if exists { + if let Some(summary_path) = summary_path_option { + match fs::read_to_string(summary_path) { + Ok(content) => { + match serde_yaml::from_str::(&content) { + Ok(yaml) => { + if let Some(project_summary) = yaml.get("project_summary") { + match serde_yaml::to_string(project_summary) { + Ok(summary_str) => return Some(summary_str), + Err(e) => { + tracing::error!("Failed to convert project summary to string: {}", e); + return None; + } + } + } else { + tracing::error!("Key 'project_summary' not found in YAML file."); + return None; + } + }, + Err(e) => { + tracing::error!("Failed to parse project summary YAML file: {}", e); + return None; + } + } + }, + Err(e) => { + tracing::error!("Failed to read project summary file: {}", e); + return None; + } + } + } + } + None +} + pub async fn system_prompt_add_workspace_info( gcx: Arc>, system_prompt: &String, @@ -100,36 +156,22 @@ pub async fn system_prompt_add_workspace_info( (workspace_dirs, active_file_path) } - async fn read_project_info( - gcx: Arc>, - ) -> Option { - let (config_dirs, _) = crate::integrations::setting_up_integrations::get_config_dirs(gcx.clone()).await; - let mut project_info = None; - for config_path in config_dirs - .iter() - .map(|x| x.join("project_summary.yaml")) - .filter(|x| !x.exists()) { - if let Some(text) = fs::read_to_string(&config_path).ok() { - project_info = Some(text); - break; - } - } - project_info - } - let mut system_prompt = system_prompt.clone(); if system_prompt.contains("%WORKSPACE_INFO%") { let (workspace_dirs, active_file_path) = workspace_files_info(&gcx).await; let info = _workspace_info(&workspace_dirs, &active_file_path).await; system_prompt = system_prompt.replace("%WORKSPACE_INFO%", &info); } - if system_prompt.contains("%PROJECT_INFO%") { - if let Some(project_info) = read_project_info(gcx.clone()).await { - system_prompt = system_prompt.replace("%PROJECT_INFO%", &project_info); + + if system_prompt.contains("%PROJECT_SUMMARY%") { + if let Some(project_info) = _read_project_summary(gcx.clone()).await { + system_prompt = system_prompt.replace("%PROJECT_SUMMARY%", &project_info); } else { - system_prompt = system_prompt.replace("%PROJECT_INFO%", ""); + system_prompt = system_prompt.replace("%PROJECT_SUMMARY%", ""); } } + tracing::info!("system_prompt\n{}", system_prompt); + system_prompt } diff --git a/src/yaml_configs/customization_compiled_in.rs b/src/yaml_configs/customization_compiled_in.rs index 0f46b94e5..ef85b49ac 100644 --- a/src/yaml_configs/customization_compiled_in.rs +++ b/src/yaml_configs/customization_compiled_in.rs @@ -68,7 +68,8 @@ PROMPT_EXPLORATION_TOOLS: | %PROMPT_PINS% %WORKSPACE_INFO% - %PROJECT_INFO% + + %PROJECT_SUMMARY% Good thinking strategy for the answers: is it a question related to the current project? Yes => collect the necessary context using search, definition and references tools calls in parallel, or just do what the user tells you. @@ -86,7 +87,8 @@ PROMPT_AGENTIC_TOOLS: | %PROMPT_PINS% %WORKSPACE_INFO% - %PROJECT_INFO% + + %PROJECT_SUMMARY% Good practice using knowledge(): it's the key to successfully completing complex tasks the user might present you with. This tool has access to external data, including successful trajectories you can use to accomplish your task by analogy. The knowledge() @@ -126,7 +128,8 @@ PROMPT_CONFIGURATOR: | %PROMPT_PINS% %WORKSPACE_INFO% - %PROJECT_INFO% + + %PROJECT_SUMMARY% The integration config format is the following YAML: ``` @@ -167,7 +170,7 @@ PROMPT_CONFIGURATOR: | - write updated configs using šŸ“REWRITE_WHOLE_FILE PROMPT_PROJECT_SUMMARY: | - You are Refact Agent, a coding assistant. + You are Refact Agent, a coding assistant. Your task is to make a summary of the project you're working with and also choose tools from the given list which could be useful to work with the project. Select only those tools which are really using inside the project. %AVAILABLE_INTEGRATIONS% From ca5b78f65e1bd46ae7749e95e4ccdbe00bffcc2e Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Sat, 7 Dec 2024 17:48:01 +0100 Subject: [PATCH 086/185] recommended tools 1 --- src/http/routers/v1/links.rs | 38 +++++++++++++++++-- src/scratchpads/chat_utils_prompts.rs | 53 ++++++++++++++------------- 2 files changed, 62 insertions(+), 29 deletions(-) diff --git a/src/http/routers/v1/links.rs b/src/http/routers/v1/links.rs index b5824382e..f6b5488af 100644 --- a/src/http/routers/v1/links.rs +++ b/src/http/routers/v1/links.rs @@ -1,4 +1,5 @@ use std::sync::Arc; +use std::fs; use axum::Extension; use axum::http::{Response, StatusCode}; use hyper::Body; @@ -64,16 +65,47 @@ pub async fn handle_v1_links( tracing::info!("for links, post.meta.chat_mode == {:?}", post.meta.chat_mode); if post.messages.is_empty() { - - let (already_exists, summary_path) = crate::scratchpads::chat_utils_prompts::dig_for_project_summarization_file(gcx.clone()).await; + let (already_exists, summary_path_option) = crate::scratchpads::chat_utils_prompts::dig_for_project_summarization_file(gcx.clone()).await; if !already_exists { + // doesn't exist links.push(Link { action: LinkAction::SummarizeProject, text: "Initial project summarization".to_string(), goto: None, - current_config_file: summary_path, + current_config_file: summary_path_option, link_tooltip: format!("Project summary is a starting point for Refact Agent."), }); + } else { + // exists + if let Some(summary_path) = summary_path_option { + match fs::read_to_string(&summary_path) { + Ok(content) => { + match serde_yaml::from_str::(&content) { + Ok(yaml) => { + if let Some(recommended_tools) = yaml.get("recommended_tools").and_then(|rt| rt.as_sequence()) { + for tool in recommended_tools { + if let Some(tool_name) = tool.get("tool_name").and_then(|tn| tn.as_str()) { + links.push(Link { + action: LinkAction::Goto, + text: format!("Configure {tool_name}"), + goto: Some(format!("SETTINGS:{tool_name}")), + current_config_file: None, + link_tooltip: format!(""), + }); + } + } + } + }, + Err(e) => { + tracing::error!("Failed to parse project summary YAML file: {}", e); + } + } + }, + Err(e) => { + tracing::error!("Failed to read project summary file: {}", e); + } + } + } } } diff --git a/src/scratchpads/chat_utils_prompts.rs b/src/scratchpads/chat_utils_prompts.rs index 809a9ece0..44b1b9e10 100644 --- a/src/scratchpads/chat_utils_prompts.rs +++ b/src/scratchpads/chat_utils_prompts.rs @@ -105,42 +105,36 @@ pub async fn dig_for_project_summarization_file(gcx: Arc> } async fn _read_project_summary( - gcx: Arc>, + summary_path: String, ) -> Option { - let (exists, summary_path_option) = dig_for_project_summarization_file(gcx).await; - if exists { - if let Some(summary_path) = summary_path_option { - match fs::read_to_string(summary_path) { - Ok(content) => { - match serde_yaml::from_str::(&content) { - Ok(yaml) => { - if let Some(project_summary) = yaml.get("project_summary") { - match serde_yaml::to_string(project_summary) { - Ok(summary_str) => return Some(summary_str), - Err(e) => { - tracing::error!("Failed to convert project summary to string: {}", e); - return None; - } - } - } else { - tracing::error!("Key 'project_summary' not found in YAML file."); + match fs::read_to_string(summary_path) { + Ok(content) => { + match serde_yaml::from_str::(&content) { + Ok(yaml) => { + if let Some(project_summary) = yaml.get("project_summary") { + match serde_yaml::to_string(project_summary) { + Ok(summary_str) => return Some(summary_str), + Err(e) => { + tracing::error!("Failed to convert project summary to string: {}", e); return None; } - }, - Err(e) => { - tracing::error!("Failed to parse project summary YAML file: {}", e); - return None; } + } else { + tracing::error!("Key 'project_summary' not found in YAML file."); + return None; } }, Err(e) => { - tracing::error!("Failed to read project summary file: {}", e); + tracing::error!("Failed to parse project summary YAML file: {}", e); return None; } } + }, + Err(e) => { + tracing::error!("Failed to read project summary file: {}", e); + return None; } } - None } pub async fn system_prompt_add_workspace_info( @@ -164,8 +158,15 @@ pub async fn system_prompt_add_workspace_info( } if system_prompt.contains("%PROJECT_SUMMARY%") { - if let Some(project_info) = _read_project_summary(gcx.clone()).await { - system_prompt = system_prompt.replace("%PROJECT_SUMMARY%", &project_info); + let (exists, summary_path_option) = dig_for_project_summarization_file(gcx.clone()).await; + if exists { + if let Some(summary_path) = summary_path_option { + if let Some(project_info) = _read_project_summary(summary_path).await { + system_prompt = system_prompt.replace("%PROJECT_SUMMARY%", &project_info); + } else { + system_prompt = system_prompt.replace("%PROJECT_SUMMARY%", ""); + } + } } else { system_prompt = system_prompt.replace("%PROJECT_SUMMARY%", ""); } From ed8fa48cba068cb3f4d91bf0a87f2f8a71a69cc5 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Sat, 7 Dec 2024 19:59:45 +0100 Subject: [PATCH 087/185] prompts for project summary, for follow-ups --- src/agentic/generate_follow_up_message.rs | 70 +++++++++++++++---- src/http/routers/v1/links.rs | 28 +++++--- src/integrations/project_summary_chat.rs | 24 +++---- src/yaml_configs/customization_compiled_in.rs | 45 ++++++------ 4 files changed, 104 insertions(+), 63 deletions(-) diff --git a/src/agentic/generate_follow_up_message.rs b/src/agentic/generate_follow_up_message.rs index 2ed5261d1..5da90fb31 100644 --- a/src/agentic/generate_follow_up_message.rs +++ b/src/agentic/generate_follow_up_message.rs @@ -1,5 +1,6 @@ use std::sync::Arc; use tokio::sync::{RwLock as ARwLock, Mutex as AMutex}; +use serde_json::Value; use crate::global_context::GlobalContext; use crate::at_commands::at_commands::AtCommandsContext; @@ -7,28 +8,56 @@ use crate::subchat::subchat_single; use crate::call_validation::ChatMessage; pub async fn generate_follow_up_message( - mut messages: Vec, - gcx: Arc>, - model_name: &str, + mut messages: Vec, + gcx: Arc>, + model_name: &str, chat_id: &str, -) -> Result { - if messages.first().map(|m| m.role == "system").unwrap_or(false) { - messages.remove(0); +) -> Result, String> { + let last_assistant_msg_text; + if let Some(last_assistant_msg) = messages.iter().rev().find(|m| m.role == "assistant").cloned() { + // messages.clear(); + // messages.push(last_assistant_msg); + last_assistant_msg_text = last_assistant_msg.content.content_text_only(); + } else { + return Err(format!("The last message is not role=assistant")); } - messages.insert(0, ChatMessage::new( - "system".to_string(), - "Generate a 2-3 word user response, like 'Can you fix it?' for errors or 'Proceed' for plan validation".to_string(), - )); + + messages = vec![ + ChatMessage::new( + "system".to_string(), + concat!( + "Super simple job today, generate follow-ups!\n", + ).to_string(), + ), + ChatMessage::new( + "user".to_string(), + last_assistant_msg_text + ), + ChatMessage::new( + "user".to_string(), + concat!( + "Generate up to 3 most likely short follow-ups to the message above, in 3 words or less, like 'Yes' 'No' 'Fix it' 'Never mind' etc.\n", + "If there are no simple answers, or the conversation is over, just give an empty list. Output must be this simple json:\n", + "\n", + "[\"Follow up 1\", \"Follow up 2\"]\n", + "\n", + "Don't write backquotes, just this format.\n", + ).to_string(), + ), + ]; + + tracing::info!("follow-up model says1 {:?}", messages); + let ccx = Arc::new(AMutex::new(AtCommandsContext::new( gcx.clone(), - 1024, + 8000, 1, false, messages.clone(), chat_id.to_string(), false, ).await)); - let new_messages = subchat_single( + let updated_messages: Vec> = subchat_single( ccx.clone(), model_name, messages, @@ -42,6 +71,17 @@ pub async fn generate_follow_up_message( None, None, ).await?; - new_messages.into_iter().next().map(|x| x.into_iter().last().map(|last_m| { - last_m.content.content_text_only() })).flatten().ok_or("No commit message found".to_string()) -} \ No newline at end of file + let response = updated_messages.into_iter().next().map(|x| x.into_iter().last().map(|last_m| { + last_m.content.content_text_only() })).flatten().ok_or("No commit message found".to_string())?; + + tracing::info!("follow-up model says2 {:?}", response); + + let parsed_response: Value = serde_json::from_str(&response).map_err(|e| e.to_string())?; + let follow_ups = parsed_response.as_array() + .ok_or("Invalid JSON format")? + .iter() + .map(|v| v.as_str().unwrap_or("").to_string()) + .collect(); + + Ok(follow_ups) +} diff --git a/src/http/routers/v1/links.rs b/src/http/routers/v1/links.rs index f6b5488af..694748334 100644 --- a/src/http/routers/v1/links.rs +++ b/src/http/routers/v1/links.rs @@ -63,6 +63,7 @@ pub async fn handle_v1_links( .map_err(|e| ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, format!("JSON problem: {}", e)))?; let mut links = Vec::new(); tracing::info!("for links, post.meta.chat_mode == {:?}", post.meta.chat_mode); + let (integrations_map, integration_yaml_errors) = crate::integrations::running_integrations::load_integrations(gcx.clone(), "".to_string(), true).await; if post.messages.is_empty() { let (already_exists, summary_path_option) = crate::scratchpads::chat_utils_prompts::dig_for_project_summarization_file(gcx.clone()).await; @@ -85,6 +86,9 @@ pub async fn handle_v1_links( if let Some(recommended_tools) = yaml.get("recommended_tools").and_then(|rt| rt.as_sequence()) { for tool in recommended_tools { if let Some(tool_name) = tool.get("tool_name").and_then(|tn| tn.as_str()) { + + // integrations_map + links.push(Link { action: LinkAction::Goto, text: format!("Configure {tool_name}"), @@ -145,7 +149,6 @@ pub async fn handle_v1_links( } } - let (_, integration_yaml_errors) = crate::integrations::running_integrations::load_integrations(gcx.clone(), "".to_string(), true).await; for e in integration_yaml_errors { links.push(Link { action: LinkAction::Goto, @@ -156,18 +159,23 @@ pub async fn handle_v1_links( }); } - if post.meta.chat_mode != ChatMode::NO_TOOLS && links.is_empty() { - let follow_up_message = generate_follow_up_message(post.messages.clone(), gcx.clone(), &post.model_name, &post.meta.chat_id).await + if post.meta.chat_mode != ChatMode::NO_TOOLS && links.is_empty() && post.messages.len() > 2 { + let follow_up_messages: Vec = generate_follow_up_message(post.messages.clone(), gcx.clone(), &post.model_name, &post.meta.chat_id).await .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Error generating follow-up message: {}", e)))?; - links.push(Link { - action: LinkAction::FollowUp, - text: follow_up_message, - goto: None, - current_config_file: None, - link_tooltip: format!(""), - }); + for follow_up_message in follow_up_messages { + tracing::info!("follow-up {:?}", follow_up_message); + links.push(Link { + action: LinkAction::FollowUp, + text: follow_up_message, + goto: None, + current_config_file: None, + link_tooltip: format!(""), + }); + } } + tracing::info!("generated links2: {:?}", links); + Ok(Response::builder() .status(StatusCode::OK) .header("Content-Type", "application/json") diff --git a/src/integrations/project_summary_chat.rs b/src/integrations/project_summary_chat.rs index 769082727..16a74edc9 100644 --- a/src/integrations/project_summary_chat.rs +++ b/src/integrations/project_summary_chat.rs @@ -1,7 +1,6 @@ use std::sync::Arc; use tokio::sync::RwLock as ARwLock; use std::collections::HashMap; -use itertools::Itertools; use crate::global_context::GlobalContext; use crate::call_validation::{ChatContent, ChatMessage}; use crate::scratchpads::chat_utils_prompts::system_prompt_add_workspace_info; @@ -21,25 +20,18 @@ pub async fn mix_project_summary_messages( ).unwrap() } }; - let available_integrations = crate::integrations::setting_up_integrations::integrations_all_with_icons( - gcx.clone() - ).await; - let mut available_integrations_text: String = "Choose tools from this list:\n".to_string(); - for integration in available_integrations.integrations - .iter() - .map(|x| x.integr_name.clone()) - .filter(|x| !x.contains("_TEMPLATE")) - .unique() { + + let available_integrations: Vec<&str> = crate::integrations::integrations_list(); + let mut available_integrations_text = String::new(); + for integration in available_integrations.iter() { available_integrations_text.push_str(&format!("- {}\n", integration)) } + let sp: &crate::yaml_configs::customization_loader::SystemPrompt = custom.system_prompts.get("project_summary").unwrap(); let mut sp_text = sp.text.clone(); - sp_text = system_prompt_add_workspace_info(gcx.clone(), &sp_text - .replace("%CONFIG_PATH%", current_config_file) - .replace("%AVAILABLE_INTEGRATIONS%", &available_integrations_text) - ).await; - - tracing::info!("PROJECT_SUMMARY PROMPT\n{}", sp_text); + sp_text = sp_text.replace("%CONFIG_PATH%", current_config_file); + sp_text = sp_text.replace("%AVAILABLE_INTEGRATIONS%", &available_integrations_text); + sp_text = system_prompt_add_workspace_info(gcx.clone(), &sp_text).await; // print inside let system_message = ChatMessage { role: "system".to_string(), diff --git a/src/yaml_configs/customization_compiled_in.rs b/src/yaml_configs/customization_compiled_in.rs index ef85b49ac..11ea2235c 100644 --- a/src/yaml_configs/customization_compiled_in.rs +++ b/src/yaml_configs/customization_compiled_in.rs @@ -169,41 +169,42 @@ PROMPT_CONFIGURATOR: | - ask the user if they want to change anything - write updated configs using šŸ“REWRITE_WHOLE_FILE + PROMPT_PROJECT_SUMMARY: | - You are Refact Agent, a coding assistant. - Your task is to make a summary of the project you're working with and also choose tools from the given list which could be useful to work with the project. - Select only those tools which are really using inside the project. - %AVAILABLE_INTEGRATIONS% + [mode3summary] You are Refact Agent, a coding assistant. Your task today is to make a summary of the project and recommend integrations for it. %PROMPT_PINS% %WORKSPACE_INFO% Plan to follow: - 1. Explore the Project Structure: - - Use the tree() command to display the directory structure of the current project. - 2. Investigate Key Files: - - Use cat() to review content in important documentation files, such as README.md and other .md files. - - Use cat() to examine configuration files, including Cargo.toml, package.json, requirements.txt, etc. - 3. Summarize Findings: - - Write a detailed summary of the gathered information about the project. - - Compile a list of tools that might be useful for working on the project. - 4. Get User Feedback: - - Ask the user if they would like to make changes or updates to any part of the project setup or tool list. - 5. Prepare YAML Output: - - Organize the project summary and tools list in YAML format. - - Use the tag šŸ“REWRITE_WHOLE_FILE to indicate the final structured YAML content. + 1. Call tree() and check out structure of the current project. + 2. Call cat() for several key files in parallel: README.md and other .md files, configuration files such as Cargo.toml, package.json, requirements.txt. + 3. Recommend integrations to set up and turn on. That's a tricky one, let's look at it in detail. + + Potential Refact Agent integrations: + %AVAILABLE_INTEGRATIONS% + + Most of those integrations are easy, you can just repeat the name. But two of those are special: cmdline_TEMPLATE and service_TEMPLATE. Those can integrate + a blocking command line utility (such as cmake) and a blocking background command (such as hypercorn server that runs forever until you hit Ctrl+C), respectively. + Think of typical command line things that might be required for the project, how do you run the webserver, how do you compile the project? + Turn those things into recommendations, replace _TEMPLATE with lowercase name with underscores, don't overthink it, "cargo build" should become "cmdline_cargo_build", etc. + Recommendations here means just a list. The user will fill in the settings later. + + 4. Write a summary in natural language to the user, get their feedback, just ask if it looks alright, or if any of it needs improving. + 5. Finally use šŸ“REWRITE_WHOLE_FILE to overwrite the YAML config here: %CONFIG_PATH% + 6. Stop. The project summary config format is the following YAML: ``` - project_summary: + project_summary: | - recommended_tools: - - tool_name: - - tool_name: + + recommended_integrations: ["integr1", "integr2", "cmdline_something_useful"] ``` - Put the generated config to this path: %CONFIG_PATH% + Strictly follow the plan! + system_prompts: default: text: "%PROMPT_DEFAULT%" From 2548810eb441374be614667a770f565ea96646e1 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Sun, 8 Dec 2024 08:04:34 +0100 Subject: [PATCH 088/185] improve follow-up more --- src/agentic/generate_follow_up_message.rs | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/agentic/generate_follow_up_message.rs b/src/agentic/generate_follow_up_message.rs index 5da90fb31..495394c03 100644 --- a/src/agentic/generate_follow_up_message.rs +++ b/src/agentic/generate_follow_up_message.rs @@ -36,8 +36,11 @@ pub async fn generate_follow_up_message( ChatMessage::new( "user".to_string(), concat!( - "Generate up to 3 most likely short follow-ups to the message above, in 3 words or less, like 'Yes' 'No' 'Fix it' 'Never mind' etc.\n", - "If there are no simple answers, or the conversation is over, just give an empty list. Output must be this simple json:\n", + "Generate up to 3 most likely short follow-ups by the user to the robot message above, in 3 words or less, like 'Fix it' 'Go ahead' 'Never mind' etc.\n", + "If the previous message is an open question, return empty list. If there are no simple answers, return empty list. If the is no question, or the conversation is over, return an empty list.\n", + "If you see clear options for the asnwer to the robot's question, put first the option that allows robot to continue.\n", + "\n", + "Output must be this simple json:\n", "\n", "[\"Follow up 1\", \"Follow up 2\"]\n", "\n", From 5a4cb030de734de648a2bc02c62bbd4abdf24615 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Sun, 8 Dec 2024 08:06:01 +0100 Subject: [PATCH 089/185] minor --- src/http/routers/v1/chat.rs | 4 ++-- src/http/routers/v1/links.rs | 3 ++- src/integrations/config_chat.rs | 1 - src/integrations/project_summary_chat.rs | 1 + 4 files changed, 5 insertions(+), 4 deletions(-) diff --git a/src/http/routers/v1/chat.rs b/src/http/routers/v1/chat.rs index 5693bde00..4ac284c64 100644 --- a/src/http/routers/v1/chat.rs +++ b/src/http/routers/v1/chat.rs @@ -21,14 +21,14 @@ pub fn available_tools_by_chat_mode(current_tools: Vec, chat_mode: &ChatM match chat_mode { ChatMode::EXPLORE | ChatMode::AGENT | ChatMode::NO_TOOLS => current_tools, ChatMode::CONFIGURE | ChatMode::PROJECT_SUMMARY => { - let valid_tool_names = ["cat", "tree", "patch", "search", "knowledge"]; + let config_tools_whitelist = ["cat", "tree", "bash"]; current_tools .into_iter() .filter(|x| { x.get("function") .and_then(|x| x.get("name")) .and_then(|tool_name| tool_name.as_str()) - .map(|tool_name_str| valid_tool_names.contains(&tool_name_str)) + .map(|tool_name_str| config_tools_whitelist.contains(&tool_name_str)) .unwrap_or(false) }) .collect() diff --git a/src/http/routers/v1/links.rs b/src/http/routers/v1/links.rs index 694748334..98eb999f7 100644 --- a/src/http/routers/v1/links.rs +++ b/src/http/routers/v1/links.rs @@ -137,7 +137,7 @@ pub async fn handle_v1_links( } } - if post.meta.chat_mode != ChatMode::CONFIGURE { + if post.meta.chat_mode == ChatMode::AGENT { for failed_integr_name in failed_integration_names_after_last_user_message(&post.messages) { links.push(Link { action: LinkAction::Goto, @@ -159,6 +159,7 @@ pub async fn handle_v1_links( }); } + // hmm maybe (post.meta.chat_mode == ChatMode::EXPLORE || post.meta.chat_mode == ChatMode::AGENT) if post.meta.chat_mode != ChatMode::NO_TOOLS && links.is_empty() && post.messages.len() > 2 { let follow_up_messages: Vec = generate_follow_up_message(post.messages.clone(), gcx.clone(), &post.model_name, &post.meta.chat_id).await .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Error generating follow-up message: {}", e)))?; diff --git a/src/integrations/config_chat.rs b/src/integrations/config_chat.rs index 85e1a2dd1..bb43a8563 100644 --- a/src/integrations/config_chat.rs +++ b/src/integrations/config_chat.rs @@ -49,7 +49,6 @@ pub async fn mix_config_messages( }; let sp: &crate::yaml_configs::customization_loader::SystemPrompt = custom.system_prompts.get("configurator").unwrap(); - // let json_vec = context_file_vec.iter().map(|p| serde_json::json!(p)).collect::>(); messages.insert(0, ChatMessage { role: "context_file".to_string(), content: ChatContent::SimpleText(serde_json::to_string(&context_file_vec).unwrap()), diff --git a/src/integrations/project_summary_chat.rs b/src/integrations/project_summary_chat.rs index 16a74edc9..aa093e6d1 100644 --- a/src/integrations/project_summary_chat.rs +++ b/src/integrations/project_summary_chat.rs @@ -5,6 +5,7 @@ use crate::global_context::GlobalContext; use crate::call_validation::{ChatContent, ChatMessage}; use crate::scratchpads::chat_utils_prompts::system_prompt_add_workspace_info; + pub async fn mix_project_summary_messages( gcx: Arc>, messages: &mut Vec, From 4783d2d70616b72b521f0aa916894a3cdefd13e3 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Sun, 8 Dec 2024 10:03:30 +0100 Subject: [PATCH 090/185] system prompt inserting overhaul 1 --- src/http/routers/v1/chat.rs | 70 +++++++++---------------- src/scratchpads/chat_passthrough.rs | 30 +++++++---- src/scratchpads/chat_utils_prompts.rs | 75 ++++++++++++++++++++++++--- 3 files changed, 114 insertions(+), 61 deletions(-) diff --git a/src/http/routers/v1/chat.rs b/src/http/routers/v1/chat.rs index 4ac284c64..47afae898 100644 --- a/src/http/routers/v1/chat.rs +++ b/src/http/routers/v1/chat.rs @@ -14,12 +14,13 @@ use crate::custom_error::ScratchError; use crate::at_commands::at_commands::AtCommandsContext; use crate::global_context::{GlobalContext, SharedGlobalContext}; use crate::integrations::docker::docker_container_manager::docker_container_check_status_or_start; -use crate::scratchpads::chat_utils_prompts::{get_default_system_prompt, get_default_system_prompt_from_remote, system_prompt_add_workspace_info}; pub fn available_tools_by_chat_mode(current_tools: Vec, chat_mode: &ChatMode) -> Vec { match chat_mode { - ChatMode::EXPLORE | ChatMode::AGENT | ChatMode::NO_TOOLS => current_tools, + ChatMode::EXPLORE | ChatMode::AGENT | ChatMode::NO_TOOLS => { + current_tools + }, ChatMode::CONFIGURE | ChatMode::PROJECT_SUMMARY => { let config_tools_whitelist = ["cat", "tree", "bash"]; current_tools @@ -32,7 +33,7 @@ pub fn available_tools_by_chat_mode(current_tools: Vec, chat_mode: &ChatM .unwrap_or(false) }) .collect() - } + }, } } @@ -104,32 +105,27 @@ async fn _chat( let mut messages = deserialize_messages_from_post(&chat_post.messages)?; tracing::info!("\n\n new chat_mode {:?}\n", chat_post.meta.chat_mode); - match chat_post.meta.chat_mode { - ChatMode::EXPLORE | ChatMode::AGENT | ChatMode::NO_TOOLS => {}, - ChatMode::CONFIGURE => { - crate::integrations::config_chat::mix_config_messages( - gcx.clone(), - &mut messages, - &chat_post.meta.current_config_file - ).await; - } - ChatMode::PROJECT_SUMMARY => { - crate::integrations::project_summary_chat::mix_project_summary_messages( - gcx.clone(), - &mut messages, - &chat_post.meta.current_config_file - ).await; - } - } - // converts tools into openai style - if let Some(tools) = &mut chat_post.tools { - for tool in &mut *tools { - if let Some(function) = tool.get_mut("function") { - function.as_object_mut().unwrap().remove("agentic"); + if chat_post.meta.chat_mode == ChatMode::NO_TOOLS { + chat_post.tools = None; + } else { + if let Some(tools) = &mut chat_post.tools { + for tool in &mut *tools { + if let Some(function) = tool.get_mut("function") { + function.as_object_mut().unwrap().remove("agentic"); + } } + chat_post.tools = Some(available_tools_by_chat_mode(tools.clone(), &chat_post.meta.chat_mode)); + } else { + // TODO at some point, get rid of /tools call on client, make so we can have chat_post.tools==None and just fill the tools here + chat_post.tools = Some(available_tools_by_chat_mode(vec![], &chat_post.meta.chat_mode)); } - chat_post.tools = Some(available_tools_by_chat_mode(tools.clone(), &chat_post.meta.chat_mode)); + tracing::info!("tools [{}]\n", chat_post.tools.as_ref().map_or("".to_string(), |tools| { + tools.iter() + .filter_map(|tool| tool.get("function").and_then(|f| f.get("name")).and_then(|n| n.as_str())) + .collect::>() + .join(", ") + })); } let caps = crate::global_context::try_load_caps_quickly_if_not_present(gcx.clone(), 0).await?; @@ -199,27 +195,9 @@ async fn _chat( .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, e))?; } - let have_system = !messages.is_empty() && messages[0].role == "system"; - if !have_system { - let exploration_tools = chat_post.meta.chat_mode != ChatMode::NO_TOOLS; - let agentic_tools = matches!(chat_post.meta.chat_mode, ChatMode::AGENT | ChatMode::CONFIGURE | ChatMode::PROJECT_SUMMARY); - let system_message_content = if should_execute_remotely { - // XXX pass chat_post.meta.chat_mode - get_default_system_prompt_from_remote(gcx.clone(), exploration_tools, agentic_tools, &chat_post.meta.chat_id).await.map_err(|e| - ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, e) - )? - } else { - system_prompt_add_workspace_info(gcx.clone(), - &get_default_system_prompt(gcx.clone(), chat_post.meta.chat_mode.clone()).await - ).await - }; - messages.insert(0, ChatMessage { - role: "system".to_string(), - content: ChatContent::SimpleText(system_message_content), - ..Default::default() - }) - } + // SYSTEM PROMPT WAS HERE + // chat_post.stream = Some(false); // for debugging 400 errors that are hard to debug with streaming (because "data: " is not present and the error message is ignored by the library) let mut scratchpad = crate::scratchpads::create_chat_scratchpad( diff --git a/src/scratchpads/chat_passthrough.rs b/src/scratchpads/chat_passthrough.rs index 5fbcd0961..70844cc8c 100644 --- a/src/scratchpads/chat_passthrough.rs +++ b/src/scratchpads/chat_passthrough.rs @@ -12,6 +12,7 @@ use crate::call_validation::{ChatMessage, ChatPost, SamplingParameters}; use crate::scratchpad_abstract::{FinishReason, HasTokenizerAndEot, ScratchpadAbstract}; use crate::scratchpads::chat_utils_limit_history::limit_messages_history; use crate::scratchpads::scratchpad_utils::HasRagResults; +use crate::scratchpads::chat_utils_prompts::prepend_the_right_system_prompt_and_maybe_more_initial_messages; use crate::scratchpads::passthrough_convert_messages::convert_messages_to_openai_format; use crate::tools::tools_description::{tool_description_list_from_yaml, tools_merged_and_filtered}; use crate::tools::tools_execute::{run_tools_locally, run_tools_remotely}; @@ -105,7 +106,8 @@ impl ScratchpadAbstract for ChatPassthrough { let style = self.post.style.clone(); let at_tools = tools_merged_and_filtered(gcx.clone(), self.supports_clicks).await?; - // TODO? Maybe we should execute at commands remotely. + let messages = prepend_the_right_system_prompt_and_maybe_more_initial_messages(gcx.clone(), self.messages.clone(), &self.post, &mut self.has_rag_results).await; + let (mut messages, undroppable_msg_n, _any_context_produced) = if self.allow_at && !should_execute_remotely { run_at_commands(ccx.clone(), self.t.tokenizer.clone(), sampling_parameters_to_patch.max_new_tokens, &self.messages, &mut self.has_rag_results).await } else { @@ -211,14 +213,24 @@ impl ScratchpadAbstract for ChatPassthrough { } fn response_spontaneous(&mut self) -> Result, String> { - let mut deterministic: Vec = vec![]; - let have_system_prompt_in_post = !self.post.messages.is_empty() && self.post.messages[0].get("role") == Some(&serde_json::Value::String("system".to_string())); - let have_system_prompt_in_messages = !self.messages.is_empty() && self.messages[0].role == "system"; - if !have_system_prompt_in_post && have_system_prompt_in_messages && self.post.messages.len() == 1 { // only the user message present in request - self.has_rag_results.in_json.insert(0, json!(self.messages[0])); - } - deterministic.extend(self.has_rag_results.response_streaming()?); - Ok(deterministic) + // let mut deterministic: Vec = vec![]; + // let mut cursor = 0; + // while cursor < self.messages.len() { + + // } + + + + // let have_system_prompt_in_post = !self.post.messages.is_empty() && self.post.messages[0].get("role") == Some(&serde_json::Value::String("system".to_string())); + // let have_system_prompt_in_messages = !self.messages.is_empty() && self.messages[0].role == "system"; + // if !have_system_prompt_in_post && have_system_prompt_in_messages && self.post.messages.len() == 1 { // only the user message present in request + + // self.has_rag_results.in_json.insert(0, json!(self.messages[0])); + + // } + // deterministic.extend(self.has_rag_results.response_streaming()?); + // Ok(deterministic) + self.has_rag_results.response_streaming() } fn streaming_finished(&mut self, finish_reason: FinishReason) -> Result { diff --git a/src/scratchpads/chat_utils_prompts.rs b/src/scratchpads/chat_utils_prompts.rs index 44b1b9e10..d025583f7 100644 --- a/src/scratchpads/chat_utils_prompts.rs +++ b/src/scratchpads/chat_utils_prompts.rs @@ -4,15 +4,18 @@ use std::path::PathBuf; use tokio::sync::RwLock as ARwLock; use tracing::info; +use crate::call_validation; use crate::global_context::GlobalContext; use crate::http::http_post_json; use crate::http::routers::v1::system_prompt::{SystemPromptPost, SystemPromptResponse}; use crate::integrations::docker::docker_container_manager::docker_container_get_host_lsp_port_to_connect; +use crate::scratchpads::scratchpad_utils::HasRagResults; +use crate::call_validation::{ChatMessage, ChatContent, ChatMode}; pub async fn get_default_system_prompt( gcx: Arc>, - chat_mode: crate::call_validation::ChatMode, + chat_mode: ChatMode, ) -> String { let tconfig = match crate::yaml_configs::customization_loader::load_customization(gcx.clone(), true).await { Ok(tconfig) => tconfig, @@ -22,11 +25,11 @@ pub async fn get_default_system_prompt( }, }; let prompt_key = match chat_mode { - crate::call_validation::ChatMode::NO_TOOLS => "default", - crate::call_validation::ChatMode::EXPLORE => "exploration_tools", - crate::call_validation::ChatMode::AGENT => "agentic_tools", - crate::call_validation::ChatMode::CONFIGURE => "configurator", - crate::call_validation::ChatMode::PROJECT_SUMMARY => "project_summary", + ChatMode::NO_TOOLS => "default", + ChatMode::EXPLORE => "exploration_tools", + ChatMode::AGENT => "agentic_tools", + ChatMode::CONFIGURE => "configurator", + ChatMode::PROJECT_SUMMARY => "project_summary", }; let system_prompt = tconfig.system_prompts.get(prompt_key).map_or_else(|| { tracing::error!("cannot find system prompt `{}`", prompt_key); @@ -176,3 +179,63 @@ pub async fn system_prompt_add_workspace_info( system_prompt } + +pub async fn prepend_the_right_system_prompt_and_maybe_more_initial_messages( + gcx: Arc>, + mut messages: Vec, + chat_post: &call_validation::ChatPost, + stream_back_to_user: &mut HasRagResults, +) -> Vec { + let have_system = !messages.is_empty() && messages[0].role == "system"; + if have_system { + return messages; + } + if messages.len() == 0 { + tracing::error!("What's that? Messages list is empty"); + return messages; + } + + let exploration_tools = chat_post.meta.chat_mode != ChatMode::NO_TOOLS; + let agentic_tools = matches!(chat_post.meta.chat_mode, ChatMode::AGENT | ChatMode::CONFIGURE | ChatMode::PROJECT_SUMMARY); + + if chat_post.meta.chat_remote { + // XXX this should call a remote analog of prepend_the_right_system_prompt_and_maybe_more_initial_messages + let _ = get_default_system_prompt_from_remote(gcx.clone(), exploration_tools, agentic_tools, &chat_post.meta.chat_id).await.map_err(|e| + tracing::error!("failed to get default system prompt from remote: {}", e) + ); + return messages; + } + + match chat_post.meta.chat_mode { + ChatMode::EXPLORE | ChatMode::AGENT | ChatMode::NO_TOOLS => { + let system_message_content = system_prompt_add_workspace_info(gcx.clone(), + &get_default_system_prompt(gcx.clone(), chat_post.meta.chat_mode.clone()).await + ).await; + let msg = ChatMessage { + role: "system".to_string(), + content: ChatContent::SimpleText(system_message_content), + ..Default::default() + }; + stream_back_to_user.push_in_json(serde_json::json!(msg)); + messages.insert(0, msg); + }, + ChatMode::CONFIGURE => { + crate::integrations::config_chat::mix_config_messages( + gcx.clone(), + &chat_post.meta, + &mut messages, + stream_back_to_user, + ).await; + }, + ChatMode::PROJECT_SUMMARY => { + crate::integrations::project_summary_chat::mix_project_summary_messages( + gcx.clone(), + &chat_post.meta, + &mut messages, + stream_back_to_user, + ).await; + }, + } + tracing::info!("\n\nSYSTEM PROMPT MIXER chat_mode={:?}\n{:#?}", chat_post.meta.chat_mode, messages); + messages +} From 3c0fb4d255cca58472e3ffc662338bd4dbdd0853 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Sun, 8 Dec 2024 12:48:13 +0100 Subject: [PATCH 091/185] system prompt inserting overhaul 2 (CONFIGURE works) --- src/integrations/config_chat.rs | 122 +++++++++++++----- src/integrations/project_summary_chat.rs | 10 +- src/integrations/setting_up_integrations.rs | 5 +- src/scratchpads/chat_passthrough.rs | 3 +- src/scratchpads/chat_utils_prompts.rs | 2 - src/yaml_configs/customization_compiled_in.rs | 27 +--- 6 files changed, 103 insertions(+), 66 deletions(-) diff --git a/src/integrations/config_chat.rs b/src/integrations/config_chat.rs index bb43a8563..f75dc0672 100644 --- a/src/integrations/config_chat.rs +++ b/src/integrations/config_chat.rs @@ -4,40 +4,88 @@ use tokio::sync::RwLock as ARwLock; use std::collections::HashMap; use crate::global_context::GlobalContext; -use crate::call_validation::{ChatContent, ChatMessage, ContextFile}; +use crate::call_validation::{ChatContent, ChatMessage, ContextFile, ChatMeta}; +use crate::scratchpads::scratchpad_utils::HasRagResults; +use crate::integrations::yaml_schema::ISchema; pub async fn mix_config_messages( gcx: Arc>, + chat_meta: &ChatMeta, messages: &mut Vec, - current_config_file: &String, + stream_back_to_user: &mut HasRagResults, ) { - let config_dir = gcx.read().await.config_dir.clone(); - let file_path = config_dir.join("integrations.d"); + assert!(messages[0].role != "system"); // we are here to add this, can't already exist let mut context_file_vec = Vec::new(); - if let Ok(entries) = fs::read_dir(&file_path) { - for entry in entries { - if let Ok(entry) = entry { - let path = entry.path(); - if path.extension().and_then(|s| s.to_str()) == Some("yaml") { - if let Ok(file_content) = fs::read_to_string(&path) { - let context_file = ContextFile { - file_name: path.to_string_lossy().to_string(), - file_content, - line1: 0, - line2: 0, - symbols: vec![], - gradient_type: -1, - usefulness: 100.0, - }; - context_file_vec.push(context_file); - } + let all_integrations = crate::integrations::setting_up_integrations::integrations_all_with_icons(gcx.clone()).await; + for ig in all_integrations.integrations { + if !ig.integr_config_exists { + continue; + } + let file_content = match fs::read_to_string(&ig.integr_config_path) { + Ok(content) => content, + Err(err) => { + tracing::error!("Failed to read file for integration {}: {:?}", ig.integr_config_path, err); + continue; + } + }; + let context_file = ContextFile { + file_name: ig.integr_name.clone(), + file_content, + line1: 0, + line2: 0, + symbols: vec![], + gradient_type: -1, + usefulness: 100.0, + }; + context_file_vec.push(context_file); + } + + tracing::info!("post.integr_config_path {:?}", chat_meta.current_config_file); + + let schema_message = match crate::integrations::setting_up_integrations::integration_config_get( + chat_meta.current_config_file.clone(), + ).await { + Ok(the_get) => { + let mut schema_struct: ISchema = serde_json::from_value(the_get.integr_schema).unwrap(); // will not fail because we have test_integration_schemas() + schema_struct.docker = None; + schema_struct.smartlinks.clear(); + tracing::info!("schema_struct {}", serde_json::to_string_pretty(&schema_struct).unwrap()); + tracing::info!("sample values {}", serde_json::to_string_pretty(&the_get.integr_values).unwrap()); + let mut msg = format!( + "This is the data schema for the {}\n\n{}\n\n", + chat_meta.current_config_file, + serde_json::to_string(&schema_struct).unwrap(), + ); + if the_get.integr_config_exists { + msg.push_str(format!("\n\nThis is how the system loads the YAML so you can detect which fields are not loaded in reality:\n\n{}\n\n", serde_json::to_string(&the_get.integr_values).unwrap()).as_str()); + } else { + let mut yaml_value = serde_yaml::to_value(&the_get.integr_values).unwrap(); + if let serde_yaml::Value::Mapping(ref mut map) = yaml_value { + let mut available_map = serde_yaml::Mapping::new(); + available_map.insert(serde_yaml::Value::String("on_your_laptop".to_string()), serde_yaml::Value::Bool(schema_struct.available.on_your_laptop_possible)); + available_map.insert(serde_yaml::Value::String("when_isolated".to_string()), serde_yaml::Value::Bool(schema_struct.available.when_isolated_possible)); + map.insert(serde_yaml::Value::String("available".to_string()), serde_yaml::Value::Mapping(available_map)); } + msg.push_str(format!("\n\nThe file doesn't exist, so here is a sample YAML to give you an idea how this config might look in YAML:\n\n{}\n\n", serde_yaml::to_string(&yaml_value).unwrap()).as_str()); + } + ChatMessage { + role: "user".to_string(), + content: ChatContent::SimpleText(msg), + tool_calls: None, + tool_call_id: String::new(), + usage: None, } + }, + Err(e) => { + tracing::error!("Failed to load integrations: {}", e); + return; } - } - let custom: crate::yaml_configs::customization_loader::CustomizationYaml = match crate::yaml_configs::customization_loader::load_customization(gcx, true).await { + }; + + // XXX should be a better way to load the prompt + let custom: crate::yaml_configs::customization_loader::CustomizationYaml = match crate::yaml_configs::customization_loader::load_customization(gcx.clone(), true).await { Ok(x) => x, Err(why) => { tracing::error!("Failed to load customization.yaml, will use compiled-in default for the configurator system prompt:\n{:?}", why); @@ -49,27 +97,37 @@ pub async fn mix_config_messages( }; let sp: &crate::yaml_configs::customization_loader::SystemPrompt = custom.system_prompts.get("configurator").unwrap(); - messages.insert(0, ChatMessage { + let context_file_message = ChatMessage { role: "context_file".to_string(), content: ChatContent::SimpleText(serde_json::to_string(&context_file_vec).unwrap()), tool_calls: None, tool_call_id: String::new(), usage: None, - }); - messages.insert(0, ChatMessage { + }; + let system_message = ChatMessage { role: "system".to_string(), - content: ChatContent::SimpleText(sp.text.clone()), + content: ChatContent::SimpleText( + crate::scratchpads::chat_utils_prompts::system_prompt_add_workspace_info(gcx.clone(), &sp.text).await + ), tool_calls: None, tool_call_id: String::new(), usage: None, - }); + }; + + // Interestingly, here you can stream messages to user or not, and both options will work -- this function will be called or not called again the next chat call. + if messages.len() == 1 { + stream_back_to_user.push_in_json(serde_json::json!(system_message)); + stream_back_to_user.push_in_json(serde_json::json!(context_file_message)); + stream_back_to_user.push_in_json(serde_json::json!(schema_message)); + } else { + tracing::error!("more than 1 message when mixing configurtion chat context, bad things might happen!"); + } + + messages.splice(0..0, vec![system_message, context_file_message, schema_message]); for msg in messages.iter_mut() { if let ChatContent::SimpleText(ref mut content) = msg.content { - *content = content.replace("%CURRENT_CONFIG%", current_config_file); + *content = content.replace("%CURRENT_CONFIG%", &chat_meta.current_config_file); } } - - tracing::info!("AAAAA\n{:#?}", messages); } - diff --git a/src/integrations/project_summary_chat.rs b/src/integrations/project_summary_chat.rs index aa093e6d1..85b020e02 100644 --- a/src/integrations/project_summary_chat.rs +++ b/src/integrations/project_summary_chat.rs @@ -2,15 +2,19 @@ use std::sync::Arc; use tokio::sync::RwLock as ARwLock; use std::collections::HashMap; use crate::global_context::GlobalContext; -use crate::call_validation::{ChatContent, ChatMessage}; +use crate::call_validation::{ChatContent, ChatMessage, ChatMeta}; use crate::scratchpads::chat_utils_prompts::system_prompt_add_workspace_info; +use crate::scratchpads::scratchpad_utils::HasRagResults; pub async fn mix_project_summary_messages( gcx: Arc>, + chat_meta: &ChatMeta, messages: &mut Vec, - current_config_file: &String, + stream_back_to_user: &mut HasRagResults, ) { + assert!(messages[0].role != "system"); // we are here to add this, can't already exist + let custom: crate::yaml_configs::customization_loader::CustomizationYaml = match crate::yaml_configs::customization_loader::load_customization(gcx.clone(), true).await { Ok(x) => x, Err(why) => { @@ -30,7 +34,7 @@ pub async fn mix_project_summary_messages( let sp: &crate::yaml_configs::customization_loader::SystemPrompt = custom.system_prompts.get("project_summary").unwrap(); let mut sp_text = sp.text.clone(); - sp_text = sp_text.replace("%CONFIG_PATH%", current_config_file); + sp_text = sp_text.replace("%CONFIG_PATH%", &chat_meta.current_config_file); sp_text = sp_text.replace("%AVAILABLE_INTEGRATIONS%", &available_integrations_text); sp_text = system_prompt_add_workspace_info(gcx.clone(), &sp_text).await; // print inside diff --git a/src/integrations/setting_up_integrations.rs b/src/integrations/setting_up_integrations.rs index 257b5d1f5..9f8c0fa6c 100644 --- a/src/integrations/setting_up_integrations.rs +++ b/src/integrations/setting_up_integrations.rs @@ -309,6 +309,7 @@ pub struct IntegrationGetResult { pub project_path: String, pub integr_name: String, pub integr_config_path: String, + pub integr_config_exists: bool, pub integr_schema: serde_json::Value, pub integr_values: serde_json::Value, pub error_log: Vec, @@ -318,6 +319,7 @@ pub async fn integration_config_get( integr_config_path: String, ) -> Result { let sanitized_path = crate::files_correction::canonical_path(&integr_config_path); + let exists = sanitized_path.exists(); let integr_name = sanitized_path.file_stem().and_then(|s| s.to_str()).unwrap_or_default().to_string(); if integr_name.is_empty() { return Err(format!("can't derive integration name from file name")); @@ -328,6 +330,7 @@ pub async fn integration_config_get( project_path: project_path.clone(), integr_name: integr_name.clone(), integr_config_path: integr_config_path.clone(), + integr_config_exists: exists, integr_schema: serde_json::Value::Null, integr_values: serde_json::Value::Null, error_log: Vec::new(), @@ -344,7 +347,7 @@ pub async fn integration_config_get( "on_your_laptop": false, "when_isolated": false }); - if sanitized_path.exists() { + if exists { match fs::read_to_string(&sanitized_path) { Ok(content) => { match serde_yaml::from_str::(&content) { diff --git a/src/scratchpads/chat_passthrough.rs b/src/scratchpads/chat_passthrough.rs index 70844cc8c..69b9384e6 100644 --- a/src/scratchpads/chat_passthrough.rs +++ b/src/scratchpads/chat_passthrough.rs @@ -107,9 +107,8 @@ impl ScratchpadAbstract for ChatPassthrough { let at_tools = tools_merged_and_filtered(gcx.clone(), self.supports_clicks).await?; let messages = prepend_the_right_system_prompt_and_maybe_more_initial_messages(gcx.clone(), self.messages.clone(), &self.post, &mut self.has_rag_results).await; - let (mut messages, undroppable_msg_n, _any_context_produced) = if self.allow_at && !should_execute_remotely { - run_at_commands(ccx.clone(), self.t.tokenizer.clone(), sampling_parameters_to_patch.max_new_tokens, &self.messages, &mut self.has_rag_results).await + run_at_commands(ccx.clone(), self.t.tokenizer.clone(), sampling_parameters_to_patch.max_new_tokens, &messages, &mut self.has_rag_results).await } else { (self.messages.clone(), self.messages.len(), false) }; diff --git a/src/scratchpads/chat_utils_prompts.rs b/src/scratchpads/chat_utils_prompts.rs index d025583f7..751c5d4e7 100644 --- a/src/scratchpads/chat_utils_prompts.rs +++ b/src/scratchpads/chat_utils_prompts.rs @@ -175,8 +175,6 @@ pub async fn system_prompt_add_workspace_info( } } - tracing::info!("system_prompt\n{}", system_prompt); - system_prompt } diff --git a/src/yaml_configs/customization_compiled_in.rs b/src/yaml_configs/customization_compiled_in.rs index 11ea2235c..ebd944bb0 100644 --- a/src/yaml_configs/customization_compiled_in.rs +++ b/src/yaml_configs/customization_compiled_in.rs @@ -131,32 +131,7 @@ PROMPT_CONFIGURATOR: | %PROJECT_SUMMARY% - The integration config format is the following YAML: - ``` - integration_name: - field1: "value1" - field2: "value2" - available: - on_your_laptop: - - project_pattern: "*my_workspace/my_project1" - enable: true - - project_pattern: "*my_project2" - enable: true - when_isolated: - - image_pattern: "docker_image_for_my_project1_*" - enable: true - docker: - new_container_default: - image: "name_like_on_docker_hub:latest" - environment: - VARIABLE1: "VALUE1" - existing_containers: - my_container1: - image: "my_image1:latest" - environment: - VARIABLE2: "VALUE2" - ``` - The first user message will have all the exiting configs, docker images and containers. + The first couple of messages will have all the existing configs and the current config file schema. The next user message will start with šŸ”§ and it will specify your exact mission for this chat. From e587eb2189ff9e979193cd472dc991cddd82c7dd Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Sun, 8 Dec 2024 13:43:35 +0100 Subject: [PATCH 092/185] system prompt inserting overhaul 3 (PROJECT_SUMMARY works) --- src/agentic/generate_follow_up_message.rs | 27 +++++++++---------- src/integrations/config_chat.rs | 2 +- src/integrations/project_summary_chat.rs | 10 ++++--- src/tools/tool_patch_aux/ast_lint.rs | 2 +- src/yaml_configs/customization_compiled_in.rs | 3 ++- 5 files changed, 23 insertions(+), 21 deletions(-) diff --git a/src/agentic/generate_follow_up_message.rs b/src/agentic/generate_follow_up_message.rs index 495394c03..530b1bb02 100644 --- a/src/agentic/generate_follow_up_message.rs +++ b/src/agentic/generate_follow_up_message.rs @@ -22,22 +22,15 @@ pub async fn generate_follow_up_message( return Err(format!("The last message is not role=assistant")); } + // If the robot message is an open question, return empty list. + messages = vec![ ChatMessage::new( "system".to_string(), concat!( - "Super simple job today, generate follow-ups!\n", - ).to_string(), - ), - ChatMessage::new( - "user".to_string(), - last_assistant_msg_text - ), - ChatMessage::new( - "user".to_string(), - concat!( - "Generate up to 3 most likely short follow-ups by the user to the robot message above, in 3 words or less, like 'Fix it' 'Go ahead' 'Never mind' etc.\n", - "If the previous message is an open question, return empty list. If there are no simple answers, return empty list. If the is no question, or the conversation is over, return an empty list.\n", + "Super simple job today, generate follow-ups! In the first message you will receive a question or statement generated by a robot.\n", + "Generate up to 3 most likely short follow-ups by the user to the robot message, in 3 words or less, like 'Go ahead' 'Never mind' etc.\n", + "If there are no simple answers, return empty list. If the is no question, or the conversation is over, return an empty list.\n", "If you see clear options for the asnwer to the robot's question, put first the option that allows robot to continue.\n", "\n", "Output must be this simple json:\n", @@ -47,10 +40,13 @@ pub async fn generate_follow_up_message( "Don't write backquotes, just this format.\n", ).to_string(), ), + ChatMessage::new( + "user".to_string(), + concat!( + ).to_string(), + ), ]; - tracing::info!("follow-up model says1 {:?}", messages); - let ccx = Arc::new(AMutex::new(AtCommandsContext::new( gcx.clone(), 8000, @@ -63,7 +59,7 @@ pub async fn generate_follow_up_message( let updated_messages: Vec> = subchat_single( ccx.clone(), model_name, - messages, + messages.clone(), vec![], None, false, @@ -77,6 +73,7 @@ pub async fn generate_follow_up_message( let response = updated_messages.into_iter().next().map(|x| x.into_iter().last().map(|last_m| { last_m.content.content_text_only() })).flatten().ok_or("No commit message found".to_string())?; + tracing::info!("follow-up model says1 {:?}", messages); tracing::info!("follow-up model says2 {:?}", response); let parsed_response: Value = serde_json::from_str(&response).map_err(|e| e.to_string())?; diff --git a/src/integrations/config_chat.rs b/src/integrations/config_chat.rs index f75dc0672..e9d64778e 100644 --- a/src/integrations/config_chat.rs +++ b/src/integrations/config_chat.rs @@ -71,7 +71,7 @@ pub async fn mix_config_messages( msg.push_str(format!("\n\nThe file doesn't exist, so here is a sample YAML to give you an idea how this config might look in YAML:\n\n{}\n\n", serde_yaml::to_string(&yaml_value).unwrap()).as_str()); } ChatMessage { - role: "user".to_string(), + role: "cd_instruction".to_string(), content: ChatContent::SimpleText(msg), tool_calls: None, tool_call_id: String::new(), diff --git a/src/integrations/project_summary_chat.rs b/src/integrations/project_summary_chat.rs index 85b020e02..38d0f18a7 100644 --- a/src/integrations/project_summary_chat.rs +++ b/src/integrations/project_summary_chat.rs @@ -15,6 +15,8 @@ pub async fn mix_project_summary_messages( ) { assert!(messages[0].role != "system"); // we are here to add this, can't already exist + + // XXX should be a better way to load the prompt let custom: crate::yaml_configs::customization_loader::CustomizationYaml = match crate::yaml_configs::customization_loader::load_customization(gcx.clone(), true).await { Ok(x) => x, Err(why) => { @@ -46,10 +48,12 @@ pub async fn mix_project_summary_messages( usage: None, }; - if !messages.is_empty() { - messages[0] = system_message; + if messages.len() == 1 { + stream_back_to_user.push_in_json(serde_json::json!(system_message)); } else { - messages.push(system_message) + tracing::error!("more than 1 message when mixing configurtion chat context, bad things might happen!"); } + + messages.splice(0..0, vec![system_message]); } diff --git a/src/tools/tool_patch_aux/ast_lint.rs b/src/tools/tool_patch_aux/ast_lint.rs index a490a6783..026cf5216 100644 --- a/src/tools/tool_patch_aux/ast_lint.rs +++ b/src/tools/tool_patch_aux/ast_lint.rs @@ -14,7 +14,7 @@ pub async fn parse_and_get_error_symbols( let (mut parser, _language) = match get_ast_parser_by_filename(&path) { Ok(x) => x, Err(err) => { - tracing::error!("Error getting parser: {}", err.message); + tracing::info!("Error getting parser: {}", err.message); return Err(format!("Error getting parser: {}", err.message)); } }; diff --git a/src/yaml_configs/customization_compiled_in.rs b/src/yaml_configs/customization_compiled_in.rs index ebd944bb0..81a7b34da 100644 --- a/src/yaml_configs/customization_compiled_in.rs +++ b/src/yaml_configs/customization_compiled_in.rs @@ -162,11 +162,12 @@ PROMPT_PROJECT_SUMMARY: | Most of those integrations are easy, you can just repeat the name. But two of those are special: cmdline_TEMPLATE and service_TEMPLATE. Those can integrate a blocking command line utility (such as cmake) and a blocking background command (such as hypercorn server that runs forever until you hit Ctrl+C), respectively. Think of typical command line things that might be required for the project, how do you run the webserver, how do you compile the project? + For webserver to work you most likely need a service_* so it runs in the background and you can open and navigate web pages at the same time. Turn those things into recommendations, replace _TEMPLATE with lowercase name with underscores, don't overthink it, "cargo build" should become "cmdline_cargo_build", etc. Recommendations here means just a list. The user will fill in the settings later. 4. Write a summary in natural language to the user, get their feedback, just ask if it looks alright, or if any of it needs improving. - 5. Finally use šŸ“REWRITE_WHOLE_FILE to overwrite the YAML config here: %CONFIG_PATH% + 5. Finally use šŸ“REWRITE_WHOLE_FILE to overwrite %CONFIG_PATH% 6. Stop. The project summary config format is the following YAML: From 3dfa84bc10bd7befb2b07c81418af8648aa30166 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Sun, 8 Dec 2024 13:53:31 +0100 Subject: [PATCH 093/185] some project summary debugging --- src/agentic/generate_follow_up_message.rs | 6 +++--- src/yaml_configs/customization_compiled_in.rs | 6 ++++-- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/src/agentic/generate_follow_up_message.rs b/src/agentic/generate_follow_up_message.rs index 530b1bb02..a66549ba4 100644 --- a/src/agentic/generate_follow_up_message.rs +++ b/src/agentic/generate_follow_up_message.rs @@ -29,9 +29,9 @@ pub async fn generate_follow_up_message( "system".to_string(), concat!( "Super simple job today, generate follow-ups! In the first message you will receive a question or statement generated by a robot.\n", - "Generate up to 3 most likely short follow-ups by the user to the robot message, in 3 words or less, like 'Go ahead' 'Never mind' etc.\n", - "If there are no simple answers, return empty list. If the is no question, or the conversation is over, return an empty list.\n", - "If you see clear options for the asnwer to the robot's question, put first the option that allows robot to continue.\n", + "Generate up to 3 most likely short follow-ups by the user to the robot message, in 3 words or less, like 'Go ahead' 'Looks fantastic!' 'Never mind' etc.\n", + "Put first the option that allows robot to continue.\n", + "If there are no simple answers possible, return empty list. If the is no question, return an empty list.\n", "\n", "Output must be this simple json:\n", "\n", diff --git a/src/yaml_configs/customization_compiled_in.rs b/src/yaml_configs/customization_compiled_in.rs index 81a7b34da..4121a91eb 100644 --- a/src/yaml_configs/customization_compiled_in.rs +++ b/src/yaml_configs/customization_compiled_in.rs @@ -161,7 +161,7 @@ PROMPT_PROJECT_SUMMARY: | Most of those integrations are easy, you can just repeat the name. But two of those are special: cmdline_TEMPLATE and service_TEMPLATE. Those can integrate a blocking command line utility (such as cmake) and a blocking background command (such as hypercorn server that runs forever until you hit Ctrl+C), respectively. - Think of typical command line things that might be required for the project, how do you run the webserver, how do you compile the project? + Think of typical command line things that might be required to work on the project, how do you run the webserver, how do you compile it? For webserver to work you most likely need a service_* so it runs in the background and you can open and navigate web pages at the same time. Turn those things into recommendations, replace _TEMPLATE with lowercase name with underscores, don't overthink it, "cargo build" should become "cmdline_cargo_build", etc. Recommendations here means just a list. The user will fill in the settings later. @@ -170,12 +170,14 @@ PROMPT_PROJECT_SUMMARY: | 5. Finally use šŸ“REWRITE_WHOLE_FILE to overwrite %CONFIG_PATH% 6. Stop. + The file %CONFIG_PATH% does not exist. Don't try to cat() this file. Your job is to write it using šŸ“REWRITE_WHOLE_FILE. + The project summary config format is the following YAML: ``` project_summary: | - recommended_integrations: ["integr1", "integr2", "cmdline_something_useful"] + recommended_integrations: ["integr1", "integr2", "cmdline_something_useful", "service_something_background"] ``` Strictly follow the plan! From 8f07749480578f60bade294e995d63c30e275752 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Sun, 8 Dec 2024 13:59:50 +0100 Subject: [PATCH 094/185] whoops, follow-up fix --- src/agentic/generate_follow_up_message.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/agentic/generate_follow_up_message.rs b/src/agentic/generate_follow_up_message.rs index a66549ba4..a5ff51bdc 100644 --- a/src/agentic/generate_follow_up_message.rs +++ b/src/agentic/generate_follow_up_message.rs @@ -31,6 +31,7 @@ pub async fn generate_follow_up_message( "Super simple job today, generate follow-ups! In the first message you will receive a question or statement generated by a robot.\n", "Generate up to 3 most likely short follow-ups by the user to the robot message, in 3 words or less, like 'Go ahead' 'Looks fantastic!' 'Never mind' etc.\n", "Put first the option that allows robot to continue.\n", + "All the follow-ups must mean different things, not 3 ways to say \"yes\".\n", "If there are no simple answers possible, return empty list. If the is no question, return an empty list.\n", "\n", "Output must be this simple json:\n", @@ -42,8 +43,7 @@ pub async fn generate_follow_up_message( ), ChatMessage::new( "user".to_string(), - concat!( - ).to_string(), + last_assistant_msg_text, ), ]; From cb3fa7546d3b302d443d14ae552fdc67ca133185 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Sun, 8 Dec 2024 15:08:32 +0100 Subject: [PATCH 095/185] tool recommendations fixed --- src/http/routers/v1/links.rs | 28 +++++++++++++++------------- 1 file changed, 15 insertions(+), 13 deletions(-) diff --git a/src/http/routers/v1/links.rs b/src/http/routers/v1/links.rs index 98eb999f7..9f3c81e93 100644 --- a/src/http/routers/v1/links.rs +++ b/src/http/routers/v1/links.rs @@ -83,19 +83,21 @@ pub async fn handle_v1_links( Ok(content) => { match serde_yaml::from_str::(&content) { Ok(yaml) => { - if let Some(recommended_tools) = yaml.get("recommended_tools").and_then(|rt| rt.as_sequence()) { - for tool in recommended_tools { - if let Some(tool_name) = tool.get("tool_name").and_then(|tn| tn.as_str()) { - - // integrations_map - - links.push(Link { - action: LinkAction::Goto, - text: format!("Configure {tool_name}"), - goto: Some(format!("SETTINGS:{tool_name}")), - current_config_file: None, - link_tooltip: format!(""), - }); + if let Some(recommended_integrations) = yaml.get("recommended_integrations").and_then(|rt| rt.as_sequence()) { + for igname_value in recommended_integrations { + if let Some(igname) = igname_value.as_str() { + if !integrations_map.contains_key(igname) { + tracing::info!("tool {} not present => link", igname); + links.push(Link { + action: LinkAction::Goto, + text: format!("Configure {igname}"), + goto: Some(format!("SETTINGS:{igname}")), + current_config_file: None, + link_tooltip: format!(""), + }); + } else { + tracing::info!("tool {} present => happy", igname); + } } } } From 320fe6cbb48bc2607f54dd9f39f8f618942e8749 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Sun, 8 Dec 2024 17:08:46 +0100 Subject: [PATCH 096/185] debug session for CONFIGURE, now writes variables.yaml --- .../refact/chat_client.py | 1 + src/http/routers/v1/chat.rs | 19 ++++++++-- src/integrations/config_chat.rs | 37 +++++++++++++++++-- src/integrations/docker/integr_docker.rs | 1 - src/integrations/integr_chrome.rs | 2 +- src/integrations/integr_postgres.rs | 7 ++-- src/integrations/setting_up_integrations.rs | 7 ++-- src/yaml_configs/customization_compiled_in.rs | 27 +++++++++----- 8 files changed, 76 insertions(+), 25 deletions(-) diff --git a/python_binding_and_cmdline/refact/chat_client.py b/python_binding_and_cmdline/refact/chat_client.py index ac02e954b..357b2b2f9 100644 --- a/python_binding_and_cmdline/refact/chat_client.py +++ b/python_binding_and_cmdline/refact/chat_client.py @@ -247,6 +247,7 @@ async def ask_using_http( meta["chat_id"] = chat_id meta["chat_mode"] = "AGENT" meta["chat_remote"] = chat_remote + # meta["current_config_file"] = "/Users/user/.config/refact/integrations.d/postgres.yaml" post_me["meta"] = meta choices: List[Optional[Message]] = [None] * n_answers async with aiohttp.ClientSession() as session: diff --git a/src/http/routers/v1/chat.rs b/src/http/routers/v1/chat.rs index 47afae898..919c2ff37 100644 --- a/src/http/routers/v1/chat.rs +++ b/src/http/routers/v1/chat.rs @@ -21,15 +21,28 @@ pub fn available_tools_by_chat_mode(current_tools: Vec, chat_mode: &ChatM ChatMode::EXPLORE | ChatMode::AGENT | ChatMode::NO_TOOLS => { current_tools }, - ChatMode::CONFIGURE | ChatMode::PROJECT_SUMMARY => { - let config_tools_whitelist = ["cat", "tree", "bash"]; + ChatMode::CONFIGURE => { + let blacklist = vec!["tree", "patch", "locate", "knowledge"]; current_tools .into_iter() .filter(|x| { x.get("function") .and_then(|x| x.get("name")) .and_then(|tool_name| tool_name.as_str()) - .map(|tool_name_str| config_tools_whitelist.contains(&tool_name_str)) + .map(|tool_name_str| !blacklist.contains(&tool_name_str)) + .unwrap_or(true) + }) + .collect() + }, + ChatMode::PROJECT_SUMMARY => { + let whitelist = vec!["cat", "tree", "bash"]; + current_tools + .into_iter() + .filter(|x| { + x.get("function") + .and_then(|x| x.get("name")) + .and_then(|tool_name| tool_name.as_str()) + .map(|tool_name_str| whitelist.contains(&tool_name_str)) .unwrap_or(false) }) .collect() diff --git a/src/integrations/config_chat.rs b/src/integrations/config_chat.rs index e9d64778e..cd6dba8c4 100644 --- a/src/integrations/config_chat.rs +++ b/src/integrations/config_chat.rs @@ -16,6 +16,7 @@ pub async fn mix_config_messages( stream_back_to_user: &mut HasRagResults, ) { assert!(messages[0].role != "system"); // we are here to add this, can't already exist + tracing::info!("post.integr_config_path {:?}", chat_meta.current_config_file); let mut context_file_vec = Vec::new(); let all_integrations = crate::integrations::setting_up_integrations::integrations_all_with_icons(gcx.clone()).await; @@ -42,7 +43,32 @@ pub async fn mix_config_messages( context_file_vec.push(context_file); } - tracing::info!("post.integr_config_path {:?}", chat_meta.current_config_file); + let (config_dirs, global_config_dir) = crate::integrations::setting_up_integrations::get_config_dirs(gcx.clone()).await; + let mut variables_yaml_instruction = String::new(); + for dir in config_dirs.iter().chain(std::iter::once(&global_config_dir)) { + let variables_path = dir.join("variables.yaml"); + if variables_path.exists() { + match fs::read_to_string(&variables_path) { + Ok(file_content) => { + let context_file = ContextFile { + file_name: variables_path.to_string_lossy().to_string(), + file_content, + line1: 0, + line2: 0, + symbols: vec![], + gradient_type: -1, + usefulness: 100.0, + }; + context_file_vec.push(context_file); + } + Err(err) => { + tracing::error!("Failed to read variables.yaml in dir {}: {:?}", dir.display(), err); + } + } + } else { + variables_yaml_instruction.push_str(format!("{}\n", variables_path.display()).as_str()); + } + } let schema_message = match crate::integrations::setting_up_integrations::integration_config_get( chat_meta.current_config_file.clone(), @@ -59,7 +85,7 @@ pub async fn mix_config_messages( serde_json::to_string(&schema_struct).unwrap(), ); if the_get.integr_config_exists { - msg.push_str(format!("\n\nThis is how the system loads the YAML so you can detect which fields are not loaded in reality:\n\n{}\n\n", serde_json::to_string(&the_get.integr_values).unwrap()).as_str()); + msg.push_str(format!("This is how the system loads the YAML so you can detect which fields are not loaded in reality:\n\n{}\n\n", serde_json::to_string(&the_get.integr_values).unwrap()).as_str()); } else { let mut yaml_value = serde_yaml::to_value(&the_get.integr_values).unwrap(); if let serde_yaml::Value::Mapping(ref mut map) = yaml_value { @@ -68,7 +94,10 @@ pub async fn mix_config_messages( available_map.insert(serde_yaml::Value::String("when_isolated".to_string()), serde_yaml::Value::Bool(schema_struct.available.when_isolated_possible)); map.insert(serde_yaml::Value::String("available".to_string()), serde_yaml::Value::Mapping(available_map)); } - msg.push_str(format!("\n\nThe file doesn't exist, so here is a sample YAML to give you an idea how this config might look in YAML:\n\n{}\n\n", serde_yaml::to_string(&yaml_value).unwrap()).as_str()); + msg.push_str(format!("The file doesn't exist, so here is a sample YAML to give you an idea how this config might look in YAML:\n\n{}\n\n", serde_yaml::to_string(&yaml_value).unwrap()).as_str()); + } + if !variables_yaml_instruction.is_empty() { + msg.push_str(format!("Pay attention to variables.yaml files, you see the existing ones above, but also here are all the other paths they can potentially exist:\n{}\n\n", variables_yaml_instruction).as_str()); } ChatMessage { role: "cd_instruction".to_string(), @@ -79,7 +108,7 @@ pub async fn mix_config_messages( } }, Err(e) => { - tracing::error!("Failed to load integrations: {}", e); + tracing::error!("Failed to load integration {}: {}", chat_meta.current_config_file, e); return; } }; diff --git a/src/integrations/docker/integr_docker.rs b/src/integrations/docker/integr_docker.rs index 4501514d0..b2485b0ac 100644 --- a/src/integrations/docker/integr_docker.rs +++ b/src/integrations/docker/integr_docker.rs @@ -309,5 +309,4 @@ smartlinks: content: | šŸ”§ The docker tool should be visible now. To test the tool, list the running containers, briefly describe the containers and express satisfaction and relief if it works, and change nothing. If it doesn't work or the tool isn't available, go through the usual plan in the system prompt. - The current config file is %CURRENT_CONFIG%. "#; \ No newline at end of file diff --git a/src/integrations/integr_chrome.rs b/src/integrations/integr_chrome.rs index 47e71cbf5..06601f6a3 100644 --- a/src/integrations/integr_chrome.rs +++ b/src/integrations/integr_chrome.rs @@ -874,7 +874,7 @@ smartlinks: sl_chat: - role: "user" content: | - šŸ”§ The chrome tool should be visible now. To test the tool, navigate to a website, take a screenshot, and express happiness if it works. If it doesn't work or the tool isn't available, go through the usual plan in the system prompt. The current config file is %CURRENT_CONFIG%. + šŸ”§ The chrome tool should be visible now. To test the tool, navigate to a website, take a screenshot, and express happiness if it works. If it doesn't work or the tool isn't available, go through the usual plan in the system prompt. - sl_label: "Help me install Chrome for Testing" sl_chat: - role: "user" diff --git a/src/integrations/integr_postgres.rs b/src/integrations/integr_postgres.rs index 14e32edae..7fa12bc2e 100644 --- a/src/integrations/integr_postgres.rs +++ b/src/integrations/integr_postgres.rs @@ -221,14 +221,13 @@ smartlinks: content: | šŸ”§ The postgres tool should be visible now. To test the tool, list the tables available, briefly describe the tables and express happiness, and change nothing. If it doesn't work or the tool isn't available, go through the usual plan in the system prompt. - The current config file is %CURRENT_CONFIG%. - sl_label: "Look at the project, fill in automatically" sl_chat: - role: "user" - content: | + content: > šŸ”§ Your goal is to set up postgres client. Look at the project, especially files like "docker-compose.yaml" or ".env". Call tree() to see what files the project has. After that is completed, go through the usual plan in the system prompt. - The current config file is %CURRENT_CONFIG%. + Keep POSTGRES_HOST POSTGRES_PORT POSTGRES_USER POSTGRES_PASSWORD POSTGRES_DB in variables.yaml so they can be reused by command line tools later. docker: filter_label: "" filter_image: "postgres" @@ -251,3 +250,5 @@ docker: content: | šŸ”§ Your job is to modify postgres connection config in the current file to match the variables from the container, use docker tool to inspect the container if needed. Current config file: %CURRENT_CONFIG%. "#; + +// To think about: PGPASSWORD PGHOST PGUSER PGPORT PGDATABASE maybe tell the model to set that in variables.yaml as well diff --git a/src/integrations/setting_up_integrations.rs b/src/integrations/setting_up_integrations.rs index 9f8c0fa6c..e73a8e23e 100644 --- a/src/integrations/setting_up_integrations.rs +++ b/src/integrations/setting_up_integrations.rs @@ -54,11 +54,12 @@ pub fn read_integrations_d( if integrations_yaml_path.is_empty() { project_config_dirs.push("".to_string()); // global } + for project_config_dir in project_config_dirs { // Read config_folder/integr_name.yaml and make a record, even if the file doesn't exist - let dir_path = if project_config_dir == "" { global_config_dir.clone() } else { PathBuf::from(project_config_dir.clone()) }; + let config_dir = if project_config_dir == "" { global_config_dir.clone() } else { PathBuf::from(project_config_dir.clone()) }; for integr_name in lst.iter() { - let path_str = join_config_path(&dir_path, integr_name); + let path_str = join_config_path(&config_dir, integr_name); let path = PathBuf::from(path_str.clone()); let (_integr_name, project_path) = match split_path_into_project_and_integration(&path) { Ok(x) => x, @@ -70,7 +71,7 @@ pub fn read_integrations_d( files_to_read.push((path_str, integr_name.to_string(), project_path)); } // Find special files that start with cmdline_* and service_* - if let Ok(entries) = fs::read_dir(dir_path.join("integrations.d")) { + if let Ok(entries) = fs::read_dir(config_dir.join("integrations.d")) { let mut entries: Vec<_> = entries.filter_map(Result::ok).collect(); entries.sort_by_key(|entry| entry.file_name()); for entry in entries { diff --git a/src/yaml_configs/customization_compiled_in.rs b/src/yaml_configs/customization_compiled_in.rs index 4121a91eb..9c1e5fd8e 100644 --- a/src/yaml_configs/customization_compiled_in.rs +++ b/src/yaml_configs/customization_compiled_in.rs @@ -86,9 +86,6 @@ PROMPT_AGENTIC_TOOLS: | [mode3] You are Refact Agent, an autonomous bot for coding tasks. %PROMPT_PINS% - %WORKSPACE_INFO% - - %PROJECT_SUMMARY% Good practice using knowledge(): it's the key to successfully completing complex tasks the user might present you with. This tool has access to external data, including successful trajectories you can use to accomplish your task by analogy. The knowledge() @@ -119,6 +116,12 @@ PROMPT_AGENTIC_TOOLS: | %CD_INSTRUCTIONS% + - below general information about the current project - + + %WORKSPACE_INFO% + + %PROJECT_SUMMARY% + WHEN USING EXPLORATION TOOLS, USE SEVERAL IN PARALLEL! USE šŸ“ BEFORE ANY CODE BLOCK! FOR COMPLEX TASKS, CALL knowledege() BEFORE DOING ANYTHING! @@ -136,13 +139,17 @@ PROMPT_CONFIGURATOR: | The next user message will start with šŸ”§ and it will specify your exact mission for this chat. Your approximate plan: - - look at the current project by calling tree() - - using cat() look inside files like Cargo.toml package.json that might help you with your mission - - derive as much information as possible from the project itself - - write a markdown table that has 2 columns, key parameters on lhs, and values you were able to derive from the project (or just reasonable defaults) on rhs - - write 1 paragraph explanation of what you are about to do - - ask the user if they want to change anything - - write updated configs using šŸ“REWRITE_WHOLE_FILE + - Look at the current project by calling tree() + - Using cat() look inside files like Cargo.toml package.json that might help you with your mission + - Derive as much information as possible from the project itself + - Keep reusable things like hosts and usernames (such as POSTGRES_HOST) in variables.yaml they all will become environment variables for command line tools + - Write a markdown table that has 2 columns, key parameters on lhs, and values you were able to derive from the project (or just reasonable defaults) on rhs + - Write 1 paragraph explanation of what you are about to do + - Ask the user if they want to change anything + - Write updated configs using šŸ“REWRITE_WHOLE_FILE + - You can't check if the tool in question works or not in the same thread, user will have to accept the changes, and test again later by starting a new chat. + + The current config file is %CURRENT_CONFIG% but rewrite variables.yaml as neeeded, you can use $VARIABLE for any string fields in config files. PROMPT_PROJECT_SUMMARY: | From c4ecfaec14f8781ddf40d7264a0f00aaf42591d8 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Mon, 9 Dec 2024 06:33:20 +0100 Subject: [PATCH 097/185] CONFIGURE debugging --- src/http/routers/v1/chat.rs | 2 +- src/integrations/config_chat.rs | 2 +- src/integrations/integr_chrome.rs | 12 ++++++------ 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/src/http/routers/v1/chat.rs b/src/http/routers/v1/chat.rs index 919c2ff37..8c0ca896f 100644 --- a/src/http/routers/v1/chat.rs +++ b/src/http/routers/v1/chat.rs @@ -22,7 +22,7 @@ pub fn available_tools_by_chat_mode(current_tools: Vec, chat_mode: &ChatM current_tools }, ChatMode::CONFIGURE => { - let blacklist = vec!["tree", "patch", "locate", "knowledge"]; + let blacklist = vec!["tree", "patch", "locate", "knowledge", "search"]; current_tools .into_iter() .filter(|x| { diff --git a/src/integrations/config_chat.rs b/src/integrations/config_chat.rs index cd6dba8c4..32592ddc3 100644 --- a/src/integrations/config_chat.rs +++ b/src/integrations/config_chat.rs @@ -32,7 +32,7 @@ pub async fn mix_config_messages( } }; let context_file = ContextFile { - file_name: ig.integr_name.clone(), + file_name: ig.integr_config_path.clone(), file_content, line1: 0, line2: 0, diff --git a/src/integrations/integr_chrome.rs b/src/integrations/integr_chrome.rs index 06601f6a3..4e9da2110 100644 --- a/src/integrations/integr_chrome.rs +++ b/src/integrations/integr_chrome.rs @@ -190,7 +190,7 @@ impl Tool for ToolChrome { let parsed_command = match parse_single_command(&command.to_string()) { Ok(command) => command, Err(e) => { - tool_log.push(format!("failed to parse command `{}`: {}.", command, e)); + tool_log.push(format!("Failed to parse command `{}`: {}.", command, e)); break } }; @@ -200,7 +200,7 @@ impl Tool for ToolChrome { mutlimodal_els.extend(command_multimodal_els); }, Err(e) => { - tool_log.push(format!("failed to execute command `{}`: {}.", command, e)); + tool_log.push(format!("Failed to execute command `{}`: {}.", command, e)); break } }; @@ -849,17 +849,17 @@ fields: window_width: f_type: string_short f_desc: "Width of the browser window." - f_default: "1024" + f_default: "" f_extra: true window_height: f_type: string_short f_desc: "Height of the browser window." - f_default: "768" + f_default: "" f_extra: true idle_browser_timeout: f_type: string_short f_desc: "Idle timeout for the browser in seconds." - f_default: "600" + f_default: "" f_extra: true headless: f_type: string_short @@ -874,7 +874,7 @@ smartlinks: sl_chat: - role: "user" content: | - šŸ”§ The chrome tool should be visible now. To test the tool, navigate to a website, take a screenshot, and express happiness if it works. If it doesn't work or the tool isn't available, go through the usual plan in the system prompt. + šŸ”§ The chrome tool should be visible now. To test the tool, navigate to a website like https://example.com/ take a screenshot, and express happiness if it works. If it doesn't work or the tool isn't available, go through the usual plan in the system prompt. - sl_label: "Help me install Chrome for Testing" sl_chat: - role: "user" From acbc7ee8a112933319ff6f608c6145b4596f9b8e Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Mon, 9 Dec 2024 06:34:42 +0100 Subject: [PATCH 098/185] environment variables to cmdline_* --- src/integrations/integr_cmdline.rs | 17 ++++++++++++----- src/integrations/integr_cmdline_service.rs | 8 +++++--- src/yaml_configs/customization_compiled_in.rs | 3 ++- 3 files changed, 19 insertions(+), 9 deletions(-) diff --git a/src/integrations/integr_cmdline.rs b/src/integrations/integr_cmdline.rs index 664026187..db3ccebe9 100644 --- a/src/integrations/integr_cmdline.rs +++ b/src/integrations/integr_cmdline.rs @@ -106,9 +106,10 @@ pub fn format_output(stdout_out: &str, stderr_out: &str) -> String { out } -pub fn _create_command_from_string( +pub fn create_command_from_string( cmd_string: &str, command_workdir: &String, + env_variables: &HashMap, ) -> Result { let command_args = shell_words::split(cmd_string) .map_err(|e| format!("Failed to parse command: {}", e))?; @@ -120,6 +121,9 @@ pub fn _create_command_from_string( cmd.args(&command_args[1..]); } cmd.current_dir(command_workdir); + for (key, value) in env_variables { + cmd.env(key, value); + } Ok(cmd) } @@ -127,10 +131,12 @@ pub async fn execute_blocking_command( command: &str, cfg: &CmdlineToolConfig, command_workdir: &String, + env_variables: &HashMap, ) -> Result { info!("EXEC workdir {}:\n{:?}", command_workdir, command); + let command_future = async { - let mut cmd = _create_command_from_string(command, command_workdir)?; + let mut cmd = create_command_from_string(command, command_workdir, env_variables)?; let t0 = tokio::time::Instant::now(); let result = cmd .stdout(Stdio::piped()) @@ -173,11 +179,11 @@ impl Tool for ToolCmdline { async fn tool_execute( &mut self, - _ccx: Arc>, + ccx: Arc>, tool_call_id: &String, args: &HashMap, ) -> Result<(bool, Vec), String> { - + let gcx = ccx.lock().await.global_context.clone(); let mut args_str: HashMap = HashMap::new(); let valid_params: Vec = self.cfg.parameters.iter().map(|p| p.name.clone()).collect(); @@ -199,8 +205,9 @@ impl Tool for ToolCmdline { let command = replace_args(self.cfg.command.as_str(), &args_str); let workdir = replace_args(self.cfg.command_workdir.as_str(), &args_str); + let env_variables = crate::integrations::setting_up_integrations::get_vars_for_replacements(gcx.clone()).await; - let tool_ouput = execute_blocking_command(&command, &self.cfg, &workdir).await?; + let tool_ouput = execute_blocking_command(&command, &self.cfg, &workdir, &env_variables).await?; let result = vec![ContextEnum::ChatMessage(ChatMessage { role: "tool".to_string(), diff --git a/src/integrations/integr_cmdline_service.rs b/src/integrations/integr_cmdline_service.rs index f73c4d989..8c2397753 100644 --- a/src/integrations/integr_cmdline_service.rs +++ b/src/integrations/integr_cmdline_service.rs @@ -109,6 +109,7 @@ async fn execute_background_command( cmdline_workdir: &String, cfg: &CmdlineToolConfig, action: &str, + env_variables: &HashMap, ) -> Result { let session_key = format!("custom_service_{service_name}"); let mut session_mb = gcx.read().await.integration_sessions.get(&session_key).cloned(); @@ -155,7 +156,7 @@ async fn execute_background_command( tracing::info!("SERVICE START workdir {}:\n{:?}", cmdline_workdir, command_str); actions_log.push_str(&format!("Starting service with the following command line:\n{}\n", command_str)); - let mut command = _create_command_from_string(&command_str, cmdline_workdir)?; + let mut command = create_command_from_string(&command_str, cmdline_workdir, env_variables)?; command.stdout(Stdio::piped()); command.stderr(Stdio::piped()); let mut command_wrap = TokioCommandWrap::from(command); @@ -252,6 +253,7 @@ impl Tool for ToolService { tool_call_id: &String, args: &HashMap, ) -> Result<(bool, Vec), String> { + let gcx = ccx.lock().await.global_context.clone(); let mut args_str: HashMap = HashMap::new(); let valid_params: Vec = self.cfg.parameters.iter().map(|p| p.name.clone()).collect(); @@ -273,15 +275,15 @@ impl Tool for ToolService { let command = replace_args(self.cfg.command.as_str(), &args_str); let workdir = replace_args(self.cfg.command_workdir.as_str(), &args_str); + let env_variables = crate::integrations::setting_up_integrations::get_vars_for_replacements(gcx.clone()).await; let tool_ouput = { - let gcx = ccx.lock().await.global_context.clone(); let action = args_str.get("action").cloned().unwrap_or("start".to_string()); if !["start", "restart", "stop", "status"].contains(&action.as_str()) { return Err("Tool call is invalid. Param 'action' must be one of 'start', 'restart', 'stop', 'status'. Try again".to_string()); } execute_background_command( - gcx, &self.name, &command, &workdir, &self.cfg, action.as_str() + gcx, &self.name, &command, &workdir, &self.cfg, action.as_str(), &env_variables, ).await? }; diff --git a/src/yaml_configs/customization_compiled_in.rs b/src/yaml_configs/customization_compiled_in.rs index 9c1e5fd8e..edabf2ef2 100644 --- a/src/yaml_configs/customization_compiled_in.rs +++ b/src/yaml_configs/customization_compiled_in.rs @@ -147,7 +147,8 @@ PROMPT_CONFIGURATOR: | - Write 1 paragraph explanation of what you are about to do - Ask the user if they want to change anything - Write updated configs using šŸ“REWRITE_WHOLE_FILE - - You can't check if the tool in question works or not in the same thread, user will have to accept the changes, and test again later by starting a new chat. + + You can't check if the tool in question works or not in the same thread, user will have to accept the changes, and test again later by starting a new chat. The current config file is %CURRENT_CONFIG% but rewrite variables.yaml as neeeded, you can use $VARIABLE for any string fields in config files. From 35ad9f3f536eea258c421fdb22cbe9e3fd6c6dd8 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Mon, 9 Dec 2024 07:13:51 +0100 Subject: [PATCH 099/185] repair actions in service_ --- src/http/routers/v1/chat.rs | 4 ++-- src/integrations/integr_cmdline_service.rs | 18 +++++++++++------- 2 files changed, 13 insertions(+), 9 deletions(-) diff --git a/src/http/routers/v1/chat.rs b/src/http/routers/v1/chat.rs index 8c0ca896f..caac66740 100644 --- a/src/http/routers/v1/chat.rs +++ b/src/http/routers/v1/chat.rs @@ -117,7 +117,7 @@ async fn _chat( })?; let mut messages = deserialize_messages_from_post(&chat_post.messages)?; - tracing::info!("\n\n new chat_mode {:?}\n", chat_post.meta.chat_mode); + tracing::info!("chat_mode {:?}\n", chat_post.meta.chat_mode); if chat_post.meta.chat_mode == ChatMode::NO_TOOLS { chat_post.tools = None; @@ -133,7 +133,7 @@ async fn _chat( // TODO at some point, get rid of /tools call on client, make so we can have chat_post.tools==None and just fill the tools here chat_post.tools = Some(available_tools_by_chat_mode(vec![], &chat_post.meta.chat_mode)); } - tracing::info!("tools [{}]\n", chat_post.tools.as_ref().map_or("".to_string(), |tools| { + tracing::info!("tools [{}]", chat_post.tools.as_ref().map_or("".to_string(), |tools| { tools.iter() .filter_map(|tool| tool.get("function").and_then(|f| f.get("name")).and_then(|n| n.as_str())) .collect::>() diff --git a/src/integrations/integr_cmdline_service.rs b/src/integrations/integr_cmdline_service.rs index 8c2397753..c2dece2f2 100644 --- a/src/integrations/integr_cmdline_service.rs +++ b/src/integrations/integr_cmdline_service.rs @@ -9,7 +9,7 @@ use async_trait::async_trait; use process_wrap::tokio::*; use crate::at_commands::at_commands::AtCommandsContext; -use crate::tools::tools_description::{Tool, ToolDesc}; +use crate::tools::tools_description::{Tool, ToolParam, ToolDesc}; use crate::call_validation::{ChatMessage, ChatContent, ContextEnum}; use crate::global_context::GlobalContext; use crate::integrations::process_io_utils::{blocking_read_until_token_or_timeout, is_someone_listening_on_that_tcp_port}; @@ -120,7 +120,7 @@ async fn execute_background_command( let session_arc = session_mb.clone().unwrap(); let mut session_locked = session_arc.lock().await; let session = session_locked.as_any_mut().downcast_mut::().unwrap(); - actions_log.push_str(&format!("Currently have service running, workdir {}:\n{}\n", session.cmdline_workdir, session.cmdline_string)); + actions_log.push_str(&format!("Currently the service is running.\nworkdir: {}\ncommand line: {}\n\n", session.cmdline_workdir, session.cmdline_string)); let (stdout_out, stderr_out) = get_stdout_and_stderr(100, &mut session.cmdline_stdout, &mut session.cmdline_stderr).await?; let filtered_stdout = output_mini_postprocessing(&cfg.output_filter, &stdout_out); let filtered_stderr = output_mini_postprocessing(&cfg.output_filter, &stderr_out); @@ -255,12 +255,8 @@ impl Tool for ToolService { ) -> Result<(bool, Vec), String> { let gcx = ccx.lock().await.global_context.clone(); let mut args_str: HashMap = HashMap::new(); - let valid_params: Vec = self.cfg.parameters.iter().map(|p| p.name.clone()).collect(); for (k, v) in args.iter() { - if !valid_params.contains(k) { - return Err(format!("Unexpected argument `{}`", k)); - } match v { serde_json::Value::String(s) => { args_str.insert(k.clone(), s.clone()); }, _ => return Err(format!("argument `{}` is not a string: {:?}", k, v)), @@ -303,15 +299,23 @@ impl Tool for ToolService { } fn tool_description(&self) -> ToolDesc { + let mut parameters = self.cfg.parameters.clone(); + parameters.push(ToolParam { + name: "action".to_string(), + param_type: "string".to_string(), + description: "Action to perform: start, restart, stop, status".to_string(), + }); + let parameters_required = self.cfg.parameters_required.clone().unwrap_or_else(|| { self.cfg.parameters.iter().map(|param| param.name.clone()).collect() }); + ToolDesc { name: self.name.clone(), agentic: true, experimental: false, description: self.cfg.description.clone(), - parameters: self.cfg.parameters.clone(), + parameters, parameters_required, } } From 1445e5c98c4b5d4a77d901c59949d75968b14ff8 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Mon, 9 Dec 2024 07:15:11 +0100 Subject: [PATCH 100/185] response_message_n_choices default implementation --- src/scratchpad_abstract.rs | 6 ++++-- src/scratchpads/chat_generic.rs | 8 -------- src/scratchpads/chat_llama2.rs | 7 ------- src/scratchpads/chat_passthrough.rs | 8 -------- src/scratchpads/code_completion_fim.rs | 8 -------- src/scratchpads/code_completion_replace.rs | 8 -------- 6 files changed, 4 insertions(+), 41 deletions(-) diff --git a/src/scratchpad_abstract.rs b/src/scratchpad_abstract.rs index fe6e2cacb..446afff34 100644 --- a/src/scratchpad_abstract.rs +++ b/src/scratchpad_abstract.rs @@ -98,9 +98,11 @@ pub trait ScratchpadAbstract: Send { fn response_message_n_choices( &mut self, - choices: Vec, + choices: Vec, // XXX replace with Value finish_reasons: Vec, - ) -> Result; + ) -> Result { + Err("not implemented".to_string()) + } fn response_message_streaming( &mut self, diff --git a/src/scratchpads/chat_generic.rs b/src/scratchpads/chat_generic.rs index 3b50d438d..ab8f14ee3 100644 --- a/src/scratchpads/chat_generic.rs +++ b/src/scratchpads/chat_generic.rs @@ -170,14 +170,6 @@ impl ScratchpadAbstract for GenericChatScratchpad { self.dd.response_streaming(delta, finish_reason) } - fn response_message_n_choices( - &mut self, - _choices: Vec, - _finish_reasons: Vec, - ) -> Result { - Err("not implemented".to_string()) - } - fn response_message_streaming( &mut self, _delta: &Value, diff --git a/src/scratchpads/chat_llama2.rs b/src/scratchpads/chat_llama2.rs index ee5d1d611..b962c7626 100644 --- a/src/scratchpads/chat_llama2.rs +++ b/src/scratchpads/chat_llama2.rs @@ -152,13 +152,6 @@ impl ScratchpadAbstract for ChatLlama2 { ) -> Result<(Value, FinishReason), String> { self.dd.response_streaming(delta, finish_reason) } - fn response_message_n_choices( - &mut self, - _choices: Vec, - _finish_reasons: Vec - ) -> Result { - Err("not implemented".to_string()) - } fn response_message_streaming( &mut self, diff --git a/src/scratchpads/chat_passthrough.rs b/src/scratchpads/chat_passthrough.rs index 69b9384e6..ede7fa4a1 100644 --- a/src/scratchpads/chat_passthrough.rs +++ b/src/scratchpads/chat_passthrough.rs @@ -195,14 +195,6 @@ impl ScratchpadAbstract for ChatPassthrough { Err("not implemented".to_string()) } - fn response_message_n_choices( - &mut self, - _choices: Vec, - _finish_reasons: Vec, - ) -> Result { - Err("not implemented".to_string()) - } - fn response_message_streaming( &mut self, json: &Value, diff --git a/src/scratchpads/code_completion_fim.rs b/src/scratchpads/code_completion_fim.rs index b85efdd09..f7880cbd7 100644 --- a/src/scratchpads/code_completion_fim.rs +++ b/src/scratchpads/code_completion_fim.rs @@ -332,14 +332,6 @@ impl ScratchpadAbstract for FillInTheMiddleScratchpad { }), finish_reason)) } - fn response_message_n_choices( - &mut self, - _choices: Vec, - _finish_reasons: Vec - ) -> Result { - Err("not implemented".to_string()) - } - fn response_message_streaming( &mut self, _delta: &Value, diff --git a/src/scratchpads/code_completion_replace.rs b/src/scratchpads/code_completion_replace.rs index 39405cb3b..a6f3068a6 100644 --- a/src/scratchpads/code_completion_replace.rs +++ b/src/scratchpads/code_completion_replace.rs @@ -814,14 +814,6 @@ impl ScratchpadAbstract for CodeCompletionReplaceScratchpad { Err("not implemented".to_string()) } - fn response_message_n_choices( - &mut self, - _choices: Vec, - _token_limit_hit: Vec, - ) -> Result { - Err("not implemented".to_string()) - } - fn response_message_streaming( &mut self, _delta: &Value, From 986b1374eb2feb615901fe033e355bb8f7cacb26 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Mon, 9 Dec 2024 07:15:20 +0100 Subject: [PATCH 101/185] agent prompt tune --- src/yaml_configs/customization_compiled_in.rs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/yaml_configs/customization_compiled_in.rs b/src/yaml_configs/customization_compiled_in.rs index edabf2ef2..bc3fbb49d 100644 --- a/src/yaml_configs/customization_compiled_in.rs +++ b/src/yaml_configs/customization_compiled_in.rs @@ -89,7 +89,7 @@ PROMPT_AGENTIC_TOOLS: | Good practice using knowledge(): it's the key to successfully completing complex tasks the user might present you with. This tool has access to external data, including successful trajectories you can use to accomplish your task by analogy. The knowledge() - call should be your first call when you encounter a complex task. All the records from external database start with šŸ—ƒļø and a record + call should be your first call when you encounter an agentic task. All the records from external database start with šŸ—ƒļø and a record identifier. Use good trajectories to your advantage, and help user better. There might be also instructions on how to deal with certain frameworks and complex systems. @@ -108,8 +108,9 @@ PROMPT_AGENTIC_TOOLS: | * Related to the project, user describes an issue that appears to be somewhere in the code => call knowledge() first, and locate() after that to find where exactly in the code that is. - * User's request likely involves several steps, function calls, agentic tools like browser, database, debugger => then you need to call knowledge() first - to get access to the latest and best trajectories accomplishing a similar thing. + * User's request is likely agentic, implying actions from you, especially modifying files autonomously, using tools like browser, + database, debugger => then you need to call knowledge() first to get access to the latest and best trajectories accomplishing a + similar thing. If the task requires changes, write the changes yourself using šŸ“-notation, then call patch() in parallel for each file to change, and put all tickets you want to apply to a file in a comma-separated list. From bfdbcc0a8d5599f82eba70b7ac8f94d4b952e79a Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Mon, 9 Dec 2024 07:29:20 +0100 Subject: [PATCH 102/185] minor --- src/vecdb/vdb_highlev.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/vecdb/vdb_highlev.rs b/src/vecdb/vdb_highlev.rs index be3b02900..d3019e484 100644 --- a/src/vecdb/vdb_highlev.rs +++ b/src/vecdb/vdb_highlev.rs @@ -19,9 +19,9 @@ use crate::vecdb::vdb_thread::{vecdb_start_background_tasks, vectorizer_enqueue_ fn model_to_rejection_threshold(embedding_model: &str) -> f32 { match embedding_model { - "text-embedding-3-small" => 0.60, + "text-embedding-3-small" => 0.63, "thenlper_gte" => 0.25, - _ => 0.60, + _ => 0.63, } } From dda0229a7bdd56f22e53fa9f0dd48320454b40fe Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Mon, 9 Dec 2024 08:05:48 +0100 Subject: [PATCH 103/185] knowledge() call system prompt debugging --- src/http/routers/v1/links.rs | 26 +++++++++---------- src/yaml_configs/customization_compiled_in.rs | 12 ++------- 2 files changed, 15 insertions(+), 23 deletions(-) diff --git a/src/http/routers/v1/links.rs b/src/http/routers/v1/links.rs index 9f3c81e93..4f98e2500 100644 --- a/src/http/routers/v1/links.rs +++ b/src/http/routers/v1/links.rs @@ -125,19 +125,19 @@ pub async fn handle_v1_links( }); } - if post.meta.chat_mode == ChatMode::AGENT { - let (project_commits, files_changed) = generate_commit_messages_with_current_changes(gcx.clone()).await; - if !project_commits.is_empty() { - links.push(Link { - action: LinkAction::Commit, - text: format!("Commit {files_changed} files"), - goto: None, - // projects: Some(project_commits), - current_config_file: None, - link_tooltip: format!(""), - }); - } - } + // if post.meta.chat_mode == ChatMode::AGENT { + // let (project_commits, files_changed) = generate_commit_messages_with_current_changes(gcx.clone()).await; + // if !project_commits.is_empty() { + // links.push(Link { + // action: LinkAction::Commit, + // text: format!("Commit {files_changed} files"), + // goto: None, + // // projects: Some(project_commits), + // current_config_file: None, + // link_tooltip: format!(""), + // }); + // } + // } if post.meta.chat_mode == ChatMode::AGENT { for failed_integr_name in failed_integration_names_after_last_user_message(&post.messages) { diff --git a/src/yaml_configs/customization_compiled_in.rs b/src/yaml_configs/customization_compiled_in.rs index bc3fbb49d..8c5443775 100644 --- a/src/yaml_configs/customization_compiled_in.rs +++ b/src/yaml_configs/customization_compiled_in.rs @@ -102,15 +102,7 @@ PROMPT_AGENTIC_TOOLS: | * Question unrelated to the project => just answer immediately. - * Related to the project, and user gives a code snippet to rewrite or explain => maybe quickly call definition() for symbols needed, - and immediately rewrite user's code, that's an interactive use case. - - * Related to the project, user describes an issue that appears to be somewhere in the code => call knowledge() first, and locate() after that to find where - exactly in the code that is. - - * User's request is likely agentic, implying actions from you, especially modifying files autonomously, using tools like browser, - database, debugger => then you need to call knowledge() first to get access to the latest and best trajectories accomplishing a - similar thing. + * Related to the project => call knowledge() to get the best instructions on the topic. If the task requires changes, write the changes yourself using šŸ“-notation, then call patch() in parallel for each file to change, and put all tickets you want to apply to a file in a comma-separated list. @@ -123,7 +115,7 @@ PROMPT_AGENTIC_TOOLS: | %PROJECT_SUMMARY% - WHEN USING EXPLORATION TOOLS, USE SEVERAL IN PARALLEL! USE šŸ“ BEFORE ANY CODE BLOCK! FOR COMPLEX TASKS, CALL knowledege() BEFORE DOING ANYTHING! + WHEN USING EXPLORATION TOOLS, USE SEVERAL IN PARALLEL! USE šŸ“ BEFORE ANY CODE BLOCK! FOR ANY QUESTION RELATED TO THE PROJECT, CALL knowledege() BEFORE DOING ANYTHING! PROMPT_CONFIGURATOR: | From 2c275c6feea9099989f5e96e02e9e61e05c973a8 Mon Sep 17 00:00:00 2001 From: Nick Frolov Date: Mon, 9 Dec 2024 10:52:10 +0100 Subject: [PATCH 104/185] if chrome_path is empty, headless_chrome package finds chrome --- src/integrations/integr_chrome.rs | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/src/integrations/integr_chrome.rs b/src/integrations/integr_chrome.rs index 4e9da2110..538db831e 100644 --- a/src/integrations/integr_chrome.rs +++ b/src/integrations/integr_chrome.rs @@ -296,14 +296,20 @@ async fn setup_chrome_session( setup_log.push("Connect to existing web socket.".to_string()); Browser::connect_with_timeout(debug_ws_url, idle_browser_timeout).map_err(|e| e.to_string()) } else { - let path = PathBuf::from(args.chrome_path.clone()); + + // let path = PathBuf::from(args.chrome_path.clone()); + let mut path: Option = None; + if !args.chrome_path.is_empty() { + path = Some(PathBuf::from(args.chrome_path.clone())); + } let launch_options = LaunchOptions { - path: Some(path), + path, window_size, idle_browser_timeout, headless: args.headless.parse::().unwrap_or(true), ..Default::default() }; + setup_log.push("Started new chrome process.".to_string()); Browser::new(launch_options).map_err(|e| e.to_string()) }?; From 6be67f0f2868fb2e31b0c650701a3b71257db980 Mon Sep 17 00:00:00 2001 From: Dimitry Ageev Date: Mon, 9 Dec 2024 11:12:04 +0100 Subject: [PATCH 105/185] Chrome new commands (#450) * tab_id as first arg of a command * add eval command, unified TabArgs * get css styles for given element * add click at element --- src/integrations/integr_chrome.rs | 276 ++++++++++++++++++++++-------- 1 file changed, 204 insertions(+), 72 deletions(-) diff --git a/src/integrations/integr_chrome.rs b/src/integrations/integr_chrome.rs index 538db831e..5b3c004db 100644 --- a/src/integrations/integr_chrome.rs +++ b/src/integrations/integr_chrome.rs @@ -26,6 +26,8 @@ use headless_chrome::browser::tab::point::Point; use headless_chrome::protocol::cdp::Page; use headless_chrome::protocol::cdp::Emulation; use headless_chrome::protocol::cdp::types::Event; +use headless_chrome::protocol::cdp::DOM::Enable as DOMEnable; +use headless_chrome::protocol::cdp::CSS::Enable as CSSEnable; use serde::{Deserialize, Serialize}; use base64::Engine; @@ -225,18 +227,21 @@ impl Tool for ToolChrome { fn tool_description(&self) -> ToolDesc { let mut supported_commands = vec![ - "open_tab ", - "navigate_to ", + "open_tab ", + "navigate_to ", "screenshot ", // "html ", "reload ", - "press_key_at ", - "type_text_at ", + "press_key_at ", + "type_text_at ", "tab_log ", + "eval ", + "styles ", + "click_at_element ", ]; if self.supports_clicks { supported_commands.extend(vec![ - "click_at ", + "click_at_point ", ]); } let description = format!( @@ -432,13 +437,16 @@ async fn session_get_tab_arc( enum Command { OpenTab(OpenTabArgs), NavigateTo(NavigateToArgs), - Screenshot(ScreenshotArgs), - Html(HtmlArgs), - Reload(ReloadArgs), - ClickAt(ClickAtArgs), + Screenshot(TabArgs), + Html(TabArgs), + Reload(TabArgs), + ClickAtPoint(ClickAtPointArgs), + ClickAtElement(TabElementArgs), TypeTextAt(TypeTextAtArgs), PressKeyAt(PressKeyAtArgs), - TabLog(TabLogArgs), + TabLog(TabArgs), + Eval(EvalArgs), + Styles(TabElementArgs), } async fn chrome_command_exec( @@ -554,7 +562,7 @@ async fn chrome_command_exec( }; tool_log.push(log); }, - Command::ClickAt(args) => { + Command::ClickAtPoint(args) => { let tab = { let mut chrome_session_locked = chrome_session.lock().await; let chrome_session = chrome_session_locked.as_any_mut().downcast_mut::().ok_or("Failed to downcast to ChromeSession")?; @@ -581,6 +589,29 @@ async fn chrome_command_exec( }; tool_log.push(log); }, + Command::ClickAtElement(args) => { + let tab = { + let mut chrome_session_locked = chrome_session.lock().await; + let chrome_session = chrome_session_locked.as_any_mut().downcast_mut::().ok_or("Failed to downcast to ChromeSession")?; + session_get_tab_arc(chrome_session, &args.tab_id).await? + }; + let log = { + let tab_lock = tab.lock().await; + match { + let element = tab_lock.headless_tab.find_element(&args.selector).map_err(|e| e.to_string())?; + element.click().map_err(|e| e.to_string())?; + Ok::<(), String>(()) + } { + Ok(_) => { + format!("clicked `{}` at {}", args.selector, tab_lock.state_string()) + }, + Err(e) => { + format!("click at element `{}` failed at {}: {}", args.selector, tab_lock.state_string(), e.to_string()) + }, + } + }; + tool_log.push(log); + }, Command::TypeTextAt(args) => { let tab = { let mut chrome_session_locked = chrome_session.lock().await; @@ -643,41 +674,77 @@ async fn chrome_command_exec( let filter = CmdlineOutputFilter::default(); let filtered_log = output_mini_postprocessing(&filter, tab_log.as_str()); tool_log.push(filtered_log.clone()); - } + }, + Command::Eval(args) => { + let tab = { + let mut chrome_session_locked = chrome_session.lock().await; + let chrome_session = chrome_session_locked.as_any_mut().downcast_mut::().ok_or("Failed to downcast to ChromeSession")?; + session_get_tab_arc(chrome_session, &args.tab_id).await? + }; + let log = { + let tab_lock = tab.lock().await; + match tab_lock.headless_tab.evaluate(args.expression.as_str(), false) { + Ok(result) => { + format!("eval result at {}: {:?}", tab_lock.state_string(), result) + }, + Err(e) => { + format!("eval failed at {}: {}", tab_lock.state_string(), e.to_string()) + }, + } + }; + tool_log.push(log); + }, + Command::Styles(args) => { + let tab = { + let mut chrome_session_locked = chrome_session.lock().await; + let chrome_session = chrome_session_locked.as_any_mut().downcast_mut::().ok_or("Failed to downcast to ChromeSession")?; + session_get_tab_arc(chrome_session, &args.tab_id).await? + }; + let log = { + let tab_lock = tab.lock().await; + match { + tab_lock.headless_tab.call_method(DOMEnable(None)).map_err(|e| e.to_string())?; + tab_lock.headless_tab.call_method(CSSEnable(None)).map_err(|e| e.to_string())?; + let element = tab_lock.headless_tab.find_element(&args.selector).map_err(|e| e.to_string())?; + let computed_styles = element.get_computed_styles().map_err(|e| e.to_string())?; + Ok::(computed_styles.iter() + .map(|s| format!("{}: {}", s.name, s.value)) + .collect::>().join("\n")) + } { + Ok(styles_str) => { + format!("styles for element `{}` at {}:\n{}", args.selector, tab_lock.state_string(), styles_str) + }, + Err(e) => { + format!("styles get failed at {}: {}", tab_lock.state_string(), e.to_string()) + }, + } + }; + tool_log.push(log); + }, } Ok((tool_log, multimodal_els)) } #[derive(Debug)] -struct OpenTabArgs { - device: DeviceType, - tab_id: String, -} - -#[derive(Debug)] -struct NavigateToArgs { - uri: String, - tab_id: String, -} - -#[derive(Debug)] -struct ScreenshotArgs { +struct TabArgs { tab_id: String, } #[derive(Debug)] -struct HtmlArgs { +struct OpenTabArgs { + device: DeviceType, tab_id: String, } #[derive(Debug)] -struct ReloadArgs { +struct NavigateToArgs { + uri: String, tab_id: String, } #[derive(Debug)] -struct ClickAtArgs { +struct ClickAtPointArgs { point: Point, tab_id: String, } @@ -718,9 +785,15 @@ struct PressKeyAtArgs { } #[derive(Debug)] -struct TabLogArgs { - // wait_secs: u32, +struct EvalArgs { + tab_id: String, + expression: String, +} + +#[derive(Debug)] +struct TabElementArgs { tab_id: String, + selector: String, } fn parse_single_command(command: &String) -> Result { @@ -733,84 +806,117 @@ fn parse_single_command(command: &String) -> Result { match command_name.as_str() { "open_tab" => { - if parsed_args.len() < 2 { - return Err(format!("`open_tab` requires 2 arguments: `` and `tab_id`. Provided: {:?}", parsed_args)); + match parsed_args.as_slice() { + [tab_id, device_str] => { + let device = match device_str.as_str() { + "desktop" => DeviceType::DESKTOP, + "mobile" => DeviceType::MOBILE, + _ => return Err(format!("unknown device type: {}. Should be either `desktop` or `mobile`.", parsed_args[0])) + }; + Ok(Command::OpenTab(OpenTabArgs { + device: device.clone(), + tab_id: tab_id.clone(), + })) + }, + _ => { + Err("Missing one or several arguments `tab_id`, ``".to_string()) + } } - let device = match parsed_args[0].as_str() { - "desktop" => DeviceType::DESKTOP, - "mobile" => DeviceType::MOBILE, - _ => return Err(format!("unknown device type: {}. Should be either `desktop` or `mobile`.", parsed_args[0])) - }; - Ok(Command::OpenTab(OpenTabArgs { - device, - tab_id: parsed_args[1].clone(), - })) }, "navigate_to" => { - if parsed_args.len() < 2 { - return Err(format!("`navigate_to` requires 2 arguments: `uri` and `tab_id`. Provided: {:?}", parsed_args)); + match parsed_args.as_slice() { + [tab_id, uri] => { + Ok(Command::NavigateTo(NavigateToArgs { + uri: uri.clone(), + tab_id: tab_id.clone(), + })) + }, + _ => { + Err("Missing one or several arguments `tab_id`, `uri`".to_string()) + } } - Ok(Command::NavigateTo(NavigateToArgs { - uri: parsed_args[0].clone(), - tab_id: parsed_args[1].clone(), - })) }, "screenshot" => { - if parsed_args.len() < 1 { - return Err(format!("`screenshot` requires 1 argument: `tab_id`. Provided: {:?}", parsed_args)); + match parsed_args.as_slice() { + [tab_id] => { + Ok(Command::Screenshot(TabArgs { + tab_id: tab_id.clone(), + })) + }, + _ => { + Err("Missing one or several arguments `tab_id`".to_string()) + } } - Ok(Command::Screenshot(ScreenshotArgs { - tab_id: parsed_args[0].clone(), - })) }, "html" => { - if parsed_args.len() < 1 { - return Err(format!("`html` requires 1 argument: `tab_id`. Provided: {:?}", parsed_args)); + match parsed_args.as_slice() { + [tab_id] => { + Ok(Command::Html(TabArgs { + tab_id: tab_id.clone(), + })) + }, + _ => { + Err("Missing one or several arguments `tab_id`".to_string()) + } } - Ok(Command::Html(HtmlArgs { - tab_id: parsed_args[0].clone(), - })) }, "reload" => { - if parsed_args.len() < 1 { - return Err(format!("`reload` requires 1 argument: `tab_id`. Provided: {:?}", parsed_args)); + match parsed_args.as_slice() { + [tab_id] => { + Ok(Command::Reload(TabArgs { + tab_id: tab_id.clone(), + })) + }, + _ => { + Err("Missing one or several arguments `tab_id`".to_string()) + } } - Ok(Command::Reload(ReloadArgs { - tab_id: parsed_args[0].clone(), - })) }, - "click_at" => { + "click_at_point" => { match parsed_args.as_slice() { - [x_str, y_str, tab_id] => { + [tab_id, x_str, y_str] => { let x = x_str.parse::().map_err(|e| format!("Failed to parse x: {}", e))?; let y = y_str.parse::().map_err(|e| format!("Failed to parse y: {}", e))?; let point = Point { x, y }; - Ok(Command::ClickAt(ClickAtArgs { + Ok(Command::ClickAtPoint(ClickAtPointArgs { point, tab_id: tab_id.clone(), })) }, _ => { - Err("Missing one or several arguments 'x', 'y', 'tab_id'".to_string()) + Err("Missing one or several arguments `tab_id`, `x`, 'y`".to_string()) + } + } + }, + "click_at_element" => { + match parsed_args.as_slice() { + [tab_id, selector] => { + Ok(Command::ClickAtElement(TabElementArgs { + selector: selector.clone(), + tab_id: tab_id.clone(), + })) + }, + _ => { + Err("Missing one or several arguments `tab_id`, `selector`".to_string()) } } }, "type_text_at" => { match parsed_args.as_slice() { - [text, tab_id] => { + [tab_id, text] => { Ok(Command::TypeTextAt(TypeTextAtArgs { text: text.clone(), tab_id: tab_id.clone(), })) }, _ => { - Err("Missing one or several arguments 'text', 'tab_id'".to_string()) + Err("Missing one or several arguments `tab_id`, `text`".to_string()) } } }, "press_key_at" => { match parsed_args.as_slice() { - [key_str, tab_id] => { + [tab_id, key_str] => { let key = match key_str.to_lowercase().as_str() { "enter" => Key::ENTER, "esc" => Key::ESC, @@ -826,19 +932,45 @@ fn parse_single_command(command: &String) -> Result { })) }, _ => { - Err("Missing one or several arguments 'key', 'tab_id'".to_string()) + Err("Missing one or several arguments `tab_id`, `key`".to_string()) } } }, "tab_log" => { match parsed_args.as_slice() { [tab_id] => { - Ok(Command::TabLog(TabLogArgs { + Ok(Command::TabLog(TabArgs { + tab_id: tab_id.clone(), + })) + }, + _ => { + Err("Missing one or several arguments `tab_id`".to_string()) + } + } + }, + "eval" => { + match parsed_args.as_slice() { + [tab_id, expression] => { + Ok(Command::Eval(EvalArgs { + expression: expression.clone(), + tab_id: tab_id.clone(), + })) + }, + _ => { + Err("Missing one or several arguments `tab_id`, `expression`.".to_string()) + } + } + }, + "styles" => { + match parsed_args.as_slice() { + [tab_id, selector] => { + Ok(Command::Styles(TabElementArgs { + selector: selector.clone(), tab_id: tab_id.clone(), })) }, _ => { - Err("Missing one or several arguments 'tab_id'".to_string()) + Err("Missing one or several arguments `tab_id`, `selector`.".to_string()) } } }, From 2e9f09ed056336c1c364aea2e9cafe930888ad8c Mon Sep 17 00:00:00 2001 From: Nick Frolov Date: Mon, 9 Dec 2024 12:20:40 +0100 Subject: [PATCH 106/185] chrome - change chrome_path placeholder and field description --- src/integrations/integr_chrome.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/integrations/integr_chrome.rs b/src/integrations/integr_chrome.rs index 5b3c004db..e01a38430 100644 --- a/src/integrations/integr_chrome.rs +++ b/src/integrations/integr_chrome.rs @@ -982,8 +982,8 @@ const CHROME_INTEGRATION_SCHEMA: &str = r#" fields: chrome_path: f_type: string_long - f_desc: "Path to your chrome binary. You can install with \"npx @puppeteer/browsers install chrome@stable\", read more here https://developer.chrome.com/blog/chrome-for-testing. You can also give it ws:// path, in that case start chrome with --remote-debugging-port, read more here https://developer.chrome.com/docs/devtools/remote-debugging/local-server" - f_placeholder: "/Users/me/my_path/chrome/mac_arm-130.0.6723.69/chrome-mac-arm64/Google Chrome for Testing.app/Contents/MacOS/Google Chrome for Testing or ws://127.0.0.1:4444/" + f_desc: "Path to Google Chrome or Chromium binary. If empty, it searches for Google Chrome in your system" + f_placeholder: "" window_width: f_type: string_short f_desc: "Width of the browser window." From fe247951a4a03c374942fd0475343bd0d32a7daa Mon Sep 17 00:00:00 2001 From: JegernOUTT Date: Mon, 9 Dec 2024 20:10:27 +1030 Subject: [PATCH 107/185] Refactor patch processing to handle tickets sequentially and accumulate diff chunks --- src/http/routers/v1/patch.rs | 49 ++++++++++++++++++++---------------- 1 file changed, 27 insertions(+), 22 deletions(-) diff --git a/src/http/routers/v1/patch.rs b/src/http/routers/v1/patch.rs index c3510307c..eb3704ae0 100644 --- a/src/http/routers/v1/patch.rs +++ b/src/http/routers/v1/patch.rs @@ -188,36 +188,41 @@ pub async fn handle_v1_patch_apply_all( )?; filename_by_ticket.insert(ticket.filename_before.clone(), ticket); } - let mut active_tickets = filename_by_ticket.values().cloned().collect::>(); - let active_indices = active_tickets.iter().map(|ticket| ticket.id.clone()).collect::>(); let mut usage = ChatUsage { ..Default::default() }; - let diff_chunks_maybe = process_tickets( - ccx.clone(), - &mut active_tickets, - active_indices, - ¶ms, - &"patch_123".to_string(), - &mut usage, - ).await; - if !active_tickets.is_empty() { - let bad_ticket_ids = active_tickets.iter().map(|ticket| ticket.id.clone()).join(", "); - return Err(ScratchError::new( - StatusCode::UNPROCESSABLE_ENTITY, format!("Couldn't process some of the tickets: {bad_ticket_ids}" - ))) + let mut all_diff_chunks = vec![]; + for ticket in filename_by_ticket.into_values() { + let mut tickets = vec![ticket]; + let indices = tickets.iter().map(|ticket| ticket.id.clone()).collect::>(); + + let diff_chunks_maybe = process_tickets( + ccx.clone(), + &mut tickets, + indices, + ¶ms, + &"patch_123".to_string(), + &mut usage, + ).await; + if !tickets.is_empty() { + let bad_ticket_ids = tickets.iter().map(|ticket| ticket.id.clone()).join(", "); + return Err(ScratchError::new( + StatusCode::UNPROCESSABLE_ENTITY, format!("Couldn't process some of the tickets: {bad_ticket_ids}" + ))) + } + let mut diff_chunks = diff_chunks_maybe.map_err(|(e, _)| + ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, e) + )?; + diff_apply(global_context.clone(), &mut diff_chunks).await.map_err(|err| ScratchError::new( + StatusCode::UNPROCESSABLE_ENTITY, format!("Couldn't apply the diff: {err}")) + )?; + all_diff_chunks.extend(diff_chunks); } - let mut diff_chunks = diff_chunks_maybe.map_err(|(e, _)| - ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, e) - )?; - diff_apply(global_context.clone(), &mut diff_chunks).await.map_err(|err| ScratchError::new( - StatusCode::UNPROCESSABLE_ENTITY, format!("Couldn't apply the diff: {err}")) - )?; Ok(Response::builder() .status(StatusCode::OK) .header("Content-Type", "application/json") .body(Body::from(serde_json::to_string_pretty(&PatchApplyAllResponse { - chunks: diff_chunks + chunks: all_diff_chunks }).unwrap())) .unwrap()) } From 96f2e4ca12c1dd7cedd4a93ee22f48032e969b4d Mon Sep 17 00:00:00 2001 From: Sergey Vakhreev Date: Tue, 10 Dec 2024 00:25:59 +1030 Subject: [PATCH 108/185] Icons (#465) * add icons * more integration icons * Add base64-encoded icon support to integration schemas Introduce a new field for base64-encoded icons in integration schemas. Modify the build script to generate Rust files containing base64 encoded icon data from PNG files in the assets directory. Update existing integration schema constants to use these generated icon constants. Adjust Cargo.toml to update the `base64` crate version and declare it as a build dependency. * Remove base64 icon embedding and refactor integration schemas * Remove unused base64 build dependency from Cargo.toml * Remove redundant comments in build script to enhance readability --- Cargo.toml | 2 +- assets/integrations/chrome.png | Bin 0 -> 24800 bytes assets/integrations/cmdline.png | Bin 0 -> 45232 bytes assets/integrations/docker.png | Bin 0 -> 4213 bytes assets/integrations/github.png | Bin 0 -> 27482 bytes assets/integrations/gitlab.png | Bin 0 -> 19378 bytes assets/integrations/isolation.png | Bin 0 -> 4213 bytes assets/integrations/pdb.png | Bin 0 -> 21820 bytes assets/integrations/postgres.png | Bin 0 -> 46631 bytes assets/integrations/service.png | Bin 0 -> 45232 bytes build.rs | 49 +++++++++++- src/http/routers/v1.rs | 3 +- src/http/routers/v1/v1_integrations.rs | 80 ++++++-------------- src/integrations/config_chat.rs | 2 +- src/integrations/setting_up_integrations.rs | 14 ++-- 15 files changed, 85 insertions(+), 65 deletions(-) create mode 100644 assets/integrations/chrome.png create mode 100644 assets/integrations/cmdline.png create mode 100644 assets/integrations/docker.png create mode 100644 assets/integrations/github.png create mode 100644 assets/integrations/gitlab.png create mode 100644 assets/integrations/isolation.png create mode 100644 assets/integrations/pdb.png create mode 100644 assets/integrations/postgres.png create mode 100644 assets/integrations/service.png diff --git a/Cargo.toml b/Cargo.toml index e7b2730af..752ef5366 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -102,7 +102,7 @@ hashbrown = "0.14" shell-words = "1.1.0" sha2 = "0.10.8" glob = "0.3.1" -base64 = "0.21.7" +base64 = "0.22.1" image = "0.25.2" headless_chrome = "1.0.15" nix = { version = "0.29.0", features = ["signal"] } diff --git a/assets/integrations/chrome.png b/assets/integrations/chrome.png new file mode 100644 index 0000000000000000000000000000000000000000..488e9b5fd321db9254bb81362f5a2b6e4c0a40fb GIT binary patch literal 24800 zcmXtf1z1#F*Y=qqrMr=o4y8*NI+O+pNkuxPL0|xBX_W4il%s;i8$4kq~X z0n;KJeE-N<$-or=@OmEpApK4y7T|}JZu0tW+D?{kp62i00iK?oJT{JYu5Znq-|;xT zw@TlapaKATKv_;&$17v+w|9z8@4V3A_19m|uO;`y2RPDehQF2IJcSQ1T6Zxk5Jx{_ zVPTp0@k7`NN4cYIg4tGJqQqs^-WK);3!29`P{bBqbFc{X;MA&nW?$XGG%PGrHgxoO z&ktJOqgl0-ITp75-4SyyMU^8tiXqKN-XIQ8M3&h1;}*6uQmW__&4|6g=1Ypg;ZES8 zqBM+yOw5lDjKi5=ET~vOPCR($BWNV9 ztHT(lgoy*0K*5~qg2iVbQovaQ-UCF|A?WLP-q-C-XtW0T&!;w<$yXzD_8lO|G{P z*m`_TME(K{GMDftc~5X2XVjCic-c5^`*8LdQLmi$R!UaDBAEZeo{$2rn!Sk+40%&l45;BFrhp# z86an+DRg-=Q`$#~{zTT8y9bpbl6ZE5{rwcwCwRuTLTlevZ->+-f5tvZ)Y$=pDN=_E z2$xhG3oP~GccPc;!*b3gdyWGmUu-ThP=T~@QgI4c$g46gkNCN7xGd&|GooPwk-pO* zJ-#1vupq`If_;#I`pk$=n9|?y43{Sid9kNixCRXy_EL}=-j_t-sICVRwY4?sSq6$<;ovI>@e8KKp9Kiy-v4J z3x0I19KuC5aXM98P!vX})txhWbhfUR`fMpk=}b6NmaNnpkYXI@NY zt3+eaXB;033)*k{RA?7F2U6;Tlz(=9J}2fxX(qSZ=FE9U3oAvwlye7p=HzB8m!00~ zy(%xMp$#u#07>8Iej1@GC9MpdF%8VAM@q=4YCH)_M(h@AFHPya+nLNk!sJYUK;euk z8%^=F#x9+x+jbC~n#{10jZ^<{30gPVfFZ*gk94Gf9BVZ||M#@qKy70pGuc{m1#-#^ z40^CIq6ol+fm?msLMI+x!Z-&mU?A=I=+Xi0DxIq7F-&jKJN$czoIn=dr3}6Wm(g_u z@+nESKAkKb?Wr^c8J`%UQYaZJlX14G=Nf9$Yl;_+EtinB7(JmDr)9{8C}1&oY>(! zg1%_Z<^c-M6hppgnp1bj4rfRF{ZwKA&IR|i8cdkx{TU(uQ7&s0gWp>LpLVxuz{65{ ztsfX|%nV}-{eaqipdA}>_tPD^PD%z$z42KeSU*leG@`!8hVwL>5cGb_q&n`eQ#qq> zVk-FZ1_B#-rK1t|<&TaZv9l9eK`Y*t_E@x#KlCVn7%69D`ESi(*FVtB(}ju@2MM_aKB`^ zGx;7=Obm>!+DT%J!dIw?9T?r}!1}_dtlXIx6oLO$?d-PZum=mIssWk#>;wfBU={B! z!aHzbuR)iTOyEWKRcWcn#vWHfO7XD)v0(Iq4@S$$^i8tx$Dn4Kwa6gEzbW~$tW8Pi zsXQ03EwDix8XFe1g2MO1gN*Tb*tX)R_Vh{FmpMF{_+T^u0t{j+;(hcuKs~isGzcF^ zAvZ?!|1YmT?!AQ&Y^ktD54!~I^FsWXfPPRNKAxOhx><(vEy@?OkE>}z_sk&##ctAp zRg-$|6*E_jfZ;r-jkzAqK$W)a&-0bwoXN z0tyzvX$Z?u<3zuT8s9OFJtwcB4IO?1Ay^yi!|ha(dkl=PfC@*FT|IZ1KKtraZe%C^ zD76PlukhK(S-OGu>n4x&CHlqrOF30?)b-`5Bn0yK&w|RL`G7AC#T$*@rqy3{1EvtS zy3CO9yk$J=W(J3!gCPyUagCrLagoZhi>=aI{t(NBQ5m^sTkPabqHm8%^TFrB;Pddv z?>Bb!GoFxv8gTfV83w{jkKMe_Ck`07UqGX74EAp|PXCeQGAvpIhaX9!j-kD*f3=g#{ULx4&s=CkFnuwxnX!` z$yVZ0V}Kv?v_ z=uVeoy&QrOz!-@}B^zwG%uAhp|71~f+bshUPVkV@kyD7<@%Pc75#-RV!#g+yTNlT? zl3^BR5CRAX_OW<#Ocu6T)5GJ?_Cnf~^3#x^D0_?uPcDFqlUn$OkOM8pRV`QNuYMtQZ(; z_V%YV2G##7$n-~{Xs{IBmsATT7=FBf*w};N!*i zQ4ku8A}Mz0zMAZe2#)h0Iir4z^?CiV%cb5t9udCRqb~Y~=4Q}#VuIT|@gTuA<9VS; z)3G8SMkSKDIuZ@e(2_&{byDv#X(r{JI5wfC08zIqJL%}`yWU8SMT0kl*Z|5POZJK( z^o~3?-syoN$(NI$v|Ye=r-f)y`VB5N5UWd$cR)CG%mH71P@*?HHpzpfkjtQY%p*?! zJG4vo9dQ_LTpSkwR#MRT&n%0vUk>WG+14WItAjitUh5YZZP-r=zPKE+GVUOAS>YF!9S5q5qTT)VA=7`F~kN(7e%^w3tLyvq25yy z5zZrkW1Y*U8#~aquV~uRazA`n9x*%wyvWfA<5jWp=(wq~$9}&`4@83zeuH${ovB(02m3ef3GxoAGlRp!sid9l7V>lhzR@yGRs$kY zHQ(C~qWb(ATnPv@^KH1V`cCD`?rp`DnBPLi;s2{HvGDuM@bJ87GQ{hzQF_n8hXsGS zUd0}NZ!60^5@}Kk3THD65m+DEiPDpF0!O}qXoN*pGZoow++n~U!jsW99&YXDy$iO} z1w0}fec*$%!D={kV!sSX04p#fqq>gl&R3hNDieEfxH;rdbmQ#_x~w@noa-U>2DCq8 zvAr~CvkgJg#|NQx6|1nWpT9S{6@oWBghf1|qLGjIW=gnk78M9Eryo2UIlnuZRgY zc_SLk+kBZfcZ2G|MFT|9RY{;b`#f-7|EW2?bW84JB`ChXlaq6cL_eLjiPDpLE3VYk z`y253uym-YV(|#x)bUSNPOeyo)6A$1Nl2iF2AgiuG?pbYdbGT+2tE)en#v_KcroKx zOUrEZ#}XH&DJ^ZN6m*XU)-FadZO&|wdu|?@G#ilZ4|zz=7VOD2#~q$o+<$^rkE)J$ z1ne5r(kx1UXW+qtN51pEXMJc4(sFX~YU-72%YMa3l2w8eJAk8aYh=93=tp-aCn2G? z+~HXr*yw9$dkRt>lf*tOf7$D?V4?r9)#a#Li%~LdvJ6Jj7ya0K0`JLJEaoiSd@w$t zpmFKPsin7QTfKWnhtCWEYnWDoEgi=Ae?bvx*nXq-UE4>^KIZeYOWw1KvHssB49Wk= zqv>S0@!Gp_9?x86m=s7J&i0>ZGpF=)+KX#?s5Xp|{)UHtBVBq}7Drd>Q#b$?DeoxzA6@kP8pg(>X7=5T60O#s3sd><1ajo?ERp74KJ z9F+$v2g5Ax&y}|}i8&zE4Biz&2uPH?uMLP%k}6){CH=xkNT_Mw{OxA0(sT}Q^>Ydv zz>_L<%gF6BN*=J!@q3?_xK{6VxE#nxNx)xIh{q`dNn;Xz1x2j82LK%{{+&g>lO=wl z9hs}D53{L6Ail1l$-bV)o79>x2CT!^@FiT1TLPXF?WcV4WwU>fea2l!a!Z+Zqjg8Y z9rDzDdH4X@v~C|Op0U5)`HSBTVbgEH$$lv1NJPh5d(vAdau zy9o(aczC$Py6flbg2$qK^B!?U;iXXeYQ7D8z^CyWrTc8s9{t!OdPYY4v#(0HciBh+ zRYF0uuDWLaerp6e??S1PPO!;yUWX8J2SP$Z2vE1ABP1D6j--etB}MC6*JEL|od^gC zYvh*X=c~o|A18L0R9-QzYBg=av%EaO*_J2(HzzyUp+%+97ruoGR+BK)ZB5Oc;p)u% zXN(=7X3L*WP%q*m8uZ-NseZab!Nh(m^}jwsU`0HE2J5CqlSGqPNYK5viuc;m$s&fy z$z>hexX}GVLvw$uMb6IH0w{6*yrKm1b#*jD6Ytdy*6UfW=^68y_?y0eZTsmF9xAtI z0Q}lgKUizoMFAp;u2nGK{9VI8IM{9Kv!nup;ZEmD4*#WLolh&0-aTTD5}4IJr^gAF z0;aCUPj)6!+qmPx!`qUGrrviV3Cg1zwvU) zqAMF4PGav$WWVs<`WynfKL58sKnv`%|T$mZ-cOAP=^?au>|{rg(VF2jeE{{#iJ9nMxr zs&Lh4b<)cJN1-%UNs}!+`Rc#}3&5R#&7EI6@o*2c*x8PvKXe_oqHydb^Dps`k6Jxg zIi*1^9@fz-JEE&}u|3PH3*CP!fu|xMNiL%%u8+h2rQ{>@BwlJsrlsGZ^$*VAQjF?& zp5f<%OvUDJ(Y^NmPaVa&8?mKYz#Lh=K-Yw2cokZ!UKN6hq@&61{V$VV+zzdbc5-FV zhE@O+SL0{?xHpVOx2dO&W9v7;|Emtnhw7=e;&*6c6 z>sevIYs5^rI~s#_s|`yWpkW{(-!NOFjXA&|#6$zfKe)^){J@6u3BBU!Thy%%SfsLyQe<%1!e`NU_*= zk}t0Yxml=$|0(BwzLwQ?P%LHT9p2UYR$a>XtVG-1DwksQMAqjla94_8^4dr#*)c9A zGaBz0H-31JxE8DZI`a;@B2X41$J0Kz)dBmoD0-ZFR#xwL4kwSyIcY{1cXsXg5FJBi9r8 z@p=MdoLU&x*-3Rp;grH)o5vwcvYSxth9s<_7CIJWq`wkB61hO>IyOez60UL!G;I%lUFbyeTaKNXxS?Ck>{ zcvw<`UYJv-L%GKa5XnxY$iz?g)k=ei@v@U#J)W&{AYLSRlB`1iK-V?PU!0U%*k|6P z&UdG6>h9O<7t&(AR?&u94T|uf_2`cWLwd4T{~9w{uGrB&3lniTF*`d;$(&@%Zr3L_ zQ4;oYPwe9Y64X}D(;+FMo|cZVX5TgUvjD%V{VG4Z(Iut~v~cr;1gM}5RR(2kppb(@ ze}v7I7yFXlX3y=xNn2supBP|6kZ3ol*4p7KInrAQMT7H-V-*n)sxHR-X?c03v%WtU zroLz}@H!y@%l$GrmnRcVS+$myl}AZ|C4<)#*Xmz#4xS{A)qOU+OLMV0{QDDb`85@; z?>P?2_=eoWhUU7`z6h<2;K0eoI1=>F=Hye2Z9jbfrInXNB}sJGDG6EOmfAwlMSL*8 zbiWP2%xc7P{P(2c7eQ)a66;&XC@TOfJM(kuI{qF?+^%$1ijeJ#>B)358cr~|(sJBi zUpQSrEJ$Wu6o}cqQAS5!Gw5c|#mHF5h-ReWy@wdOc{lg+0}*#!dfnZfnqSUUMx~Ng z*G5?;x?B*$DQNe>>aK^eGGcaILBYw{`gA#Yr%#{cT@^AU3A7eQ*EInIh_o*ago+6V z8CkVG9+4Y?=cy40JD;nh!dX044x$RE*0@S6WA5vL3HoP3M%>)Zr)fc8Cutc=@?8Xzv&SvextS#FO%VDz z4W;()J>JjUS4r45&81JqV)f2d2i4rrva)vNxS5Q7y8_07oh?m!yQNszn~;@?&WC*4 zQzs5#497vRR&J``*Qd&?h8>d$Tm5CWou`+x1E(8c4*@itE1R(HIwi0~iyM&w7Ab1G zA&4${XYpkm{6=Yp8rgM@igc;Ol|5Fq>#=uCb?@~A&M%MDa$=v~3^0tiLigf_xkRMy zOiRqG{roQU>-#1+a#&EUfqpvyIPpD=TcC~hY6Exiss;KmK(r>T!KacqFK&TS@&Xe|p11_`k-`ZP6C{8*7H}@xY zIl9z~nug^~DsNvz^!+1>c6dT{Jtz=a78p#*6Y~lZX!$rVc@Jfw0MoD-bD;=8PNF`q z1%J`DEFNjyP8ic~4`UNjTnK%5T^UNVDKc~wziv6JGF zM?o=X__Vy@J3=TZ$(jqx4HzB04-~t|K{CsYxKKE%Vo*kzX=l%sd;Xmw=V?bZG;tht z-EpJT-ERq_d+wEmq;cua23Az10y#45#g+3vPg!#H{@dGC zf~+|LnFM@_$yes~A_etQC3)M1G~%@UBc7e2*Fv%0o5$X!!qdZ zpF<=b1ab8Gv5V23_f0NJ1+h$8KRY#jX6yI!vK83{&pO&d6AVtccY1J2=X7rj=G~VS zBNe!G1*zq!R#QQkjOCkk6OOoDT4%`)9If_gP%DLmHwCX66U{LNb_`UJmV?;IYDC;s zf@j`S->`*rxo?;MO0ag>*>;(Ubhe=+d~p;Xq$~~Y$}{HKba-TbDM{|Q7k%xn;s*a- ziN)_DO7UB{JK~2Sy*|!LQI$;0Nt|{h5mmC1^hlYix5_VGNRur~@qK$^x%&I%z0dvU zjiQft&BR}nYk21)uCrHwi1>Q)n_Mi!*H{@Z5AE$KZttT47Sy)3-@QYN#&xP_9Uhgz zyyxz5gvzzh4jRM|bA6xDT9baep zs}Uk6izJ#RvXj7_DBXI?ZgA07ZV^#MtmM@I!)L9v<5xopT5GJ`oK(-pPK9XM0=ra8pG4h6_7?{3 ziCRCyq+g89p6a8Q;~p{AceV%f{~rsG%fkjguxt&!Z@o4)5^E$ouiQX5$tXwnWq1ZP z{po~!aQpGc9xj4;=0oB5kHnWUoa)a&U7sY#I3;i6&t_rn=G-{D`RPBQI|tBBVuFVf z7mn?;8XoBP`MOk%Jh%wzDpW?F^kw~{Rde#%&DlSVoIxcuy4DeNxy$M@Rz_y*fF?@a zOY1#ZYQaL$B)|?klX!Y4EbT{7pTsv!H2Ba?fZb!t#EIM)!pG|Wyp?|*-tL~m7Cp=lt`grw z4{gosXiVSb@KhM#C9;QNrgR12|yCxUwZ% zDCP&rC6H)C4Ck5u&L3_8HwPCQ+lIy~mMzkEBG(RE;i#kUOhY-?aO|=f$S*oQgPV{Q z^tR3N+CRizRU9GFAUBuohhw8n&B;B;!`r9lDmqomS0e@cR=(r&R)C!PF*9Dh+~7g+ z*4{VT??T{OGsCWvct-Mttw7~#g04hrkz+*^Tm|*xUgTru(I!+h)irGO0w8 z%IdagD&+-$#;q>HT`xQ&wR`KROl`mQtw`5Sr12KZIpdpqbA{a63tngH73(_Z;;1r{ zO56iR)gy+vlSkXAS)UaMVVSplu;BkT%}GGpydd8|Hp{zf0p>derb0yPnTL|)g^KhUw^y1}!! z(73-gY_<(lou_`1Qb^iG(eES8e{4$vR~PZj&miQc0Z-s>r)h5Te|KXc%6QSgRkD3> zuN|d$)VQYpeTGi+x0}$yWth-uI65|f_UTB7pnhs;)#jFic-lbNE1Yz)l`f;opUM7~ z`#0@(cWfwI)dqr&Qw21ew;;vL3(;IPsSvd>?AHNu?vs-UBpkO2GX5P(|J_=ttd)1v zd&MKhIW9@(!s(h6aS~G4IqT8X+@irlVFJl@3OTr$#0pMM$Q4`hFq?-Zz=AvG3sa|k7ndPw2e=+JNw0I0zJmPhz8~O#Z1s4Da0|oEVs5Vw2(JXtRUZ5=KjjxzzKqA*=6MS9>6YS zw;f2h`&nkc^_1}$;V8Z%+K-#=gTMH8Mve60gFw|~f7F9wruc8{^!M=|D&!-LpXS?9 zC3uDkGv{TYGr-g}b0kizandvo+1MlJjw@WH{?Wecd6Uv3Zk~&%7lndUD?Z#3*QnbC zk1^d_M!cjz%lgRqb(S^)1oSH}sg6~~<9TBH=0qWr^%xmLowjY+lT^{vWQVUhy&5`c zF8&1p9N(W_uX$WZy6w`h-4bMejo=3ovob&D2R`0}#_KKbr3>6xr-@mJhv{WdJMwib znhv1(*D;41_lsWiw?-=pI(jpq&Yr%I{($LzjRKq`*N3YyZC4~S6<3;08vx7~D3=IM zK^3LlgQAVTAG8qLDT3R`5u6QPhx`0n*|0+qx}%OE~0kN>O4iOwIG&SDI`2T?d<6 z>s=@|^k+~DFUphw5Wc-?*%Q~lp7nG7(ZmNeIR6RO+WCzW$IQjq`~72Ct4YAo@7K*c zhRiui$1pkyWbihQ*11eqixp4@UKsNUN=~0Qui6YbY+2(etA|}B%!dR_OCvUgp(U%y z0i}xG6@4QwpqLaq`LeYBm=~|pksr6+ybZHOm$Q$8+3%a%FyKPw5pr2+$=NfFjMAUy zM<;Z4YpP(A!;YywTcz|3kJs{~Z0ACmsLm?{jh={o#dN=jpO*|*OO7afwB>&kX7vh{ zgQVl_?0`1ani>)S6DN zn9#j%_>K|{&Y31$rciyBNvw$cd3NwEGrxLjkCWl&CYd~K#pFeHV=%%qz?Mr7rLmp# zX{&gFO6X#QBDyxk^s|O+O(2{MyqJH9a(C}(Gx}^%luEXyX7y#NOwC>H<^T>U9ilvE@l3kFclf=5iP<}Y3hBI4-! z&RuZl=@`90%`{Nc>4alm@l!UK8RCd>(%3nFWF&5llY*Z!Gy0*AFj#SrwN0?V;^pP`*I!aQi zd(pwZXLUyZO#(v|()mca`lkWBEV^Fx^efk`!tPgYHg!KDJ?5!@sPwNG<6Xu!3eT(k z)!R^k-=$L5Y!Myxo0gEJ8B?^RX1`}3dXq>@5IaZmJS@v&QbWXA`mZnV zXpN5=oJs(7z~Nm1(p|pt=yoyU%ijBu3bezm?;~mUMGkq1sO@TZN43rR-0iy`p7wsi zZ)J|I^9DzWck3_WSFRz{H*J7ef9t~-_)IdjUHZ1RvD)2gEfgkT1-vm0s&|Ec@E;h6 z`Xta^A@_F2QSDyatF4LqW1b3sE#NpK*QLxqDMSdsT8-83j&vaOl_1y#Lq$7-!RcHT z?^+bXCxD-jTyCCZyx@qUs~I&f7%VgFRQ5lF65gj$fL^EyEFt!iZncF4EugwiGpQ z^z2r)!M-Y-wy(Gm%{RIK!Q`4X$_TfuW@MNPKAL4wV9W@>ec#Z+p#TI-0oLZf2{g}q z)L?2n1+VeP(%l6Q|GaW-tsxZb8(PFA1tN@d6PtL_apH*^p*29bD;Rv|-w%^Wix%{P zM=^d}(-zH=Hj8Mide_RS*4%yT+h4Qfk=-eGQCN5q^kodZi*Tz+54HEVKO|9pbBlz$N0IX-bWZCAm7 z&s$lO(f16~3XmMQeKA4lOuHFe#{qseti|~${>oT&I#)JW)AcR2a;i!b8!1HragsIV zb5@&zUw?&C+a&+V%Ha~!yF~H#zb@8(OqD#nQ059Pu%hEl6OAql6vPm{RgZr+A}xys z+l4V9iMk)vX<}Y2ER0IWGi~SmB_=QOeXj?5prbX1z~Xm$dO}Uo!C+^x{;9(#HUu90 zONbj@EBZ~bALVaKnu?{(8@=x#1QXGM&;99i#1J57^2-1Vzy?aATI}A(%mi)vmAW5x z03|rqdu$IJZPo~X4-3EOantfdsnA;b#i4D(o!a24VNv=r333uv!0j~ZS{QszonkLp zMEK`Iov^e{VGY)Nj@_E5bNb&6w#pGGewwZ4Whk&AXLk ztImQ0x8T9h$lFTg{4~(a7FH;d*;}F>Nr!kqq`g86I+;{mXZtx)>jQBC2fz*r{NqKF ztBRv$K3(R!&C-OguCozsZ|uKt#QQ?xX5A7dB{1Mrt4g7X=V}K@3dUGZt04pcO}W4% zSG?7~&(tlXKPeF}bV0zauRmJX^>sa4k%QA;y5HlXkT+)NImdgP_~DE)x?M0x4pF;P z;-5?qcRDh0C<;Nh1O^QNjii#NM~1o?B~Qjgv4F;uJeD%hwY-_8V?kX%n8&X9k1HYv z;H5c%3ZKsp*du+pfRf8QQI2}O%q>*54R2HFn0t~Zwz%bb(}%t*Y`x+jKK27o!3au- zdj@e1?q1j^6rT4newJevh&xY$(l=;poErXOa{19w-j_{0ZbWm>ZWtb+t~0V|_R|;Z z#=h{&c!FF1rzP3q`{8Rx{X#9HU(*f58hej80ER_&wia47*W;C)&o!I1*ORP+P#@bzcWSR1xq!5e}%QDw#?vn0ODmGSs`jQH-t zhg6?ogkBcd-fC_kTCfNt4TzF*6~@b5JMB8_feKT3*71j;0jIy4!T zB7(>&c^JlMOO~7pe!ISQ!Gbet-@gjo2Oi4Cz%vslJ=4D=q~PR!&P)a*XJATd_NB`p z6#mKi^cmX5dvv&jo{l%gyZVi+M)5XUD}ap;m;Y!1hXx(0*zT0f#$2cplF7gwP?Dq+x*s%CQ?Uo$bv~#4&6R<@y#7Gtd!M9i&2_an#S_jwwKBI4?DI)0*b8d_Kvzi&=&blBox!=#9Q#7h=KIqKXi~|RrIa3a z&XfmLD)tRLB)VPsu0`IM)COF@Wz)&qVB}y4T~q&n9^^wWE=5w(suO zI6u|BK;3@m++LkdoP=;(MPUk?#&6UQrug*Nwus_$jVIWHQKN6Wz!>3ftq9Fcd1s4` zXcNMs{iTl@XWU*hVjPoDv+=M3m~0JH2ag{1s?$T?Os|$QiRka#5)nh5ZqS&Z)3@>< zZLH4%&d8L`@R%HkA-4F=$3*b=pgPa&@G3wLCAoSD1M~T)Da*gC3LMz|dzD({03dqI z5-^$oQzm6v-tJS~qOhW~BM&c19PK47$wE<@&1+ilyDjMK?c^7AfD(?V5elr&UgGN6 zAj)z!HUQLKAM*k8&st-1XH9ZasDXH8Dzy=-WMFe7!;=E`d0L62Rz~dOIer!Y|>|JWwLDa$Kva?e_O`jy*Iz5S#7&?|9sw#Vh%KKQ!Q{E0734-Vk2011!eyTeKC5@@Mg zb!tNd09zY|0(lSm3+7U_`8t1&Z}7Fm_CzoEmg*BDvq-}4Q1<5%Ht^p{5ssMBzn{l} zTl2Fv68P;__&HJAU8kdXQ^JR}p&#txQeWQ#Fk1gU{YANKv0A_xLIzk&g(=SFHxNuHy?sO_>b0gW~BT7aU%AE9>m3dXm0_B$%4J{7-Nla^G$4Z#JZy1010 zb6#V|bze6PU0@MYru+Q8M%(IT_ElX5zf}yjS>orHB*J*F9)I@@TM?eQQan|d8&39b5+q|Vn)W|UW=9^pH5Z0hgF&k0~tXItY(YlR1^{o90 zUIbM)M5qx?nf|vCwpd!oJ9jCm8PLCOj6P%tB!;ZV>vSUePDa{u3Vg(;s*|JZ)lWr> zqmPP70mEi6m_#ZBO&06B^skNiP**6-99zEp8B&#{ty2-YK;CzmHl6W?ge`VG0TEVo zfowQr`z9Z23|*sAbdZHEy<(Ih%Hkf`2IozA4G;Ot;J4KDp_*!iKluV>qF8 zIg|`Em70;7%wLvP|I@w=GBxco0Bj|nbnz-)BPfit^6(^4n?7%f(O(UlQyTj()X)Ah zS#aGPc46?5_2SoT^LaO0);Ed)z^jdEUha;f1x`umow^S@@@%~7VfXoAbS+oxdk`fh zvNp|aIQ%+k2Pb+e5LCi?haw_XiB*H41d%uCuQCEAZ zd@wrrF&c@dS9>#gb6IrWmfy~>2bq5QQwZ2fJm~@tu9c3cxoBiJod4Y}gELtVe~Ad`n8wb25dG4VAU!N14*=F`n)iD ziNJ8U_u^K5LD3IuI#-43aqh%qXoE2=Q=Gl{n(B3N5pro;cF-CEuo>A3a<23U~* zFRHxr3|$prSoynVf>|qKv(|L%{@rv~9}Zm^;k)tiY!^eQQVBS*_oH{M@Nt*gOai|w zvrNn--n*vJSXs!cGl_!__UO=Ev|+X5j8XhEA+FJ@ZLI+H(yI zIw9~pgi>I+;%~n@8BhtHrjWdn7XDZ)>EGuiWxBIv_KI$w_+@dc<2QWbmelOWTCD+S zO`BxxMAOP{nQwhEtGu$`|1BpAe`)A#O*%sZW={5^;^ou1_1oVhx_$1NP2MS&c^WFq zaIs^jhl4wYbpd~Lv%A6*DReUrT{2DD$IFLorNPq**GkTC4AzVBUc`De;=1f|^a*8{z?(O}z&p^*Xx+qoCwCVeq$;-Aic zJ=Z=Qm!nH)=6Skr!JNegEEc96yxrP)rI*1kCSt^~CX2er2&6j5l)c zV}P+vU-NjmEJS@KFg_rTcTmSg2i@WMrZb`Y=J z@NSb7VU1OU)?ONR33@IGPR#G1_aCCK;O2$=1qMpnH++xQS=GZTI**J=AEzLMeTis7 zbPakr0#C)E9y8v5^UBaUp zKQ)32nrFPIgP_yCrLetl>m1YWHwhBEP@219DpeHZDEr%v0*JhTe6#-NT1YT*3k-1J zMqkg~llD&cf1la9bRf;@oQmC-kbY`=Hb{2k>%(}p{_)^I!xq15-s9~9lVr10qagJ2 zOi4QfE<=-Z9zxBNq#2t2-v{@Pq=i2aYk{@yK4Y{0@ShPSg#*ua!Qq&Ve;t00-(7K& zptF!E8fs+E9J}UoBfpvI>+)=Sh7F!$7F^9Cd#*c**bk@r`q#s1L=uwovAjn>ZV5fm zAvhNn#(Ek5IF$BIa%|q8xZu-PiRMQFd@Q&u#$pAav8ha-M`qaMO9<9ML;dShPDCU5 z)!{Xi2k(;wnAF*Q*GFrsco|Rc1;5?~1c}|m9(t(gbTT!u9d^$@b_1`Ekd1VB;Q}8x zA(|r3JC2mgrHJlGXOjQvT+9kcwYV6=e(WSNdF^XS@$5go8xnlKrz_IxPx;S3VwV+L z9+8FAjP_}?=3c!Wl%Ilw(HncP5*++F?EA83Hcj^urN7ZkdNL|wAaKzKV2qij!Ltlj zd4~&j-(G;1mKnFTU)VmNMjN)ymO#7Qr=z%E$gWq^Eb!f|?22o?dj0rc2UylujMsaD ziEphlzt01&L5KvHifavolHyjPQ)JzjUP1(7j=8RRJ~l9J>tPh?+Z{m=&3O*VnB7y7 zLq8qdjS#);gw^oQtK5Sy2>*78aJ_~TYO^LxIOmnvg-aIkzYzao)Tq^KYb@dGTukP& zcQ4&d-aejB!nQm~PYoSN;ePVkrGG&xN62EUu6~cX`}x|<#Uis*i-ogLSDj;O5Nq+p zQ5p7!f9Urblpp9!N4v8bldj*trJLV|!i>&VHbWTr>~7<6FV=iLnYKgA-!+xge&;k* z>EiC8&3a-6i!?oKXTURQ}P(b7}*hVRix;?WF zlQVx>v=-82b9hew<~;qP<9)R6D~zLK5Awa%?|iUn;ZDCk$Gu$@ zElMk)#6#qz#V}e=v8pF_l$~CxWShVRb{C+UO-PQ#=!7&a90&d%wuEu<^eJ<{yPGQU z{(Z68V}DCJt^+lb(F^hIzkb$Q6|A#(&AUyX!Mzg1u_?zU|FpZa;C0DF)W39ru-0zV5V_o|Ygf%mr)62ewPF?lrts8}UUX&%&?4?!6mK8ICm-uG;GcDvV z#}bD~dIy&KsZ>KEf#9%w;{Kf7WUJk_?CkYoBRdB)Zn{82&RnP*sd000L&wmq{Ec=a zDK^$tqM9L`ys-t-MGGMyiALg~+vFIBkMdfvDR0*d{TFX4bxlA z-*c=2mzhFfR0Kep^1hBSVKoL;pd%V`8ySBlTZj+Zczm(FIawpvbaZiPW9i@wT(anX z*G~HZX@r6N?(MAp78S4O{ODa=r2B)MK{v8}I;QglRs!`=HYb)JE+5(_!qM>?2_#t1P)4IKk%CWiQjf=!beBh(jYFz`HE@_JZ z_Iq)j-H;@%+2XTk!7!r_ZgOk+OguuQcBtIkbV_H~SDRQ@TUh7~uLkU@7@ZhQ-+qkg zY1|NVJd~X%#rXGq(9S#AqHeZ5t1&6>Y~B0^qtqH>Q3o?pr}ynAA~>Koh<#o+MfJZ=bt zO(>SZkKqdM05*JpG?3p4H^DE6nJ#V;@1}OT4G4%$D7y&^2Kdqz(c>MU-5+_@Tt=Er74s6U#qmOm3v;!F{OG4p{-3^mSeRpTv(Gg@+xt!yB z%Apy$-v}1_41fCkKMwvbx%!7CBp*#|_7h*%p@nT`0wmxez(M!e@}+58gB^o_USNO5 zf(D@OeH2~{FpI#N`m zNtYI?0qGzmKmb7m>4^05UEXuPUpYB<@67D8&&!r|$V-N%MXNXC3SeY@#vTUN^%W0R=MJGj^HdFFHJLfV^mPCL(gZN=S; z(w`_XVJYPCsqaE1+;W`Ly{N0mv)m0ikc>+UKMlOTPu2dch&9Mj99@2PU8F4$JpJ43 zv%j$C8xkJ$>)6}vXFnb1iKq)D_alEM3Ay!7H=E~!lcC8hOi0ELPwsG7tIjEA%qpTb zFJrE=vwN-waJ+dEc*(ii830tLGY4E+h(p4qvrm=zrAX(G6u(bIg-N(|Bm@K&_NQ+v zqSd4b{832O=WMaXN4Ak;-6fB;a>)aqK4!|AIJhjI^7!WZ?^%P%a(9Q}EM?saOryL( z*QoW_ux6mGVCZs0PB?gl!Dr{=+13V%+0HtB$L(LlO8;8Kt4XV7hI7J+=VyiTofwS_ zxKTq~m}=+c*l$g)T?ugX%Nx{zZr{H6sy)||A_zn|>F8gQTtP`ouHJ--Hea6d{z<_8 z5%3HCZY@rzBrPp{Hl@Dbznhfa|BrQ_##f$SeXp%#g*Y8>t~!v=H|Tq-eq{ZbMvEL} z*NY);LqVE9yti)cQ6;b0P`B*7fAqp!!4Or7s1hp`?NXILr`a!Q-~nqVV*#r!H;Mue z$BpIbsGceFKdWli?Rrhz$`c*fJIvV4{!S1U@l7GG!@vG{$HjqMazlnHp+QeglVm?d z+Rc1YvhnLPNb*JiQ*|zGlH2ysb!ph~NygRwyGEZW`9FN^KKK*_Zt`8xoHhR4cfg7k zYkZs_a{nNS=l#^1wqGg_^BnI}OnE!>2zRDm^PQrxu-dhKf5V+U$+C3?CpK^Z=>7bkb->1y4cs30#_?@>j1;oDj z)V!~9c3P6wEtoc47XUROwbC$Dzk!L5HvNR)jcgl=s?K8h{LO5tsl+7u+wz#B%1n*c z%!cv$#+XOL>T8p?uuH|1dAzJ-w-@Q3(j=xM(1OqG&QB3j?>S-GE`6RZefy^!4<7~X zI=h@MwTyjM#TNF7P}zyUt@BM14g@}Y$o5K+-bqLDq=?MWi8R8-_Z#Tn_t%r(Byz52 zBtE_N9JaY;i>X(dZEKD@wNW%Wm-pCRo4l2gGy-l5z8 zY$c^5h2z(|yW+D_7C0U%0Qa@vh9b= z#Z+zOsDBaTj-6)=6;1;(oU~kb4{pA&m}jt+e0LuEiYHZjTvf;IpeXXz2U#dC+k>6b zAn?8MH{qwn+>E&m0G23rIw6m=wP$fbUt)@jACr4s2<;(-*hxxmfL%w8cE2rauW8~(4UNPTi3$Jbtn{P|8GD<5GkFnHx*C7OR6cu2Ra5e7S&VDD zc~vlJp>`j2)P3y<2Q(G^>G_XB8k1Q;k)X{`GE1_HkDFPXeiBk&lr6U`Zyl%vJNXCV zznqlE#>QEEn>}-jxqXcVIX&S@SDSZzI!Zma-8Gi8t3o?{uZ5@+G}02#`UYD3y-Epl zY0XBIE=AF2t7DDaPsSnZ750kOz-?p*Q>CK`SX@vl{~4Ju8*~+Ai<{TXLoHwataH( zaXBQ=4;M> z!~M3^L4i|-h$s{n)nFSFg|!n}utqKhoBOHS3=fQ;no$&nZE$5#N0@%s$G0+I{kcLQ zTuQB%=#=>eGp&Oj%Vr6Do`#~pLhlbUp4B>Kin~E^FO}@*;dAGLtAWTxea90eGgjVV zn}#rvdD>v?XU}uQB?7z6`uJ+NTJ+YJQ(r*Dnmc8#qkbdmTOl1rz4RW0FVCqg#}q)NNT zPT6uRP{evZ0RtEa9ycNr$9i`glen9ABdohqSB&B;z8IwMg4o9~SD$g?uEaiLGCW%G z*!DKbLM-^@`7G%z~ABy#l$MHUW74F7g?xSCl z|M9B_3tHp;Z(;C<`(&C6B=t=Q=$B0*HWMXKF4 z#-C?yF9IA6vt{rcML^D6UYw~lTaE)G;xm(2=@W#=J0x%+>HT!>%I6 zK`(fN7q_(PCEianw~_bn9*6n!&<~gLK1I-8+sEYh8_XKJ1Fjz)TTskdf+!6eF3ld_ zHV~WEY6!za26T0ts!<)R1z;u*I846T2}R`fI>IaWBSPQQPw~_b1y9VguVWSprU4uU zNuqj|JCaXSVe+oBd1ym$!`QF~#`3$?|^rotTUFi))#! z<@q0NCjHk`Pb2f1DEAUUYHD0`(xOM)c^x&$f z3Ls}Dhr;2xfS&{jWTIbKCB5Xvrkj!-=nbv>TXO+2qSa+KU^4)h@#m`$iB}~re+uIq zeqhJUrVt!Gzon3FJpTTAl(Ehs$kn;nbIwL)#NyQ(2fn3O5%~?EcfNdr2v}q#Fct|& zkOM?&-lwcmYssRa|JtiTU+H~qF`FVy`6wbIu8Ix#?{Nb7tRblU^rAD)TaAgNvm^Zq zHg2*2aIji})+4}?qyCV{()yv6XQ}^*52aGg=kmTX><%29nBja|tsuyi9_9Gj9&PHD z@Wc;H4&^0N1)NU^d=L3c$bq>^D@YfmFRkDm^`MeHA@jpqzY@+DWePQW!gQ;}`b`LQ z0APRUFw$yA1}k8d&W-q+vOnI>dk2JLP4`XF*_)JPD8f<0G2+SOEIDk*gD-DHR_@XH zaHR^@e}g>8*zs%6BxHV|JO39*kv=9DsR%3u*w^Jz@zk*M_=csc7tSlQjBYRo3e=D2tRp9c4Ib0B1TN0TYs()~$#m)cZrAU6ca*`6H4cMc;iuS}!7l7$ANZA)CpvYwgsK_$T** zE__SOqBN&wO6^pooPfLVkFNKIvrd3jl@&&&ucY3p#z~+*;v7$B*2+<@tJEe6HUb%| zQyn3=@ewwMLD3i%;*$@$&XZ56?2gnWc_V<2$&8flDGHs{piVjB34xrg9vUbTFCx0} z-D*bC!TPjb4c_|tiATclp!_;axc-cxy5ei;?HFY+vMi#Sgm%xgXKg(W)FthMC4bGU zJ~CJ6`*$JA?8m798V;H)$&zkco|l3mHB5nzz8Bm{L}|)6bpSwUcjVCqX}OwGI6js< zfL@{*_iUF0#8mPKb|tWZG6tnBQ};&?T_m$q{pT#%IRrq@nqV#=(E?@9(^;hwwSCuF z@+2qdrr0Hawb~H)y5GXZfb2)jZe#S)Ms&P+Gir-0?@yJ?w|^SAZHRQlbmvE_#y|Ma zvT+F+1YvXGZX&hvH3?s`F^2IC-ajuzXg!kO;@#KdoxazBe*W&$A3HgRx}a-@tU*$~ z?UFo?TR>MVqS{FMvSqzaO9#IR00?8}b^+MmM)1Mco}}e>-jh>@>)FF8*pS4xTb@<2 zRZeT5f84}=KDN)-2S<`F%`NmzjMGF|$Kn+kXhE)4m6)9KE^trjqkI1Yc1*^Q>5H6yJEc`8@YOeJ1 zLxMbxwKVY}%(#9(0L}mNX_dZfA!`*<I+pBdXTV#4ad>#QN z5NmdRMJ(0i)c6(%?g!>_&$-U??+O6tLw(az630ap;0?ZO^0V@%)h2$%51b>1;EL3a zZ<2?X?|@3%A#4Jd?neL70vd#w&Qf2L9S#$xwJY1apoG~xtZq8E)Gb&tqbqz8%~ z=sUOTcHu93QRehAo%o;aL{|x5^b1fVPTDW_ z^DO74l0i$5>P4-u@h1Pdr=ZQ22I8UX^yg2au)ib+vvJn?XsGPhrlbUcA!QI8V+?dh zMU?K;R!K0^i)@az?_=q)1UefGh&R`h*ZMg8w~ORpkX}LClL1Z&w0^3w>3IxWJwqcG zLh@J2M4{DF)p^|zksm&YYqoa;DnY{-!B*~{O9RG$S%@7AQgU|z_fDRs>;25_U1kFC zG5+lq2Z3)T3W{KrGyQRXJ7$`81C%v9!gOTsG?rBiGEHBfzP+Y2Wq2@f{q6HENKQ0+ z@hbabCEcr+U?`)4WoOmqrK3oP%qJ)+zUM2~nJ5X)cmvvK5ZCc1--IxR7dBJI-MJ($;8;}6yW%41Wz?@_!HYD+rJTBP?&P>eb z`pLoxT>9W0xN!gbmxy-CBI7$s<43+F%Ptx(z}({Wyw_pE@_oK?l}$s3-hm7UMNFn_ zO*%5D5)w1&!KcW@(rf2}%C=h6QZ{c0U)DvsGcp8e^)L?WdvlZm3OR1pBl8&_JgXq= zV6fExmU)xI7o@M~vSkeQOj~q|Y#qs*AQWzHcSwoWsxrptU-=eQl4&Tne`)?Vq#%wB z&2z`?fDG)*0yR2;)J5MkC>E2(k1$b+SVk6-nRS9AcKInS`GWtpFE<5R(Dn1<19NE^ zGqeEMTf66RMUFZZo48*fNwTNtrX;anwpIyzX=o_oZEtC5ItQyPHI5$S#efEPzg2J? zi1Tl=_L|qa{?C(8+Jzu|$i5^xp5vIy(79DH@Q1Kw1J>-{ZkuH@w@Gv8bGIl(88(LL zy9FCJi*nZ1WjxR3J>GCgk5pNS$Wjd+MdTe8;;8>tx8#e`;}=AzRIZR%DP0-y0`AGx zls^3g3_$S19=W|n^Rs6gqqrk5^X_q4;^m!99^OhP2&F|q(O)Y5F=<+Ay0FbXUT5i{ zwXLZA!P2KPE+$W5F#P3Ge0siIm7bDd!ZsXm)=Q$JaWtHT0U0kvMK}6wYO`5Gj0BdL zN!tYcx5vLRqfH+0I&xf5=GDvs=7#$De0ys7c}^=_i7-&D_5)xfU+yFZMH%#34EU&K z|L!VBmP~SxO2^udem?UrJUv$3o@e7}054Ai^^{B#?_r+)<& zKk2sLamc^bRw7W$C_neRtG`@-=;xMoSpjZ2 z6?xtdt>&?Yn!N*c#k}B|kVz}~ou39eUYGf6b#+`XNu8~PiUDpecTZnENXD82w4)xm zqrbyWb)M|sI1aFHBuFf}xQL3c%EOrnpKR2M-hhn@>%Tlm^II@S^j3l<90hY8)FrWo zJiSL7uT>!>^Op5|x1AX+XJ%+_>4u&~Gf)mw?QXp?Z?sluv1kYb?>g*wCoa^wU#0%x zSsh7y7klxa)h66Hsh?p(xRHL~3ww=R6lwiiy1DEJ5mF1Cs!iTxIH9g)GZnLrMnL+v zdu@F4ZQ@wC0joH?VCHsdCA{Wt(#{~sI_!@kc&!j}M6dE)APxqEfG%o7Qgx~v(%DJi zTD)qA$WjY9kg+|P2@?Y(;b!Q~I%l*v_64OehFJe?>m4L|l30b_-y-Sk6J5^=9$#~oKGHyys zEk_d>q(|%ptr{Zwr+*Q{HkA#xfZXn!Q+=NO@V{a}xKS1zcHF-EH1KOS5Ii{I-8E*w<_te9fV_6A9sC&kZ_{s| zBGgBSz?HbO1l>C|Vz&YD`WOZUG6yG)sKz6osL)BLJgOax^nXy>yLPL#mZ8QDB+VE* z+uzhg4R^3}|F-xLin(AOUi-R~`)Pr+3>C0{sDIm-WYzcYPXpl!uK0#~m$=)aRWoVL zf#t*=+anKDaBaIUhrkH1JG8;aYsX)FKJkRx{Nb{n1k9gyd+@ta|2TTYd;vfdt#>VP zoSIF9M39YM#HB1+m#p59kW>5FR^2Lf5$82%Z`pjbvX}sDG%j}+2SN8+XgF$=M*0_0 zUW*Mw?EL#z-Lx-tNORa6LV{)i4BF{icz;p4eY^K!9zbNuAH@zq?6fv6L(aGncA#*^ zlkqmPh-1H(zg;8Z`CI8H{HUASS`u^_hx&7OJ0R$i)g z^m;lP3J7(^_T6BycyJ!VI_Cj|RRs+9sgjyWN&VgA29x|a0!i4zdAgQfy%!>I1>oMW zY40a?s_)Y@VRxVhRRKmeq0E+tJQ{yXM>lx2CSbi0Pi#BR@ZxE&>QFFedU_z=C9wVA z+2g5qBy-0=|Hy`p;kF&KI8kvhz$phsDKyHBIOF0zWZmf1x~~Ale|`6keKkzX%vMaZ z89L_z+$89IbjYdonJ=SGjsdrVzQT~CLc>zZRi}GBkMm=pKtW>9*Pc9)YdYwUKvZ?W z^R&H`D5A%zTX_As+6ya&)LDT)0hXnYW{bGgcg;Bg5$D)+)9W2Tf<~_d9%n@=bf!WR z%x+-{uJkj%Fp$eaj+y^UKh>Y}gN=Xv7#+0`lRS5QRk0R%?Wq{rmY}C!88fN${@e7) z;|?f6;yK|ZEDbu<__A>;`>^oV9Ut(mlIe-S_p?z)%|hM-dN5;1O0(5mO-5g|NB5+R zQA3#iM<{L;oknGsk5$?>R0P@P_ULDCWLEqQR5hpuxYCNZB{mK@eK-wO#m6v^)c1PJCtkRT1vAtBGo7#Uq%fs@S2XD=4CeaQk40CO^A!6JrSw% zoVk`zaJLPDmLalwu*Q(yBgqN*!a=IAbEnpSTP^ar!Ct+rHU&BfE&ejL7Cp#M{9bgo z3xZx&aC9-H+rhvebkK%j=K*r4*K-b{=b>-&-*z0qNO%Anih^2D+fzMWRpG6(?qM8> z~g$^p6%i83AnJKTcBP-Xc{J;K(JVoHuHOLuXRuOPb&Na-Ahw|NYQ(# zLq)ehxlUIpN(5X$@cwyq!810ma7_I$Tm2+x?l@+ z9A@AT;#;PuIBwSt0UkmWI1tTGm(r-A3IN^~w+1a|y(R&BM1f|(xZM_SOfZ;dS^@c) zNPbuL1mC?vX7Cxzycm!#T2r&oyRTBM*+}hm_+q-DFhCHH*0=@(>d~D0=-ivZOa*3$ zSY`O)#q#&!KLVP!IQ*C(gY-Gj_-poY07h+5BxVMJe}JoM!#}`OwWYSBtJ)?g(0E%> it>T1p70id#Gn!pua0%_98dy96038i|WT~3{%l`pEThprm literal 0 HcmV?d00001 diff --git a/assets/integrations/cmdline.png b/assets/integrations/cmdline.png new file mode 100644 index 0000000000000000000000000000000000000000..fedf0d38039eeafd0db85b77aeea656ca4f23437 GIT binary patch literal 45232 zcmeEt`9GBZ7xpNkR77RpMIu{5w#iP(zOR*?B3srmDk{ zrnv&VGxhF@3-E)|S^0%B2via-%;3x2PqS^ojeq4R0W#Iz?-Mx45O`&bbzXQBU?W_Gl zoqCD(#?AXTwz`*>fh&Tv)s&3`CO2pNL)fv9ZSrd08+9?t@xQtWyzRV^Zl>k@y>2yc zCd!wWe#?4!uoukDz3@(wt%v#8cdox@pVCg>7PT-HGZo9b-wof)yLK-sgBz6Nnen6< ztWT*@*ziem9Bm~=!`<^*JlKhFw&}`nagYE1>;HKNXu{@igZ@d-+_(5{1PT*`f~-?O zPscCa_XWL+r2u79f!c5VSKV5DJ?!OwyA-!gKv+sFJBZ$m<{qe>Ui=9*Y~C95RQvYA zcOMGBO>`$*_<>Jb? znO{CbH)MRom#dTBewV(M$^9X9&XiW@pr6vCV37N@MnJ1Ll|t(6kzjj%KKr}?-fR~U zxotLYcC&7WR4?n*j86w8LPvpOtkJ@EIII(x8zAH); zdCrRU>pX{OBvvO@qt=OR_EBm%PAY`-WZs0YIpsejBEGDv1Y|ltO7&HCbd^pkw^f=8 zhO&14^s4gZ>n1?*M}%;}D5;jShgtm(l60pctCC1H-*pR;(W?R>5~rC3zH$$7!Bk)o zwoY^H3e1CyXLo$r*NrXf-+1h_Wj)OKPjqQ_1hyy3;ZXO7Vp=19{wjAE$mq6QpKR#t z?RWC3TsvF82=52c%-RBX;~IL=3L_SQn8~j5M+09+Wu!EE5*>AO8e3xp89add{rlQW zhBX>l%I4+huq4~z#oj65efBXtQuY?M%Zp!_Wl^@2ZR_FB7~UbRQHhSNv)VCd#w zBT360UOo5j2?i8nn3KdijQ3=1{#4=5GN{he#fo}QLw|>LA{sr3y6($*3diFyhs5nm zFzhMj$lTqBu^*1tzv{p*N zH@&bg$s$w|&3-)&Rmwu25h=P-6z29JCnz!PdTY_FUZ=)4(rJ46hPDD3M}{*twt8kM z(28}n?SE6A!DaaA1hDt4MbGh3YH4QXHmnANf&I4cL>|2fescIvk>I48DVvCcZl)|<53>H@B3n)o0v&us z#T^#*-dxSV=FOn&*41|A+-R{tyQRsv(7M-SlaH$(ANsKN3O``_%NTjDl&$EIh6EpD zib1`EZnC{jiC21%y-tS2ZwRx})g3L7$fP;gOCp zn>{>#m(KALsPFra*G8spIGpSH_iu^c4i)h(0%;S;8{3mu?~0Sbgtjl1KlRx4nol-R zn?7k-J*dMD_rSxWU)Qk+x9H=#L2(s&3JS__*wt`Y0!rHQ-_AYPkzmmmA)BkA*{6M# z7-?f;)3JL!i8s0(_WE2DLNjq1(nmLfqqL$}a_byuSU_Q3>Wmg?<%SxDbALpeg;M%a z@!F2>^NbWd3r31Cn4}l|V#KA16+G!oror;(Pwx$mf#n#!_}dIR9c%J<0 zzkSSt-haQeU%zE53YE=5%T!laZ%-YaM|*}W(?TaFho_~gzWjylhUUBq=e~4_hK6JD zI{V@^{{6d|^B#5oK6ZUkB0V~E$CQ8zD&VJ*SU)B?*dwf0`WS3c>WIu9_XiZV9-3HX zh#?SbL(tIPT2i;QbVKd>_-;Y1(&;~3oZ`V#G%lF1Brh*_Zt$@+;BvQGutva7N(#zK zrGOZjH`hr&@yODXdOdQ`$vrb`0~ybjoWR5V6&q_?)AzYv>46aCN;+NR3fEfS+346nFz>i&>0y+^EZmI0>@H3V-(uism?f(Swud^c`xjroyXhuUa^-DMEymGeEv;_4^p-sW`O ze9pD?^}xe_X`ZMfvn+DkC!jK9nBb$|II;6M6zfZxj&p|tA7H_#(7`Ad!ynwH2N5Bq zlJRr{9_e6ml#5i56F&naZcz^5(hCY>7*0a3W}#PorXCfImMTu!25#t)37%YU<-De0 zx8x$O#FYmr)9`YKaZ7C#j1NlC%L3t=f%7~qT6!!qI~uSuSy?6|EY{HCHJuMzET-?B zy99ccUR^UoAYwzr$dg5w?_4B@UZb*U|EaeeXwRAS^{Lyl*RQgDK8X4g18|`qLt9RH zI})?AH-)>Kw|~d|1@Cs-*;YB6^i)6g^8<54YKTc_j1B)+`VR#9$AJQ_Y#FSO@oZ+y z_k2w?*#8X_=cz=P_>K8~7vcZ?n1fZLgHfyF8e2F${ZsLWZ-2-ClQ{4pd6MVJC8U4- zU`VKm-=TV>!=7p>uG9!0%3fsx0(EruOqN!j%e5xF^zNbR_j-dV}kjI zPxL@;2q=cc$OIg7k(b1hiRWeKrxEr0t5ES&9TMG0g9LZ)$|aEaNpW>eC!sM#UT(6v zi9FV11tv6y;!1MA&}rj2Gan~B;XQ}|mdJ8PA%@?EkRA6obX8Bue>Xj%gRWyCl1?60 z`NQZ2=)bDn{gZP{=JxiP?M&r9%I;+R1$6}Fmmw7zWdMcxy8iKKBS~#sFU~Ps#NEKy zeWvr*QmhTrUg`I$n!>il?ORFIJO8L|7Zt(>Cf|`~zWJPb^ z4?h3S{c#kTc7F zhB~xOf^I(umj(Uxm6dip-IuL|=k}M*P{oJ$rU-@jvkJ_!ra6H*!WGVilC86!5g{)U z4@*LDac3022&=^ZvgC`a_aO4SQxA7mb>*EIm(H42AvtkBLq9a5E$uL?^~fK%ehFP= zv^PvbQuh^|iS7rmGrOj)@`vdzFCFDqv&=?2yEmciO{-HnJF7AFS8R18K6w-nm@{U} zQVmejYo6;+wmNb#WYr)?ru;f3ll-xdD$2C=)x{&hPEAdb*4v%40uR`rC++ssvmv=; zA)&%-3xz))Kpzc0b-Zg)<)egs;I42YULl^CHJ8uFv_A(YY6(C~Z<|(0{>ERe5;ML| zGMC(IY^<0lu1Kw_NR5$PpCw%OoQT;A)H?~$wN)w$Q4$l&GE&MiQ%W%sO*8q85B8bZ ze5#z*@3b5osI7s*3ng_Qd}c(1NDHI)PorMyP3SoN>wcWlFN9z2B!d1~A*=;Pm{dNx z`D)aM=$=@6w1RPgC#Ze#-5jN~?%TI%DpGauC+Yukos_16!MdAIuwA{Y$-RzeQ9PV* zL3T>s(%0qY$`2p0M)$<330T)Zf#sTum_7|C_vSND&n>^i8Jy=LB7lw6Irf)>A%K^i zUe}NU5lUB%IKRm6?sLTU#A;k=wG+u?rnx)hLWT^n887Ur*}X^pi3Tj?5kYKC)Ici&X-{yid8U0qdJnQN2r_U*>zn!aXmn%vaX*vL%2v0j{^ zqGC^;QBU6RvjU@M1%|-uX9S{rlCE%8uTWO+gDm{RQlI+S)-krl$bp@gEtdtHuJ_J? z`9f0?ZhNJp;u%b&jt0(Lq<#nR76nLVt4S8A0WZjBYWUSeN5^O1lM#&-h9+Z=;l*%6 zlNYfM5O5O{BLw^>t9s7Rhn>e#G5tAV!%6)PoNrt`NVBU@fJ|ErGyzKO z;jNu6*{VU#_mcs_9lAmw{o1expLM&tZQ!7a43X)ALboy>4NV2Vx}unHKu}mZ3*u=E zu@#Kt!ValY`z?$cJkFBMUir=F<|>yBJCe>13OWsXbQ&ys8YJv(u`Ec>eVs`Vq+_X7 z&UjVa!&A%>itc(Er_EbhvFANDI_5qw9r$xZ*6fDQs_Sg)=bz9HUXw9QkKb94b&i30 zjzK*PF`I3RZ~VNWa7x~8@3P`f>v#Kh z#S=Hl$D;*{GUSDVfhYFe!*gFdUdu4sKX3}26_@NtSUfe?fsR=M+OJAeZ{hADWH>N3I!di4z7S`JOfy^mqRoYakAbs zU#Gtc%P<&7v6?vg#jc2&|Dtjh!L;6cULA1a-EZ?pKiS0OL0Pe(qe=CH2Z$vk>>PCx zApPHbNmAE7gsg9-*=c4+>aHY^p;@OVszIymT2OWhhvIWh8lJFUYHgk7O`N~+Egf1F zbH@V@qX+7Das19<6dqfj!svL4GbZ-Nx|{pPbjhR~V;4VVft{+SZJ$xB0_< z!R6)seDMw@ka)eGf@C5?x8&o2tg;hlk>QW((S99?`4=1AZ&Ro5EMfr%fFnqFNB;me zZY`P;+<53P(hS{=3ndRdj8jS+*jw33MCR#Cq8vGZ$EU&gDCU+Xs~af@Vv7-<9I z!@5c`z2Fv3lL8+hi2SaDlB1>Ijg8IG%&jsPJd9(q)~5l+sg(Eqw?MUYma$wKhxA(t z5WV??(po=rZco&;e%EwF$H{)R!^yLfi9gEj(&@SR!f0cuNtx_sC~00txI5MXLUxf$ zMlF7J&N}JfBCizC`TM;;Z96aetZOJ34y3Bey#1*53hmv8!(7mH&y+XFj=~h9L;$Te2X~Bc6C!SZ-8> zq=_lkW!b{rcYum%J%?bMO%;6?kyG((*8yObmu(`&xdYMrwr)=*4#t|vQ0VE7Dq(se zcVsm}i;oVSsKNDJ9e9Q>zfU(W&S2YE_R4Mi2Rhc~h`8F)j7R?4cs1Uk=Te`J_Qio% zlzT=vSX;~fG+Ali@E&!ZV;c|@eUmEy;qN-7OgAYN1o}_s20x4+>t|wXQDowdUH|x8 z3>&d8Hki09{Fof`$RS230Gh2W|K43b7>PukOwQWOosMdPtD2kN4<3JUx}`$HegDIb zM5d!3wMzF~$o|}_op<|LXnAAff?4dM)IimjO?yq4OJ_9NeQF==3OukNKeTB6$U4L9 zEo~#zakxsw+nJ&s_fgZd3QC#!>epWyTnIyBv ziV}3lVH~~612BV%9f^&2M1aEf?A{I|Q)aotIyq9q9-sUz9Rr4Y)?1_Zziq4!m73WO zV>-FUj+nBrEi!Y)xFXC5(h~&w@lg41%55K;{#1E0cZ}y~JyVk=zJ3p4+4!dZ_3rUg zbeAuXNm_dTtIdkO=nV4VXYetxg^l@$%<`$NGeEGA5m-FU5JV0WkC->Dl?3X-3Dm9_ zMf#m}s{fXl4)$YrZt<;2Ce?E&&dkUK9ml`-3|6<|glLH=HK}K0aP%zi_W=swH#Pe8 ztiPPm8e_7aDADE>IU7GBb=8~Y2V*kQJzh2^nKCn)KYJW2>F~3tt?1^HN^nzILxFqM zyg8%AJs@#F$~3_3L|9LhK?&5)(G6=rYzp)$0i-W|0Nmwe z`+?|rKc)Ys4nIIEzl~wgLq8AM{Klx23im~W;_iys@yIZw9ud{|G6_B1_MN0tJJdNg zZW@B4ll-LeSF5d@InjjITDPV6Drb8*on~TDG2FNL?7z0vEXcs`W|$xYS8^(h$4sN6 z(ElF7Hx7unN)*|1Zj?YK+Ep#@YK~2h@=pc+QS517Ja^R405JC!&k zcA=2#Oixe0*sL#Ro#+28fUZLAz|Py#z!UHXI6V(+v1?zUy1Hhqz94fnmoQs0Gi7V= z{CTAU+TMd21j=2*TWSd~SibAvwFx9r`_8=J8XHb_evqJ}ySibqt|JFo)sqGvex4)u zkrlr6I$dK)RM(@SpyZ+9TIBRToDd_LY#nl)jF%akni4yFi@hCn@UJ)(1N6m|8cgWU z*S-MRmj)mKixU;-DTT6HqIKt3>%C~du^TcQ8+6l!j?SfqjxS;xLqaWK0BS1+?v&J$ z7W6`KkH8bP!PtM1>*4g5Xr6J0Tt5g2ndd@D;E6J!YjF#af6>gTdqs3yFiTzO^=@iB zHkUAy%OTeE%lLtM8zqRlaMiMN=;Nci>ruE;te*_=8>V<}Twc?GHAqzfJvBD%yMR|Z z+)dk7Iy(B$7?H&&IqYV^_xa{WtuAruJhiqjIOIwe%6}>~#Rk$p_jfT;k2!UG^fPR> zt=C~VXcHBDkY#rkwS7$TKUiTa6P~NhxdQ^xe64o#^u*3)$RF%gq6_m+^4#2T##Uxx zof(scgI?(!6?=QMp66@*skKMVHTzpKgtH*RyWND1w0XB*2};zLXq@zq2U(t{JR!fw zZ84=c?M^qtI$qkkaqAfJsSFW| zMZq7&f#IdFn;(Io9qF<}EOx@y+e^~?Hg|aL_4bSc<%4%<&G-&Yf{RFv>+(kB26{GN zVEfBSqeQ3aI7!Rj#(f(6z?rqOqZiQ*-1K$w^*l}8pI5Ftv@QgQV123lowkZ} zEvX8R7MAFh3?CAeJXhWRkr6AzvvT);P4nmZHWtwkYe?Id*LLS$%81@CATwFAut)yG zbmLwQ5WBLi*?doOFZM8te0-;=+&^ybL?Jj31QKVYz2QDKF_u45)>zDx6==DfeCKEF zyX?g7ilVDL30)RGOd{4HL(68HC_;_Z&wQ`s>rU*|3@ZB5ywmgcYwYyS#NRK5pyy?H z2onDLh%Uz7O^_TGD=hK{fIMhDk7q!Ft@VNO1n^an{28E+# z=o!Y-cx;rEAn`lMkPgGl!qS8cP9dFmk_>AhXU%>~JIXA*)pWpc;aa@$gM2Judk7)# zf1fqhmDH+kG{pAWTBf4WV<0_0r4dgI-bO8-3FdjdV96H@2YnQ}$<=nV0l9Zd_qTax zxeSU#)&Q%LFi8sh1GFkhy6*dJbQ@gc1@uAR_kwS_`OF?b$@xlqi*J1?k}+t0y&ROkJ%Y~Vs2icm%__9PMK)^?_-9ez061pQYmq*YO$ZadZV+;ejZ0T8~?x|biO zyJ!9ztSfB@(N$e!S+xn+J)+(|Hb8X9tfyZ@qHlOyV04Nad0oEtTxBc$6b#6jS)-R9 zYn0(gF%kI$bYt4%d;qDss%p{C{LD!l{N`Q{?hMa|L-k;N|Q zGSKZ)pMi(H)%lA0I-saEEijChwngqOBgajF4Gn*Vv_VOeKXv-AH)|QJ?5*dTaAmy<6 zMVsea&%9B7SR&IyThs#DM#dYH$C0;Kh^SPph2WYG(9p9KHxIW3_!ZDE%ILddSy;BS zqm5WdaJ9>XcfZx2cZN_$sk`#dTc@!D0QCtXmB`rkGTl{Cg?;mE(NNI3B5>} z!?d-sl5Fct_~eOmhaRfW5OTTT1r5y;)A>jf^%+h3YrtO9vYn3$yyJMiS8GBaaSVw8l_Fs5ay!9ESO zx0;3B*Yo^-2q`rNiq;#0k%@_Ho3cj!PCwt3pOtrF+Xa{XpTLalOpKbAif^!G1s`WN zOh#R2_$EzB-}*(v&JCO3(9_e)N8YiUJJE-rzdz8CyGHH|xc-$&zvdSHwC07Z^Zyumfx zAk)X<+8Rg>CK43s(whS&qMF~&Xq$Ur4~{H7g69QKr_jXmqs{RSldFJkX|n``X%o`t zi+6qcfMu<1cST9!kc)5oH6umBpaHmW%Pt$iNt=41C}3qVDLE)W>I zmtG|KWxoj~37+m{k&BMDW?viw6W`BY;;nUo`oRZe0&uD$&)@GACYHRd#Y%8H=)qNjn?L`0#x+ITG0MfiVEcFA0YN`kL)rgB5Ji zt^YJOs$=4+`zSvJrTWd=?=*MbQ8F#F;W{e;pC{RgA?}k|x+#O8EcU*~|4~4e zX|u4jR<-Z+C_rJ_!V$yk-+&aeirrsvok&G2%j*8hEoZj}hOW!c2O8EC#b%(LZ(l54 zXT9Fe8}`lNI6lH|7w*3{_SQf2m)CY9fGEaPK`&vsfVguyX&0(&#Tn=#`Mnb$aJ@aA8Tx>1>^|XMwlVBWu*Gb70TVDjJIG>(xv5g_EZog;hM=`n$tvtX??qG z>|+4g4M~(ImspX{z{wly^Bb2yAR|le47o9Q8=&+|-e5DC8)x3!Vq?_iPwGWm*3PF= zZ=Vl!-$M1na-G_4-|qf#4w9VygJ{9epyK?d?5#?zDnd@M?47POd%pl48;3tmK<%}I zjn0aUL~7-`HwUl0>~qCC_Q~I9XJ)Zql7+E%vK)^eKxUcw9v#dPYYS#y`4MXsch1-g zJpIpju8zmxhq=z>K`*D{AQ)J%OIh{N%;k5*$%n)bokOIl3KV#!iP&uaub-fIRy#-V zoFh;xZQBKQ9}#cfKy9_yiEmm7f*h5RG!AdXexRequWz5urjXVMQ z_a+p96i@gx7eRYJ#z-(MTsq|XO>VxU5UpYI*&bK~wbBt6qIfsDS!V5WFloL2bb*Uh z3gddxngPDb1JqQB`)aZEGdpFk)UZEm=J1Bqj>mtFfP=|NPK z@!BM+T(j`DkeO4|dCP~tgyq0YKsuWv%@~V^$O*x;!4BywY3?p&JY>%};#JrO8!0aCh zuk~a+dBR*IR6Cw}Sr%qH(_}Pve%34yLTUW1q4YwL<7Zfh%yEhECXBRdK*g?nrUX$Q zaVwnekH53?Pxp$I=URKra{Cz|VrH%i%@Mqmvbc`f;}Y7tm%WhyNtNCwE(CH)H2vnH@=9kuaKMtw$ApqtowBbf<}_ZD|kufr*J3 zq5}Txj)@7pu-G_I{WE|w$yx%y9|Ws*i_ma^^b_MwAWfJA7vtRk>qrN~?g~kunJ@$O$x?S|z-Uq3SjUj*3i5?P%!T0Vf1c5aM z$zMmwe;I*$Hn1q#_{j>;@FFOd$=?Ok_iY@4?`d-0H_@V;N`NU#~=2qaRuYt-x2ek;}oF zQbFjB!G_50b;|fbs`w$`K0ynt_UX;%1)ib5JapSUyMLI6#eEOjn5RC(+Yu`aXJ)Jw z`QEW>0^8&0=m>|Gn!d2faC37rHZ{mLSI`{h(R6P#L_m^sVPx$QyhqN2)<bVs-%gXs*>O*g1>?oj*A#3Fo8F&Eh8WjYK zRO5{jWOz_ks4tny9uR6Ur@+`KPr)8~_7-^h?lbjimmr+)`4T#kol6w=K9SFBx8xwA zz{IeU7tl=%A zwVxlnv}k=xQjdSB&iW>R0}tw~&2~}qmcZUT<4=<80HhY(fnPa7DQMhy%O}h7Q<02v zU)M0k;TOD_TBF705R4ut^dAbNTs;|J+K|Hp)bGX1U4wCFSQCzEoP_wnvqWNtzsE$K zZ+p)iAmG^QYuZEX=;j~=9^~!XZ@LA~Pu1IG~V86#~J%tM>Yr6+0+QNQn8= zmIlWKH!(Hf>GZnt{rur!%5rR8&5`t>ufOX^dkVapW_qWD{xKkzZ!s|9hbrT&&m7H@5w39`-PXi*);)WHy3)E);tP$D4qpX zD%-~Z0}?pA$N(Vn4;~1!*iwOhNT3Z8wA3!P;(H3=COUCxdH){NL-(gO>!~mXx5(BT_(;&o6bEGoqnA$tJf(ve@Pqu6A&Zb&F9=p<3g?u2s8zYcKv(W zTG2=uuOALHQTBC_s|8dE&F395VwQhghRD(##*`)@Sqh=nz2GI|2FUUSSpt{A^z2BHf%Zst(Q1sRJVP*5foZe8*oPq<5ZP34g zsjSU))*rNzncZuYalf`N>nd~4+gIUP-ZNu1s&NawrkN*|9bT+&?S#cvUL@|FG;W{F z(;X~4(n~Rh=16D&HOB3SyqVwOVH4X^3XnLhTCTxZe`;<-y$?{UEDU{^WAlz+Hx&c0 z9CE(@9zKB~!rTOs?h7~UiILas#NwCeiiR|Tl~ctT=!2eqyvogJ@gAGJ1U&(h&i3kQ z?}ko#rEk0(ZbU#D-mh7a4<2X8dNn#E0zI3N4W!9!W8?O7{s4YJdqI6{V^+5>pFQD8 z_zDp7bB0u|*Zdcgv7QaL5_y6`J%sTvx-2by5)l7S7cdXek7XVoX7yz!q1UNW`47Tw zb18M4qO#}Jm~4Yw+gsJ2m-=(~d(j)rb`>^M8WE;XAzO2`wfu(pUhF2urf%EkCqQ7Y zXS^KPt^kEi^jOxHUX1+uGr-NYGI^8{tZdp7A-ZwX{&7e6se-4-r>C>PV9?{#4!U>Z z7vx;p`Oh{!VU~uEGM?p*hK76br&Z{<-O1LRjh%tP?uUPzNg-PSyMcHl;E)94Y+gKr<#Kr=~i%Jsyuw=|fPcb_sa{@l=D&8`xZ}RxD40#kyI&U3v zlesZR+Zts~881Y~qzabKm6~n=(o~g;eB21%*hsr9&uo9NqzcYF1hPorCprz=z)Qn+ z6>zOIaZ6|bFy{6{#uKBbFO&FFmmNz4pJpTFw9GwCmB zXFsXFlBOT(rIgKzOEGR?j}`GF45)pA0Uq0 z0n&0sN*a#*?S#QY4$m?DKr_Op98u!p_jc zt9HTU?w6csD?qQ0u}!aZHqAnIREH~|>79en}=7G^v*_)F_)@7TcK z*28w^Obo4_X9E6ki^qJ^EvKWXu(YUQfe%cqtp4OyUw%Y2B%U1}rUFX+_EN&|GMoFd zu+3R^nj735Cz>L0M_DJWXH~IjXVY{mXnENpP*5Y+px^b4v7^bIeMJz6*|Yq03=p}% zR(hFw^ttmN9vrM?H{gu{b(7!d9EZd__^xGrU<4-664@AGQcP;$H!gMQ)Zgr*A#+Z@ zBxarDlLKveZ#9d5y~7rhpj(|j`pKW9weA~)v7s0;*Ezg#13=|ppD954VS7>60Jyp@ zN4u)as$F)zR6k1{xOH@Zg5Qx-4W}nW!{W%}ky$frKK+#$XXN<(f z=r@T@=@(`tsq(jzzTkMe>N79rA15YH^Nr|ga+0Y&oBpLchj|Haxs zM>Lt-&l`Sy0!bES+ZW?mJg7u`#tyk$VmcXDsjCmRal!9#`Ky`9&d!~v)ehzh-V8_n z94iGZVkhra)HfsTJ1=knQP5zeArN*i@I`+;lWoYN_~Q(z(*;#Z#!Fp!)Cs%AA&&g( ziuk5Iw@!&M<>HVrU|S0~L-oJVH9mIVuG@=>vLHkO9q6NqH5{9lcv^&fFsjTsGeiZL z6>lIKql#-CUf)T+!(_Wla_>3ZW=i>j3Cg9SYo(!ZF#Oo%^1PBod!^V9#|Ug=qg}_h zJ5v;ckvjZIo9_Tc_V&h{gkpluz)zJUp#QHrjO_e|UcSWq?)9BW zjTG{E8FA%gV}{Y8%90M~tZuL^o}jWlj~+FjK`|OR3GE;FL(n6$98%{W2_}_-_rU>G;zsoGjf%2f{&6iKIdas z;aTVbb$l{n0Ljk~S$cf|G_0sNd1~&7q$76wvYTK>?^~E^^j-qJWaS4s4jUuBN3;=7 zt+X#RWHW#K^*5od;{x}dP;MxoWXZ% zk4s2~i+B?Q6Jng&M)&vjyah5YgTyn`b?5R9_V)HH4F334)w=fb(FrpR(pE=0OazMp zvtGpMUcY(UHW0xzVcwHFh&bY=pc#LXBhw_v6j6a1tJG7<9vFZP*6x{$bmRfYJ)Bf- zjmror%F#xst}5!GJp-QsmPt46#}SLN)jlgyqDFY>YkXT#O4b}SKtNEz41o4{NyNO$ z#e$#Vi4g|=LxwMxQ8Ug3D=a3251d!}08uSl*p)503JPQ4=8Qu)-M!u}xnYy&KL^|L zj>&zE6SF`dBTj|E2fKR4aR&R^n=oBa>c@W)D^B5nrsWj~QEjVlX_R|ag-cHHJ>I&U z?b#0i&I9L4;G~NaX5YuraqY`%7Hu2FnV{v9RbP1ZllWJ&!1D8P15e64ejq)0Uwsl+ z0SVprjakJNJno?tZ7Qw-+^;_u;#%(~KXaX;1;(T%_Re4Y&FPni5*zX8*>j z8JpOYxOADdOC}y|kC*aM)Ad|^840!5@g76^Az~L}s#q~DV$VK-7_`zj5#=|oH(S=Y z`Q0O!4;noA;EU|K5y&t>yZ} zm|3+OYMY8NNrIn*zHt2AiT*?=&%dUzELpz?L*v z<2R`#t>1P%0Jw-YEiYhp_*xn**1Y$oQ+BMK9=}2m>Y#(|6HOcoQ3|pr)H@QCRypP-f0^R9Uf0Yr+(7~2|e)D;)^>=;t!r}XDR`d58=0V zuSJhb#KL_2d?WVrSaKZ#p9tFl79+zU7vt`|y}<#hUH+!S4xF2-TVjk*oj@goLLfm) zia)7){C@VQFlCUC+?$xuDT!e;i#EI{tMaekOuhQs8fU;wS!Fv_ZB+C|;M>EA{}J!u z9AJQ69@X>giLUH(b-wlSY*o2G758KjVXHvEnNY&=Tbli?q}+#a5QB?sAl6DD z^=s$ra=AMv&pz14=&~#3{9f!-39bNo)9fwcA571_f~5lJ*OGd!!97*vkC+7Cy}w?- z2ue1dK3VH~M&)+;{>pVDHdvJek21gw)|J)F?4b50k`^^pg|l?p9bUeG2d_H)%{52& z#~p=@IlSM>ewe$Pn3)y0mo=FJrRMSs0FmDu64qn^szb9ZF2X02)Vn@D-`3+KsXk{5 zl)cbay`6T>c;lwT3U`(pTJd&v5ARI9l!>U#b3Ry z<2enF9s2mN;N|qyv6#XCbRep#dB{?a$Su#Q&m);pL> z%*$1pPtux;5T(dx{xg0loFO~P)ipIemTZ&DS1CZu*!_m;sLsWxj@Qo$MzZ00F?j<= z?;p!#e8W$@E?9rS5dF~o%^UXV%aYTf-0ugQx8mF8O>{1G6~0=u;j#b54Gc5Bw?9C* zV-Q`O`Lm$me8x^#xM&#j;Q;w-L_qqQOU_WCw8wOz3Bq(8i7Oo;pa_bJglQi)cia}{ z-d6yZ04MHmbCKUVavVr|LdjUdCu)G5EY*3GSL^x?4KJG{T1t^J0hcwE6gG)+lar&~dk$-tzAnlFf>R~x1AmgXO~`J$Wpij#X28PLnZVVIpxt(+ zv6-2NwO!QGz~{jMy=1#o^aRCKdb!s0%T0ED(JZsZ@l{v*9AJ>#q(4j%i_*H44SV2I?bsAE%_}G)l!adJYPp=9u6ip{L$%@LrNqiW27&+oPF! znN{g2Kz$E}T%@WH)|^iQ^iQnc>vP@@DF3DQ;_X&F>nc6b3-gVV{!+)+EScPZrPG-5Gl1T>Wld<%kZreCBJR zzkD1>Il0l<$p;oz)9&qwISDTICML=9&vuyc-i^Rqmszc&n!(ft(ss>WXQ%-mc+#lL zE`Jxr=p3_fPwL!_zZADoTsbhzMc(DYiIEn>4u;}FjUPXLd=LRVjn4tRw|dmg_H1nh zVH^)h5ykwu_j=l5hOh+T&Q(x4K zyQ{d~swLT>T@3a&>i0>LsiiBr`XxBw=PP>3_^Z^g-H-tw?-~+Zs!-$aok~os+!09m z1{MF-oT2-|PjBSH;m@!({u$D5C-Vd|15|2fgAhksQ@{bn{*z0fa2O65rIDL`of}xu zI>$$oztLf)iHmZ{!e(Wu2>-J1$3Q0K-uZHmL!bHM<8u1F((vG;C#%Wb_gU50F8sB7 zCn0OPrXrlHCPah2)JK{!b0L+e)3LG*8R))DwNwAk4}(`hPhFz;>~G(g|N13Fsf>~0 zAWRE5kXOBbSHxUFrC&=zN;iM@vf+Un6+jdan;B$-kV+3FFY8Pu6o?_VeM#0~qFltf zPQUrcz4FjWki)^k5W9Mn40I9TivA3(#&HoWAT3*toGC(|m6_pmlEvNnM zVH$Q3DQ2u!xv9P+-IC${=8z&{V~b}l(0PDL1+XEJtHR@jeGHKHF%peA;DLR=qX}pC z_NY6^4LHKLlf!>4L6g(K*}0&&I{X$0gg&9DOuTSLk4)t6!Qr#n=IZH}v9YEu$4|IE zxvM|t<_V)@(bA>82TT@a=NHGZUZ`Ykpzx&DLtEUkwhxEF7- zGQKFjyo#39)oGV@p?B|SJ5dF z5&hSJW#qo|Wv@?o7yC~pQ8Kk^a%$=nUf=32?OrnNlD86bulK>h0Io~b?vK;H@};h? zZzw+4yIhWDwtubMcztkUsPv5hBOK#4I(7Lq4flsHNFeSv{6km$58}wjai?pVV0gXH zY7YStqy`w*IG^$+Xsy9<)&hu97z_lPda!^5+F5DSSVJ+GshqNoida<(v$kdE3cPMn zHqyU6HFCBN==<07Ydi}!#T98JC?I=)4*Pbx&H!OrGF*vPLNwq}R%tSxrvCmiNuv~? zkq$qbCosmJvw?hoXjq8IZ6es!j5mXa^{uSzO8lkS@>YSiBp~gvs!G?>Hp`m5zHl6J zxt;v|-Ie&+{52;t&@RNT$k^>=YD+42s?G)kADnf8XwI2|8nx5j(ZF%eY~bj!{qGc*IjYPWTj@_5o1In4CDojs20?Z{agCax z_U;f#crpr8Y?13C#fr>NqX5l2mTcLC352(mH8eC_v2CQ~4p)C16h~cl6ZVlao>xk0 zpyZuREpq&+dyIjZa{+*2bqr~XeaNe8_13y^ibp>(0UMM}=*cp2w}Y+PbwprnbhPhB z{OI_2q22LkyTLnV*4gsgb*gmW^FOmdV1B3pS{?^)4#98xV56Cto;<@3n=7+SPuVk+ zuDds?{&^N^)M|K>+x~?lmj_|L1?-G)cN1pE7A3qfDCs)N-xH^z7w%5|torBF@BRA? z)Y6T;Ta+N?dUYI8_RqdVv=giMoY`EX>bWC(H(yWd?6Qc{^F9opty0yNs z_xK2NvyaFfcAHZ+B4k#NKXDnx_@C$+2Jkmoq4;Ig1UrO2Ec>64JCX$THs-%@Ve~@x z7;S>q^Z%D6v1~HHWG_gA-8nF@UKDlwy2P&87Km%kABD4HW9`oz0SPTI^!n6uYoMe} z_yRf8TB!j$6pwwKhVP`l2-Yte4DGm|)_*^FGmzy&KFH0A z-h+Ri=r)d~Jh4N$n#+Nl$^cK6D)1DV6)y>&n7W+ES;ZDKKV~I$x2U8D5v%*_J$uP* zk$PK(OIq_cI=wgq@2aJBJ3Z>jdz3A&ov%<;01Z_)GD_NRBG@YK_dN4ij?u?tWhla6iid0O z&-43-dtUc-&iU-~dB2aQwvl^=UF~lr#9C=;Y7V{#0pZHkE*yCxbE0bq(fRQVsn0*d z(%s$tBLIbYiKFt}16?u};y9M@lKSdGP2>k?neO0snNule{qhQ>?#v|6uhZpVD9WsF z5L}`J$bi)IOs|6NYfI0R^!4L6s^%_8o$e0S{MqjOm*b=b^QyB%#_gMhw3^l&&IbQ; z&YkagAT4D$$2^& zst~2vu50$T`*BRSKR1S&;@jQ8%vIUy?Ce{W(=R?(*iGgH^VX;!p^J#k_Y?&df%VoG z78V(HRWtaBK7WtKYu!sB?ATn{8-1kB9?kp^8`Y+bFBW#GgsQNlI-vw>y?Rl?&RbX4 z{UPo1n{R)sD)FrtMXHl9Jx)lz@_PsNV+yXkd5h=3RBZ!6p2Wk8H4op-@UZ)-4E&yJ zciF5vcnV2e&1?|YZe+NZ0z8i27iVds78Vg%{q$n>6JlOg3qEfQcpprDIz2gn)`Jaw zRAXGPlZofbbdD=a&E|=$WMqw6iwi#9n|_h1$A^?y=3O=8A#-aLutDQsSebVyVgb|@ zZ+jQz^v%tFISq}E=OAJY8ae_dLfX*hbWZ{j9Ze%6>GgI|pTAz-i#L+{W3-L3KX26B zLCoj{M(M88^Qbr%VKjSU)RHHbIkt6(!0QBD=o$O?)7W(Ojo)0Anq*-7{ooN%Z2GUp8l;i>95#=aSw))`e=n1X%+oDZf~>MnQ`;~D@r2>h z$64UQPd4l}tWk|AOC)A;qPTZf>0L~{Wg?c!RoG^UvoDSjK)PQI`>BSbZ{89_n(Q3a zI#%0c{e2N2&V;vEF}2_s&LEY8n1AF)En*Eb5zVn4$%B2*u`-57X9{~RYa{;>JlBR3 z!#WVR)nC8p#Gb=O&RmCpl2KfkuDV3xyX1p;rS#Trg-UfvhV(FYjH;eKy%tO4GXsu5 z2C}c6o^>}h1Fm7nq_M?)?%utNBVRgo_V-}z72}_jk$ip8b@|fk*RMM-#AvUAs1D1M zN_|8|I8kZ;BuxVKvy6;fWXxGFX_G5ZaXGzMM`72E4E>Urn$@cpJ*vm@-Z^yb?ZjVd z13xe6PhPwRPUz)R!ez<*JsRJz79t;DG*zfPU-Vm!o&OPX16AUmizO!w=iEFDYS%vNSJ9efLSmyKo{HsrOirpk?QysKCGI5g_(d>O+81e4`9V}gd>d&4-g zEAwBYyGXGx>fCST4zV9fC&!k5!6h4YNvFtXY&{3n#RpNVanlsx90u)TzBY?L7d&uO zvi7lvR#vG@&cS9o&>{88MKOhsHIk{gW_K9b3oWxE_~HP;rZSLacVm;^-d!R$l9 zh>95(bvx7X+|v1bb@D~nUs3FrgTsW?K6s-cX2iS1`n7G&0mv4DOyl1cd&?I|@_Aw5 zYCKO0XOta9d-4|L}6PjQy%`+8d6fK1Aumy?>d(TJDwgT!h+yok<(-iRrP1zW}Vi`md7Fz(2*5n6k@n+OF6S!BVv1u%l8gS^N4DEi2VY zF~6RUGN0dm%8g|nR4AZdqg@#1$53%%b@umE!fh)B8QAIWy|Xguz3h7z<)G1D`~2=c z2KPZTO|>1{iiBOUQ}BDgut8cs8XiGvP^P?>3wM&10{8;R&~mLVhVDs-GqQ$JZF?u9 zuu+`VdX2{~Hj^}4)o_Xh+XOD|^w2qL^yRoF&iHWC59L?!prPnGIH|r zh7_p`KZbkFv-$z3|IEYK6Oc0t} z`AA^CSgB@h5%Pc~vJX<{tbZZdBd_AcuJ_Gfv7n&K6VbbbGah)q%^}gx>6nZ=h!wkI z-rv2Qof5qnVozpudA&(ra4V#ibn4NW?)6qpFbd(e z_qz%5C@g+f_?9ZWOTQx33ZgFyv5$K0bD>cFA72{qj*8;u9>xb(*$ucD%KwBwbn=Z8 zOBe|g`s9U5fN`kw$~j<+VuQ$D^@^z7P`k`u2Sew6#iy2*##xjo<85nSe(6!2iO&~n zcpR^g?b-P=7$0(Gzk&9^;w!Jj>+<&!t1N|BbT$6CG+x(v&(_>b3GUfHB?t-FJh{%tH=21j-WjPlLm@%k+xs*` zjMv(#9fgS!9A|TGBEn*e@OKzLxXIcjaXoA0SeoY|t7v3>lQcuP^PC-W^rLshy53yHs9Ir1s*0%>R+IXm4< zz9^s1w$cJ%BgDQ<*j8Fi*Ly?i0R5xVsFb0nsYTKo`ghx<`>r;eh+};$Qo1pp<`jbc zzRqOf#&ZT7-=XVJ8>rps86*+?k%Bm8WXKoWM}~(p+8KQekeuzsc62fnHYi|Mql`jD zC)qYQ>6JBp6fv@W5yQbt9y*TVm6|49x~@Yg;;aIpZ{OIHdLWY@Xa^j~=gbNQW>~!qBN(_{QTbVDuVq@M&Q1SY}3Q$zUtD8gZRGpg9G6iv)S# z949!H<866|-_4cZ7MxlZ$@``UDT7%hw;R#ZO};ES@hYHP3%2z1%Adwa z?-z%!1aTGIH#3_Hq3$uy*My21kQ1hXb2%BW^Q?s4T4>eQF#_l|SF>_4QOkwe8!Z<> z+HlFt=BYn5e4+HF#4TsO@R*@@2LT;Gey=b3h{@bGl~hQ>LM`b~lo5|!yvi=l+SEnlNK zvCPSX?b{>g#4hTa-&uQv)p=06mH4XOG={QPjg($}lP-KXS9{Hj@#K3WUHJQf^q=lY z7l8bL@pM_)<8UTm57dM7M>0SNQpgAwoRlGOVr-n1(TW1HvnPj3u1ODxn7%mCjaEfk z%0*qXqjF+h^Zd3@k}woOKne}^i*8=Xnw5_l<@)>k(@=nLjY9Ua*TEXTJfd$uJYnCE zZzIx42Zr32QdKP1W^KaNUybsJB`4Cw9V;9qs5u%>*WTffpt1=b_!tcoGjiW?Ub>z; zag!`S!trtI!ePj-3h~Zg?#eQ=v9lX_$D_ZS&UmUaQDLjW8@ra+t^i1xx8_%=9)6f{ zCP$yM7Qw@FYj;?vv1__S@97!9UrgaacqP0X>e+gqpXVdFtD!?HrCy2o*O1k)jg^&t zq;i0yJ-w1UAvHBMI_gx=DTHnU_$ZO-g}|C+RifO3g+Dpi{I~E(APraBmJI)ig=&m2E%>fk z^zeIc=_5iUw;HI#e*0re*5Gv|t2K!f(~VQvZ+eO>7j3^6$s*BfLG>ank8X*18~xIk zR|I=V9RjLD{otDfidd)st&aH`!t}h#<05Xv&*}negVDP=l@#e)3T7VKX0T<^b)G)z zQm9h7Y3n&}m~Z;HIe|fuW!PsNlt*$Fh=OmrZajpv*BKCEnnyUe5xK}51oY6z>$jr!!RP5=FpFuRMT-EC zg-PQk;mNEdU{hyMNGYx_G>}&InFNq8lp2nQK1pXFiK4_@$c@K8f*wHaxUhCR2x2B) zBzRg~U5zS#2Q!6&PH?FxY8uhN`kP!_opcd26lmHjSN4z+aj}k#5`8?_sL!sO=q)kF z`3|(0sjeXkbuPC#=sG@>2Oz8#2wQu2soFO)FUBievxwh=Q^I36Yom5YO=_zV$782k zRCf$o8&FA)Y#;2nc|KBj%mD37f^jof5(NZhyRk0j8k^{Fs3r78-;>Tpta4%9{s+KB zK`9@4(C(|wP@*l1uqH2H;ZhK>2#KqH2n)MlJ5{UeI_+JV@kEvA-JtjGVghx7txvdSU7Wbd{7|#q#Q2ux61DGx`^#`y&?4B!R`aHI>Jm?xMiZapWp+04Sz~r zKsk81dMQZ*L?T3>YX^h?Hsrpk2$qcCmZHjHxZgh(>0H_wFE#_<_Y#BDpRMMnc6RRs z{I9xzk>!1NYZ*7<+3RrE5AiXnlcw|SBDqkp(Qut||HR-vf3fapF&;6T#%+=hqGbq~ zn28t~b-)8UIhAgF-jqh71jJpHjB(57!!Hvu-Qd8OJ-4*7>L0u(laDMOBIY9TF)c5k zqw~I}FN*yBT*lG`^-bIzq#0!PlH;57<2Cq^;5t* zfAynYMPn24Q-%wVto8x<(0rt_^T~Q8#7k3AOYYpcAPsMP<38}!)s-hvMiFUPwuOui4yI3jc3lgQ`ICz5NxL{ZtC-#bwL$Z!popBB&9DbU z=G>E>Z~VwsW^h*^mE6K01Gq(u2$9IJ{g5%xZ?f~ZEkh)A94&K1K`-_UowOt{1UPjJ zirk=1*TDJR_om1Dco9iS40TYx2VnvHY-1kfLn^{@1Lmxv%(^fA1Y(17ggcU;8*XIM z?4BAgsul6=7UYS)JwSwTV5@L4>ukLXxz6C{*K+e|V$aD~T{jMnDc8 zn-OZwjA^SvF%*>&*4)yE^Y7(veK>N@;3_F$>0$ALyBO6IJoFkKJm0@g5AEi39-e1h ze_`t##qT0)@XUqooIEKZJVRltaL(R<|5ceI>Tqxi zJ=e%xFq73x$dvJ3&hoTTVSdLQ@jx{znep@P3>@ZBRRy`Q^oB@Tfvsr}7c=%jIw<=K z@ux_XApKp@z^`{u>`e#W`3BYMqLa=P9A;5<+uyjxODxWFU^nK4F|}&FFMN ztbtn@a=;~1)eXK;W&m@0jY4N#kwlx{-}vs6=JXjDtLi-xWqKvlwMksLHJF3A<6T)2 z5ku;iFj$l^OoO_@^LILa-~yR!gbm0*shdHmQ}_0H>Xo3E9mScAYkJq72f6_-RsLY2 zWgxzCs?aJ1Ocy5mLHK=L8uq!7GC*WM31*_3a#E@2s0EQZhK@Uxb{E`pvsAtswUGQvkE84Wy$QG6PN@+f zfFFz!wB;Gak3fHK0ZB1Fcc%*VI8~oqMS%LJzOxr1g2GH~!LU*Ffj)N&X4=koNH z4cQ#@L7?~}vGXeNHOQRMS#uCL$GN_Wdu2!54O4EMvnF;ePJDUD;kC*CEP>nhf0w`r z#CpVEc?o{BacC6LY{j9 zj>qfI;01n7$^@Po{2N_Z*Um{XDYlR{!MdQc{i!bnF+|LgckE6hv(IkigZH34m1!3P zPr!X5f8)HVUuqW3o@{WN6~WWYTL03JFDw|pl9ln<_{3r%FkzVNP{z75E{PZSk@+pe zql!k0y@t=G=FQ#bU&O#8R2*1@OO)}t0f<^82-7*CGtWygA@5`o!~OV8nQ}1dUg;Ph^o`nS*Hm|tJ=hkF&=C9VQ`6_jf2-?a1QHUDdU)NS07 zHYVVm#+Efc33Mhzbb~+s!gtXmTDL)Rs%FO1I2dWzOJU@y@`pu=S|$%2T7j!$eCI|e z+!p3I6a{mPLivJ$OHGZvwG5Ak93W@2UDJ%@3D-i@%=_BM`~F}F1r7U~@f`u~gd)86 z$Cz<y8*2)N0j$9BGG;s5d;m|0;$Yz91la!;1`=Fks=3M-e^2Hu=K@tlW7D2CwQsG^+nf06Q2(UXf}2iuPuQ zk{^6bL#GYz20@EpY^5^U?83z89nwRjy5BazRBPv~u~@@;hsI z!8qqm6_XZrS|CWQ_uP7Npph7>JYwbJwh7bH*2IjIc_?|HOfhehbg{G$al|y&itlI%Mph;YY|ccu~q@iFXWT7q|m_2mIx) z4IG5yVk~G?F8;@+|BpF-DV+yt9_s#hIn$o830#rsN+WLAlsH=m1FS^cz;#{>ZOFe1 z_CF7v3h0Z&>0~U^hYzh*sW;PsP(Y9uG4K_NYOp2AMGw=QGc!IrUT8rM zW~}k;=rgTWTU=8@0c4^({vgY`iY7y6ew^%o4o`01xOu;sGK^VAQM9%xJ~?#-`K3FQ zPl9uri9Zz)?qVX6kx%9M$6s?Xwn`%~mzbcp1=%+Ccq%4>{a$1j0~h?K)VSp7Z+hVI zOwM#14#Z-Hr998K)qMsz5Ghl&CHa(=EB3UiNdPyuZ=OFIA=g3;IWo(VstAx2d{=5$ zLX>QfLORY*+V=lg`Rn`SiEsm1bNm+}k}!)deYcP3(lA~^x$oqqb7)ePi}?lL+pJaj z;iiD2LbnNzQZ8%eoYFs6fVqYg+lxngF#lr?YW_cSgSVB>H(mLj>1Amo5aOwa?SK&S zUkzGE2_(p2cRVB@>y2GH$)4Q_hzXgm;pG6X7USzB0VKw#civ0SAx>=iVc=C->izpY z85kn|*A{e6?w?{bX7da0nw*@s8TT7_xu?ApM zQ0HSAcm!Nr?!H1VpltLU ziIWH)1!6&kUGM##_nN2CJu;r&A_@uk1t6;7@zc@evaVyY>)U;esDfYqbp>X(74KR1 znZ(7%!S?PW10HaNOm0z~34I?*-B%C3fMZIy*9<>NXir8oe>n^P{1-BAU= z(Ayp?J(WOn6&T)5rD6=h8L{d^ntG1z;W?wv(}RID9noBzu+UM(XCil`D+R#GKpn);*3z3_Sk7 zJ{?nE{7R&%UNkcuS%?SB9M(8-a&|PY6jATLJzqQ7-Z?5Omt`YM4_tFpM@G-)^Buh{ zUwrZ)R?y#=Wbj?vtiHAP+7ixZ3c3wMMuB4q8($SZ35LlE-EalWuGPr1A>Y>!MhV#V zv_~>#(#sbG*)-*)A%m$n z?!K(s`W}_jIG_l|YGw@IMFU=s8@L^IYzu*j;N*Jvzx&=RzaspjER_H(Jrro!=vLk@ zgQ-HP;zDlK&6w$PPQ`)Qk4P0>($HxpQC-OJkB9i}()jXAgXYkGS15O(+nazZnAcok z{I~F{)}l{~tiXmbr;h1ad-G9sDJ?67bxy)HFau!w5XIg+sh;{}U_dCKb6psrA2kW`9=crt8ei=(C6+lzTl^b)9FU!8HgXJCu+GY zO5kTv1e?k2LH+t~S5DGmcpOohX9{2@QC)#@_6C)5ds<@t#-URp!DsC7qF-Sj4i>VK z=tIO~eW#n}{Q%G6nk0IVEKY9RBcJVGyhEssO_YY8?$31zmf8B*$Z`#1ZK@ML%A5XcXjKBRKw++3YGJA6tooxU8#*$MdA`N}&-U_Xxcu0CO$8Fw zo!?t>kr^?WqxPzDK%ElBi0(|^L;@RH@<0`)!GQl8J^<5b1buBPd>*I<|Ei<+sDur^ zKu}_X;Y{13Vb#?`GWWOaWY+mX`n^No?ct`}oT=Kfm)Y%4@>rP01)iEX?4bSFou90TOIH9JQAn zdNgGJ+Ux>6MY2!AA}?4pH1(Y-;~cSusyYQ=;IEq1|L44sFI<*2$uoijSVCwFUQtF$ zd%=+=-n8<}2RNTnwzdqM=Y-iK&Kv77xCKqgUV=pt_4O;gz~8o;!J+R=^4L3>fegE2 zw+_2;8hj6$M_ybQqRg~m_~rY}4+zN7mVe=4xpth6lRi&Ot1u@np1951)v>1-IBfyQwou{(><_3*IRuqF4fbYoK zGFz@Rz7j|XUz=9hE9Z%K?=V>pLPdwuOl6x-I4Fw-G@<>iy}@^@6`}gpJii zd=qC7yNArPbLZ~Ri6kEc+uLgidQ-_?Yd{8=g_rr$&2V(3ld+YtZu5i&iq9`04YW+( z@sbJT>h0)okP{>DV?eGf>FZZ`c{%aStOi9HFj2%ZpxVXVHBy0Rj z_=tz>je>ay5u4AvB-EnMwIJ^0LR;K3HMym~uuUt+X@jkYbtfuBDxWlPz>-BaLgRj) z@ZQ=PC8DRkpsErS$7g-xgtP z=~gz-Hi`ds7YUKC8Zo_oPVV93u6_7hUpM}sHiZ6&?LKGdOOPEuz<6~oNUohs`u-p) zxKiWJgsdYW6Z8~%pI~smbS{-jvjWkv#86 zpy?(-3$DP{b!VM2}D=0}%6P@PzOw0XqOq`=z>=_!v#z z`0nr{Tk6YT@JX27=PjMQ;CDj*cy{4S8WpVS014v^-Al?CfiG_x+ou1QQ7h%B8XC10x{()}t>7v0zF;-BYby)d8!wvfMUr1a#VKqk)9 z8778xYM3oDIXLHTRL>DeNdKk^#+rIl_n#h$V*J)$Yp`CU4ZbiAqIbJvN2-^qjsXub zhAR)h9Z2yRIy2kb+LNkuU~Nc7i|dd3=N9oZ56dLY}~SCbPh^M8{L1fSMVB}0MD z=fr*b9zkU4q>2LcB{_2h{ZmQ}>e!#5N-U17k!W^nw)8pzjTV{o6@7SqDi|oV?q=}NR7`g~yaA!|m z=u0GCd(IdUMPCoWmH;{aQ8Kk5(JK1LzEUlziA@#pEavG$uQD?Ow{ z^wb>esZ{qp8#Mf2k#mg|FNLB_{yb<3Xd~o+YIoM@g|deIuzg-QPKzZdr|QjeJ^33y7CQyq zg(fFKuSd{NdHXYz8l15MHv=4R%mP-=lKZ;8dAncCyr(p|}sC>kECL(v08SQ-D0H-;!Ux2u>VIoWciy-+LSCCU4{T?c|nKfSknxZe;u}Fq-ec4 zoII2XnZArpMLBQJf6d!Gxkk~*+3^=eo;k95QeW$pGy+ci4PHdi9!9}H`;EC^R>PUO ztE;Q$BNPp-2qD8Xa*kBKR;D8B;h)t=X09D$+wCef0 z|3XfE^Zh~u3>tMyclxqn=~2??K>yT~KvN-Y@620C4|E$LjhPUgb$&9}bMDk~tv3p3 zt_G$ZuwCK)3&L3Rtu3!dM`U3yfaT;i{s_)&C15bao-m5No1tI%%Q;hI!oF3<3LT~0 z^?Rj)nOXT=tPixI5hGP^phu;SqF?eJ?;VK2`A={HmjEHiWi!VB2&-@3zM1DwSDw?l zSRNj`DfVL5(sf$K`(;~8KaSwm z8+ahK8IbQ5tc2jK`J}H>v-a-PgwsiV=41Arc>CP#77E`4oar46+80#L*k8qp&uKX@E4GKzuN93verahJ z6B8q-b|`4V8xNf^sps!OIQ$3pA4TFxCLL}@K`-!vSmv^uasK-1tv$No6~5VUS2Js9 zpfWd=HB)J(`A%}W9Lajk`t--)`1CTc{mlCp1@=CMdNjm86X#j1V>->70I{MxEId5d zbS3(zMfuS`Rrd2fyCHU33&* z^o8qmLeriBFVz4_oub3`sD2ta<`0SLJSpTnpu;AbK~3#ETlT|MO>5hp3s&9~?$OZD z0FYk#nrJvZsD#-|5mJtjM0m_AWw=vBtYNW zOqH#~n%O79!oo!J-tz2IU1@)yi6-*CeeXj#@NtzfKmn#0K(ybONA1v3Ocj-jUm3(5 zKR!t?dBp%tIet;u^qIfEU!;w(wXza7&F`l4@vX`sGC@Ut1n^R4*fOL=z*u|`b73Vo zwAjr6kRQH3-fgP!M-?=fYdknLng}aLN zK0e`*SDtM;5xph>q$1I4HL3AK>j!m$au9kO@81GIa>5z&E$Fj#H28E_GkyI2-QuFQ z@SjQ2q)#Hq{=y8yNPu@`fCTZd@$_jUgmEDCRR9g;d)5#2AIMaP0c6) zMZV_v-lcGt=g#IzM2u#`kxZD7abqCUZN4k94Sh9w<`%(LW2aI~!sX?uHsxTGDP{%I zfR4ub)Jd}W@Q2-1fNOCw+}q`4G%&y}2FJjcAmMU7x7bq(jUvh;A*GVWmmVD(8{2R{ zn!C>H%SrntD>GB;0f(1sXe1nrL9~9I2Dut6zPh5KBB#E6g*{?82^&<|(`{6i zT=j14IV}RY%jFLIphSB5yHT|dda(fw*?M!N*5gB3hhG0hJ9DdhRMX zU@9gJ4Q*}{qR<_nQzr>_&AJ@QbjmP#U|~?4ez)X)7i`-)tvB|u+p01dx^384X~Obx z$^$qPNU++VKsXS)J~LBEb9gva${{<6!GgvgngOz)ryxU4@=s1lnjF04Tg&;O`>`v- zO^-4Qzbygvd_*EHQOdRL6ob4G=*lde*t4JIa6Vwe zL2kvC==A6xyz|h)BzKdTYj`rBu8s*(U3K1AjhxRn%bE*AJUCLcwKVtgF`7U1lWo|Y zNts>*~)-oYy*knfIZtuCC=hQ;~R0 zS8XbUM}};5WA)tnywk0qcGC}{KJ|2F=E2#Wc`>oQqUDw4byBK^Vg`rCl-$@V50Ep! zA|n@lzwcL6JmihNX@w&e0j!@qdL;^KDx!Nk`I`4!Mel55qsRn7N+9;wmLEScxeENC zWtP89@dPm3@tt)?$Iv5{>gxX}RD|U*i_29kTvq(BV%l!2>Cy2Yf1)-|J_XgA(J6Cj z6}+VwaA#6dmxxinr@~PPi9?QuhX=6Ko6C9pQBs$ef3+wilpqq4lQJQwl(bZCF;BHP`c_b>3CsLq%6=cMnD?c z_fzdW^O((u%=6u4r;+ir*sy2x%IB$g9~v4OX6ZlWjVC$9p@j&h2DR$*g};UC(XG14 zOP%ldHYdL^VQN_8yEkMU=T#2)g?~O1X2C+Jd8lc6{RQQ5-uTRK&7?zqhx^<3zJ`8o z_0Hn!nydQO!;@p#&eyNf$RjjON}B6R06d&bOibN81s=Bkm#@#`&`5!25fK)aO1hm< zEAW!w3K=vN+Lclev8D6YJ)iH0tk_6O`V>5VUZL*J7&}OgSL-=oBDrSL@Yl{ToM;7{ z5T}u}443~M{ac7QPbP?O2Tw)=;R>Ck+q2~9&-8!Xd_v%uoA+-qg0s?0r;Lme&RL)H z^Vc)bFN(|c%G`=`a;@d29RULicYDhkZj@b0#`QULmfqa;QRXJ^Piqa^y{Kbi3Y#R=f7}C*69vs~?{7=v1NYIs*6ri{Yss zFZiA~pBUk2ivF5amw$KUK>pNK=a%NMrk-0{Q~lP1U-3gDQ~NE@0Y4@XCgC6{#RU5c zSyJH@XYDE^y}!olPP!l7TUGpedE$&?)TP{G?sVNz4_m1ho7#2k7t6ID zS>OFMV2?jQoL^@7m0Y+dUX)AC2Efp{rV#ZZBv?7xr6$WUY`_A;qg$bRQIx!6-9qqv z?3CA9{^;JuWN&*8fg&eVue6x@`AME_HF)3JKYTM+2q_&~;3Ed;xV0H8(Jc;ocy>SU z;XP7>smLl~b>%&!eBp7omF04NGr zGN-#?2E4AEfv=nlwVxE|WkBAb+y7ywIzg>F$`0!JO>FVqs}HSnw@b~Vjm<)|@{RHV z_=vXaJvbeCH!?!6^=&PY>RmhN6CoJH6!|wo&kx3}tPT!~&A@RO_7W6dOJy4#9<@FS zqAXpU&(ye<_|z(L$-PVns@#|*cL_ice-<6(zBaVFx>o*lZxy}&H{zoK@4?7Uugsb0 zqeRJpzl>dHux__b81jFURFc1o`B>+9Aib#Hz#1qe3(&kCy;3FN;o;KE*RHNE0@vuF z%G)QiQu{gATI-pI)z=!no zchptz0Hv802MsKPt?aL%wii^;)1xao+j#qfsD1rqUW4ycEI|!lyrg~1M|%=agqyJC zKj>`2?z19Z`0uV0>I%`8ozp*nJ1pC!!Z7-wv{%bQQ}ceEgpr2EAiZ)p-O9*!imvwKaZB(Hej5BUXu*-g z2G?$!v0}6K8L*WBuf)>^$>*euH$;SFIb0V7)uPM*Be}3Y(abhvVM$w*=9|PV`mp$YlUSanV66F%T^KOE^%f&EX>cj@xX7QfKCnhF_ zZ%oylY;o=-w%kT&KZ#i>to)j?1Ay83N0>n7Y$#1-{LDU@B z{38`Z(5P!Usox*SwSV?ES)VBVR9#v+XE9wVj#s3TuqL5bx5(Wc-+(T4z?y)YU(95O zsIf=_LasT!kHcgAa6n?F?iier(66Fk-D8v+^z9tp1C6ghM7f$_J5gEpajM^tV$7?L zTmmW(^!M2XuNh7ljqTr;PL`o=%wU<;8y*?4+FKnCQe-LJ2J%NDD|>6RWSIT7pC|PQ z=jcL0xNpOQx;jR#UP%o&X(r;sw*x2o#6-UJAH}8Fd>G`E4@(lU@ju2u@kDf`Sy;sC z&?v&>%0@6>y<1snx>;~uEmHyYsEfL7^9*Fj?0o&=TnF>OZN86RwhioO2=8g_S0&Eyb%UWayv^EVNwrtyK;TrIQc# ze~mfW-;eIkF9$HpU=0bJiJ|mllS?A$69hF=8}qti?ix>$jTD%X1`?R&KUGD^G4QVnWuOWp3rA=jcRIooBeSi;EQH_kQYh zVe8SH;$nyA$Z*;Xkj@zVJ>!$ew=czu2ILO9<`?Ac9z=}07r3K0b3AoV6yLV$} zDQhPHaQwF52&7O!T?4lcQ7B>#@O9 zU*Xx#9lrW9pZi-|&d2=6t|Zx`9nB^FH=T*KKRcdlv)M88wqyC6rhm)B?CeTD;Cth} zr_bB)s#qvy@K!C$0m){pf^}v6G#qF0UBIQRd{3?FhW};2M<<4WLuzt^6XzWVZJ*a_ylqAbq(1#a`y*SE=WTOva3- zlq5+5_GMY%kXpbp-`dKFl$IHP#{DThowXF`EDAF-7fop>_>RBaQimfw6)iHRUaax# zeN?~xUhs#WMX7Fv#xGZr{_>-aBV zeeYWLN_%<>(HfEuz1Gt*;+@u`7kAE@-|q(S<$Gs6NSAJ@1H+8tdue3n{9o`Kpr@Af z@Sdyd*NxZZmi;V-FBW0|zT~U93-w`-^}W%C<8PgO4Z~;IvZ-d4_d5Dea#i3v-BC1a zb2C#ys)zhh2^$+524-ev*7EYrG3*#4vRi^}mTK~^KE%hnY{OB1v&L*N`>QJtBoa4l z=L@(37y!db{Zg6t6SRky0|8xWdiqDvnf$!0K2LyxL3BvG{n*yk^?>5whs$XGinGM0 z-*+V$t?or!^daDRj}p|yGOtPzNa{_ztN=)*n_77~(lQM95-1{qXJ zll6VmPMWT;9xZ5?209y)I?s+_9VZ!`C}2>xg&+30gLk2Ib$Pj?D^W&N>Pb(+dHVKj z!c`2ApqSI8!x`W}_<4Q7Y7D9-5JayeF)VB!O)*?-Hz|=o8S5R@7eXw!Um*fZo|9Xu zq5Co*@8gXgovhBO6C+J6EquL+k+nZot>gIX4sz}<>cf5C5+3~Pg#a}|Nd#N zC=kQzF|<10DjS~ondkuz$2p^EZ8+1~)wL2lAR-VwVPU>YG{p8}+Y2?TfC7Y+!g4YS zstNW}wMSO~z_JW1+cNlY@Q**~IUWRf91+Q$6uw_>BFd?np{@+#=m30(@2||ujt&na zM8aSYf4pLC5KIF?FB|rBm;?x=TwGmAcf*LMEGDUMY~iCg`(GOO?O--fw!M9fe@FL( zT}YI0xlIozL4&tpPu!q4Z9VUsuC*~dJnV5+KKgXyN42j%V9h?0(ur*iw=;O!e41LB zq0T-qt4h^2Tlc?!-BIQj5n2D_N3O{&L4ZqT16C{nN^zfsvMk)MUb`Hg4Q%Xp?{dJQ zU(DYY`!|KG*BdErNhE6BbFG9DNX#nyZ1e-(*yz|^;LteoCXIOIsY=KWh3c0|XMaNf zE(I8fVTkuGj<=HxG$x%)=*@(hHh%Tb-YgE_;wYM%Xkgb=@n zPy3vmPH(QRezV+_lJWm1NzqJA*W${ozY<3$oHEZeGwv40%p(SuuRC65a|ZBB;dsya zN1m!R(;0fLWUMY97DGf=L<4vzI%)VXdZ5 z@QyGw2Qnpt&`^0o1Mph(T&t}BKY-owwt|> zyUHw!xH}V%3@mgy|9wpbT>1o)kGBB_=_<81ui@!G^k{CPo*6vqpFVTBxx9ZY1F<_PiMRbJv+1*x<>XiWJ~p(D6U|a~;k$0(#^W8e?mNz{ zG1{P|*S8wS2M2e$fDL4`=W&U6sASpGk5l21`)4T!;B50pLufXNpU3rv4(-ekKm95H zD$sqaopkE#hRn{5c-Su}Aleq-qkXgbW2Z+0=y4!Ky0&)mBVOZ@n!jfn4yx-{yRE7b zv!=d&E>}T~dT6K<4&RJaFZ8-~SK3W}eq!%5{mCnI>SQLESxdwJ*76rgrO@)#VTEbL zxp^rnwidhZEE@h2Q`3wffIL~~Ne>;=5>{bT;YT#=UEFR~cF~L}s9L?AFS*n8m3n&( z-hFYVEOXp1m9!LEHr@WSZ^r5{l50-)caSj@IV#9Z+h`sto#X0x_GMvfra2#&dUmdJ zaq$s5+LPUa2O3Z(3j+4{sYKew4_@^_leZy?=2{5xsatLQ zr3-$Ml~WRZj%{g*^blWD;Z@6;G<+D>13w8TmZx*~LbuCHzISxW5cTNCK5}>sa=@Mu z1pB>X$KTV#PpvgHG-CZ3O&6^GoM4DAu&CaK{;TVL#G+?rEb^uJQx_N|yv$p_QCe7h zE!h)v#Flb1dkBoTK+H>j)8iqH9Q0HW7WIf(uhSYH)gRpb=J+`ZVuvZOzZ|`13l+8# z7p!-W=vnt(5{oXqihx_UK=pHeF_&WoPGGl{hsQr>uLj2|r}wu;|NeRZ(W63OD^C=A zk&pyg1X&;gKhDW5)Y8-({`jMQ^XL=8yjCa;Fh0JCDd!vNht!f--c{m^>35|+fdY`{ zkeLb{^K*xiN%$9pSO5rp3>=dKAV(eQ)~bAa2?W|KdlJMUa+7%RD^75Xn;CYWF8%7# z%ynKE*3PY{cwX{t9|)^D^8tj`I0;)nV*=pJPkau|kfU4WDA8@lNvX0eH0)IgB8>Ch zqSH3IWtI<~+y(yowm9=XAHO-Voa7tIlnV;@eysVb)@bv|$%ZGJv}3K^?qR7t?V%5DWvwQX!x+E2g8+tip_ zKc%KsVDmr&rj;8L!jTa2L}jE=UnIViLygW8;KmkvZxsIIef8MNO4cX@Qcr~JE8F=4 zBJE0i+=ki^-VMiS8)oPHzw*xVugN$3`$LgXkW!?P?i4{%L?lK!N5`ZE84bcnag<6) z2?$6^NQpGamz+omV-gb(5s(-mj2iJf`Q1fI^={bBaF`<}nOEEO3ue7B^%XfLm3=h2c z-@m`OK+Wf;H|6CO{-aShRwIE~P|1AT2AvTN2PO}{*}Ob^=7vNXtj&m75T)vrVbwbM z&(9}BQ2SG{B0ZCm)m-{e!MT=?(Y&V4 z=#0S2Z#2yGO}1Oiz$yeda|Gy6-d5@0Bd6Qj5I9lYWey36975q-`jd&fVEF{PySrhc zY5W-jamy_s0}u$uudQUm4~vyM%Zq=?9@(>KSs^@}oEAO9LD%&ODKj(EDeo6U{qFsq z@E%`!^*sZsZFYefsn~H^z`I2OZ;{2t&R(aitc-v|GYLmAu(DLgPZlyX_nlo_dILD7 zK?KeV0_Fti%r<-7%T+|d{o30_ z=;4!V>TI^+13oU!&Q+X>vJ6IoOd|u`&uJS&_Y2puIMhPSQ6yAxlGN94-!9LAs47V% z#Lv&~D@YXn8QSsGMT8^p2<)eiYi3o}hZTT!wzVc<*OiRXQ|E9EYom*#Ap4_Eo`mYW z&xWRA;swh~QS=MHNMb_xk;1Cmf>;tTyhukqbveXa6zfh$+8AlHcCu~e}*Pdptp zPb}_E-FgEpo<2q4+=zR?PKKYJlDI>75;GjfuW%&DNnWDHu3aw%#RwM}G$PZ}D9C!G zKI_z)e;fwBG9f-*kSVUGHXxdwAsOxhmE~g{^|#^r<702ZZ1M>LnWR*)V7K=A{S|F} zP?z_u4wHKwD~CUQ;ZSdx(~Kgnf>FEp{kw>~@zNf?(i81Mde|o761bu0r1!R;*lg}H z!@!BMzNyLUcJ@#}fFFM@@+lRVOSatvw!jKEOOl3~)S(6sd>Y(Pt3n8Wimsyf?@Nkj zdw5yDCpxKaXUQ`SgjNNQp9kl}-;PtwV|>U#6j-e8%ENt@ofiT@&?C9#+{BA(PAmO#+n>bC(cEB>30k){4196p(?iFLQhMLhuFavb2bnxp}%Nwmy zUxYzaQyDfi-9S*Ds185N0IZh{%)ckuiKVKxxTho@4V>5oDxRW^5F<9T=F6*D`?V7w z9JJVc*eo`sFPQsRha*GY48v$tbPFd^=-}{XgOFyQf3p{7;;ou^^8(~Q$>lg57f}JJ zTqS|!*}(gKLVdl``pKMB+~+@5{nLq4V({k45|gI{;h>&K#iVuIGArOkuTI57w!=V9o_7)9L#C+Kf2-(3lpWt+ z&=5e9jUc}iCyjh?+_EzBwxsgoNAqVOaOUCX2iFr!8L9VumlU(9udBk90iK?#oJ!)k zYf!a3mn{tlBmf&1R|4k6cDzHa2n6j6H9GR&;>{`{j1L5-K^I&5`}+}XXPuM?_x}4~ zeD7i;4xhj;y#nTs2OWDRyb?w`Tu9DR-_qh{WMmYstdvzIn9|?zA@wDp=Wc~-)-I8# zE0~>p`bEt`f|*qTgp&aSOy}#4RNGB3MX5w3c>OJyqmmO6t$st{qytp58>rIr6sOqoKxL_xxi$w zD#`IrjXu;Bcmxx#(kZsi4+Ww|*}tzNjPsTOD?5A2&So13yyqs(e)*CUCKixi^DkV| zre$|i(zY!S8EVz)gPasG;gpN~O1=!I)>M=<*oduc8L4Op14XLteJq-m&vJ)Va zy=^9*`BkUr3kswR=>+(}f7dppnwk~cTL5MkWcus%&fJP&AYb%$F9Xk3acu67F|7Tr{oaMFCK{UitU?r-hgJ zz-=z>vMQ3MD`aJ4YOw?r`6(09#$b+e$1$#Y_9PD!GVq_q4|T&krl*&>)1J(%GlDIP zP7=DVp{WUw0-mlO9vcjTzSUO-e-P7D?z~G@A~cQ0PMo{ws%~wJ@9HhpTxaO zvp@mF-(T}_I@;obz}fjM4YCHE5PfCUWO(`=$2vp;*ylif)~PdiR;p*ue!b|BNP`Bv zc+YT6i7hG-_54LA1@+CzeEP9L;ZzsSu3Ru1p+w}}KhiZ(JPR!25DY1G}+Fi(-s1$2@NCU7JNsz^f1gnXt6 zLALeg;-_;);8-tp_w-;vtO41IXHjFjE&pLuk&N6RI!TyzK%nQ-^J=h{-#wkEMi}uk zK5Adtt#M_GS)|5A!k|SJ6?&hh9s=&`*bGi0!9lOtni=D54*GW{Rh&gZLD9cKtkJ+q zoy(s^Gu!0aZJ9XCg3(rpRoucII%a0(QxfnYTiilILYo7HxZ(!BkR~1ts^<n+a6_C4X>3{!^pn%#D59A_)?u zYdMXtza8|Tfo6SKwbsfT7?|5=2}ZKF`K`P<+X85{SwEY`hKX`v8^Zb4o5b*F?qrtO zGwM@PYvGR=pQ8DWfc^p>gwuXWpMLaG6+d9F!rN~9v2V{ zTC%LH{my>?g~OhuuL@P&Ko}ez8ahn`z^nrWpxLV?dOcFzOe@iJ9r&9Ok7G1qmzd<) zOfb`^WNs_d11{21+ z$L8#IEa(=hxp5)pP5mk++Eqs|C3$pI2H1e~)8}Nm#jvLXL9MIrrX|y$;Vl7#>G0FP zg^3-aX;71(&+Xciql4`6`}YpO*GP;>Diz?#9$`(yIBd4}A~7 zhvCoWOw0941c)CX2~|e$$j)wG4i{H%@9i0oe_lBI0#wEcalet_+A&li5~>1rBCvBe zCzpWKXqlpiRr42Z$U;Q`Kr(Rda3}oCB{VcN+=E3cz%1;^8sUM;NE&qN&D0dhXFsMD z{2oGj&F`-LxELqYdBS-SQZw_sn~C8*mOo7&3ZSgBU({IH*xn32F+C!Y9$@7jW-$u5 zHAW@S8rGU0FYN9Pb({jqNq8C1n(3&6ORuH*-krD7O&0SwKHZ{X(nv*nvX7N2r3>;} zs&HXP^6rMOX1XMt)(sDjD;C4uCp?y*n*Ek0l|IelAl#jvp3dLVCa$Na_vv$3xRX#Nrcus^!`Jk*e;yaBjV?=ZXh z_VW^g2_TWolQ0!l4`?|b;)ChWztnvmetmq>cOFg6Ha_hHnp{=ir6GO6of{>tG_Qn1 z8#fhiBPCupRSBh=B>ObC1&E|M1_cF4y#JF9ls__>al)V5R>gDYF5KN1;`LZgsT=25 zIn?(<&t28MC^)#d5qvSAdV!Joz1;-lt`6YtutbaS&U$>bG}%8`Y1vqXdN0=Xi=~_q z20BQs{Tlg?E4h!l$ggNeR#-8df58_#Me-5j(Mi*gf}Zffr1y}5xL8?(?!eRL!!Ka6)bp%)p>+$4eXyo54hUu2$%Hc?>bvF}t z{A}ADh6|V3Z$j3;s6Ui?%+ey+QuWG?#XhTqC7K89d~xOsxTwgC%?z+J5;kCo!7!9E zICPhYd6FgAQsrq%$DFK6f2SejYOYy>_|u~w1S>FSsCPsyFE0;KMtwQ{LKNb!S5a7h z+zl@tun%jrHFOGLNu}*hILV!_zP-K}_2V*2dVZf^|vKrAJWA) zYwhFAo+DCSYg?1!S!GY(j^bQCf$}B2csxyn{ko4$3`Hppo%&@SX1a7wr-2 z@ln;e@Uuf5vM-&xpuu(%^J~6o7`Ab5;|04y$*I>i)<_764{+*n z)349a^TIj|bc~HG9$t(UeA*%Is-25++X6FEj&z+J4_r!Iozr03FNm~d+8z9Pq^D$E zS3;)(_1jhV`6$7W!R_p!_~~4|*xj+5PqM_+Ly;^p0lS7#Bi*t3YHl)%4k2 zR==~GUchQO6(3-L+U+=9UAsH;RchI+@!c;Vcd78*CdS!xKn{`ztDs}$JeU~kEls~W z?F7+we-{!I1QTLUaN*x4Cc**Q@@}Q(?{$%aKP>W(AhA6QE-9rLR(u0D*gu{Cn{_tv zAukNoY%H9*xXqX-ogs{??2?z;4~e8e+yFM(w za2C@20s4n?&++$B8uNv~MTPG63*C@X1-RDFGGg`zla{^Twu7vVx2W*5=fR0i$z!fH zf9%?Xnr2EL;Xb?vGWK`7z%}cf_9P3aP~ie8tcOCi=IgA*@YmJ!kcgZ?P7j{Ek?;`G z9)Ygoa0%bf4KT1lCxHYUHQhs~dY5T)c_m6f!@>6g)gF>+O38-l%p=%8Px>LAqG%SJ z=$Zu`OS{zrv{4rg*)iJYVU?HT@zkcfbH{KDR=E$uz2!*!Z6Q)TRH!RGj65x1ykbm%=K|II3nn;6#AfqDv-U8uqN#F= zdwkUjs4o~>1?F4C48xqPz}1xxh?Uj#5T#6LPHg593qlI=>;1M$H4y1vEF{uARLn!X zh~PAbFqmsQRA<|l#R&^HF#vPxtx_N-`{CR5bNSzLuPALSNx1Q(1SiHi^-~1Y$NO_B z2e`XGdE({wzIrpzNa-ZKYAux4UbNWniwESt({Jp#grHoLGWZvSx%Dw{bu+n{na$wX zqH0wJFSkla2_xed)@PUF5H|;s**7oDB~kWa!PdjnC4w zxWd$zXo*YIk1iD44kXZK*|DH_85H{Z1St~k2JdPj!q2F&a=`{z6+oWqlQYnx*8&k0 z2|v*D1g>f-zSdw<@4M);g5lnnJqb@qG5Y{C{!5%24je+0`{J9}qv$$p%YIT}yJ-0T z9-+a>6|`1!Y96D$xm}}KouOk_iqwcj(3gdJ=EmxHdU^s~lgm2nsFiCdkn(~vt|#A7 zHb|T+QMEXgdG5%(7m!fDmt3qe^M!5PF|4u{HK?je_edXD>@8kgY*p|yoNMzBcFu=M zTHVIfp^7=|y0vcswPI315GMYr8PF9zIK7CWDa$F*T}Z~gfXZ4*kd11flPXD}W+F~S z*p)wDzj}Eh5S~V|nX85$u$j5JyF2}kzZDIzFsO(@WxYV$D_D#D2o)qY(2J5jCG+gCip&%NuhY9oC>+#6O|d7X1oQFWm8c-mHsZ zJjcWMqVM@q#D2bt7NKQ``|;HDdlRo6s&`D|O9?`Fs-+uC(=81gdM32kzRdBHgO^v$ zGqCr-G?D#4*-Vv!)R!W~y?zD)ayFY|a+TUxxt&^7VUt$K<1=o#G?e??X3IwTTFXGf z8nfzzL}lZg@SiJD6|Y`hnH!nOYu&$@A@(sHYFcal+0@LeB6k^CX;{qmZZMea9)IbO z_Bbqg`j7}tl4t6Xsi+7@WcrUN;rQ`JzSx6$A=hha@@$wG30haU25kS#iu+X$2Mdc1 zCmY8)Ckw}&pIB@PkPx9ztUh?gqf3PRyZ_45w7ax#2hSZ*Z9pPI|MQI!}L;Uh}hX+D*C?{226Q@C%tY3UYr_>i}t*Iwq<3>KTbK``)t z<}aYE{K%%?u^cbG`?kb2x{AHTwK|AW@z{wMwJ}Fv?pB5`|v{Sfw+)AmyrL`rKpnk>M-TXQL@eCeHWJK^YUQO zA&9Y;;p0kNt2G~djj=Cr&5AC03XOxcl=u~|W#zN8hnLu~l;p30e}K^?_AU9CG)AYt zS{iY%tg6hcDomQN!9@MlhgtclV>F<$X;D>H@MVdsw*gj}_v4kUk~Lj}GM4O15jrAW zJK`8`Qw{OuI|=Qz@0Ra)OnIw5J*iroH1y_s2J(#FoQ*9q)oyqJ&l3yx0ZD+pol0<+ zO0e-SZ*WphKq@yZOePSNz`(YuwYX))gf+`_lS=cnawm%hCyRh$d&P7oMPM!bIWWnX z;6OJ&m8}aGjNgu3rKaM1I32D@%~G#weyN?ia6gQUK`aEj{sSrc(JRe4qI=gNdk>cnjWt{0+W)Jdie<-4I)J zkZq4smuY+X4Y@9g_Cs?$?_MR@4mHl1ya9LK%OE^kT0-R z$kICBR3xAFmr7%(@wL+Uncn5vd)RDR4mWnSTV0e9FW59KX?cbkz?yMY+InI>m(CZa zjnIIQ#Ym-tU5T~`YjW&-mnUoTRxxke2Tox7uH#nS*rq1sb&>)Ur+Z|pFAa7k5M06zlkPX<*nx_$h^tWkRLc6 zzWaRW5 zZ;91u>Wl)9<&^Kb#~K?PBk;|HPdOJSN5~wNr;{|1H-6vXFF?l&lLvNN& zJbuVC_1E_V%xzLQ!LjKvH{`KYRJ=3yoW6gdvxe?CEXDLbMgBzqNu~VJPq@rC6dgn0 zur|JZN4S)^!KlF%LY{V%xWBs2XXVN_=B7SXwR|%We6q@OV%?NSI4(V>cJ8y*y~0rk zE0LjXUUHd4BWaY&YKYsq%9)brX+16 zvinQG_@V8U>cwZ@?OG{ovhP0_4>fuFoqiA9HYL4K?>ZGAagkmvV}vRLLlfbDda|J#Nec3F?sbfco~Wyc@LUo(`Y6KHs1R;9l0z_ymgbN}>2f>57XfHCa0`dqX zJ>>szEFu4Yy_A>FE+z(DH|M%D)&uTb!0&;%pQD|hlahmv6Ziv>m6ny3kdcv)xdoSz yQ<9TblDQ=&Ev+Oi4Y5=Q#n}JX8<3ukuFgUK?>Bf;9z>k4t?mOu%_@z@k^ckrIcwAa literal 0 HcmV?d00001 diff --git a/assets/integrations/docker.png b/assets/integrations/docker.png new file mode 100644 index 0000000000000000000000000000000000000000..69fb73e628bd36b1aa8c086da6db02a2ea632d3a GIT binary patch literal 4213 zcmb7IS5OmRvkf3bN&?b5Qlx}pqyz{Z=}H8oDM;@%Ku{1AklwrWP6!~N5tQCRrASE> zhzQb2z<@LbM7aDfcjn%i`*3G=cF%r$&g{!R?0$(R2z?M8Hyr=~02vzSm;nG3e3!WD zD%GWrq=Fw`hAZw`###UXHl6;=k@_;f?rdOY3;;Y8y=24y0H^<95&?iW}xofU_Nl-R6|9_dBSD2!?-SvT8>4ZvA^e1pmvq(S;G&D`d-UrZt z=X;hT>55`Yr~;M`3O9e3c==A=fEX)i`i=J6BYh ztY5T1cs{Ungzp_usCA3-)jY6q0sMN2qj{a@-6nJD8-@F}(usA<*i{i^99PQ*)0<`5 zrp4>;Nz@JBY0!8`I+i)4032S%6jysKp`J0KQpDvcAi0e)x1Xx$169EgMOr5?xd!Cay>h|^q<%u?fxFMp_d@LZ_>!vD$uFiIGnDy*6zN@e z9X)Pa*?lDLjIM4v1c${R4LjtOOkxDU7DDKF%H&!Ub8d)v+1!_*f{_G#K7|itzU%c3H7dBmEA5mm51+=Xim-w z>68{U+}Qwy`zi|>>EP~tA`w?-^+k=JPY0uAdc@C{4yNFnnwz@HZ5jO)R9-H^KE$!Z&ZEzH;$SQ8WPnk zTJ|gr0o%C$|d|OHuSeOJ09Y%3;ST!C9P)Ea}UjB^#p4+7*c)l*1H7ojhNN65M3Oa%=uWC5L%L{s$a(HZNqEGQzo@gpYUzF1u4c~&hVf3I7@hX zrrUz`+&YLN$NA%RRI&W&;4H?+HTQMI@UD|xzthU22hunZHEN6(BiKMj3)C5sS+F9T z6!VWh3}sDC*mz_HpKC3C`sW1%e(QlroYx%{4^{K&sCI|e6olxu!&*fWVmKp~QtoWF zT3^2aoTN&do<>)K*kpFA_lh1x$)0$1TI!J1hTo8b*YmejGTlgP^=c&vo-%LeIz%gE zno%8z+*YX*bR$wFFhRi-8f%sm>O)jX4>$+DYXOaMYy*MI zK}9s?3PUIPvD_JVh@WOeLVWqB%r)Nf9U#AJ1KD2AuOa!*yI>`D$u+C&aD|n3CR*tp zg9MMSsPEePU1>5zQ(q-2@g3_z%eGiVC}UjtN7T|`zvv9zq+dleypUbL^keXnY80-& zQ;^4Y*kV=ZWZ$hMp(}tB_NJ|ypn#exWEZC!%(c_eLpHF37j6T0mC2$!O>uG3=~g?H z(Ltps$r0ocQ`5e6(_ULVLL`cxOLs#|f#9=+rA_lDB7HP_ES^a1^1P={Qz0Io@$zJN z&qm{W!S@Y7eK7AGwqjmbb7c$FJ6~M1JC0BdOD5EX^FI3I^eDFh@v5b z)YJwwYw{&9`&0_bJx*0sFP&^$le!RlIFq@xV%57-jHfv=th)CnWs3!4y;&R&@DPZl z8c4!k+(?pKIr5dAgbuzH(TFzq$qpy6zVdnB8R%+##huBnz{T18N*sh|U!C*fZ|v*I zwtFj{%Jn{kc6X-dKQfX@243t?(fQM?)G?C5QaPH_<`1N^D6Qoib+(hupZhbK@S^WV z`tWC(gY9Z^5{OzEl_fLy+jh2Mg_xf)!kB|LJfjZGuz7XOKP-SQT>c{{4B2Q`Yk?zLc^>E$ABZfy(Vpm z63i8t=k0rmUmq<_%;5}i<@xNQ1{anLm!5jNohQUU!UBfKDm7@)=dK_4t#^8Q*ET#@HyuzWp#@5}{R4GV{yE@FD z4ayge5^?=APJ+_yGG%zhXhf*+1pgPC_fJ?^M_Esrz>l=a_K$A*w)ow-s5v*n@97Q* zjs!n+x7W>0iVSva5xGKQ0E3SH_>w4$j5ywwK)U_NUWJcN2H?(B5^SY`az#Dg+9PkPoECjbtyo zVJQC1UklZ3`se>rh|ir7%^BXi2n*QwdHv~Eh$@~LSo#3t`AHt*r!96R zRN+T*$LoasscUxJz|=RdOQ(8jpFfZ)*`XsLe_Vl-@ z$};a_Bgp-nGp3nR-+I`URI7TYz$C()2YWDyx+*2$ax9_3=y` z^M<>TSSi&~r&I|ZNOr7ZNv$v;2w{aV5`%>YLtV!koeZjDnQ~IRNOt(_!TY}HAc$qQ zBy%Q~a-VRexhPycpasQi@qHq9qD-97=jInL!{bI{t_EV25G|^e8wamol+7P5-O2jc zi08tClqie07_FRE2{%do)GVkT_uPp^`YS$4N|GTa$OfT(rp*|j@}yJnUW)>(`IJIG zvN_AWKo1fp9jJOUrzd^+tk29`)MXXGTYL?Xa;F3PSA^iq2PsjeR|zMj#9k}ONfkyS z*9e380|!2JD5vGKVVoy^MD`Ys`x3I#O3x+a5Le34TB(tCp7A*tq2WIh_=%uI?ViiH zW1U}^ccdrzy9;$f2e4}Er7&1-yD)8^j(BYxTm*Bqb&n`wx(45DfR@Q0F>@phi0W1Jbh~xSwh?yV= zvBPQaI*<#7RtD~WO*3PQWc{~JB zs5QvAmUb@7j!A%*uG{zS@hF$z*CelX{^oitUOg?r8(4gL!j`zg9rE%=_;#zTYOk=6 zoT4`}2EsEjKZkTegt3XWU{5wA#~@D+{`jA1rRe+9U}sgiI0I^G_aL&W4SwB6X-qM8 zQwe=R%_^TmSulHQ`Nk!9jqHa}KwJBnna0`k-ZvOJ7RnLH+rue~SHa&cx{?Npo*mn_T? zxD{~ln~X-^W9Q(wklZ-XXrt+k$ci;Hq|T&WRMr!wW^E18)|TRGmmB#d8ycsRKpi9& z<3On^d~ThPb4QApJ{M5?dhANNZMsCs6DnXeCw(kBFfTv3MnsHwU%>jDadTs<+Qdnv z^`f z3;n+ZqVfsqmjbT;bhz*DhI--{=nBws@pp2)Zs_ai?rP@h=n@(*;(GZP000>3B6OOx H9bWtw3og)! literal 0 HcmV?d00001 diff --git a/assets/integrations/github.png b/assets/integrations/github.png new file mode 100644 index 0000000000000000000000000000000000000000..66c52518b716c43266f693c88480f9d4ec67fd92 GIT binary patch literal 27482 zcmX_o2Rv5q`~R^kD-kj?LK4cRkc@00**i02kBld>vPt%ctn6L328rywBeID{HvjAC z^ZosMy?Q;KbMAA_eeQ8x*ZY0FuP0PZMUMCa%>@KOh~*zht0M?D{E3a6!-o&F$KWA+ z;5tjmYo3FD-sj9i;QRS!4|JTN{RrkimJ~;d2OPZOBBSl1;b7t7ZsPO=ad&s;wzju* zHZytlgxkT%GG*fq4T4-nDXDos`5r0acERn_As zGEYiheSg(m;iOYvQBm<`p~9*Df=-}}r{_l<9i1fkM~^PA=0}*?+pE;n2y))AUA4KP zSO}kmmuP8Ilx%Ml;)Z_N&JFxBm(|#K5;#_A*`aNTcaf6_^U=qRkikZ zmFni7SxXENUF+T?8v}jP7GiE%C>O zT7pII-o2AE^lh@VkPW(j?;caTXv#%lQPF(8s;4@mlO5N`y1O@CRYxd)$dTSoIFmYa z8XiN4&R;-KUPXqD77yed|Gb=>Yhzxu7wII%SMNHUo)SKjz@IhZKqPzp1Mcg-dGkgE29Y{GDP}qD_3J^Ry|swA zz(BleyNQ~7nNG^`$jHcGJ3BkSbLX@ZTScjm8*rtXRL!y9~7-B$bi z)yYu?g{avl$%rZ3v@!pkpA_0+9?dkHoK@Ijq4MPjiCF}?><*HEip7meEs_Pba!=R zLQ_+7HeH;WC{$W~AV)Dl6Q1LcB4A4y-cERr@hyThR*1X!ME&|jVXdUpmASU|^w#Pf zM0sIttq8rldw1adqs~A@>;!%DG=f(iOaBHhceDfK0kuF}Y8iMdKG4-Eb@3?;NajD^YLPCP&Cs9(wh>o8A(Nz7B zY_0v2m*Z@I%Rtc%iWfR#==0}*Dz*xZj<0o?^8MWwL8~$vMD2c`0S@YDZ>m-e#Xa-BD)(?M;1qa*ToGlefTB3ddIi{ zMqi~!_v6ZEm|pIBrv)2Ar5zl8hIYqa6H`K+6BEyezQ^BaGEE~0C%u65Cc{2RX=`sU zZS-9~tkSEsA1mv|G3F9=SyG$+Q9JP%I<9}*Ev7*M@98Lsg(&|W%gYeCcI}#v@1MDx zTS-YtxkWn&bs^y@e!&aaxCEss7{{!ZuN=5A{tZ3#(B zGx&kRIyu|k+Di1lf@Q@;f%F=~`;{xo%a2ida&KS0^PM15f{u=E@Z@N3V)N(nL3cZ& zpC(KhH9iHRR*JFZoYzDaQk-Lcw=FD~wu>V^3%`E7+g|-$TOXJ*adb*}(y8+s>m&#p zVX$2tF8x8i!RmeG%9R63l0__DI&N<6?@hiw?jP#vRCe}^)V_+6F0sRnJ29)4TmQqB zwWj&tV0buD(Fy^tn!UaKt0tc#@6EZn+k|qLK4+^TE%&vGv_-_W_01ocnr6Lpr9^s3 zNL8a0BqW-@+71^@cZm`%ky0R%($X-^IHn~AM+a+z$treTRmG)9G49VJ7?z3sJZ@l2cq*Jh@nGzhb9ngNC;NjP8vR{eN^5fjh;o0H4B^+z z)_~*k(C9z+u#Bbs1NKu%ojHixh< zN3<(tFVO`AhvE|590k?hUO}a0j~8Z;KM5MQpoASZRRSpOCG zm8XiTJ-lXVcnB&a|54$aq5DBZo|a#~UlxPT1;InFV%%B1`{ss(+uHt)LQbqJ9fArD z`D#jY%XB}(ZoJC#$%`{=7j=9D4~3;&Z(&!uw6r5PH#e7D4$t0Vot~bSfqm-fCO;b+ zR#e#%_ZI1O21hHzaMuYvUr}+JY?TW_PZKjU4ZiKMn3SCU=#S8B9-3*iwlZ;-3z_!F z%E(k>x+qER>+5@7+`t-zB{G_G?Y zuH5mWULuyU)%4`S%qeta^U>qSm66y;%MI|3UT~y&qRjC`b6Dr)1v~_&|6+lwzP>&> ziO*6DGp#NCd_6=Tme<#(M!+wKta3>OV52Nohw%IUaS6JeU z4>3>|CsTbZD}Qa_KFc{ zi3|N|>IF@j4Vz6)f{QAt#V8O_!S&C}>{0Y$f+4OX2y*|n>6Z`!BEvx()%>s#4L)Se1{ov`KdbB)gX3U7o4y@m#e}YAAWpvj=AON==c{i?$qyL zwb>~ucJ>e2R6Dd}q3VlvsA(w>-5QJCNii*n8NK4C2uRt@BNT0?d z*_FY}Uk}l(@;-PaL`r{W|Ng^=x7xvOp>A0ZcNw013gb~R{OI(x2l`G3x2t$Mw!M7x z`LADO`!RyJd>$XKTa523?b5HdSxA635b^4k@joj#Yw|H+ijR*k%7(rPgi1@9wS^mg z@cLnMeU&SS1W%IurcQ(T?sVf3wqqtKf=G0Aby=;A-m)Do(W`&$N_jsBJM%m?CH~W~ zTArsQg8Nv=Y=Smd?kzJ}ze{wNj$x|=UBE*HQFiT}iF0wS(O2vK5QRARsw15otg>m! zI>SS5Xmbt^g}->}u(9cP>^S@AxE2R7R%E@@*U8gdWjUW4QCi+@YNdwo_ot3A(u=q( z^*$IDL6BJqo`$VYeX8P9+gz)3LAZF5EM1A>JCiXia);&JSO`*m_h_v$#>o4?({EUW z2v4xcIx~rncOM5?s$`LiM5lx7;@xrat^8MT~_>=tcRRHYPpmb$<7{b0F!)t=?ywXQO8XT5qrs>O_gZXMe{1 z4^C$)yWjSxA$Me3KA~=!n3!xD$FL%(pWq|jmX(#cV+PD5O~&XglK(mOtD@HC1eG8` z3d|EVceSqa(ci!yIu#xrHp4n-vI{6VE@J(aX7|k|$)}ryi1Gewg=_0m^*qn4XVv|n z1@Q$G0heBl_aSB?g+~dH77xwGj|VW{I^W#P=oSqVzlXI|y3qN!;iJUUv){t~R7lId zrj}O!Y1v5zk5SW{GTa5v4VyJfpm2nUh>%e1p{8cI|2d>3L;#{Qu6`Dgy8Tmbl&vhGjGnFM#OLk15gKYmNBs^WJZ)VPorE(n$EvRw5y$l2nk5&CQ$~bu#6aT)a5bX%4`1QK`R3OUE2D|VuCSP@TyezC=jr4yGa9<%+5|u zxF>_qlRJhgDv|qL%X~3Xo_EwP;+Rg(_{n{3Z-=pg^>}y_Y+T#dp=+-~>dLyaY-J^I z%8XC_9lBG+SrV_)BZyTpUtZ5XS7P%l?XZZ;k9=K15)4-I4lXB%in&oNZ5~-^;x-J;Qx+&Rg;EO-2Jb< ziwZfp z`xwE+=Qi*`FO_5c>xHAR7kU)c(vnjMn?Nv|6Or=2FH5&S-x8agOf&otL0TRR4i5Id zkRahYf1cz@t7w?u6=kfgp~pr75?*VgSZink1R2MStSEeaQQCC)9-P6rDL-Dz!46gT zJJ!6!O3ZMzUB4#s_4xRJ&{F$%FJ(3d@&=!qSj1<=MGmtjT0W$SyVlM3x8yJtIa>vM z>)lK!B{i5a``vBC9sl~Z@ygkG;Yb!sOG}%f_m``!#{7lJCsak+QP9L3S<#xC*L@d6Sd+x6zLGM%LEWY}k~O z_tD3AJEdja{S^2oPv=ox;+{N9Enw*8|eA_N}_*2uK+2pqW?c4!gd5 zdCAQDsiS+kM1BNgYsCJzb8vFLV$RN5T8S^zG%zU6d5vUV2WP!9oh`Pzv%?}GAz?G~ z)+Ye}kiDaaCo_gcHdeQISY2TrJr~A9H@V&MCIJTjbt@%et5uq`wbLW!`;dd8D0@i& zt~}f>=>J9UOXW>py<{SS9gP zVts;27T`rg4Y9G3Zk5$NaEQ5&Wt%^BW}am>G~A&PnbYAq_Y?&I3VE&1QB8M|GyfZ~ zIbnca8&_)$UKBY~`-}T{4Gfpo-nMOqRgjgPZM~V^=rZ?3UY#?(s{*0EP_C^6VQX%A zX=Dx{C&2?>Uk#TZxm4>m80CU1sC+y}qe>_oPMaUO9+x->ue=FQJ|#rKI$g{rtQ9|uYeKn_-0v%V%yYOnQL5AKkp&#lc&$4l!0@zJVPQJ$ShEq0jdvS& zx+!S*+=#};$15Utq#c^?CY2cRC6ZQmHU1s9eaqBu3=W*;)72Vm5|Rt{&U4aV?m%}# zOSH~xz5l-J{=t$F|OgVe7{sKWpZB%eL4Mn)8KHY^gh1cWL@3I*r+JdRcicew{O2U zGoI~_()pbqWk`iEF{vMEu&c@wKS}d%q89&DcH^&Hp8QQNE^Rq%D+*_4XRo96+NGXm zlD;*O%8u?5Ydy!xpa~=bHr9Orjj=&59~?}*%!xtQArBrr*oO#x;5AIfK%T0@Fj>{7 z>FM;-z2>Op-VB1}2qCtZkDe#NSFp-3EHhxN%=>-}?>_D~Gx$4>Ee;ErJMou{NtSKmVVz!jXo)3s1(+Zj$MA0^2Bu#kCojiV!buaOopc+FSX^0D z=zPPIo#B40G(DXLNc+V~Z#mA%*1Q{rGp>GW&HKFBs;DyR&)&_t=)^=T@Dg>wu90<1 zQ(7(m2m8oxXkR788(x6=h{t zot=05{LXsFDjq#@djI}C^T*>1qr&&^&s91^vBx~H8b@2B7gHRffh=AXZdhKsnc5^X zfngRL*ORM#5^%XgEz%b5c|ArZLn2O!-@kw5N)#}pBZ-IfNobTgG~5aeB4w0iYE^#~ z5@Jj*zY0!JPEO8d1K7zFA$In>MU0~paCc%>ym6yq-(6CumY3bxnJ{+ptnuLFUUNB4 zyh2W$5z)5=r`X3G1r_gK{o3Y?J=5dW>{!rbihF42E55L>_|VsvVPSFMp|7Dh@C4LP z$ZkEGmDH^gMomsmYRmmhix$_n(t||76m&5n1Z@*=wNV%MgUfP-wV3=JZh5e6~nr_IdKQ zfi4}sj_(%?$Ri2tX?S>e#zM$G+fnWFs8x1Ly_2g&Dk`0u+Gc0?r?It>(TiXX7Q0gv zd7Ac_i*>7$P0h?atrO;jo!s5e+@l>74cAYvtlhnH=KNYl<*VtIqfh;<~W)cvHsLd+T$Wy6u8B^!t_mTYFJ?iP3yGUU(CTp+yLRsTdVwFU9>e*>VtNypyXz2;SWO zSeQoiDTn-hspX}YRiKrS;UYu6 zXYG+`d1aq92S&#x`U*7Ib=rHML!6bJ>3Z*VOw6Crk^iz1-c~K{u6*6ZhT)G?R7eay zI_=J6&BCy=^~W8GJ{EecTg8FGC#LygrtleKe{l{a0_5ydzIAnlPkWg4Xa0jvi^hgX z_;VlqHh^Nfyu>EBqOL2_&Bt|duz#;6bXu=I)>{S^ml@Cqr11s|H1duDeD*e`Ae<{B zb$AgBL9H54|0h{jxi=gjkoe+A*Lbi<%kR_X68nI)$c*M?xtU?kyLYU^w}=dT(!n3E zPk(hGxSoqAiE%4_V+WmV7FglmfLUIy?2#~||D4In#-{G;i{-jLj+fTlqWS1iUj(ht zx9;wAQI{6V+K4}`M~wZz)@!%(ULa>zKAuSyC={x+$r$t5Q?t$&=H#@YFZ>%35s=NY zdJez6voj3nq=;4{6egmE?M8hpFuU}!Nmef_D{DAV;eJx_S2HaHkcRi0*i`@Y$BJ}- z6%f)?x-@M$UK+?f#HOawS!mQMT|gj5#4LJvT+cNws1*q@3=4ungiY`z3$wC&ns3Ah z>6Tfg$YVvP#a9d5xY7AqW}@wT-}y%($P&XL%{sX1+dsO$n^pwg*u+EeOWV+2zI-=y z(=8$`o5cb_B==IE)jwQ?PO-}aAoZ^8e2x{JlyqQcZ(m~gRMK3j43f|LC>hjvLQ2Yw z?yj!mH=AyrHxVhRcYkAI9+v(Y%T>#Sj*AQbq(rGm%gQ!VUSh8PU8!tHh(e>$>v(v0 zpTtj<2$YnRzIBt(jv%Owv55)7Qwl``4#;oHtE$d)Tc$KzMwnWex*x{G@cbD{j~*Q# zH#PD?W&sbcigRMeEx|fM0?g(Jh`1H812r-D0K83EFp;WlC#V8ZT(;Qv$GcsyU)$$ARkhk z_VOSO>w35vqF!K5kA>jk`b2KLe)HxPV4{=Is2UeT_Ky9lq>^WTVh>gZe%KUux#OO1t6h36C&6)C-jk^ERP(PxcyH9jcWpw7j! zihBZK23mtF$`v2^nO{)AR813xxUTo-bY|!DtMM7sydA*4Dk0%#;N~Xib~2B((Se09 z_Ks|^h>7W?W@I$0B-kxgAs--P6I|CIeoUa@gtQQ#1&;f6soz&(8!z`i;oF$}`1$B3 zzEmKi0y2wpNOL;*(Yf6VbH>HhC+#v)dwMFS_IM;{{M9Ri!1DJ0(*nHGmO~IMNu$p^ z29Y_U*AcB!-Mh?ei^(b6CB&|s9o;6F!9je8n@vqiJ{B8Rd%ff?DUQRw%5=+-Pb+l$ zh8TN_D=u;ofzGHG4!w>}{GdZMGVFzQv5GtY^H)Nu#%dn}kBZb({b4dh5`kCRge=E2 zaIvCRM|HY9h`Po=Q*7tLcoWwkix}UehJB(f5Eff$S+xBh8G*mIO+n-b*02 zRImgux&BHRqYBbXkG{2L&`2}zST7IPCI{i*;?mbrQ!~zruzj&;`Y;U#Me;Vvq%wHh@!UR7_4p3aa z{7_4aa+Mk}ehJC{XBVT>Kft-paRJ-s0=uHh+(`cbpH$gq^C=t_T|EcQdTWNUcpwf$O%+v!o;&QP4WcU`+ za!$X=`^wSTYI$z>SDOFqPI-BGF~1#!%jUkL(ioQHz+0X^2h1^`a#966OJ(sS@7-&i zd;gwOdHTW~5i&=A%w%IPDWWk^9fW*b!@nBgR*<_dwcQqcFj! zeRz`3o`vW=efpG8nFhy~=!)pe#jd0uqWB2)3lV3Ff#|pQwcAnbArTRFWOQ_M!=rlw zb;!4#9wS`?gQ?M4ELEWN9;^-*UyC_MYK2G+gi15(*4p26T^o&YrSJ$}S+Ri-&}jVM zWe-#o7$!#d5D9P})}8Dr^2{-1kd%;>W{&B7qbdjiB+Dwt7Ikv?j|ff#5@C8X4W8ue zXUj~^&V1tyPx$`95orHyXeeILVN7Xp(V{3@lHPv0Vfhjr#bn*BO>0f8=!A@BqNA?W zy!b+`n16^3G5+H1?QOO2{!x*cBt0yIg@xZdwq4I@9>Ffj6`%R=pe5LPK>Xdq?DQw} zI0&xsV7{gGconHmJ4%$_E!_SY#zN{&*16XA6%yU6_1ayLVVnylLp;zwt4IsLe-@dw zu-H%4t+PQhe)KLeH}YLUaVDX|TwAw94o22$H~EqFA5x^@QmAy5*ALE$@^TY;Jcg@r z9nw#7ayEtjUE<{A#BM`*-sQ=k*st0^yeUCWw}0*$jh!Tc=wM=bT`QiXlT&P|g`*@@ z`g41VyL*!yuw*1#dP$cv+QNs3IT1{*Pe4Qjk2GQRyyi5L$-_%tU=$mt!MZwJvN|3; zBAAbGaB!s>XlM*nX=Yyju!oNv;@ZZb6_u5VFH=woTJh}TUq&Fg%#uQRzG|VkcKXYT z9@2ZG(Q7xWcG?TVpZ&-8+t<7ZFY@p=$ZU1sXXAqktf#;Of)&oJV zL|3yWYVB)DAm_FE+@KLpQpfY)HO7~%FTL>G`5l{drRFCta$*K3c4x5%hCL*|s!CH+ z%TPS0X=F$b0aZ{#PP}w4E2QQ40kUB3&R{#+^5WO;-|eFKYMSPfRJjlZMMd_)A)EDFf7$ZN%53X_yz)paNbO^21~D-)u(ry@ zS{l~Y%fHwg7eg^y5Ua++^l4l8^8P(+B*4_tQjX6;33J~S+RquSV-Gv;9%09pNe2ynxmOl8Xnkd29-1u)V#3QsvTD%7Bau{cS87aWPE$X zGNd6f$}Lf499J4%W2AckAIXMsmGL?JYeIiNnn5UcgcEtkq`rzqhqv3)A$2>)ggup! z2$IRib0BE;lD`bwc8C+Cd}jRMW%cm78hQ9$q2Nl$p1fXk>F7rwbJI>$SW4p*W;o zZi5fNWgspaW0o0D=&9akB@&PiNq{2b&ML%HWe|B|P>`!q?`N>=Ct5ybT!T#85$hTTcxd>B;f>1)) z>7VV8CNTX@nE2(+eswVZ^CYDpaw`uDO9wfhKTo-ka}Ul=alQdj=1SD+=@RY-hT-PT z)gA{rYT;*BA;F57S`RaO`yf|UqC28(5}F7&aJNMv67*VVFgGIU(YyC&v+vd^bby_` z_FA)JrX1<e38KeG@J5CBlNgk6VnH#9UfH#7ST zEBakto*uxB%^Bw=Bjj6Q;nDX6C&2>T_s`umTac?tbnOp?X6%d1&b`v-X7_Y;oq6sD z?OIV7bS5j~#W#`?6U$()w}dApU2)wA z&+9or&uaTMBOKh^j^=O^?|PFzga|^3y}kP1wQA80KiDF9J!|Yb!LK6D=?o0;qUu9F@I9DfV1!4FyUCX%!U}b9?)47{E!|9TDcN@82(Q zef|~m-1g}iA)IEt6v|83Qu*9WTuxdu@-E^Lcyb0=_<{EV=~BN38gFH=Fmv9xVcRMg zSvGqrfwcJP)=Fi3%lp@!^+v*2c?1iW2fWFwMDZ=0mcr*|rxJLQfnEM~bSAAZwk>|W z?Axo;iD#Qck|Kg;i!mv*8y^s4a(-o{P1OH?UFz?@{S^BPP_j2L3m=VtDD68%yYmsW zG6DMBmT7Xl(#&M(o_L4{N}Ab({+`+|Cy~IH59ozKf%{c792WT{aFzdd1TpADhski| zK%(n-A*VmU^&(Swm2?gvvUN3b{@Wy-V&DEdHO{+mu&uFF~S@vsjC~0$J|Ido)A$Z9LCxz#2hIE&95cB^n)#B5=4#m zP}cWRXJLx7)5pmd(6*JI3X!y1qCL2l&`JLR;;mGi7M4)ldTt!#4rnNvfgfBG4wQO9 z7AnVCoeV)3(wMDYs4aJqB2dbtwLp{e*6LNMEKLp!km(h>fR1QTFVHllK<<1N5`i1< zKl{QHuW)SgXsb7Y{iMveChZ7YJ{_?bfdB~VMKbG!z0jV#flBaYSIzcy9L(R3^d zICuWN{{Nc7_YZh2eBN<5$Y4-YjM3R7wi+kQAj|(h0zB?K<^g%X$(jJV+1#J`#*?Fd zeGWn*3&i7{n&v;t=>EP>Xj=h9&8bpvTZoV-&KvL=m`*=hqV>#C4lVb-ZnPR^P#YAqd$cO-*A13LMk&&{%RaVq_C` z_7ltB|4fQL1_7P(XQfPeFWmj|Gno?6tRQsgP)bXfT%|ko!OQGr>Tlma3Ef2`wb(2W z?Taz_vE+`-S7711WQQRsL!vK}vBx@IA`Fv)MSX6-5S9a>PK>rTg%c$ur6;V0hhARz zfQyi>C-3qqBdbjavBa9e%DbtzQal8jZe8M$iyClT+76zioJhg$oNR8^`BDOy_E zVNcxiLp)%nkLh6+M@L7aF(HC7WrYrO7-FyZ zhzMn3Icak{yNHJ&Sk<}_?O0fn(Y!d{yZ>3XimKPI!GP)eo%w`eHsSPCU0_pG{6FIc zMl*69b>-uK2oq_jf6)|`^xqj9n^&KguY7bo`8RqE^^yN~=1WqdF`tF(#&n}1;Ix1J zVdLQ;4MuLcICT9E3Hib{H}Q1-++zr#K?z=o@f;~uXip`ub|TC`rbMLNB7zg;7}KBf z?Hj}axa-r2J!E9pSXuQ=Y2`+4HnhV+!_R)W*Wm3=eRMgaHRYtJt1E)O- ziXPmbo6Qb@Zpl`u{h!#`6-Nq_rOTiluq35_EiST|QJrVL1&#?DrI-E`5~Ny_N8tKe z#>PZ4l*Urg$Fnom)O$LaVs8e$Z0t7BwT&+hw%4r-;C++_d>SfHBS@TbX^7|E#1xC?I9dj4$7c^ zJJeM!k`8>xmcD)}qo~K?TkMwBRxDL)Ubp=%6U?fuQYhA%{~W6m+cl;GzDQbhsv#NkZhp6)*(JHm1G}JGUM*JbY zjA-O4g}S@Ae0eJw4pO{<7dQm}wJq~8d4~~=Uc95HH<9eQvncM8lJOiZQ(|B8Z7I2FL&gzmYhs*2P18W+}vd5%KGtd7alT=RX7x79O5eR0LoQ z6+%Zx-_LlWTa4Y()<#3yzZ%;3oEH{I5MkIEIvAA8#qFIoml0sFM~m;N(%tn6|Ni}% z96mxsN_nSjO}1jJ~XaQmsHDNCrI8((30C^E08xIbB#@)}g8KGg`RtjlPQ+AEJ z%$Gv*L5AcSxtx`poGd!SZ#8TD#LKHLR3+|SfQYbg?^_AdSWGN}hh(aNz!*K5|N9R* zmf;T?tw~QR!B(=m$xuJpY*6nu+ITODf|9adh3h>Qa`*2*l!VFF2%n*$9UtSKd(=&2 za%!q{iH*c+C3syz%~OP-7j#Xaew30pYX|`L>E-hj!Kk}N7*qzC&rdGo1pJS5brmGe z(0rH)tUgB1tFp#*Xt$%}T-HYal$#xxjUcb%<8_E>MH?D2GbJrsGyh;9!x^IqF!WzQ zq=%55RH^}3lhc3=3gb?O(p@)vSOK!bZ|@U?pJIrvhtJe_G0)m7 z)}wmaYD8MS+1oVh2-#Tj3kY-8o11##f2Xm^oR2+()9!czL_iS8P-c>ojf%=LV$fpVPBu0;6()zt#K>l2p0;|zDcU>pDX7E|H2K5^Mbh|;Qa!=QfouzD4i zI|>1@Mek;_#2*|sUch#bM)OoN*txs%_a=>m9@iF*hvAODgUzw<{SlO4VYF3PAbhEE zTHy4W)G%*JPf4+DG;?pnL#3tBM`jI}g@%S!tsmK8?u_!@o7#Jw`npRL|IUkIyAw<^ z$@qp9ImrL6vtp<1AFeuZ2nxP|A-ITI9dYP&pYa9q;m!*v)OplV=v%bu8Z~up>E3+1 z^0$vJ%dwSOu`g6Gv<*-5#yddmGN7#H~Z`!^m# zi3|=+{{7&;#0-VB`bgDguSubWBgz5heX%<&?6U}xDG$jUtBtcobxhS6N3Wz^V?&eu zk}a(_VJ7(R=L-vc+aUFRC7^=eu(CRmAbx@-zbzo}1m2Y>T%SAd3ZF&)f?|S>+zWgg zkXWy@v`TaVb4F7|)6mL?2nq_Wbf-1W^GD%(hef_>ivu#o>O!FZ;*Yf8)|nQM=kBj; zmu6K5L$E<=UYsz}6A^{WB_l1}TanXQ_Jkeb4}l!B3|83E+L$vihAf_5UgCeYGUNo_ zBh-8rXeuZZqLwG>Y7GeHfF%ttp7|T91Fa4!7C@LJ46pM5)b|1`vXT2m|2W3kpFf#a z!FSoro4ybqt+o}i8{5W^0GaLn{{G7?F9M3pQ22E&>#-ZFt9sLkd;DJRjHPadN3=%_ zy`&ROGqN+Lqz)h{UHqYEHR4bYu7J3D7k#I9bdSFdS(iu`w61^@o zQvtTjD+?q_f)3MC6Sa1h^mzOj1qgkHmjZ@c-M`gBpiC{nbTs!>ahHJ~Ika6XG_&rA z{rZmyyMXYj8yF~ezTwXP92^Cdt1@`yn+`qGcO%5hRI`as^{T9NHbp*X&ivgNQ*^fI zVZulu;3_q+TcGcZY@Ipxq1x(CFx%!pKBD&iwe45aH3IV`nEH3_GoDe~+uPc(?--t# zoA>^z&FhW7Aw1dM3(DbzCkw0mKvLHtAt8qJM>e`jUDaANjaSgG3JY)e9FDOMHu*dU zA);hEq|>dt*P`;v%A#E`M(;%C@;1US@J z0;TS~eYYnd$-$avULM;;PYh6L}Z_%qWzI#x3I#c?G2qB9pb*SA{_LNx4OH8Du5wgD% z7;FZ08ux2JsRmO+7SJ5Cy|LC)CGfDb2o*vrr`?X@< z77{XpO+oVj5)vzq3jc&;klBLn59+DvxnM)^Gki(m$1#SIhJl^s^!-GuFK(NsY14^l z{IBgZ81F@Mm5nW^tx=o-p0_$y`8zb1=u}#n3nt)jcSp~^_(g;sNE?+?d1D0aM;~0i ze7R^Ik)(&%#cJVIJi7a}`*jwU09U$R4ClTwRlgUTYnUfri)?sqP8D2XS4|&W5|QTR zf^7Y-=4V*Mw4lU-8Zf`V+cUL1;Wbakq^^Cv26rSqBwfllnU9kAfXVKpV_jw5{=0$B z5iG$u|0e3EajrnF2jxp?)u;L3I2oijw+i4PNGxn?LiGyyP-DMs@phfT3T$r ze*zQU_YVv>R)nJxQ&I*73pV>+yXQt16kJ~wM*NxLI+k{N#Pf{&PKb}(&V?5i7TV0` z632@B9C9Rk-7M}xzzuTv_|&Hjw^g@11G&uM@eYp_`i7a6->5Hu@FEs0>gw;`6~_Ob z2zvcIpt1&GD(^m_8gNg)t8VR+TNfZ#nSs4B`sS!xPe&aMVpH8FZ#>XQ1%aOPrOJ89 zPC&cXM>Xr1puxKHjcWdf4~ed<#=uxQIlB_~LO#xka5lZ%tZh-h-mS6E@=|~!#1)yO zdCJ)!fxq}l3i$zGr_Z7JRCc2FOcL+{458U{TH_8W-G27Rnv%Ss>9NlPJ7RW*q(m6g zjK@WkIbuXB*B;66IUJE>=`g>2_%oPs`QW!GGWM*ZF0St#9jzyu>{fkQcoB>ek+M-&l-=Cipc_lY z5MCbc*3K-=z6kgim|)!ne!I}VFDvjz>wSXink!*igb+6*@mbJ|dGBvMD>i&hMoLQR zbsG2^Q@r_WVd#y%!Kyf&91y6W$#`O6(O=x`(6?~o@e)-0d2szETU=a3e(cB_RvQ$9EA}j4(y{lMk-;%Y9JzMY4yjlhk^Tg%L@w$!N z8Hb0vIadbqG)C+V1e#<4kO5)-(A=DLeX246vLIDdz30IB2CylmKKL9&{8bd|;=9k1gss#~K(c z*9$6*UMk4|B6?BuqR@GXg+z(jt}?blV6mfbb|9ba94C0VAwRh2k;nNR~1g^a9hU(IH7 zUS~q-R(~!qY*p{DaPTp&u?+2)FdSSALc?|adg!O9U=8Un=MRq@io3`CTlRN%I`^T# z$mRB%m8VVBBLk{DBJO`DN$WUyU6>Z^FF4qN8^FOw=V9R-4nQ44mE$}c=k?aC*5{m@ zoJ*h^azQUW1e=%Lx{+|{g5=S;m zZf`@;)zvQT2Xc`t;<=A^-$G}?y6Z5C#QX1oCBLzx-}J;yPR^{|x3o)8q|{Jr_W(p5 z`A`wa|8$g>=C=7c5fPD!NL=h3t-@sxTvITd_OPEosqS!0%eQ^Ey_;Y--P0o8irz$q zgG-Ds3c=!j9UM_(AkgdGPGrIS={DMXdW!g*O$mPQ?q(y65*&PMCP#C}L#=p3Mowk` zf`jWfiP_hOg6D--1`EhRxf59Dwyt1M=k`{sm}sShoH`m-UmY2(V!4ogFQ|Ez+9I?$ zZ{B=DPZ&NrxR+O=4^diRNlEDdHYR{d;Wf!x8=k4-b zAyA7%AE)=hOp}ja|H7>5yZI}yBRdm#^nRp>HM2rt-fRs)y6EoR^o=FpPxB9y#@;WW>C~VvmGBr@@m%rM~hnJq7J_t3KPam)% z;0#}$U_n*djmO1M zoFI3*Qw`*GfSEGsB{q%UU@;7r7>FJI$zv>gNY(?kcB#=l&s0d6S2TctRD6GGZ)W9q zK!S|Sfzbd2G1DOJ<{!Pb4q}R5&BjcilTRA^?hQ(MLdvc@$NBbK){G2`b8}J|K7X&{ z`{4sQv~;xD{MjtSSd)vDa;2eRvf!THrqk*pJ80>$X4tWn4PhlQ8zGo5_;F z0*AxHYs+{##xqw~+Qj;}wk|3Jf65tbuS4@ukZ!Fk8PfJPOf{j=k8YnOxb&|_J)WfW z;Og?UmVm$tNYYH#8LHb+P(dd&;}s`(y4dP5t_1u3PCvJk|I-;Wi(i z^+4_qe&0#lSD~SvORhleu*L_eZB92<4+{?V3=X~)v>Oj;HHrd=8mmc@H55^g><@!{ zk!PRoIMjn1V#Mw=vEQ4Ei#GgX6CjP~n{ZIhHE3S%|GqmnzX_1kYsnViGr!qkmo8yC zroeFLjWR&;ocNlb9Z;Z=IeT?wpq3OUh>&#Ey?0S(d5xGy{F<9%4vUOT?{Tkhph80B z&vWwdlu9;D4CO##K)04V_89Hnq)JPhVuGv8pkEQbnb<6uE%??KGHVny zevCSLkgLO11-*`ZY2%^j6Z65-1FV1b8XZx>WZmXm<~0HGuz~X`ZS)c_APAPQ+Puam z7y_ur8uXN?tf+X!@VE?0V^XKRSCu2iUmo7v%7|*6K_Ih*k)XN~q3~_T-Y+O=+40># zYV6K@qiSp2*U6p2TXt54e4zl|GtHa##?3XRzh_~|O>;j%Qv5!AbPKNu=R1)+g z(*6RT_|fnFX=-&vR|iGe8S#fo(*-_-tvT$1&|`^&;P|F5L$j;HGX z|L5A-lD%p8SP@G03JsgAY?%o~t`S%EPC}7ABI6R-BD0b`ql{!;JA3|KzP~>`oO{ka z@9}!SUeDL_`Mf3J^a5{ATg4eNCLuSl-~3wz4XgZIDDPDL#Hp+EtG%gEHS!e51rIE_V$H{O2wwd^x5wBrCo(+lD_D~sgZ2|MU45`rj+b;L4 z;5MIyOlwcj(np3|I96rvQj&FcY~{+22d1WXL9He}uFxSrIzB$II_atXZ%%!3cN(&T zg~DQ!{{Tpm^zj*hq0%fVp-R{A++Nc6-c8+Quk%L+Z6VsL##-|uLP)%e7li~A9N*-il zdmxv%uXLl?3H$lGB%b}xPHg=TNj#DVjef^EiX)+qZBu6yy!9a?++O}^{kox{!EKd% z3v(tFC^4@q**J`hj95CVRm{wIX|bpzN)CknM66+m&lp8-tq1?oZ(>yemK2Z?XxBYk z0YoMMM!9ftxES}e=&AIbHB!t?j;?SHcp3lBStjiQy$jT(VIXS|_ilXue%|_22;_s^ z?`|IUj(G2`s*Bon_vQ<7F^SSgo<Z4PxFU`Kc+Hh9lOP(|~NKr+8Se``udP>RhXgObSMtlMAlja@&5zo4PS*(TeL1Y|-gna81&`<;LKkY((JapP&2R;ScGj z-TQtePTWiq^Cb#tYtn`BGn{N}Y!jNMns@IifqF8&QBzl!cIL|9v8cX5k0QV^5K5AL z<+uREVu^~5HtOKKW98s*TkcRf_u~26#evx^uqy^{Ig?=s$rz3n=hr%xUtiQu-d#De zEhZ(QVAGnv|LHMgy+`%RSpiRnx&r z=$!=;Q^u5jtUQ2y{;&?c!L6gClf2!K?kCAK>HBY+>^%X@Dc$qK?c#qvV-+Ipnvo2d zAlB`wy<*;1r*uL_N@~LvKoSH=*HNt8i5AW@f6$gEqc)0Jy!LcjDRf}Z+h$#qD1>Nl zHfaUD@@XV_`xp{MU_gc@Cenjv3G=oLXp&p!)1Pao665!djC5qS`1caXqZaxauH0EN zyrmSz4W1V6=a#Pti-=ef;{>*~aqI{Z78^UhI6YmK1YeuK!8~GCP=a5cI}SQ_HGR=? z>oz}p`ZOc;y!FazpSXEswwrTGOUoFla!Sw7%~hKJX7R`1LpK~SK+5aaADebdhJJJ& zt3C%HGhc(9mKG<>(9x^g{Y3 zjy3vAI5d-WbmC`LR#r4Wc4s~U;^1hNOZxXU-148O>T1IZhXIw@-TTC1C@qP1rtdO~ zi-A=-%_SvOS5!Q%V-3%^En8uQ6C7!505r8irEKoq4gFxJ zSNF*tM-2kgjPjI*T2N3Bwnb@-0$N2=b8Q1WV2De*`!2lWL%P$Xp0@P$S)RwSet&jy z3TN;YM z5%xM*jKK5sSS4K7b9?UQ>ijSlrV=UZb1k;^u`KC_HCtL0lBxSXrYM`5uDrCuf-PVj z1Ez6!_;uvt$GDPf!$!$UE1q~oJrxD!7hQgSyc7kiwr_a2z2vtP-4ppPNJeQky}yzv zA*b}zQ?YdXxV5$e=N=csk2e3EInVsLS5)m&ZEcU1IM~6FK?oUkBf*C@+xjk<1zVu& z14Fdo5;Qd=#);f&5ok0r&va;3C-_}#5XT98zqZp-_C`Wm#Dw$5JDLC>Z4T|%my`&_ zCIIk z#(18Mh7%ZMS!&+~BaWv)i3{;-X^8_nqIu(UFv4BoS*eJUFNv znHe}@dl?6zcm>wIf%&j1w zghVllunsi7ixIKz;N)!L07#4W$19b7H0;W+-?UeT$Ab<**g4_*bYU<&{_^!T5(Dw( zMJ~*IG|O(^L__1sN*k;_Ln9-r@f~MUr&dQNr!o)R_2s=;hIf)4%(b<(B#Rs`AV6Mq z_wX?BJqG>{t6W+Dv{a|}%B*<>>IXM_PqPXO$$;GfuzJ(*Fnx@M#`;EF-xrmfK6;@e zN~ieYk1+}oc~F%e8r(Yl5m~7c?}6N^G1u9w&CYI#2A8?So;SLpw{984rTJY52r#|y z1l~J{ONq$(EA{L<14|1F475fvJ=d=Is{$b-vbkCDL5(k=K}3Y>>?R#B6f=v8$hqSd z)^zujv#y*{Pyb9$m$tL8N2E+lMn})S6XmnYN_(f_M2$5b!Tt{c2_iv9Dd2thyTAYN z-Gj|tzWV==>yO9w_5%B?Uz|0*RfLk9O1rb|Q*Jox7@{&rz~8a@*$Q*3m0O_ca(jEb z@b&8=)ckztu4;5pRn?6F-_vKH3IKTwgT8)3>8DSD z1qJ7DYRc+bDEZ_p829u`7cakqlrj!2N<8Joh25P@jgN0hd7NwqWOr&>magR!__~ZI z>C;$gwS~9q>%zYKLrJ~879;0$O-v#wa5pUeB?|I>y^5Z$iD64!pEWS=J2@pNC@`U6 zqWK1q8X+bQm8tXp-Bx!!I7v4ftTTwJoJL|;$Ocn5d3f+csczJ3m>b1#$eFp!P@M@3 z-bmS%M4WF}?#q7SEB07=uE%^`2mZq#^q@6=SCvbuZnZcrTzFwghYE3cQ&Y3Q4Y|S% z!+9+tGy1b*X!ha5hpEv~Q5VWFX3+vCi&q8*OMmWUl*O;==vHw<$#{FDBtIWW@TA;4 z+#*ZIeT=7l=pPZu!91${DekEpetRhYlYP4eb6r!@=vL)6_DbdI&=5pTi9DK|AG(v0 z^9+ACN(52p>o8tdL^UsjQk*06+FPGKiHIPbxW~}*?OPi(B1F)oPi)smR4>PW85^tp z;*4=}bks6LpMwrybnR+iw(3ULh^Q-cq>|F^YG?#NBP&tBl$?hr+Z%f}uCM>L3w8}< zJVz14k|H`B&dw{wPW-r+gT?=i7}{ymo1N0%yRCkcGu}&ccKk_MFxh9Nz65SKaUsNL zmvd)gvF2+dprJ4{y1rUH^l!e$dh<&9=Z_oyAj%MZBVnl%IKp=oYdkvbRIup- z_LSUQBJ8W|t5{#Ni{~wuL#_MD;;t;`uBbT;-tPc_P|4ErxPzz4F*gvEiO*a=++g^IVBu{CK?C zx!CJGL<*Rj*WzC#2q;l3yIf{eUJZS_7mwOH=(KWXrKP2xfRh0;OSwNzLQjw$P z=*y?0qoadE>`j}Qkm!Y9HUB21Rtu6r77RF}v3vB(m()3VIm15pM3BA1!xms0e!(p+ zEKEf%np^byU*VcT6Wv*z3{}pe0KT$>#CqNd52+RzAM!Qw(IaQ}3MaeO_*cmSVH}iZ ztN2<;U%&G9q&i6bPtul>inqPJUFJ4GWN2tu8q@!%@uZeGjwTKb%Zoo9s`$3pNIwi< zjS=xQvYjvpup!E06b6-@W`>8r8gLiv-o9F;-qI(~2cp=!n@?4X)s0yU`hrUAJm&xW zc@fl#{v`dXstkCtIyO=0?GTriE;_lSCVhDnmNR5)e?vk- z9z)%MP4ny4?kmg^ahdS7wIiZ#Ce%YnlHR`ksicIjR3(1f>BK5E_>uwxg1mf{>^n3z zHdY9Iu_GgT#G>VZ;*GE+H;g2XHz@y-<7sdK{EIH}e&U2lRqh8rIS+B3y|c~ZVwf}o zE}j-PHhQ2v=Vnuvi||yfADNn#2_iYKkG!ykf6PuT^gf(vr`S$+5~9{aq(6D9Tjiys zm|QK;`(C(_0YdKS-!yAnG7SzLOXr(C#RxbBAoUpQ>MAzS=}(R?D$j@Il}4LR?fJE% z!|o+yHCUn^z`aCHvd_H{ce)N?%dmq>S+JK6neH7LDvtYRt%s@zG=^=N6KB=N!C@C@l;6Di^Wf}`DO9RG(-lM%IQ9S^1 z=g*{+lxFRant+ax7MKZNG6GyIKyH>_GB>11qG%R=8MSHPIx_XJ%uU3+tUGfxAF&Ur z-W$EG@I~Qv(NkU%H#ZS(@MG?6x>4$=FLq{SqROr8U{8Wcho;;}dq;=5Rcas^ffuXm z(`d$%)g2H9gE35ggxbXyRXBP-OjyXqv{A!4E-A!pZm(r>gpRYY!3V!(r8Yr zRMphM;o(2v23zfcqlZQWlk#tQu7Vm z@($cG3T~M!>e>CFBeoE`HePl0Q%+7$`Z&=CHFfpq@1B>6a@mnDSv%<|vS3OYp;Q@`HMC&okq`p8#D@NDtmwJ~{d2Uc$e-UnjmUP=`e7q2lhGLCh$b(q?u25SO$rxZ$WmxRQ`F>tyyg%~Lo zbst5-K{?efQIB6wQK_TEDSeZlU!O9mRL4}ECKA&xg5iadqF>CPgiekOFDG8pGiZTe_b^G3?lt@40LBGAQ>5fHK?;U$O z3`QH(^v+PA9df0#2bI2W>BLVys0vc!L&WFiYX`ES$LSTf`Pmf@5633Cw!&A89R;iGTT&Dd;9<1xWXh{2jCtH9SzSk zk*7o+`=TmJh@-`HGhr2&*$rftyVyxi-)Vzy@2X9VS@J@omp`LL-XDKYH_(~E62Xe0 zg?anLa=nOSd1Yn0K$$!9{_R_^L4=s`wAcg%i;Wo!k{16f_*PL9%bbZP-}PwXqgNc{ zcJC7sW}U%b6cd6FM3Ev1!Z&Wzfp48uI`kj9tW$&Q!$=YFEnM2~_mL4L#$RndI#=jv z)!6e}OlPqpvv3-?dU}p~93MnKJM^FQeC5Huc`cbBKW~$4ljpSna9&Bv&weA|147O& zHP~i%g zKR~Cx!$x~yAxKc9qui|P)2DTV-)eM;9gl`Mwh`B+hy?z}c%`R}nsO$!(C9b^#R>QD z+r1ZO+l2zfn-~+L;S6TmR!LYxAc)fUOTct;$sO$-=g{zk>@GoLmI)`|tH6Rf$4G~e zT0^<-I%?8#pgq$g2v>^ezE?l~{j=?-`{d_i;{Inj9wjAtbmD@F(HIMDD)~;(;6br} zWdIz9ad0d4(r%^X5K@Gal!D?pBQx`o6WnhIy3E;dpcPzs|Mq|Lhw8nSmzO6#t<+-o z910;C*oDA`$FzVIo`mWLY~yOx4^6Ax3Pqt{?75icBGh?nC}lss*$rCB#RxB=K~<<2cNBlla;(K7AdWnp67bV zdg~@#CpVao3j%8{wr@@$S{1f)Bq^qyHe7jqb5q&@vI#m`mVcLU(#j%m`l>>DGff0^XpUEwD<4Rwn?_kOj zRlB?J+1S7!$**%Llo|B}zSY6)i?7C-nwqv@jX9}D%gJd|A(}EWZccxLHfZO8T4SvW zu8K17gs`(2bS8XI1#KA9Ymcp^c|}B~#c%@3UirvW%`AVSDZ!MmTXRk3pCbU;{whhPEz68Y9rmy@Nodt#;ERNWgneuxeJcMJc`@GE z2H6WW6bQk(%;Q5R_VVB!8Syqk^0n5(sT0{c+?&y?ot>R?{*2zpI)}Sgq)mmv^f^%> zrDcn**udy6DKG-^F|_0Qa7zZkt3YpP5__)R5rxgOwhKA%i;25$<{;zk^+CW?-m&%> zh1q>W^tT{pf6FX9`KJ3t4^F(2giyoaa4VH}2E0EIW>K*9cAQ*`6%N$bAqPG8}(^K8m)HL@6-g;$`XaQaqL@X{ke<*vZoVNfPGrAcG6p1T1MEJpjz)E|nNxmx}W8e=@^fucKi*-UW}@z`&N{stM*D?I+*q zbpcXqgz5CkWfHY(#Sc5%lZR(gpr!uYyI?q*Lq3SlX(8>mQe0HT4y|ovLDS0DiCb}! z@R@=UDmlN?Gn;;LpY2ugfgeAXxp2}Emvi#BsiCg; z+sI3m&Z}R2-c>y7dJQ((Jerz4>Dyad?B(5qI?}$hYRutOG6;A_c`wh+(VG|>k9oJW zZ^!>5+sxm%60JgrID9;MQLp;#KNDFf)}3Z%XXysO(do|c+vK|f@PiQYj5Lwehhr>E zO#0qv?^bYo{pyaZ929~0{IE!rW;K%w>3;-&{PtK2-lKd{p z{F(ZKZ%aqFw6AhYfPO~p;4}j;K1ou`jM`de)QPEqu1wQO*V8pcshHY#~S11S`u&PKieM zu^K^jp;DbBL1p04Hv{#ZNY~WgzwVR2e?Jc{bHg#|=4nhH^&2;{=^@m%&}fyJ^4{3a z2e&IMG?xq9(VV=z)OV8v^=Adclas|38zsO{{Lb3XylA)Lq$JP8f`Z%1Fsdtj4L7+o zwNdhKqS<@P7v8R8+CYUO#FJOg~wbF>-Nn?O7d&spaZi%Dw38 zx;7#6BUzlKaqYC>+hie>3+CpN+;Gglp=$RnmFG)H5n`!!W@cs^AK7g+GME#IyFy8K!4)}lqS4=fwB=*y@6%VOxv*WGDxAmZ zA=>v2<~R$fbwb%R1^cf0={)5bT3?`NT!O{=H;s;>v*DM2KI-u$ly|GCJq1h`tssNj2%|1$&M*JLlaA zrihx37h5O>bu;)3Q16IbmlkKlFl1Y+X)p8Onbd@FowF)??|IQXeRLSv&IhbJNE6f1 zz}Iv-{5?I)*5hmFzQ)Mqt4fG) zk;|-zm*QCrHw8KcE1tu!P)J8C{FdfghJhy`hIcz>CO~ozrG{FNeZDZXu`!uGfXYD0 zA8nx!aZQdP^hdoC@}`z~Sy|t<`80nG$|?FZr(d}`4#C$>`-!65QP-!DWCD+%|zdbcXxkd?O^L_YV2hG#?i$x{aA<$1fmAX zN{VZEWE?GP$3kX(ckfO_9rdRInKcOOm)f_ z9g-ZxeWD(o^jIlPbqF!*rvZo8n9Cb9+Yh?Q*V)C;v8(Cs(CFeZ$^9mBm_wOzam?~; zK>P`sa!DByV&L@8MnR&_VZIps<$ZFu3(L3=rKb6R`lY6@BM5!xyjaIp%E4qty3NQ?Gcd|qE&IfWF4%8)H()gG8cWIOfvWt8#irF3OF&55obDmS&> zJNi4|JVaNsCW6e`1Ez1v?^Xj~=p_1U@`V{^w))GLo zNvPnMFZ}a;k$0b6XFvI0HVy@{yrl;*xgvkOnq_Ft>ONCJU%fgC-+cA3A7iM%{th?p zQt<@R-0zeP|3T-r&agrOwkJ9-%8~g|4=(Nm6h3s6r<1*R?_(Pz3bikj9?x!mvgIkj z{~juHUvOOk;a#GdTv)?CrP|lG75R4=o5l=G_UlX36Y*MfUF&=!_m^Jh7O*5;%#yHf zGh1{vJkmlF)VZkD@vzt|H-0`5l=pX8sIFtkx1l=1N#1vK4wkGULqqdDY*pYQ0_o&v zk7nda^tk=k^H+&@c}B2|to_0hMP=S5`uB(Kkz8Ed{^+1Ssp|_=JsVG-au>#cgFXsi z%=Q3n(^^}+>*lADdb-bSxN%d{ZWWEFy@axQgW8`t!VdbjfmTLdt9x&Z-mA?<=wDi? z&Enz8>Ie76H>WOcI#9|VUTC~gfs2K6m2JK4Xk;8e2n)^Y4*0$qhrn*_92J89Cu`{C zanRT7xn&3QcA9u(*-v8N3|B7{xd1%qSH&X(Z}_>Huk5yGHS+IriEw45CJ4uRq#Eas z^cr8E9VBM}jkM`A6{e+_{uE0};wAea5RK{teNxl{i4eDCooY@sryjn;HNI~Z&q(sDUV=Y=5 z`js(V%4_+GzU9W;DO(Lt434GtmxJoXwoncjW&i*NLyo zt4CI7))B6IyEjH-K0=P-mFx6F08UnG5;#KUx-&IUPdlBYW!@df6L_evio@>5;Vna2 z8ER%-fUAu&Ej{^OWib&{({ZFb8$p}+F_Fav-xS#g`4!E!YQ-lix=1ueqZq*`bNg}Z zGR!ag?(iKHAB?r@X^7%}qyOWytf_Oft2m)Pmq8U1?9}zV5B2VxG|0p-3OruvAGBq!~M8grz(DOQk>bveGxt7?+sY|%PVe=DRVzc@nLaC6>{g zJ4H%ZP>Ecq3~ulzFyb6ni{&uZs`yqE9&{%7cB6Og?Te4Sb|0N_s8k^mGn8xw>l7m7 zARePKrwXqLMthcKu5kYDb;qGm)4Qr4T|s2Mfb?cW_WFjz^2&j9R317O;Mbw<(2PM*KXZHiS9y=?-zkssRFOr*wXwz4!da8suh;pE` zE$U7WK*$I{GIzV?6Kcsk4;%RzACT6uV05tLe%J&aJxKA_5$R%#tF2FO6$v*YUL4of z`j#;AY?H6)r-x`-_CydD8!vo`oDofY(f1Fg*!R)7q;HRLSKn?0<3V5Zad5hv{e0J; zhHflLCtExFVHLUb8X~RkqEd|V@?7DBss(p$PcYizOLqY;G?cC<)aBy)hEd@nw@dH| z-?w61$b+{hojmpR-J3Q5LX(x5#&HbYi<=S z6Z~~|(Yg9MD6}-0m2%}QVf779w0Cn&dw}W@|Egvj6bQULgM?j-!^|j?yeO>6jKDzO zXjjnfceXt+9ur-Xieb%LJ5ZsFb}JeYD9O(O}k!oe4n2=L=k zdXbG=>V9aie9{&02?>o<@i(%FcGS)#i~MyHfH~r|1_FxIdNspIr;OT8Z-F$B?%Nwz z31IQddLzOiG3rpnF=FSXgK{eEeZj~vMsR~<%RVKz$)`82p-6>Z7*{r|T7r^c<1| zdg(+E1sC+Yd9ulQgrwe{`{?}kK5 z57;K}Is3(rTg;B~iJ+k(WsMVfFJQ?{8=v?k^DybHNqF=Q{2!hG0RXc`A2{7WJ`|~U z)IYpebW_yH&Tr?ik=vVy0rBZ*HS$X`j9j6Z`&pFR7!Fo$8Tl!VcJ`$DFWu zw>!xz6*$mT*a_S{Fvs_eS^=WCXoBB&qdp&OMD^4UyQ3A=;6UaSI8;-B;ewrb%P2tZ z%_n^C2vf`l+>P^fP4+TRX=gL0p%ZJ*^CGqTw6oc7(lfrQGckk zwgs^a6TuOQ2H!>Dc@$5`$!{QEmGhNAT%e}j8mf{Y#rBc z4Z?{#oNDTEKuiW;4Tb(VOl<(c$mS-EJYcwW6D2qeE-W!Z>K8O?xAI_F(e%?@bXSPl zYtYAWC~vxPz5y`&Yb+W)>xb+irt*|(8odDoXsWbN2u|v-`onCpmuRVs_SALM3S8uU zG%a0uo^7XG3P|CYym2H_E!msM#942E7b-JvHQ=bk+XN(|_gFNx`SKtdd*AQrEH{2y zaqp}SZ>!Xjn~9B`hjLqk`gHE_pz+CGM5aBO4x@YAt(!P;ms8gj%jl&y5b01o)J2*U zX0Vg@OiOP^Eg-+9UMDbq@n0{HPP>Tveq2#Y2hj`UTpZ!ZoP%h7lrO}^1i4amz+ zRWJR`^rcKO9`qfrkJYyz@_{B`clHXdqmSC10Zf+I`B|i4Drt=es(1gK5Cj1R*Vn%3 z3Ma_4ewy6K@NR21ui=vZoAMeQS|zPI`XH6`1z0?4qC=85IrG)Oy;rN^X8ByGVx*ru z>a0^1g@5w&(;&WkiCIAsX7PbjDIm=3cR6u=kpB1(Eg`wEdmZi2SvBcS z!LNB+l{<#|pA@QL_~rNF-tBp87B$Iz{|Qw0Y`}+h1;e=r1;9nw=|HI65R6_@J1~pW zn~ctizMM{RDmE#D<#;3P2Kz-cU8y&R5xF9&jl_Fb5NR-h06Y;B^U;=UAbBbFpF8lX zYF>UNDpbIbzWeyIcQX40(3?vc=&iBDGZ3r*dco1PV?a6LpJ#Kk9$pft;gV0wv_&vk zHA*++@VjbOGdLAd-Y>)|w{MibGu=v%#C48iWzm%tDVF=T0m7^FA%MQL$}I>V-^b;@ z_*Hi!eEK&I2SA!QEpe(qv=mT7riOFk23;Y>)wFkRvoA15M2WJJQ0RR&-H`BpR?dcC zcpl*!a1opK1vrPmlHam4mb6l4z34IfTik-JDIir`O(#7F*kXam2l5CODLRc8om2|? z^r|X!rz0_Hz;USm^c-`iK;%}*76QPLTGGt#C81P>C1|9cHL%Pzq6N!=Y$~E!vByx| zteulOfmjjz6LHL1hquM?bIJ*Iyhag1`_v2D6o4HoZoDo9H~wr-$M1um7W~L7Qd*Q&EdD2%BUIRK^I!9T3&JzMgjFORXPPk zZvo7yo#JI%$5V);8d?IVNLT*@MGR3bTY-#0W2Q!cw{Nd1(nqEdE%j7{_Q(+5UKKnR zTl`$!2P_H6H$|U5oJc+?|Hf_3K7&Z8p~Zm|jV+lr6d=OO{r+ zPXKk}0WGY=2GWF&(asle`puB>-FLcQU!A-{VeUzNPovo2AD9 zau{v8;d}8+MwSu+Q%VK8`b)~oz9To@82eL*6oX49=zvj>e>Ab0cJY9F!&*GLm7 zH|xP3T{A&^wfR>AYr+ZMGx{fjSaI*tRs?^O_g&#ztu$ub81~r%5E50^`-9(Vlt$B7 z@(%}O^eB7tb-k;(!>rL34>*+N*trI6xmiY&1eblN?}_;mE_B0&yS|yjB^}fd_}OGY zaCh!w;>*6jt@S_RYgjsY&mJfgBh?7FNV#TiyU41u%=^eJNy*=4RRt?>F73 zv41P&(((CO)
AiZ_gouF($A1-!QP5x}V4d}f0Qk`NsoI*l|nA}a(%K^Z4^}qqL zAU-ptM#l&yye7V1LhFJ^-)EQq>!?;z<#U5v6G{xX&bPa79 z(~$Z!;l!}yS@Sm<`j>0`hTKVLq_(b0^^p$&aagG>k@L1+gPjC>?>UYyfj=+;ReYm8 zFwJki@abnE54Y^a`f@WB6+^sLW%R1JFTfWy2;N`D5XCy~73M1EHEtXKT8@^m4^#GuQ><`%$*1Tf8F0ZJVuQte0fqf zXms%ynqn7L=0{6=Y^_0B3a6-I269X;sJ;5f1xfHg>ebI#D>}+f`v^Y~s03QV*8p)R z_ZMb)`O209Ht_Ys(WvXtlIfe%w>*1mHmg)Eh+0R2Sk}6yfYfW3lmivxFAxF@ECU3U zf8E)x$0GHv3d`hD3xHM-ac=LUjPOCoNCoHqtyj}24b1>x$4g(?_=wv0XW1etH$X=n zQFy>bM~2PBT7aYcizRXnuVH$WGW%V0HQ79ScnSII6Q~FZ?Rt|bGfDXwndj;jOM*_B z?1Z!s7~d$#i7F6yJhVjHY8oqvF*T~V^%F2I#{J!bW4M~AIuy9_K&0;g4nY?mdd3Lt31Z0_9O^kG9j)O(V?Qa#Nxvk|t_c7Z zNql@*>3ay!1WZN)fwvAZ3Dn?wP$zKc1&A-Vak>Jcc~!+Y)NL}sk1V)U9+%@*4>bAv zW+tMo9fK0Rk*y$9IVxdu0|4h&|H5d|f3+#9ph71ls($JUwpg0Qyv(4n$XQ2aH{?zc zPf0w-CB}gYU95J4IOPK3>oYrt4@qN-1zXn59+9E%M5Y_zLHWX|q_@{QId>^%xuAXB z6se@*Ng?!vM4wPB(inh9V14DUZ3jSH-7=t_SKn7ey=W`PfzexI3z~ueugIFF{SPAX zOcBUznG`_YnY%TxCZ&Dq$8(HAkfv`4O}xmblEWN%Y%w@7*kzmZbh=?B zEyeKt%y$OR@utSV_1-Cae)dwKVGVQY0}?u+v$l;TRCQSmw9#)A-RBvpnt12#Pd$9QPW4KpB=}y(}pf^$uR~hEpJ{M@n>2ED5C&7}JU&Pg|>2jdd)&5g`IoHnEk!W2k z8C9%0KdO;PTE=>=a*_*55EH;Qi{4zLqkTwe$%`2)3%5q`ZF9V!&A5wxkv320myxfR zTD(bBKGrWU{S-CRW^b0&v#+l6LP+N7U=N_2j8gRX?dB$u)42S`M5_odGj4TeC|k>$ zT(-+6IEiLmXNL!M`^a-WWpsg>6eKT0m8{0)rI->s?s4u|m?pd$-T2kK7AdD?(;(#a zG+j|-Q%5oo1nEfY2==Za!DbFynLSJX-r;eehRr3A-t;x%Ox8DXZd7J+;kUA?d$oEJ z@aj=jCB7%lqfWkR>q87rXkvvdWSDcG;gVS!+||%NP^GC6@RZgnYmUA~DBDxgf~)iP}fNUw0Y&g5A*>*UFrZ5657Z`10p zL_VPinqV}x6GHTz3^V-LY1&fc!+bX#R8TeWvmPm3$L4x6BXJg-2W+;f9a0Z|o6{B< zIxDjPexG6X{4Y>|hyh#LY3f$X~K5NTBzLgfoIx6c~-pB~8=N*5mU%g<@H?B7sN{G}u z&cB(*9Qjd~ZSXYA?m3eFRJ|I85ii8wD>$Yp99!^F%5HZ4qXUsb%s^a3@CEgt_i#q? z5Y{tDuuMWS^a@D()l30@gqC9v|19Jxcnv68nG@|$`MwE#Up*n`=Olj8R`5p{@lwy^ zyrUGagG;sv>%YO*hTu>SuB3xE7Sq~Vz>=l%twH_hM|w<^gu6!O8^eBSxT@C=YjpNL8KIhbBP^HS_%EnKqIy;cQ2-QZD?9;@l&0og785Bm+7(| zykaJ(&2} zjLJ@A)Nu&yzR7-T(RAfWe5<;=blONfMPHRVO@iLkFi8EAoYNgRaT)#18ir1<4o+S3 z%i)c)&F?V0K#~pks3UK6nZ_D>nm%x)vYSQsCQ$({*8BwKc4xK9SX?O%7}I*FML_`_ z%*%E1qe$G_`b}W23=x^vhFEmDAKZVa^-kv_KIwwJ`lFmkj)V#_6bk8*pPFW9pKfJ;CNe|&-CIPUy4}2 z))pFTkKFgrRbG_m0&_(=L_P>$(zxbY8hLcO6HJ7K7l8kYl7D7~?Jkeem3sC9)q|_h zo`|9{L&{z$97i97@4WtD+KLNxzxXa%QXr>5Y$H7lwxy@s@|I|864Zk%&?sh!Dz-8J ztIl-1jEzC0dWTH@q3lyPRc*mYeP0J516c&@{8yJXv`$oHxwm+v1~y!=LS{E)f6YLc z+4AOj4kVDlJlg4SJy{yEY`N&zvHEi;?oP7I}sfa=s;!~Y% zfyvqbu=uodjSpq)-G<4Ri6TC!1Ng%;$FYD(ZD92*T9~|3Ft6;oaoxL7hX=aBrO_K1 zgjJJ9U4f=4MiG-V2}@VqanyK#C2MeJ{Xfo@eyC@F|*Lcph2OJ$ugNzxK{G&Q`fAUP$ z*4V-%ht^o}3Xl6%Z3c)h1Zkgz;{V(Q0Ats>R_l@YN-GAQTcB!Xi^S5C&zxBKFV}j! zf0TLP%G4dT;O(90I6PTZ)VQl;nUuq_$uEam79yA=By`n#8 zab0t-SjGT?l=2Nl4Jah!gl92dNKg&L6LQ1)RH6#TzFu zlVdoU=zHy$KF|2-zpFjd@ch--W6G(1sc6dtcIs`w?IkReN-`Z}2p4hGVEhFqvv5f^ z7#HVmB=suoiOUv$v@$oz(W(qCc6SAKq~orm80aEFH4@K5{zjas5#_$nSRXaU*U>q| zphO)MQ*(QDSNK}|ZhW)SU{zd3Rhu19x;XbYHb>$T07ofe>d&_ofdq!M9J$Pwy%+Q%mddf_e1yaC(wY_b9PxNeSXGxNKM{{e)HRzOb4#d zEY(B>;522BfRK9w&Om(;ox4UslE!7uzx*-l@oP^2VSw=w>S`fMadwR3b@3S!5^S<* z2I>Kmbw~5%U$S@hEm|uZ(V%CN((43aX$XkQ&0sp~eJ)ZC-`9q3HpX{aOG(YMVQm`x zI`TWoX|*yxn>y}+>dIB^;JT5U--12uzY$0ftH>(czd^FRSigi$N*M+wA78h3edpFt zIRASth0OqLb;vSReXJMm^T%xD-+SHopSk|<+#J)l7|D063-yKHnLtUfUDNgg{K~vz zLA=JVY}0XN@lbjr;4~q2P-C;l&tEugq#`WS8y#4Z_6)cgAH68_m$NB291vPs*=`?@shY<&1_(RBtw!r;Vwt@v(X|cvla&VfMPpWEVgmUPda#ba+-@~jZz!=yi~Bl) zSP3k>{=O6#qX!B4W>rX<2^_y(i;Ht`BgW6nbCgjey|yM31{!AazeF9^r|udoh! zFd~#~CC~iR`X`2=(2*K#e4;{3NSS)TzM}-HxfdnXRg$PrL%6dDQYz8T^Gy0)OnYW4 z>s!zpej~{E{kPVP-=earsdDdfq1fJlK0Iti=9{V{XIP4a?btFKV}9e+2Cy@ zVE(iQY3t>kx4E;7bjWc($SzTsK1cykj`gWM^E;x(Yw{_&J{I3fdHh61uNfc;Yb0!u zx;HDo<#>pBQx3}56v^QoOqJX8p+z6I_RO6qXZ=ut8`ESmcV9Q7kY5;rb~E%GBwq;9 zcve<-hw#{HmaywA(W6h8sJ8KBPmS+eH|KS%Howf*;^gc`%|wzB$h|PTA6)0&4_Bi< zm0QKhQPj#r+OX=I(Ru;fl|WphaEBLoU`F?<^3FwKe@Dd~c`k`9rM;hf(kak}jtcdH zS;}v}VV~O2+{sHG_p{RB@;(c$5`lA~Dh%H=@W)MxCHix#3MSWj9L&?R196LvrV)L3 zFcw-9S=_st^dQ>bQEK=jADgG3Yw%|bx5I@=5z{lLY}}6z>HC@+UpA=d@gyeAz-;83 zYQO56pLejxV*WHkPQ?J48>uEJvT%sHgN^t1GUn9e<^hZ(80SpM^&Z%x&o{0-baoD0 zdJ0i2{xsRGUkb*h@C>!6dT(yms5tVxbiq75~A%O8p+) zd}11Nrv5fX_U*h{4iJP9iTN8gBu&q?M`$Q-tj(CI!)udHJ-*ezHR&zK>^*=Ba`R7!azFj z-g>>!YjEZJKS5`Pk3DpX2dW7PVovp(eUVX@Y`<+tgyc67JC4Czt z58L>S0BXm&fK3!3Sh+A@YlO>3VuJL69r^p8a#j`RK#7+uL6%a%M94bi&e8R0I|CI2 zh3vS8Hb8EjZGOnQ)Q~W|^-^P{B>%7tqq915&U#D@T|C4Y9lk*XP#xqJ+m=6aMFJDP zo1=4Hh)zJs#8eT|S&ReOi;g6SGMI!wwj7D?eCADShs8|5h7W4S&Y+ z&AZE;ARa*%b=;zbbivJUJbUC7>N3k;pRwrkB#SoVakO?jw^;wY6z9m8TFP`Y3oEsY z&yWcL=*C#0QpktTDB4&Qg((NRq>n#>%-iV zf+wF!FkP9?L4wt3ZhrV@DB8wB>M#8_+6~!%j~WcB#6A<|aRS{k=9J_7DDH_02JCr? zh#PT0^a*P*?@jJ2R}!k+kn|(!)#ELRux+S3H0~M_ zBkWJd3u7e~qTnRwl z5~F<4TQW)1C=%xLD}E(*-U9C1_aDfGmL`S1ogEZeijny7fkbuo1;dw|9JvJG<@_Ym zA;29H0{-7S7^Tx;KLhxJH;E5XvSlAWe4h$DLdotTkrsyUti0x8ovgwd{%!out5Ajp z!>-RAhyNV_H)M}NMvstah&IM^?0Rn+WTRGF7`~=zpq`E4hi==q0*&_jS`=z_J5qt6 zVr}_-&K3zDWHm41>uaiYLG9(K!3y63%XVj@vY3a#NLNh{YyQ7pH|#wvSZ5{o{M+I+ z9lg~ zsc_c?+^t!<(`^2~y+e=o@2*>G`lFpNdL1Gi@1yf;rs#^8ynbV3r@^>>R3MQn-$nl% zGC;bXH};#}UBd>`fjA~Pb3%moN!Mh7ge#!w^J%YyotAR)kLp)X(L_@(bvgNo=h-@6 z^6|>^M~`f{hD5=hTt1O7FC?>*Exg-nmoF{4Vvq6bHKxombyK+dn}Xr(ExYz4Q$$AC zO2&_WARGLv0YgNRh(KzN1GmnA|+lqddLZ=tvfM1Lc~b$zXD)?D94*r5p48}LnomlTQsP$^4qM>m#j7gx?8fH_Jkpmu3HqS$$t`gvEc%@>WfX{MNlQ`mV<}UwWK<=G|F?3uxc7=l*u3_TsuV{2=XXxo$f(pS9aT1<-Av zIdz|Gkt+n_*Ov#$AfX=_7?cFh+r@DZKcmqUCH;+G&n%^vV9cj`wMv+ndBvAO}JCb_WQo< z|G=kIBf(SMR#PUrT<5P=%ERt-;+6SUWbK6xT({Y?>W+VSxo-=%ZvxI}FrQV{Qh?3P z+tVZdQv{FQmVRs-sW>)@`)`Kt}ar^II2ST^`3 zz~X2$EB#i*Jc;L&bq1`UUNcjcFvDhpOuje@+TukPR{Efj!dOZp&uRj_y z>DE#h;1kFm@z-yKjl8vy{TZDRjq1KH?~_-)L57|%a0@m!0`5mL zijWs%Wb2swInp1{8^s{OzP)Qr+A$092up3@6o$WiWBW-NlP9@aHZ~0|zoJH+ja-SF z%t@9ZnzJLyO>204WRjqrVu@1-!3RxiT`+m+{u%U`XU55lwQtCqDf>McO{1>)+}9@R z5Bz^%wk)y3aOgxGFoNh(LChBls4qFmCqQSwuJUV;KX7m``F06% zi&m*01X}{G>UCvpA^+(tYiIwF1upZhmsM$`45`JU7S6YQR*$lbzR^^h|0Zy%>eV=p zbrybNB%xX7)5La6;tkz((tDANU){b2-HqO}xSO*XJdkvg$DXGBWOxRV`GC=I&PKwh zO~^*N%CPa?EtdTOi|+F;5LXA*ndV5z`1-l%!C_$PXFW@vD`E;dOA5isEF= z3(VNUD$NsfA1f?GkN8IDnt}0zy_EjNdLh;9N%HzwHk?`~cz1=?Ui0COTZEKdLZk*F zKg_rN-L4l$kC>pWZzEg(;JUY&p-6{oBTqBh_$(Om5|zT zSB;K|Pgtle_pQ4Ul`3i__BlS5Gw(j>SO!p$SFW*Suoyidvb&nKCxfL#b#07(on^&# zPeEfQx{j{tL{A|KxK?P?#>b>?ra~22;E2@dwR}Pj&82?*8-c$S`)o5|R=HikJu4ci zX_9i!+%re*K3hql;v?UFHAHG-AVI;cf_SXBvx=AWL?J389=m)DpZJnix`xc!vdE?$ z1fvldq@JJc zUS}>7xAa{}dwgH%=G3G=M*$Pz@HgxXf9Mr$oFXSto`3Z`=1y&{`bT*YI*;2bmgKyq z?Awc%0UTz$*-=+D&Nfrm=sRy4_lUARZOb^LH_{98uLIt$u>-Od;UdfS?QgG*70)P> z(D_sIOJfBYKa3X5k8ZyKg4GbVFS0sM8r*50Z<@ZO5VqYJL|*0DoJ8phplQ`J^ly`C z6$Nu@q2)|k(`H6!HWR1_c1;owOSt)(%=tQ6)~e>=;#$Gbvwa#jm4YOe1U9 zQL~juunU9sxv~yX3ONv*3Gy8g!#wgMTF!G03p#K+&?^6ki{PJS4G`WZIK z0Hw~mbGQW9#0R+aHU_1dCbn3L*`1!d@abMbWP6viY5h9f35+AkzjPNRUQVy=^O8S2 z4=Q>;zyqF;b4cy~A;~#X^b8X5DbAKPXMez7{O>&)SAFKUtU!!s{kL|$xXb2f%lH;5xEd;zW=v%&+eW{GZO4^ELR3AsmW4| zEs*$AyGFM=oqa@`V*?ei@2os%$bPj!C zV4TgwL$6V(#avj-HkAXp3}BtJy`F>jN&J0H9=@VZM;je*$#+HuTFlkXrZ|XX797VYjY%dokxP9hQa^>te( zv>fAJ`1%;)V~gq*KH2DhXf=Z#bf z9N*mCDpzL^u*4^(7suoWJ6yZ^Il?j*77dj=`#b3AfjlA$eq~cI`LCk%>Bc}$+k2wX zLnOvQE-|j^d(lgHNQv|2*Eag#$L>FJe?w|oDwg12W{EY!ml$)X6gLq04S*tF|F~!E z0EFO5I=k3YWPgBYV%%SYE&jEeU1R^@&n5S5>1|f~8ETG})W82FU?$?nilJ?~(e9vY9{%fu?0IBe2{63RS?Bi%tOeOk=kKEeJ2^!I;llM!a@F%;$Oos zD+psAI723IIE5jbxY?Yh0B-~KNdb%g69dE&4&V~S9%h5ANEt9cs=Sw85TT#2NUq0zQmQF4n4kP)<_{y!D)|xIQ zZwB+k8jM(SDW}7b=qG|0k(u(D*AF0aWuv(J)x+`N+&+3B+mViK3x<0}{!ve~5Phkf zx1?Kx;wlO6Jm2H(WLYruTx_*iLg8C$DMo;E>VEU31DrOUp?dTG7SC@(o~;}9*(#S9 z3@hNMfEQ+T|FRUTB8sJ1NZ%sc9R77_ji0lkAk)BfjBy{MKUdPq34v*Be&6(U6}{!X zzP3G0AtJa?eXsT@Gz)C`D@qQEeqfK&X4b$7#D-){vCm(Of=a=kS`pzJv*rb&&aw|x zz~r8?`x@Zby17?=>2G>o^)CN?Igj$7wRgDRs;BNNgncQz5V?g3;|g-!$~Py1WEKzK z`@~@W$rif7Kyf-t!9>2!%Yd#ur%M=MG&Nj!KREsl@>z7}h&MEz7NM_Sdh5{TOhIx1 zksma%y+r0C9>Ke!QH1|rw6^$H%#C$djF6njwOiPZD>y{;fF*gZObC+w3fb^iv!obY z|Gp6yROX1xO+BZ=SM`~>h$$*OsEi@-UcY{AG>il=Mc~yz>aeOpkIWz;v0Tgcyb}+d z34QEeh;}ML79oN2^G59j2~lWbtqPMsE|GG5Eguhe*O2hjxp!WC=%PKT)zrEl8)>aq z-d7qd_87Q6av%MZa|U_5PAscVdexfsaWLgm=v@wEo23;#ZUfj0HDlyEX)n^=+td>+ zo!{477~iVi2{i<}+M^W2zXeXFmUjl7y&4UI+VZ4;b!2*f>kUu_y>dWmUiv(xMZmWW zyyJvgifjBhixiniMz1K{(aEhSJkCoxT~zadh3T}dr8pgx3*p<_nA<`Q@4j`$ytFm!bn2= zs7%QE|ERSMK^!JxO%R={TPnNSHqu&*Pw(#{%tEqhka8(Il>gH0(u4BR+qUv7uNQbB z?v~TUVQh_Egb|Fxei-zp;}s>{WfnNjR!HjOTjp z-~TdefUOKD5&}J=PvKe)v6?K~fz$Z2$&A%xxMc%}pV>NlyiS583(P(;!F2fFD=1A?K3vyEc1WJA$8AlmjR5>Q39bKm2}WcAlZT zeI32UlH3vkw7>{_Hjc!%8HKVjknbXy4J9KBXZzx^U%+0tGJv=9e%rIUKKG-;P$CyW zEYLK>!~Wwa^iX;+SYxBd+J%&zqdH%V8(CJY*7?z~z;5SjTlw08JV5W&znqIgmcBR(b;Q! zKaYIz-T~5C0hr(8{P^QU>&@?T)>5IAe{2_(5RQv3|Bo5yj;A5)YcJjcythZ)QH_0$ z6yk}}I|{e%v*5^w=sVAi`rBOWlSRF)K4Gb7*TdB`nKEoW6`(&^mU3I3;*rzfV)dMr ztD{19o%MDFi9oMeq7vY3=eH)#O4bapFYIkgs237dlYX)Qw2U3=GkOrS?_1cG~{w6jk<4#1S@`fp~} z@E7@a8Ke7Kb(O;=nOCa7a(AuVQ?qTV8fQr&FVBo2+%mtKonyhZiQE0XP={Z`cI>3Vfk=>_IkXTY>etqJJ zD~v+#>0?3jn>U-csJJ0VIR5+WC~;fB;)9I=-c7S7K=hPIl1O(F!;Uc{3&fLnzJ~qX zu`#2ISgRA0>@i;kKwk+LWmlBVS*Qp*_)k;mI2#B3Fprc~YV z^+dTPI^H%k$`ExHy$a7867HgHX9%#H9n02%gEeJvWl=4s=<+SZo51JX{K*`i z8HfO!#ntWWZgp#w%nYz3waWi-ar~3y?CEl4)=`-2yurMc9f?S=+CpE9XT#hPH_T@> zA~{eV*CA7@a&|}-_~k^9=I-F*BM;(l^E~xusTV5tKm z9-1a7FY%Dk&^Yj@-O=2otFI#&KgyqhF+up&b%if@|NRzO#Pe_RM}MjpUop=|gC-gj z2*a=J2b^Hhd-dPyt|^&6e5^B%-uv~Hz3GmwlrQB}YB#Uti3ZqimmdHRr@qgtD*`SV zeeESnWeHE`H#%)uP4;N`+N9$8IsyPj@PAmi{yfX}d-XBVq`7U@`9KMLUWMB%`9fpg z*Ta4QE@Gdz{ciXU-1|bLc0v6w1804W0o3)~Nr14_e*I3Qnen#pIQp4mU9Q{?toQO4 z1X7!Hr><_Hs|qKtY3_2-pndd*)NRt4&TAEF&9YZMDa~eYQntCoOnhO$ZAF?R+Qc@w zCKdvjP)W9k;xmBKw<EJw9yLu-8H=Gjjd^LLu*k4EiWsAoV;u{xr6yw&-nbYhUK@>a1gGOvf+?0}Mp>3Q zUK=l?sg>GzB{2<6gVfRn1$}2z|H8b#@Auhjuk~GftntKi z1$l>{(ga!W<#*NzeidadkC(gMO+=I^dLx+!eoae7zqV*|AhYd``QGVSSDH6pYqg{G zD5>A`R64QgW_UR|pRGR#QW)kAC=+@ygceGRLO6}em8Ll3bZ!#}dbZuPlpL1j#k#z< zBJ!VC#z=&M0~>AmX+5SXWCU(uzFRybqlK*o^t3*rIul0I)Tk8 zeAtW<$~XJDmBX?Unvfqj{w4}kC=O{@{-_4ufT+Op$qpy#tB9)qONq>V9 zF_!#m*T_h}OEJSf`3K)SvF*M$#EIHu1d@vK4F>Zq{s_$7?}Vodsq0cguk+@(dyC5d zBYInq3fk0hGX&K)VYiA993DyMs?Cl0nLqKS1)CH4KI_sov^Q1n`NVz&+508X3k1zsS1W}O$99JnSFmMqmoSvN*LF05jdvb02;+D;|OQW-yq zVxkJLlf+@;{Uf7m-Al}X)!{A3k81xRcv8_FgDQ5C#C|qOc8(&4Qg4wiDwil23CM{8 z@GJoab^i-O#ak@;!Ij^R1=7#_tue{<|(lc4zxHQvpaUhG`i~Jz( z7jNYI+|mUj(UZ~crx7Chv{k-O^YY|DrO+Dra^W@co$o_u739#T4Uq9)fGTvNZhx-T z+Q!jmXF{q;MfGM!%A{kPpp1Z>J%dFr>f)OmqdA3HTwtCxPj{$%zz87Ec$gibC|Zi>Mb(uX_r3t za9@W9F!pf6&{-Tt0Qc6((rb6^7Fs-qiFwKM*=JOrbj5Y)%lPBJ!OPqtq}>oZX0?61 zC_7T}37*5HgOm7evkn*(D3`Hr%nZjv5!R|ck4AQsNq@nyu@j-`_EOGonM4%nIS#L~ z3eg3`ftTT>pF+M46es;JsAT#@sS$Y}Lv9b5t?c+VLVH)j0BdCk`fXR~nV*-!KC(9P zchJ_mVG&Yi1zfb|QJ30dab0krPTu7`4o(lOLMy-e&e)&X^OtV z9PT({_W~U4b)rFM;kV(cuF-ajziCY4dFy+NuZWs~FqffMDm7@jFY_90hc`TmyQ%Bs zvX&~lx-vcIYHf4>YmN7CC6CMmNqoBn;i>F!m~Gps(Us<7@+7O@!|y_h4_sD#Y~9^@ zHLFm}Q-b7l!P5-J*R2IucF2L9xVD9)v5zUWq;sj0S(4%&x3@FZ&t|`RF`CRz1MV$p za+pCmMN?#oNaF13tN|4fqNSW6C4*Ya3czxIe-EMhkKMD@ihU!4hA?xV^??lFC2%2O z0!vUDL5iK>#vLg)VeSW8&{N2nea#fR{8s2t(1in4C(jE0Zb-C@_f$G@-w;T~9p`~c zw*q$RjA^SGZy7kfXXEZJ(`&w(uNka#IF}y>WKfbJ^~`eFrLuE(D284_G@C18A$UOJ e-~GtQ709dU<<{_+0e|q`7R<}T$6fMWXxcwqtqOVo literal 0 HcmV?d00001 diff --git a/assets/integrations/isolation.png b/assets/integrations/isolation.png new file mode 100644 index 0000000000000000000000000000000000000000..69fb73e628bd36b1aa8c086da6db02a2ea632d3a GIT binary patch literal 4213 zcmb7IS5OmRvkf3bN&?b5Qlx}pqyz{Z=}H8oDM;@%Ku{1AklwrWP6!~N5tQCRrASE> zhzQb2z<@LbM7aDfcjn%i`*3G=cF%r$&g{!R?0$(R2z?M8Hyr=~02vzSm;nG3e3!WD zD%GWrq=Fw`hAZw`###UXHl6;=k@_;f?rdOY3;;Y8y=24y0H^<95&?iW}xofU_Nl-R6|9_dBSD2!?-SvT8>4ZvA^e1pmvq(S;G&D`d-UrZt z=X;hT>55`Yr~;M`3O9e3c==A=fEX)i`i=J6BYh ztY5T1cs{Ungzp_usCA3-)jY6q0sMN2qj{a@-6nJD8-@F}(usA<*i{i^99PQ*)0<`5 zrp4>;Nz@JBY0!8`I+i)4032S%6jysKp`J0KQpDvcAi0e)x1Xx$169EgMOr5?xd!Cay>h|^q<%u?fxFMp_d@LZ_>!vD$uFiIGnDy*6zN@e z9X)Pa*?lDLjIM4v1c${R4LjtOOkxDU7DDKF%H&!Ub8d)v+1!_*f{_G#K7|itzU%c3H7dBmEA5mm51+=Xim-w z>68{U+}Qwy`zi|>>EP~tA`w?-^+k=JPY0uAdc@C{4yNFnnwz@HZ5jO)R9-H^KE$!Z&ZEzH;$SQ8WPnk zTJ|gr0o%C$|d|OHuSeOJ09Y%3;ST!C9P)Ea}UjB^#p4+7*c)l*1H7ojhNN65M3Oa%=uWC5L%L{s$a(HZNqEGQzo@gpYUzF1u4c~&hVf3I7@hX zrrUz`+&YLN$NA%RRI&W&;4H?+HTQMI@UD|xzthU22hunZHEN6(BiKMj3)C5sS+F9T z6!VWh3}sDC*mz_HpKC3C`sW1%e(QlroYx%{4^{K&sCI|e6olxu!&*fWVmKp~QtoWF zT3^2aoTN&do<>)K*kpFA_lh1x$)0$1TI!J1hTo8b*YmejGTlgP^=c&vo-%LeIz%gE zno%8z+*YX*bR$wFFhRi-8f%sm>O)jX4>$+DYXOaMYy*MI zK}9s?3PUIPvD_JVh@WOeLVWqB%r)Nf9U#AJ1KD2AuOa!*yI>`D$u+C&aD|n3CR*tp zg9MMSsPEePU1>5zQ(q-2@g3_z%eGiVC}UjtN7T|`zvv9zq+dleypUbL^keXnY80-& zQ;^4Y*kV=ZWZ$hMp(}tB_NJ|ypn#exWEZC!%(c_eLpHF37j6T0mC2$!O>uG3=~g?H z(Ltps$r0ocQ`5e6(_ULVLL`cxOLs#|f#9=+rA_lDB7HP_ES^a1^1P={Qz0Io@$zJN z&qm{W!S@Y7eK7AGwqjmbb7c$FJ6~M1JC0BdOD5EX^FI3I^eDFh@v5b z)YJwwYw{&9`&0_bJx*0sFP&^$le!RlIFq@xV%57-jHfv=th)CnWs3!4y;&R&@DPZl z8c4!k+(?pKIr5dAgbuzH(TFzq$qpy6zVdnB8R%+##huBnz{T18N*sh|U!C*fZ|v*I zwtFj{%Jn{kc6X-dKQfX@243t?(fQM?)G?C5QaPH_<`1N^D6Qoib+(hupZhbK@S^WV z`tWC(gY9Z^5{OzEl_fLy+jh2Mg_xf)!kB|LJfjZGuz7XOKP-SQT>c{{4B2Q`Yk?zLc^>E$ABZfy(Vpm z63i8t=k0rmUmq<_%;5}i<@xNQ1{anLm!5jNohQUU!UBfKDm7@)=dK_4t#^8Q*ET#@HyuzWp#@5}{R4GV{yE@FD z4ayge5^?=APJ+_yGG%zhXhf*+1pgPC_fJ?^M_Esrz>l=a_K$A*w)ow-s5v*n@97Q* zjs!n+x7W>0iVSva5xGKQ0E3SH_>w4$j5ywwK)U_NUWJcN2H?(B5^SY`az#Dg+9PkPoECjbtyo zVJQC1UklZ3`se>rh|ir7%^BXi2n*QwdHv~Eh$@~LSo#3t`AHt*r!96R zRN+T*$LoasscUxJz|=RdOQ(8jpFfZ)*`XsLe_Vl-@ z$};a_Bgp-nGp3nR-+I`URI7TYz$C()2YWDyx+*2$ax9_3=y` z^M<>TSSi&~r&I|ZNOr7ZNv$v;2w{aV5`%>YLtV!koeZjDnQ~IRNOt(_!TY}HAc$qQ zBy%Q~a-VRexhPycpasQi@qHq9qD-97=jInL!{bI{t_EV25G|^e8wamol+7P5-O2jc zi08tClqie07_FRE2{%do)GVkT_uPp^`YS$4N|GTa$OfT(rp*|j@}yJnUW)>(`IJIG zvN_AWKo1fp9jJOUrzd^+tk29`)MXXGTYL?Xa;F3PSA^iq2PsjeR|zMj#9k}ONfkyS z*9e380|!2JD5vGKVVoy^MD`Ys`x3I#O3x+a5Le34TB(tCp7A*tq2WIh_=%uI?ViiH zW1U}^ccdrzy9;$f2e4}Er7&1-yD)8^j(BYxTm*Bqb&n`wx(45DfR@Q0F>@phi0W1Jbh~xSwh?yV= zvBPQaI*<#7RtD~WO*3PQWc{~JB zs5QvAmUb@7j!A%*uG{zS@hF$z*CelX{^oitUOg?r8(4gL!j`zg9rE%=_;#zTYOk=6 zoT4`}2EsEjKZkTegt3XWU{5wA#~@D+{`jA1rRe+9U}sgiI0I^G_aL&W4SwB6X-qM8 zQwe=R%_^TmSulHQ`Nk!9jqHa}KwJBnna0`k-ZvOJ7RnLH+rue~SHa&cx{?Npo*mn_T? zxD{~ln~X-^W9Q(wklZ-XXrt+k$ci;Hq|T&WRMr!wW^E18)|TRGmmB#d8ycsRKpi9& z<3On^d~ThPb4QApJ{M5?dhANNZMsCs6DnXeCw(kBFfTv3MnsHwU%>jDadTs<+Qdnv z^`f z3;n+ZqVfsqmjbT;bhz*DhI--{=nBws@pp2)Zs_ai?rP@h=n@(*;(GZP000>3B6OOx H9bWtw3og)! literal 0 HcmV?d00001 diff --git a/assets/integrations/pdb.png b/assets/integrations/pdb.png new file mode 100644 index 0000000000000000000000000000000000000000..69be1cd7d051fb9ffc4a2794051450f0b5cfc802 GIT binary patch literal 21820 zcmXV2cRZEv`+v@{w}Z&$WR{h^=MgD;mlYy=l$CHK9FY-G*&_-WN%o#)hwQyKkv)F* z`FwwWc&X>PpL<;UdSBNqLPtxDjF^EKf*`WH>L@)3f`fm;Awqoc>%e#D6#OD`S2ukM zLF6siA6SRmXIt=){+X)DGkrI^XFiWSA45JqJ^~J|PETzfxjz?ooH$+B=R{EQ+`^4pgci-{b1$XDGCc6MlAb!#D}H{-`#h@oz(S z33!;ZW2*JC;K<)fG;5mD92_~*GAl;};%pT&GWN^UNBgp8g?7umwaRbzQ$<8(ZYZL1 z69X=Oe-`a1YX4U)ryu`3_tky8?*_aBgt1>J)vxL0K*BJ^T3jkzN-{P)N^iy76!%UP zq|5!{D~sY1k>xF%SzH}~U}+c=o(>)+8_gGjqDKoq$-2KP$(K`hKm11@%h#RanUvb% z@1i4!mkQ)aVyaAONS8zAXnpKWXj%Oy&Cv%$yk*dsp2h%`BFPgZwVpVBzloQY!WKaZ z1=U$-tPE@u-m0bUJm@k`KC~`#FON(iLz+W3#~P+cqkM8Q=;Ci zBQ>Gpl2tKf9+PiKVs!&od`-_8_uvis2hoFz_7w4gA9S>|>-jiTp|@xul#=l7NBW)( z<{w&USrzp$B4rtz*eMvt=UzKqFG}khY(bP%8*FV9dV?d64TjGR%w&Ikk!Fe;E``|s z>K^{Pm;Kf$Xbr+RuXct}j-h3#6>Ha>5m_wC&gg_LjuO(wZ#gD>f__fE@7AqQM$K?^ z=bQ8MwI6jvM5&_Y3TlJ{_teMq;dwYE1-*AVJRlQpG-q`}By6iD#rxjK9xXi1i4VeD z+U~w9i+2*WS0EfSRz9F@G=<(8tRNehV7qmc*0*!&Oa2JZ@x5?Eg|6$M?3&14kSJ?E{;3k}$^$E%^i1;5Vtu0P63}<4Znk`?@9C{YtT*D70VFx1>pwfJi zI9@}X<2!^WpHxQckDveT7t6;%gi?_}#4=JgD;>%TUkIh!uz{iG$q!$H@^v8^DHAA`9p6GEyK2y( z##F;k4&9!=Px+x=o_8-%|4v=|G~5=K7NXJfGk$jvt2_;{6F0J)qozNHG?Hr0w^gaO zG#E06HPZn`RyQ{*e|`4!kP}`>-SP1sWn2)g;%mA-r;3Ak@W$NhIjz>Ig?t(~BjmguT?1xR#WEPg6hx)dYke{^#f;{as!1Ee6;mSw`r(Eb7rctKwK+ zl@TWDd)$_0xzgmOj3m(CiO`Y2G^2;cL3xywWQwoBI7AMqa;=$VOa#fL)Mm-LkgXobx-&^&*aTukJ)PqaUTqn z=jD1buL8EeBm51+*`2IliY!Zu+S(|ybAI^WK=)a0;W7=_Nj@#((NIbSeqsT+%5pm# zZ6OLW~qJ8BuSxXV-td|-(WCNp|`%A3xKoP^8lYj?_0_ovdbY$PtD|Ge$ zTSgJiq{|#O^aw_o`K{QJ;hjO*nH@})ob9d`3sQJeiAZ+3vikIzJS*h~69|^5q6QUj_NeOm?H!e5|^b7!ZZ7=VU)|B?SbRwtEV#1 zH$!`NWS7ZThrh*7xh)Us*4b#J`JiTlo%1yh506r6j@B7y)BsSe%R_O#oCl?_+rUf5 z0;KO{44&fkUyS<>LA?JeV9#{GcKEcc%V_4qerG*;YM_=m3)Z>g*?uWuDCu&ArW^?l zszA})hqrw!Y;bTL;3O~vLuT0!Tz~H*ktV^5ah_y?BZu+^*FoDKu9|j{!@4}~d=Q^N zaYhavT7pC9KH$x38tX8XikX9+A*C8aZuYBEsHCL!<=aTkU2*iYVN(pxe8OE`*PE%r zo0j<@u|Xz~i4OK4rKKL)Dzo1I4bY4m9+pU=jmbA)e)XaS-e~Q4r%&=8;g-rSQ4mr% zhM&QGX)H_}8JcEhnI^$3Uz(6PMg-!DOIMik77{qTOXNXu ze58z;Q&YBnI~nI|?BGzO;&pvq(S}$mDan@Y!MVP=ln!%EkTSFCyyybbK%+*KJ zJW1xi`cNKr`!rnQ(qg11&;Ra{<%hhYriGLejCFqN!^h9hZ4NmxkLu|K{A5A4-0^ur6ufCmA?y!g6V%3+}CPI3# zu=X}59UvV!c+CSyW2iT4144i@N`xxfmsj_#v5sfyI-5}koDHh7v76>)i-bn8AHQZB zpgQ)^Qu=9WKu{`73kCk-=c!`UtA*s%@1k;)6Cn9+db#}v!%Bs(@zAWE#CB68v-HDK z7z{E+lW@;_dew9Re27e5hm7Klb@T{liLfsTAvQLl&zcptO1y@@7aN3^QfXP9vDrWS zxu87DjvcUS3;)Zy|5~70+l#zlmK2@jNn$6@`^s0xiyVpcR6TH-*dm_zV>q~6pkV&Yf)guImqYb22c z4jkn4z89s)BiJstU$*`7U3Pgqx@Z5Y!)Z(EUT4offCl07wN!c^y6UZ~BwInzI2jsvKiUy4vidpj}3^ zM=yCyjZl3_n(4rKQS^RT-HUP<{h9@Y*B-zYV7 z%<@@V%=9ciSmr03lqi0aE3bw4>FBr(JuQ;)>$HA)S^VD1+i zYgS#pU8lT5cm`+l=`P%0Un>8Pw}zdJz{ZoFdE6*WhaGzbrx(LetuqW4KX(Sbvc;T4BBXG>Prx zmu2Pah##4TgSg)v6EQijcsN>3|4t;(d@k~ukC%FA*pbocd2@at4bMG(ovr@cBtQCp zKEHIGiN=h1JZHAHEII3Jjed1h9PguDm7At;aL8k^&IX&J_3Jcoi=8K`bUiw$&2)*U z{K>%TD#O9a3wo7du$%_dpazLe%KP$KYD!psYT7#oF#m1-Y<2#22D>8mDMdgb-)Whtxk0Rh1z|r8To6J)rXmQ~REzciTt`wUi7;VG}} z-1ffzB7^Rum>oiq{aumT*t7w2Gh?nGgiq7`3r{CQn&3o@(CU0t;JV1SkaZqZ=lz(L z$fGjC44wEJp2QjghN;ce`gBlBv0%gq`%GBdv{8n_yb9un?tu0cToy8|*Sr@AZ29I?Cb^IQbu` zFLr&t!|V*!)o?bq?oDs>3llH$zuL}GWX9wx0YTqrX<;-P@V{}jx8z;EN_RxuH-=W- z>j}~P^L&nrXmRzs^EZ2J<@n${ujU_(r0X-!qjGOTf7=tx*f_!pz$p;QR*Q;**Q!t! zHVDN`WbnPGYJJr@UeO97ny^@42OenD7J_{htK~hj&29S5`fVk$|wD!U_>{HN#bG@N_R=rWdYOha@F!khY^^U zVSo&sxMh5_wy)ueaBa9cQ1|ns0Up{Ok(0=`%_NZ!aj@-_0E<2|Tu2EP>u-2h0?IbT z3ZX+>jx=0Xy@W6$gL;1a(net`OFHZ?9AZclQ6(o570oo@SnSf5KzTwHgbhB3zMMDZ0q5fcm$zwWWzBO#7a?8Ibiert^ zhcUq7TjIAH%bX(1>6o0a++cTw!s|KzL?R;n3w=vPLH!FNa z1{ZeE16Hig^=BUS!DwNNN$t0_w#Xsv>wbDFL7qHN69{%M_XG2~-)zq#RAiybxQje{?3<6MHIfd%95{ij@LIC4qyhCPh*7W9nX zi_Z6-HcQH|6pr#|Ev4!8u}`Jzgxn5kYt{V!4yWF+D=65*d7z&I-VUQKo+`v>Y=BD! zuVC;H`1YfSdC+|<)|(H`m*KUKn)DrW$hS#povNl$aFumd$M40BZQ!sP1?jOHY)FP_ zp{XF~?{BYcEtO)|566L9umonJtpxs)Ue}TSOikgx&&!3D7@@5PoO1=7HH?!#&O1~p zU%bC9b?gnD@V2=oRp9uF4sjEoc>S5ZWzRh_nlb_9gq@n@^d1+~Sea-g0*wdUr*sXP zpTMsebA}IEGNJGAwu@Oz_>_DMfOi^)Klbu6a@hMEJ6%TXcVgl6befd>5@j1ZsO7Ne z>2Whp-+FuAJZyPR1hyD{=YiSzaHjlSbrLi-Wr=It%9JHQ45W{_zytc6=RMEV(%nK^ zW{ovZ?n5UU%F`d>MHF%>)>Ixq21zpQ@F%ptK>V2c#A=3CpkhnL;LVujj*w8#?Gz$5 z*7D&8%yp6p+=M`JKsuJIGSl)CeFQzH@;C-lV}YsowR?YV*Lr7w^`<{BVG}7 zT*XZ_tY52s%3MhbX!kh^3nKV)|G9f6?#?D2qayQV04;vZ)Y zt~}ouW=~sC+_ou$dJk-jawF=uE9s#UqlLtvHa4tX`20>ER%!7jq*-rABsoJRnP&Jl zhGFQN16+?Pdv9v}!u1G<1VfpIPsX=j!QQ4n1!RcBl&xRToZ3k=H7}j%m>Rl``u$|U z?O!{%%;t76`}?T>&n)@6kB?VXnjp0Ql|m!&`;JxlTc7fuay8~EMrEi`mp>1_c`Tl| zlpI;T>EOO%CTDI-cWdwVn}{7OqZe46fSb$Yw%9cW?Eu|1Q^YXDn)fpKT3*lN`P$Rj#3tSVF z>u029U41h>m3;kgt3q7EtHxm^DXk2G&N|0CYEQ;vnj~pWjZ0YIxY2cC|Mrp|mTZQH z@T@qYb8RY|R{Oc|vhgd(Cfdcqe>vTD+Y@+EQ3nOuX6{m{=}iW{YWdu>1)LJ$yF{vxdxElfNfy4u>+4?U->XO`0>3myPZ^E| zRWiI&+t4*!5UTasEvi%V_&)wJKWQdGo4 z4+%-f_JN0S4YrClB{O)6*xG}VwFw(HHxh|dAd2e#nVz<~({UjHWikD@2eqD@`GFV2 z>Mv^gaXvp{qW+jv7*}5A2KyHakz>NoPkQvvd%8r?TQ_2GIFw&Z8hp zZIHL1*g0C!fyUjAi(P6jZbFNty!i4LAW5aw;%G<83zXnm=2Ybi}Q<|N`fSh*d zmHg0zd4M0j6^2z8?7EXp`RGy#Dket5@I5#;7!TO%LGl* zL@*W;l_3>sZ1cmX0>Z_|PY44~NZCUV9J>!q{uc~oxHU##=OkS5+n3JhfGftoyMVRg zaZ&+Ngg#LA3LRX?uVW#B%!#$G04q%~{cyogE+A_GM;MAKQndr~a$|x?*x=LT4nv%Z%aC>9R8BX6gOjLP*4=0Q7od5Z^Vq%Dr z+NEVlMZ0MgvBmyMC#6@S+)L~HqUYVQ-CEjA!=DN@{qwDl0_t~P8@-Rhbl)MgU5UDI zb6i`D>@wS0@W#*h@uR*WF96f6p)vb&#Q0MF7^ee&;OEs8n!v*Bb@niM=-P{;M?Z5H z@Pi^*O>bn)WW~EaL!!`eArP}v&-zly@}@6MoYfV1@F}up1Dsf zL`L5t?{@9YGA9cRn8|dE@^`#V%QrDN>y3P^Q~gXPtfrxQWYJHFOvN^%S~_oH>!FX~ z?6Ml2bz3zKJ}2>gm%5Fsv~>mouKN$dctyk&HE>iAaehRV%q`}$P)Kn~DrtmklQzmW zO8;RV+i*+W-i>!&l`B=8$F{vjHp+Ojg}ra4T$8ALuNKnxMxDBJ^*8E|*|RXQQ`llj zk$5#9x{g#Hu;jIn?O-7F8oEx=S~J)?rSk1(X~0`A~j)E3kI){_-)YtFND=}6TU zY|s4r0O#$A04Fa)vF2*zc3z(Uq|d_PjmlDYn9-$gKiU>$YgC<^y5X4I1y>{E@@(yd8KbbAHtI zHIw2F%FZjgfQ(FQ50qjZC&(p&h|nutTu7*{&-o>}i=IJ;JFk3iG|B0Y*7^{!(Z$SQ zEyFiBmI9NZT=aXuH;rYynjRDlPBUk`QK!pSZ|M(JZ=Zfq;0@+1I9jkWe)R?Mr6iI*pF`bRq7{UC24*A*}-ap>#tlTfC6Nkt*srq{tJwuxVWe-XSx+6jhUVjMlde? zxT5=T9aOGVHluMgbKOk$i!ZocpX}126FzqF;vjEP!z*7rk&zX<Nisp{yrvSuu$X{&ptXZhEBs_diRq|edi&B@baq&OM! zHHV&?`+c_(h*Iy6)P({0?}4~xqZ9RMmd_Cv%h6P64ONAFJzW8JIU{G23flcD;Vx(H9^(JM*sdl@zpG{DKRuFD;WVl%5B7kUvEQ^InG8f;Hz zMWDsJ195wctIa>}SebkT_jA&pYZh-3QxxD*g;Hx$SGo>4`0j$tDZ+c^?AKa?um7pG z?`6QgINACzKeeF35J3-v|A^@74{lnJOKlWJD5B&ee>1&H)jFTQi-#un%f0P~>3*!Z zL+g9eG~IbWa=&sX@6*KX%e5W%(HzYXEr$=UBv`h>SyF{%w`&~E5R(R#l(&Mc2idaR zKSjoO9L5tvus3*+YT==a-tPJ;;LFZq&1z5*>7wVU; zc~zc$*x@VXf+J3}o-{v&v3=Tnbe{?NR6|?t(BJ~Z(11HvnGBPAixtPe+w|)4$DBJ{ z-6p*>>54~CzH=91?vf|&Y`P|Kl3yUn?J)bm%V!V042Dq9wq-1rbg&^S{8?Kr(ZSXL z?%EscQrn)uihG+6J~`;bXwe_L4$B*qImABRV^<_r%r2$#Iqyi9fwTDBb>|VtWQO`A z6eEzWB`JL87rkY5t_%km5y|)`-?-q(_=wQdR=?!u-!J#+qHjSQ2O?2DDz<_4J`PbM zBxhM(BuQP4SFU?N;1(iON9$%%w1@3QU$@<(IBajM=a<{=%2h(msZvY2Mky3Q{hwsl z#9&DH1JS#U1u+LoGc1~N+ckdnc&<0io{Z-z4d^mpqi|Nu5K>}w6gzv)|30Hs1{Hiy zY^c@Y+SmR*a-wC>YIUzY(s~Y;QCbr&mSUij#mpB=Sg?>|1Sn;tWo6EU;x&3MiXC|f1v*=LHE@ybKGt??YU#25>Xu; zaDyjx$^PYB$}XwYPwB2+9t>Uo9#hirboiKo4UeZ>C>FTBB3sA?2 zed1DG_)z0-?H;uA5&zBES;g`C*%Us5kJ-YHIV;;5!U9f zG5oE6%(0Y#HN^Z2>W0*t=%FmgYwLZOG0R${xP>@b^;D~7$Z!1^F|xmQny)Mm4d)U& zMny55o1&t!IE`>L58+4q_Jv`!(+tZWN17o<@4+ew<}GqTa^C(e%X5eL;CLU>WvifA zxZ*=qKTNZhSs`=GbmE0mh1}5@%a_6yIWAWQ+~az#=7r*~R)cKFr_z_Sja9}$i7++` zQ>HATgNGG$YOdRJUV|BQ_a{YHkHiok{aqOhsu$@d+kx2HB@FGnHhqJ(#G*HOE6FcV zi%I*>#apnc!60d$*dbrM1exOi2dr3YHB#PQe6Mn+-7*>fD=-j9{fJUl#(gm;_Mq~K^7mX^vLR-LwMJ!WIR$oH7VV<%akeTVyIpa_ z#!AQ`jKdy%4eFqUCp{sjD87l%bsv&>Rx9bpFMnkh-VspaPP22 zW{yo{+ASuh(FUl+P_D*eJoNcLOAODmKW@A&ErR?l!7XAeM{;HQ_{45obERi#H?OH} zfXL}qiSt%q%vo+;`+Vs{*}!<@VJFHKa~~?Pw`>q47OUw|=u@|Sv`>5W(tgH#%h0D5 z2zO;&Mp;KVuHjhK3J=T2N}35D3fokHyg9})Nu=v&a^MhMO-q2=m!Ynm} z%4Flz5nZsS3Ha7z6Z9fkb2jmo^dpe1lm1+e0UVjz8!?*wOjmGy`JZESP4U)xDG3rm zuT>@{^VZczvG04+g?}w-u_m8^;PSFvgYQA<$U;5u-GTS+P881+_@{smkEc2w+0^vS z^Oeg9*SG`lOI!aa`mv^rXV{EKv~(i$&-+_G2)q%H*6sD*>nd?cT5%1;O5HK@m_<3U zQD%`v$No-nQxb{X5eFuwd?A>z#(((cd(6;4=%3M|d!qB_3c9;58tfEUf?V)!7Ocx{ z`&;zcRUQ>jQ8L=}I{d{6ciPM0<-wdPENHQ5PO{oRm5* z!+10hXz|}Xn*u&7V5aD5;>2mL>R=i9q)y{>zTy8-1&xLwj zZ!(|gtth1xPhJGFV8AQRI$ZIlB%b1864}G9J?zPLVprsk%%yYZ1|IseiAl8ssuxH; zh*7gNjb7Ry_nnlG6T^?0PJ5%ZHLCoo^YyE+JKpT7#3nGx6X)%Un%zl%r{=7xzU9k*yKtg%WB~BW6>kt>jN;Z0Eb}A z3pURAEl4s(Z4uA%`E2^{oAlaIB5qI9KR)2b!ZP(1)e{T#d!^o!K`IglTaTtIXfd4< zdThPq?)d1=c&u1Pj2J&3nOxZfe3sr9KN<`Aj8D<ALsd+Ny-?3!Bg;D5ZXg5R}`kd8XcyUGbr!sxR)BG)?_C5NOM;ddv*vJfWFRd;@@D ztXgW^9W{k0FMcLPde5C48D$qu*Eo3UOB{`P;8JLsg~<`fN*qav-2!xyiR=l$Aj5me z8&hI#Qqy-Pu^byc+1?+1J9XS4Q=p^2QFZUpwp5QtpXC@}N;8;Nr*$9f+0$B-*zNIC zR^&I?UP#Aw0tCqvb>B&4nlq}jz24wx*Oe2KByRox39v&J|Ksk90KDku12{9AJptZ3 z#my?zN!DGx#4%^z09EEjfNR9eZ-`cnC7%^q1Qk|kk0F5MAa}-Kl1&mY9_1lG+U=>~ z_xoD{F=r2QtF#9mNT#GE9f+PiBthBoaFalP4Sz^N8~opL9n=JFf)Z!f=YMT;Go{)_kN3E-L>$C=U+SphTVM8$(1 z%(}OJ6G9H_6c(ECF#vW0b+n(~2N!{8bld>;wq9kJO``;^GC;lJ%OL3{S#hhUSqbh&@56^cA&=OY{@f}T;guxqsW#)q!s~_5d zhi-{mM)Jp3x%utE61jet_3Yn7J)H3~?s$=B>5SH9H`{1m0m#<(oE&JxNg1mIvv-Xg zNIVJLHlwkyUILHrAD4r3tK7IhE`Mu@IpRLu)pa45J+VW^;fZN@Tv1wrJZr9gqi14r z1?BLi^t0rHnb{wSnpC^Tm1X_aepyXlUf#&o{k~4v@$-r8n-wn1IVW3z7?hSoeB)cj z@`t_EVgCJ`ViH(XRizx@N{eua0%Rs!GWU>XER}I#m?(f><`_p&t8ZC7N<#Z-R%D;V z(H2{l;@u3;4;xzUU$hYhZ0zO5OtNk39I%Cty45%M(IR|#Vgjt*Hvw4qAMWx#Zfo!0 zxQu^hOfXxQ?^6Df@4NHc3{kcF-^oUnd|HL#6)2QxczAW%9zryK^y0Jjx~JqkN(yPk z%E?9hq9k1IS!%z6in?|%x5CR?E>(t~Bz{1z@@RS$W~@!hld1MCTI~coCK%`YkqH%; zW;s~GIElH~bzPF+hRypM1E|ErGzOf;ls5q{uCdxvY=pk}Xxo~3_f85Zu{_cwG;mNR z?%T)!NR;kqtaP*^dhtvEAY^fgAj(kc0D1w90^M{r0E z$_9%ACulZ7>;K=*YgYh=I$0zRx=Yu9!{;-UDp#K|So+)c&G|U$6pcX-^&Utm75>bZ7n{8{I(L^aCE+M zfFl8n@bOxBh|k5LU;`cK*bc3R0`=mth@!GHGuxuumFJ{p5N~Pku`D14jiWMVq1|`u z)z|-c)*PGgxAOLM;N%_iqJ>B(?$)m&c7QOn9`g+AVt6`1^Z@`bRGo@3bEtvT9P+9X z-k9sK@oYEw#v~E!yUT|t5t&cf`(NJlgNn-ov3sazGzAJ-?#9N&HXYjtl|le z01`L)tpKj~{F2W!S%1Ww4ssY)lVHX~7QiXpp4=2h#Yah!N>^}#+6h$;!+!C59x7Ee zhbKwzqJZ{yu8lE4CH_IzE-T!*<q*fbxn%}A-^}tvQg$t@noXdRU5nn_k=92=<*u=Z}-dl>EPbvg81NG+wOLr}$ z_DJ-61SGgfLp89Lsi))AW0yM%{oOSUgU!VP$237pEF?82>wrI-17{Nub0kAQSE17F zC+l;0qvk3_4XQ*~zL2)IkLvh3QIPih)ME}kH%^MB^3qFBk0(a=aglw&+s$T=|%Zui3 z==ARUZEJi*&@ziqCX{{l)Q5xnP62?Q5p2Bd#7CAhsbz1Btf|R*p_Bs#HfZtt!|LsV zp0OoJPnqF0Tzs+CXmwIv(~GpLpfF#pZD3aZ%(0B9wQV%+I%}I{B8^eC_x%#;H({o-+vC=B2XfKPfIY)@YwOKsU_FK$^XKi*7c9xZENU>?u4>aY4b${+)MxjRtF zQ!I8V<{u9T4$2fu`~gXLP)>b+4=#+?uEMGE;H7xJjmbF3zOtMh49t^Es=XBwi>6|H zUXLt%W6!fPadm1T$kASQZo9==6jH>Re0hA_#edvDGCse2OFDGeC31L!BGdi1Fo*`2 z&y;z0Pb6ga<}d0-xn!Uua>vM+`ccyG_$iwnjc}7$03Yw1Kd3Q-&`)K!h6^5YF?@0- zmZE&<^C7!#tv+Pf*Y6ia<>j1P$@2^Lmfo7DJG!-peG2$jGv_No4~6zZ${&@sba-3>!Iqsu-Nb`x9!h!ep>X?|m7pdo0`3H9bHXO6N;8t_~H*P|HdOKFa z;lTk9K1W;mX~mnDJ5Lz~%cMVuI=bB7zZ>GoQ)n90x%BSg&vTbYy10d>Yi!#=bN)#r z0!u>kf!0o^s>9<}4n(DI#ppkwF=&XAR1!l}9H# zDwpntdddkCTSkI#S+YwgmS*R&4=vjzpMAR~=3@f>nm6f@4pez1^kpDZlX!l?Orc&W zlfAW6X>j7|6ITZtZ&QOYl=O8fW>1^{&I!@rpQfROC{o+p8n;#JVPs=+6Dx#&usrOP z@s=v9zuxQH`t>1*cgnK0hFLW$JFDSKLGj33sSewXa<`sC(#*s5>9JL7-M3XE}8RuxyIjW4W}33OZ|>mkpKrFAA9{riPf0WNTWt6itPL z7$K|4_q~a}Lj4m>K|od|^B8w52~@(xznC<9GV>ypPSYP}ik#e<^|rd2pHd3PExB3w zfk)@-N6ydDit;5BxWII`Hp=t6?|4n^4s@+76cyBDm@fFnq+fXG(}RRawX>*Erb4@A z(C6QQiIvG?u12tsudCCf{3&UOUTA|+k8F@&{9NKYt@_IDDRP$L%}=dKOZpAO>FhkKEE0~qIGGUBO z#R|MJG9JhZ1YPpG+=js-u#)1VeRch7Pszuf1QrFfFsAe21=WZ(rZ<}95|vPY zx*s2;!M*ZFeXcs5l0K$*@>M!EJ{&qOi(6eT{|@x#(ZFjt_MS)ONOVYp)UMClDnhGE z`}nY-=i_|^d1s*swPcDk)_{`paSwZ+o!wU)heILFX9z5;JP98;!fydooZ^D!cm*olMju3b zF;axfs;wM49G@MR$@SuPUpmTg1@4A`&Xs~dX^emR!2~^(=ZEocuce*G z{8~%+>br4kDVsl_#S~bZLu&tWd)`xD`)9J{1*1&jSjG|RIQ@cpB*4r5YwM-<9RWsC zUj^!22aFxus950@*#EpoFo(s7{wtZc7WxbS9{O9*LFt+ddbfh5E*~S2mj5JgOfCIz zX{a=t6R|bu!Nr> z$wKjVH=yfm7v2@qvVJ_}j2#3iu3zn)h@cill6(He*vRg0#-4PqYY+ zK}p*T8eL!Fh9>)dB6J%k>o(G?-nJRn)-wzB)l9%r4!QqEsub^UT`J6~!Gy4vGtg$M z8!G@n@TaJ@cqP)*vjxk>bNY`@mvPG7f~n%?=;HMK3t9+*>2CIck^U3jRjxGyzk6_A z-o*VM^RI|zZv@G&7{hej$}Sku<9V@|=E?kYGl?^Yu^}@&X!hzpRD2!**US4U-H_I^ zUjQ91U$CFCb0)(Pp_rn3d=}+>ONYIA;=)32j$Wh^nUtjgISEFj_wZLQW+S;=ky=yO_qL0$*4aquCmW&eSVnUXya0O13%J+$egvf9Q?A^`G9s0qLvu_KIyvE#Wm$ zl-3%G>D*Zh21dg+zlT`OT-`vC4phF#?$ca%b0bK>3|^wr9I9FRuQTbuVzJi>(W${q zA(-emy?Iu*;`V8rqM55JkXzqZxCla8+QWU9v=`r{I+U01!$Av1!`;`BYEpQM;n;5= z-r&3XBai0($_X4fd_=@#t~{oHf77=%m1y>zLfpu-pa9D?9ph=VASbT#4oqj}eM)B* zDDU^x*D$1akF9$*JS>uBzCBQ{`6>DCNVG#`zyZBLMLMYRvL4^~8%@)EsFXoKR0+L* zD0*NI&EP+0%2NOCw4*zD3F=tq$;_@tT^qwp0sf+5Jcg?|9CvCa6H~7DG^KYkQd#z# zSz$JeLu+XguBqlE>=3b^;x3q=;xa(f*=vQFmuu-R%I0<3-ZQ{F3W1shKIaYO;NWhc z5IJ=UXPcwr<(tY%I*5Z{$rtsgb1lK7VozvU;1cg?{ZweaW8c*GcQkGi<``# zjPaM_DKaZ~qlBv@9`KdewpIeFo z)Ji=j5Eb{%(|Fi>qx0I+-xS(Kz?RsucG=Ms8q*9G*P?9jw!SGpm9`jm`H>9m3b2?? z2Q^IQADn}r!*{)?4BIIDn+ddr=yg_vfaJJNM;6;-5Al)y*~1_W^Yj_#%A4!KO{!I2 zs!#h0m7QLBLaccI+OueURn!kimTyNOL#K-70Gg^J^i6J4Tq_4Lu$R-w*d%MALxQff zolp~7>ZOB;k}rEiG=HE}EOym&UwE9Cd2Q#2T2}@4P`oJr(oKkheh5BWc2~vmDT&53 zwCgSubB`t()U;QVrf5Lx(9$ynL=`8e1&{+nx^5f%iz#F{%fD*#f(APDMl>r5aZ^N& z+&(QWdb5{)b+C?Te&P5zE5gr!MAKhnlz{BiE!{1bd_2`y=xOz<%A2Dd;F_%=UAdaw zbLYm6h{#fG-R?W6XgPlDgr#H7_xiwPD|11b`Mmyf@!xHc{Q$nI=US?S*Rdx=qU_-e8gU4Pi=X}Nf0rrgzDi4*G~X7>43bzLIAKIYrnGk_ zF?xTT{{mOJ5cDO29JKiZ6u3F#I+m9y9SGiGZ=|lDl4MHDgJfGW9!M4`+Y zJSYQM;}dSjO+2QY9W*3X$&#pZQdbpaS=&OVQmeXL4WC_b1vdR(5SVw z1*BuwS_Qu0D>QTb7iK2Ps0VfXIayUD__kr5f}GkkF+ngvt9~?=#7+hL2?2D^o~$GZ zXbxJ`sQNBN=I=gT0-rTeyv}ozO2Otft zvvud{If`G57El7Xt4LGrJHEOPaF`Eq3BJzQm^n(mcuxd^emmt`;FPGgRRLmmt2n>8 zS6-Jr->pH6wz+uIMvZNxM%gkbYaKiPj(u<~&j|b(1jRdE!vv5~fpo*W;b9p=H8$W7 zU6_NoPaGcKG5i;vuT@kC@d&Ui4Y9R<0b>kDcEjIby0i1gGo@dO-IXe;^iB9?x%vxG z1=tzqddg$eWBgk;3kcVU<4`Dh-r~g_j+uzH{XDaScfW8o0FU~e=LH~*?I4rkUOBb@ z)|-l+I!|59X81BZQy~57FWjovzeQZ7xqn;DsKP60&MlG>QT2T*DYn>*(I}_&>8E(X z>3r#M(6{od-l ztk{viqKB+W>|vNrTya;0m!9%Ti(*6`mJWM7`u9J)&jQR%@$Q;QqM9k_eROT+MITYT z;wOpMUR7U9lU+~;;ckiF%4#U;Z}BtaZCSSw z?~=u)l+GvVFvI)&{0#MqUKoY40&;ic9OaRE4HG$=mKSZ=S&1$6b00U*>i?JsHn~4i zbUkr`Si$wC%AIVJltD4-(%a(01I>y5bt_Hnb>8c8|9n0=(Vz@n`84sa25>bbeYN}5 z{U-m0iRve4+Zfz~Q1(KHVei2jD|)sm3S!G|r-x7SrSapmZ$|&I`MDk-X<`&2!2J8a zTP$&%YBPLOAw=Om2Vy9kaL5FE7%Hz0dosgPU{i9m|?uZ$`j0&G}y5wxV(xZU4*AoEFC$v zC1dgiiz@@Dl19nmY$c}lXoopm;DFKK@XV(JT*QXD)1_Zr zB(C#|3{nD_B#@Yw3BIQrS%_N(EADOI!3RS_26XUQeJ%^%(=9`jn&7EQP}vL|Hw#;t zUbwk!aZ3NAmD@1fy<-Tkv7#+I|5MmT<21xF{6}i9zxDCxw|HiF1kbVfPGMs}Ur@}) z2l~Bs$!}=%rVXT}D(-ku`rN--9R2fd@W+oWZ&g~fh@t)MnsI6F<)9-H=?b@3Tx%~P z-(p_cOy#yzu8eU?Ha`xkL*h-RSUwxLmH*V&dYv9UGx~ouTzNdy+x!0v?x-{)mE90o zBC=)~VPX^&A|Y-}B??(GDBFy(h3pqu?_|r^xz;k2FflZdkgOxd63U+Kd#2y(_xE|u zInQ}N?{hxq^DO7~oep-~Ygt80k#7Z;ZMhh?pJS z@`#k(hSVfIR!ah9r||t+C5|A!rv3-n)W|f(3UrY#D0zu2zp@JhLD!e)sQd%tiH{9i z_lwaH8uHh`F)^B!xPR$c0@>Tf7{ppw9%jJKC*4j(&F&HwpvGWxVg3gz3@R7<%f!v$ zz<4g`y>fkKIVJx>=>M1WAx;?h@D<Bk6Agq6WqpvR^@%=D6Hh~Ou z2XQB&Vb{h)#@&Fcb{#T1yOLq=BOhN2Pkn`)5Ls)-wd99V?I@ zefLAd_=i zK0evCVtRwMZ7??Ld@UZV_XUh_XNhWm%0?wbe7|OnP=X10`F3Tt{$N{8^CHInk}dnc z-cREv;lIe>ZG{&M`&STp-c2zIRQ5= zJ5Hx70F$NI|ALuP$A*@jnDGhmR4`$`p3Y31s3AXF*dGDk-%8to7Tg4*WeB3y4d}Tp zT8I7_`gVJ&H)__0qVY6YD{9v*s>3g zfcuHm(ofm2cLVwtW5VC-a?}@F&Xd^{Vh7lA4vc`{fz*8iwIeybK{J^e`VleVQGyWE zm+?B5tU@?EsDY0L1*OS#-yYcNX6)T%#(%Ql5re%c)W`^iZEz8Pu+YnMJd6vZBq9l~ zDqv82m%6UY^vb9CCx%!Kkvq4_a!J~F>b>H67)0;;^hm~O><~*M(vXHX3h{!@D{lE& z6 zxFEA3UrLb08{OmG;c3a48dlsCtMDH${WHRC^?>79#jONR1kvw1x_4eelYdaC(Ga9} zkJO&3Z4V6CttJ^<3WK+pHmV}baGBhzVXSd)U5l*1ZzQgMG>Aw(iXh&cLJKu|X^S-} zT*XQrgp1@1XBp4vMu*2tdlk7viAF$Xp^iPmM#6Fz~$8po#7I$6> z>*=#!!b+<7)l28xuLo#0e1ZlIiXLaBru88$BLFBxR}EDuz~-I-%=m|SunK-Bnw=JZ zofO7|$;Xp$sBg*BShvs94>ab(DpH+j^W=^oEhTjTi&5Aha!;e-3L}ax{{0F&PJnjL z{xRGst3{VZx1)=wj-HRsy>X#PSwA7D)zNCc^B7}Llw+L>usQK+hC=P{Y^m7}g9ynG zc#BfHa=EMArd%ewKEPCxHg7n&;;-tReGrv+9CUW`U+FX`?zYkr;RV=8if329eMND; zzk8Wvkcb_-Bj(gJ)Q;GF>YM&Fc>TXf>gfE>d!FgY{fR=D65AJf6OVL@Q<+bb4uXj- z%4j7)4@sWxl-(!YB)dc-F<8v0x)%yJ$MBo*-vkATs@LDf!?Uma_E+LYC0U}AFPtam zyJk;BhkRu*PpWL}8~?haFrZhWDLXEi=4P+e*S!dnX7NkbJmcKtyTAYgUA&PL9TG6C zKGU$ztS9^PZi=%YxkL+pX&2<7zN^oSzeI|FE}CzkX7rX4ve-%xEY`E16W zK@iPTJjw|p4%wcK6VWV4IA$o19+H$8q=m3%?ktP@t2`J?QA*imL^DEwA7J8`ycQB9 zm1?{%k2lKig$~H~H_mBBGXg>kSR{=}OwFY#&bfQ+7=oz_;vlvza99Z|drDNGfhDO% z-jDvREtr0U+MAe#|La(vZ)>%iJE8INmbAnNMmWLm_boF*Q)#IJ5_M7tl9K7KQh8v* z*QAKSCfsL7|KPk^xQk{9R`ptlPeEi3qclQrsmoeMyu_Ykz$I-%tF9g@fnA z-pLZPV|f!Eu6vxd4|syy@68h?9@WTgnAfNAvv0E~Ib@PC^5gQB+wU?k>RUCa6GQgt z53!OTjUCcr;i#)$_x4TpmDXaC!Bp#gC>c>_d3oR;cr;O&_; zdJ4z9Fh1*t5`LlRvxx_c=7FB_!=EY6cGDh9;8V!)}qE=QKSHDo;*T`Ld`wcF}rLgJpxI7qg~e4o8gHLDFXB6ofA06aB`sffi^6APdXf;TVUXoHKs2r^7=>`N2cIUj?nftpVvqL|X%Iv)e@xIbd z>e=c#7VB3p^yoA`E|TM?kdt+$*I=K_>~i@PFo9?Fo^Kn>t7e2abQg1-Das@0738$O zJ6AtF%zgJ`unayXG#7RP%CpsuQ=5Tj7J!|nqH?$qK2pBWt?mxKh7NYqLd(?IU zTATsHsmn@_;L&AYHn#3M`@=D#GC9V2Dh*fDT-w<%X6(_)bqIl#0EdE*wLM+$Ra);5XMbAW!gLhB5P)TPbk==ja-mM<8?$3{dvA0XcWex|SR+scE4AW67lWFfR-mZPnrCS;3E{>0} zMDBh+{gzgbZ!46q+KN@+Smz!&sS-8%Zg(siJbJWRJFxG^6ezkP1ReeM2mk7(XH>p4 zRm5&Xvg5;l`5Df~+CBYdCc__)p5|4}D%#4*_RWkDJS|HogUhFh1_m3Eho4#_9FBJE zCG8|?%^P$?zPZSbxnyByXSd3XmZ8Bt+BL-l#2Sqq4~)VW<0dr&e)D|9*VWXtk)C3G z^I<2X6#LVQqoeGc1F)(+BLAx6lc+i791jI?}zM*if_x zO1R}6qDmywO)NgaL*8h0xwyD4-q`K)-V;C@Ir`m)jE2zLRh%31Vj@NveN&!%je_x? z%sK4lH-C{sHqQh(J9Pd7Crav**C@v3mE^aTS_00n?NIkb?9h|SuOn7g1J7qab5ABn z(xLeG_W^Hfyg*8Y#5u$lrl%!STGpUvsYOh=#C4zYZti>6%zA|BQ9)B7DDhFzm!6Zk zFS8ia)k8nO5h>;;2t8Kg<`EIak)q6=&->E&c&bq^6d%eDK56^A(af%*ya1kLRjCY)==Z?9NBG;X%%PB z*7T3~yEw?b0LhG|7A9TNSVnvHoVLAjVDIZ@X?RbfPPO2pUfhG2(xSC=8PU${T~p?% z1Pc=E#4qu$U%$T0*4AGetEr0$jd!hTw{vnz{ z#Is~M1e742?!R^IETM9@#_WlSl#`s(wYPd)R}PF=iPy3w0g8Dk>Udk58I}*VAS%Y;tF-Jj4#mi7UL&6JoV(g4Anc{i<^h$5G~aeF ze?Pr1eDS)X;c9cDM>Ecwc{Sk@Afm-&dV0$B2O@gGmkwVeN4LpV4h6U5Q3H=+lSFc` zG)ay2p;+NCG)`Vgw2<$P#FE&NrQN-rLzpqseH7zO*eC8cjCOVJTJ1LvdekMZs2aUjJ0`l~PCXAm+rG{u(b3MeI05f)Vam!+xY&B=3A+J8;vvO?j7vFagun37PO&XFDx^ne|+sf}ZyI zmr3O3%o5%iG26pp0*M$OS=zNL%d)pm3S#P-P)|K`<*`$<(kRidM6u;7J z^-)>wy3IbZ;MF2VJtlt0f^ebZK=pPi>Q6)m#;8Z&K(K;)VuN8!Aya-|J7LNHWYgMP z-TT6HlIr8z!qOq8;n6{;u*Ruyg8Ck7T5iuePG7ojA8Wf#x?f%_|19+CA+QQQGSg%` zL6>u>eNX0Ofx8&??h8;T5mDHBzJv+%TTX)OT1R=2+klmXTFZbna#s7Py?MaHA~?}v zL3Aa2~TPlN2<&la5*2kLwvh@KGx;%oSU#G?Gr`#xMU1{+G zj%W|E`gFFGEUCl8`&487Qu_R+thU4~+}B(g0XUEdiO@S|j_2-5f+R9Sf~4ix%<>l} z4)9cErA?1rI!JNkGC|Yb_yfI+Eq~zBelx;<@E$Ypc6H%C)gFt$ zt9`idL&>D`jTBL!ik^?uT+>-&8o`CfA|i+LwLyNhLr0rV8m6p$dwC;&YfMHytfn6TwBe73q#W zU2rohq>u&WyGW8nSSg7+g$Wh(jiL9Mg7c{c6_#2au`5CC+daoq4Q&utJ(4i(&`aUm zKbflqRgwZaS5Pw9BKXJH&-xg#I{df`Djg2(sK!Lq)!$-bV!C>GVB_G33JF2l+uQwr z{>+)tJ;wQC{>eWeAbaj$|LCag@?dR^+{?==(VOJ$+pOH&AMNb|2u~G&lU}KB8x^jVah>D87AtinOmDrkcd381U=Z~<62+Zi{XkA^M_Weyk0nO0x zFdZ{V^dN>5)<3pnTkbxd|s$oKdY*WySZ`a=jY?# z;Cxn6+T7dJ_-(hgn{)w7rS>1PF9UWMxA+6eh=>qXg#xxIYZa73d3Cww^cb|azXpr+ zlJDA@eqSV+g0=MrY#Lepo*+zP6O-tuHEuCNCMKpdKDYCl@r7D50#Z`v%gKs9x;3>< zpD47S6+t+V#Mpql*=xw0buNRQ4r)>?5+_tDYa|Cdy}AO-68a-fQqip=R$z2elIy-$ zW@_qiPcZi2z(7@T=!y!#+qdL`f|*_q*VP_3*4Iai5?WfRxsguP=+NWPnOyheV?S{q zruM#Oz7J7@8X1z{cX6oySw||{(g3D?q-dGg9DqKoZR&6?4RjrQ6(j8@mwt0 zQ2XxRzmvHgcJjxkih2SpztkTlZv9U23jH`@{3>l5z_&!$V=2}RuHY8t05fP?38M$7=$$j+N^KbOnR1r%u(f8zHqRxL=mu=vUuG)+B-N5j*e2Zu}zs6%~)Ak zrExp_oGMgM`uw>w7@L;M=^svd`s|f!a74sT!=m-(Pa#lBYtly^J)5xVb?#HI4 z%6yLU7=il+_O-4q?$4huW9hsSii(&40RhU&%3PsD6ZwcJC`QLizebYT0ySz)S-<|F zYU~(Ik$tTXgTup{r|Z9oc^rc_`y<&{Z10MK1_mUp+wV+|TQB6To7Ul5+e~Ki8CY3Z zI`6Lz+#Ze^7wW#o=F+1?7|KHgw}M7V0%#F^wbz{Vn<+6ciHS$bG-{JLZAcwaCn|W5eZgf_>AFPm(kq_J2FdZbiUf`CRy*9*_ zG$5`p3eX)rP-vdu^|}HSJVyPMRyWxUJ`I7+N?{opl)JmTt=(ORtpQAQQa(u&llSVJSH$WV3kDjW-d*+}$C#47Y1PFeVPJu9upj9fqwNEAedApjb$6 zX;}s{QsH4?t8CYW|F=tRSE?W@4}Y~Sg52!_Pc`4+BiJ8FX13byRoA#!;uE{a)0lx>+|Ha;<}0E;f^fVsmHn-%FXy+*KAWNLdJT5Z^ie13j@SZ_se>NIj8wuJ8D{x1*u#HdjHWot0@ z*UR(6-9NPsX;D#s{)RYhKd?GbANw1u7UyeBP*hY@A_V@ZNXf`-YOUVm>Al>~nM@G` zt2kbvL9gXcVXYl31V`|?9B<)w+`_fEoHX=1i?891%s@X~&rj$!g+kPWeue!mP?!ec zMe2F_U;Fb_C7b)Dxt29GW?o*NKMAjMmBX&Q^U+^1C8gNhNZ2Iod+*!dWcDX*H$&QQ zFhm87v)ao;8;-sIwmg{cALiy3W?mnAH{ZkWkxWc2Ru;MlDUADOWS}rV_+=Yhip-II zBUs!0QP~&)(IbO?YCSuKCsnURQ{PvALQsDl5B;Pq=c6N5d zUvWbiP?9t{y!jU!tc>O=bex%4Q@HFX6!^|y_3nn$v>U|r?5!?E;J<0{7TX*ZwAY6= z&sNC1Xs^!7$rL(V_nux|Q*hJ7ZSAqd7zs;-pi^?wgyrphHvIi#qFK`X+r#zIF*;;UPhqV>9 zTj=?Cd&c+jc)_UCJZwh^anJwv=<|6URS9pSVg9=5J zm5tk~w05^2~8V|R+y*nINr{sMn!ejjtel5t&mFE*|Cy&oA>0d*= zDOg$2eLf8oJWXAF+;Lc@RLMiRr8)I0=cuBVKbqJU93n1{t|UB zaDNVCa;mc5xcRZ5bkZF0obzTH8Hwcu5&rJs4e^>xR0nexR6Fd$0~X$vgnE=x#NRjF z<{#~{szO{Ew(Slr!*e4yFMEC>uz7eAI?x)kaML1@G56~hNXy9FJ>PVUzs5j)0MaeV z>8B4I3Z3z=oi2WYIrcTHU;rHsWp0VURK=azRG4|Z^50K1@q75Iy+E{` zz`M0KzkiP3tlZpO^~n@c53C}Ti;GD5l4+LzjDqNWhj~IoI;EZ%KSskzkI2cPP%YQ& z9g1h5V`%YoHHareEtjBf6hCKstGgtvFD(KuEg-bsg`Q`6C3 zc6S6~kr9NAjSj7biRX&Q+=DZhPnfm4b!4ZNIe1_4t*%IQ3HS7DHkyD;O4X3c=+8z{ zT)L3EXP#0|TxHM7cVt~Xm!R6+R1zZb9A6JZ+lkBQc%x%MMxsJ#*R@@EYP2L~UYPJGPH3{zFmHG6!$lQxFH{b%fI$|4?X z+}-bwcn-U*_v$pUC`1P2LnwIZLelcy*I?&L27TcVoGSl%^v}q~8xA3ap4;)B&kN@G z?#Z6I`}<>ZQhaC6p(u?BZB#7Ik+LpyERu5uu7uM$M77*ZB;2;;|i0zcY-5E1S$rDG1RRWW4r)wm)7U0 z#LYEUS7v?p3W(oIoY7jtq&3ra#Y#((>-6GVMbz^e_P+oHyWHKKGuqok*|5m zm_k4&Uh_jBtqS1ON4y~RbD7pvXG&9n@FBHk^$Ba1CuBZf7Cs)D3D%Y_&Vq{2nwOwM zJ3O!US;|VLaQWI<#Zi+9#9G*RkeR1A@h01*RL~`(g5RX^28g=ZLndRJ|ENWNAFuyy zq$W7S97&JrT0$ZSlMi)ry=hJu)791W@C^@?ZxKZtCm0-+Udy<{P7-9D`tYvj_}H4% z`_=&Ddj9nvnza`w=F+w*LatoMEqax3cmM?@rQ2@0OVe2&(Tw^S1wG;4+pT$S4q_fT z9ht=CYbP}d5$LzEc*6s6Nit4-$9vS~2ghd1&lIzV3^9g^P>R`pBLtaatw_nT1pD(? zb>Bnayq&_$y^B zK`Lor;+Rt`>Vamn>q9KxEXjLQ+ktim8z7@zH3#IjqWM;%j$kgk+tM~^6MOFTN)@9%z1?OW6&B^ z`cM|LW$&nZ8jd#$u1Qso_k0vvCOOWsoB0T?A`?F>C7PQ~VA{1F@;x(k#RXwM8U z^PXV}5$O*9?D<{ZvAv5~tRNeeo07o7w0xlvvs!+7w4V9UK;p7cQ7!+UureaX3QNGE zSw!_$iXt=-=ngpVvg@22uHo2^=v>fL3_5#YX5A=nTukhlj zWhd&bNek;yDyPlPamz_Tsmx4Tg`klz3jU!G(1ZiYvMS%#Z}4 zwU*AzxDV%twd3a(nJ?ORUj=?lns2ln(cT{PQCox+=8mM73)nxDuy|%w_v4`!m z?rNWn>M>Pip6XRS^}o&hsTx;%4n2)e(b06*QiO+xBx{K_Rz^2;saJ(JZwdXa5FmnX*fnet;_tQqUPezzqSAz z6^|KYFpDSuK~8 zGuV9q<=*Wnm^m7gtoUCu_@n1bJ({Oumb$=*PSS-ON!fzh0BFR2HYtPa3_>--;rsG> zS$z!gXr(Q_9wcmkmoBpYr@xua)*}WRo1Aq#-k6w(Kb**1SY94ZDG^arodS|@5n7bt>T;EP!N@h z1I#BvG?~N?y~ZD3xheL9Y3N261V3bUygD2SL|?Ha+D9b4#oh7D@qaU=XXjrkDVKIA z&D_rG(LOm{7Bv*~+?`&WZRGdc7UQgyyoH`;eq2(c=-Q)vurA{7|#?WOPB8l zLlG$DDC+2zQ2%DsC$J8gAQUVw@BzG+c?AVEP6w(pJ5?BHM6T>O!%=8Mx=5K517DNI1CYVqml_NZ!L9Ha_%;(S&EwZgHAz}rp*Fq{SGi}TK`SRAP zBrJeEdF|%pWTU*?jXq6wLmrhN44n4H`~o4Ez1|S+72cCl#^ffa#r9MvZQI-D3Cng? z#}ESiki;*bQy-k8R4Cs&JSzUx z>p&u)GkT+|+wTh|I9KR*Q0=YPJ&DT2>l}n~kE^_k8f|Lz@Z?yWd7$UiKZKnA`AU$9 zS=r1<{(&M1F7_k$XLT{P%+{Xw`K|5YMrIaCYyu+nLlJ;JotT^J_k%%*``}b9(J7v# zc-B?n%MaUW(TStDiLdMGmul0larQ2Rp6Kp)_~32S&;sRz`P$u!#!%BGSFZ&@n7H@# zc)7i6Xb%OdIHkl`T4ORN`KO6UfSmzKlnG6v%u4FJCIyJqsBcfX`}=e@n5l3QqkqF*_>p5Q>O$Zl+w)TnqO)N7;QtH!DdsY59t~_m9YU zcoM76tPh(kyR}C9iFTWlMQ}HO(T2e4lwSbO-H&;i*qrnm@VUH zxHM(YDg^@>s47@j&FS0xL|w!6?tl>V3C^tDb$nH&DGJ=0!Z%oZ{T8cQ71C0(rDP;q z=~lGLT=6|qbd@$ni)RjDBsIce{je%igxBjPU{>r$pTSU+? za4L7Vw|i?$hHp=QV|=x-QEOh(UXv55Z;E7s>s&-!d3JDca0C25uE|rX>}f4$V~mS@ zgy9uchL?Ucy_NSWAQsdFd5XG&sj%2|yna}6Ii>*Ssjr~FSk=2=vJ;>uF!vi*LU36K zA8EJQ*76WWp*g0uXrR<0@L?%tdO^V$U& zT#EiNMRRd;%8stDYZWl)n%VpZ*WqbFH1yymNtI~yDTPa2TTDuht+p?}m=G&TfF90w zlluHmE>0}c^_JQjY|@?qob5XzKv&8pvG@UUkj@I{6Z(9(n}3xdxSzkxi_5APr{HZD z29A|?Yu?Yyj=Nw&l4Pn6!j9(TKJJGco?dZiLG+b2n;y)SMz#Z4e+H>y#+7E~#{d+R zC5z9|+~MxrN~3!8Y~}XUv>RF2WHo3<1;afH50mPd`@+J)?d@%q*MoDuOgQ{=#jVi1 zI0SL^`RVED6|Wl@Y#_ZoT7Kd?FJ!+Q#49KLAr8~w=1Qj#tQa!;_276nTCgGkDHp3{ zVu81FCzm=*RMloN)L*^1Hkjt3#{xGJaDPn-P2i&i8f)s3suVMaf zbpYfRKZ?Te_Hs($BA?kyYI0}1tu4Jf1YXQSQadz%O2YmnW-w%yxtp3S5D&AsG{lJfn*L*_DLQRzB*%78>bWdkbitNG{43__&V zic7|CD89j+m7=JW6co2>*e0py|L+C(fpL0;JHE2wRgyM0JJ`ddKmiH?eur1-#s}R> zZ^tOPnpwRKh%AkRu8@cpCtH6(F-L{7p(^xmcuHF717(fzSdiO?JQ`y?hp-!z0fh@N zJ43#|9CpvgeV${pv)@gc82T9r1;^ADF+scoG+@<7BfT#v6BsopTJ6kEAg>Mb5(D|g zPg*;QY?U(;DS4>o83RZ4Y%#tiihvSJ!v5Nmt#Ifc82Oayw>pa{V@;fVrQe2$gW|M# z&sr?{lb&HaEFyvih&H3CoD3Q?aJPjqaML&t%p?q>D-}j6Du2)pHcpr`P%oAi7xNBQ ze**$x;N~PPwJ<84n9#=y@*N(rQn$ymeq5)= zVTNLpVMVcwH^HIVQ{UX~87^?!9>Zy{#~&R~Ncf-rk;qagN`AFsc)19~D7OCh*@QnF z-rIGY4^LrC)r4Q6u)H*qR#&64slHXda1-oi?Vn9gnMs1UV{a&?oh`vqEm}B0{@ZU$ zL&NlH(*qL0eCoO*y%S8>SPD{}9K`t0}f5)jToHS2j zd-o$Y|7yC?x!6{Cydc-imu)u{;QNT2gw5jO;#VyONL-WD_P)5jQ7B4G7@b(;GCik& zbx+7q>;MfpzJdvZlBzQ?&?dRSJiMgOSy!XfRl88skPT4dZd<)G#Qa*hL+%2)JkPgB zS_4vFR6j7VIutd%&Z`W%y9;KSDu$%Uh>^o8!!V}^BPK|y*)!kVZkAmFmS28|{N}s5 z7w6s{|LyuH!bdyw%EnrbXG@siWpQr#<@4m*Gqon2t zZ$|)s`J3*a^lXU z1euCqJIJ{Vp#YW_WN1cV%?m#ZAL zjakC+R|S#M>mDv6Urd-33iE3d1)t3-tJ*3nnQvA+Dq|=S-PYrgec$S51g&#++{v}J z^)@%YjyJS6FQomHjXH(zH?M^had}mm|C6GM&^Mp|ymuea@NL$TKo@F=6VaN{9D z&)6CGdNw=kvSb$+b7TWu=FLTq3vr={k}R6}cGt(*{;qXDA=j)pvDzon(9)gQp)Yhd z6r|Fl^Y(`;bgF2splkyeLHA9g^WmBOv&;9rYrruoAd^YOHq?{$yuXx#(qjpIO8#Nq~2WtE|nT043)=w(yHV- zwMoq$H&c(iD5D*zNXI(zmg$o|7Rz|>ZGS;7eTF239UthVeC>uzGvPzL@IMhAE>Z&% zOE`XX#=5zr-omL2@)7Gd)Wb`|c7DY405i)`loPlQ>qR5FE z&Z0kGsjzml%Ud%0Us@9yDg=5h!sWx)A6!Ni*OfCy+rLY%lcJ#De-LHRO*W`cRSt%{ ziJ`yP8O1(+oSUjN>kt0&PAk|V1`E6IaY@$T0X+IEU(ky!MKva-BZI%{;nYSS`?-V9 zj98{nYk5lUobK{B{Pk8M%7VDKcuZ_8WwR5!>NU(8T!?O`C#DE>a)eeyhKZ0Mqb0K) z|FPajjY-eT4j<3BY=a*mv@w!tk$@ZZew>p7)wkE;=|l}bDwa_@_5)M{34TYBe99-O zX=RwO>OLW5h;0ATk9qn!O{U=lChVmz%`g1nK~a!|u{RyzX*S2N)9(LvQui8P^}I!; z1-EkZ;HdXvwKW{f!TAo_^`m^H>8#h8Q%+**NN)oa@E>hk>7^>IW94(< zhJ!fku66wmHD&Sx)_?M(%zKD37M5V%z{}!R83Ei!VG1d^>P%>Y52ZE3#wC1j`EotW z7bYJ2x@X!y0N5mZ*)}3vjOUc`xNpDfkw6?xK68~%2to3#s$!uQIk!B9QupOXnxF^b z?ZD_L?mYUx#b&x<#g$Xx4W1=-1T~m(qNyALh zO+-GA`WSB1Lov`!noNRiKA5^Z>@))&=qlky!p>v=pxz?fYJC+kG^C)E2$^xnd|w`$ zuT9$1Ev4ZPdyBj{T;YQ?xD99rzNbdHI;R^3O7aD~OMnm|lc_(T+62Ni|jN)YkmP?l@QP`Ztvx@0O8O`^Vl_Qgxm(8ga`6^VZb+D69+I11t40$l zO*OZsKz6LeC+{U{|LKNFdAefO$Q(EA&)&&NWM!r8`)L{z-~T$_TPWmAn)~xZ0GnjO zhL0gEHCG)TI5wb-8N5Zf7l;bBU=$Udj;Q4Nlxa#T2w8Do`8c!)0zpJZ#^R{IL@CFF zY(h#=?%3GakWf%+XHk8zca5(*3SX!1q)%smU(gUtHpzl}u!g`?caicGGW$yI^xq(& z`Cq*UR9bg~pI>+SXC~`q^Ki(UlF<|%mG@Q<-zgRQ+sMKyV`=o9qsoingJN@Yr5hov zV4{oXF)%;Fri9B)PFGac=koh2ZM1PlHSiD=+6>feuST`Gs;75|P(knL& z&sTK~aB$Oop{Py)byfFqTtLjC#~}P;qRMtZ9~#4#Y|UyAA6z3UmNH@9 zsi0YEKz_r0iA4p(z~!ME9S$|te;lMdqkw)9vsh|mNsLpk=3{p0H^#bwet!z_4bX+i z!1>}w85|*bdU*v95vK7o55MriQ6Ik?y)OR!#w!tP^La_-2*7j?y^QjW;3bsUEEJ(q zm1&Ev@f?0eZAO?Xo{8A>bf(8tE)U(l7k)qgqBrYfhVZ%3%Z0{A9iln@_YKxS<@!8H z2)Ja2*0aDryQV>_q7`TBTYPZHmizKw5c2g*(g3b>#BcVyc8|GwL*QyR_VBiH3lz^_P z%LD=r3M8~D89^v_@(mG@ML7Y4eUwDl89X@;+?h{){V-PW){v*??WX`-wC-y^Zvj5X z+<91a7s^frKH@)fg#*8fQxJrH_#%)qGNK9!hDu`!O32k-LYjDiR12yD%ED};lhX2g zu^drD6j_?vqamHQVp{W1E+YgAvPrhIyp8`!35cpC=6r4v``3j9^(H_F!`I&*)~FriK=J z55*#&3o`CAhls`op)48o>^rQrf&B~m3{i?`?O?&mmC7QKwM4T>N*-lJUadrlV94I_ zG2UzHThTx^HhRoj8l7aI){eJKL)?PGXs{*5B%#Qo=TmmZbXS(0h38lLwfcXMW}uL3 zZX16h+&{vt==-b!K{PoJc89p%18v@N6Ep_Jg{a^2w4DL!}B$(bDouaPwnNw+K+l;kK{n zGsVF^g+F(_d;WD-ROKaFCCDj9mc0?IjWGE}$g3@9H?7y{k^>vk)g>GOL$K=P0M_+3 znNjt{r=SGsscHlxTJmrdV2)x7To)N1z^~TjICXpoGneT334bOo>)UHYk}eNH-Rm8e zry3hCnts~~r)7kvN`IdnB}w%i6>!xK7pw*k)AP1s>4G}(wq3-&z%om{SOP%G$`(iu zA6s=$lL)uyB_SisiO@n6b~pfZ7vw8>ex6Xn_s})Unn$eu+$4olea>dFT_JD|%LX6RUAFzNQ#M56LhTQET zQIc3ep(6w11_ybOIVFyIiDY~x2o0a?=awvV=iJ}rw$|3uGt38>W3>qn&5b2vS}`OH z2>oZ43}uSwqW+e}bN`n5`v>=0^DhZQwD$5F6g{nL5EYdW_zaE5syb1K`Z;@I_EhjM zehB>KN2CdzLHRJO29=MQ+TiUH7HVXf#LSot9ox-{^cj6_!D#r{5MbNAyhK#!If7*z zo37j1w!T`_F0I#O)9%*CI`(0-vlqqo{htbva}Mx#@O!eu4{sXMlj0@q9g{)|?c}j? zg#!+O&oj+2}~Vh>D~C}iNK;~^%UY6q&w=IBeDD^;}QbbeKi-v)iPrc*e)bq4E~ zZES9`K~_WPteToHgAJ;^=xJjx`rT9@qjl>v(SWcm+_3v?8x$xz4uK`gW4NZH`$_Zq z<7EOCZt;=fY?fy|a5LUa1flkq<(Rs8+jT#3pnv@@KHr-FG7=cvkQe&=5m-$`W57m{ zf{KO|ZHTLY4Cnig6jGY`cL>&38FDPyymmyErR)wqqflrBqzh0y{Ev2bO-oz}cPRuu zzGY6I_xNmp?i(5i$%EL+e1TmyfKY#g`b=Rb%|VKV52@|XFLlM*6&}1eK3u4kZm6i+ z31xCq(jqBRq=1B-Kb<55_5Fqq{K>t#l>NW$BLrgGmj!Z;76CwXmGwx*D{K)537lNI zx9P+}nCwopo;Wz)_DD1|yXgk?Q{2NSE=)uymIw#;(7+P znh>cpBE>{?r`Xmf8LHWwsc%x@D719+ad~@hz)=QAqP}Ash^!Lw>HhJAgkDuKcmVV+ zknlVn2<9jIg>5aFvlY6nxSzaJ`9J7PCK;Gfv;8+xn?3PlQ(sMOvj}zg9wN%tOFbsz zn~4F6JUGUDNR(uSl(g*ISce#3Adr!_|D+Kd-TlM79Z`^s8E}n=z{(eHt1?TRNGf5T zMV8*Sae`_lIt{Z{2Hi;yOl0nDy5^*yR5h>GVfDunGtIt%$tw7z)EgVLxtUMNN*}kC z^cgH23F(sZ(?ntvL{J#|>GWQ9CA7;puhb=KY{eTa{_F8xc1?A9$UO(3TSzWjqMA^E zY@;nm6Ay%zB%Y5_O-E(9li%34OsT6Llz-($<`A;3|K|^@>15XK&jto6;c-7dC|0O7 zN`A=k&zLZE4-d#ELdkG6Gska?Mt=+kWrL`I7ci+xfBir4%=}UpU!atokImQvaGWzF z{u8YRu3+n*c|DuPME6-+}H*b1OQiB zs-AmsL;>9|kS3zD?`W)@*Q5~U^n^2lGg*@45kQ=Z7xrOo*(}=Szw`4Rd7IuHHc1D$ zL_@EBQ04}N(>Cyn(7{tBVv-(?Cz^(v+#Obj(2Q_1 zRsYP)P&b!DRs9X$aD*08g=!*WH2`%0l<4}|32rIHk<6st?%-r&MAN;$KdQ)mz+|25 zfwxt&zV>xx$v&(ZZ$0pP58T=HpR*Y;Y`49FVs!k%YjS~ZVqJ$sQd>%;9zH@ixQ$v| zUu4*o8F^X=wCA$gzB@hZcO+@RV1-=EMWAoK?!Vn%#26eZ3La=+CmA-2+)g0Kb#)kK z1@}OGigNWq3$|dPkoy)WRGJ$Zz>hh524)Ii5|i}48bgmr<38D= zl(O`dFWn}RDLfxdEzEYhve9I)OH33LQvDKliV6uJ^;Mz}PeAk)hz=kkA`%-3Ld7k` zPvoKn7KLtDF&&4(ZD?}-58s)`3Hq(rBwF$pKY?6$)w5Urx~HiL@Q}AqHe-Sye%wjr zWEEgr0Tw(&;2hdqAf(=VM(71y-b+6iVveVwt`6~sMP86fi`}adS|^fIdYFOpRn0Yc z?vZV;vCWa-<6|duUHds0CLO%q@3SbHn98njCi-!MQvEF5#5#G|hOX_0y9C*P>|k6* zS-85ovO8Ylrq;^DVXYB`Ahr$m!ubNshpm{s@x7vWb4cS9Ji+V!bP>_UlJ0U>{|S{P ziXU0^wnd2Ss7ok7KllxZ9(`d`K4t-DqIaqdTTYz!@JkYqpoC95+uL->P!r+tN)Toc zKjeG^WaX-qI%(Fz>RGPx5*KE=)*!2v1RPNHboeGscr%N|#64ks9=b?8AM)Adr)i30#7$=gW(3r;RTOM*%uPbxpRX$U-pfyp*JvzR zWTxi8nlSGx4zt2T!jql=5p?)y6_tshS&rvJ#!Mjs6O4#X4x9`{6G9+hkuc6_`M89d zHu|<`ko7&q)2hmGAej_&JYk@08F&O=kkdo%5dvLLk{|qazGd|6)v0?szfgFeH;zUK zglrK$URt0?1>#YM)U4et3Yt{8ZkdmWo2SOd9S>>KJbVl8i#T<1czhjxQV$PGU^|2uCNm4qkuQC#@xQ)-$9D3 zE$}0bN{rc8H4VK>rwl|BcfaqAxeF7&N%CTqpMbHw+*731je-)o4P3#3nn1dQQPtT- zg@=ickKfwbQuit{hMU@?8ViUahDc0ta`n90oxi1U10%-5#b}z%zpT>nW~gLf`vk7U z8Z63Wj*rO;n}32aEh8M+Gg=+c`^RPn@`Pxq=s!iBcz!J?v2eaYJI~~YD_hhXxifu` zA8p0TnL%Ct-rbcPgoyM8U5VcDB@_0iXgHHqs~%KXgYy}X5?G7@F=r4)B^o;cVRGS%iKDX{U@-Pg;z>nFhhH`0WnwR zc85&BHnoS|WgWp~^qqRXeMg2j|B01g*ANNqe#4irzgUy2dI}vEkEEi>QSVj)MYAEF zjVMcGXBnj-)6UWFrftLc%`nR{ni~BB(&1or{uKjKGqopu4qqHeygpGv7k5MmIJ%h zt4B|vgEV)@I)@zN8WPm^VqsxXTv@4BQ@_jyQKkF`1#zIz5B@}aHF>>q$^b}rdK)jjR3eNLAM5xA{7}*&chf4Z0mJ@UuOR~-JpG0{4@CLuPQp8 z%BwrI{l9hF8e4Q<^Yg6QHDD{1ElIK!^t`Q#DQ|%is}M+ahr}WJ8q-0RpnVW9cDOy- zli44T7aVUH+vJZGKfVnwQ*X1^@J5f=|g|^DU;bhjONgY0Jl0&@C}TmQ5$tRL%>y@Yu4YKobXY{iC}Fp zC!B2lBlWE&u z@gXWnGH7MZWBXU`C8ecO3JQ()`d_QpRTm}GFa&vbAb{H|ThSWu;FX`R$KwGi9B{V) zBO2264K-J1Ak-5J2|tYE<>l1uvV@aTpi1MJi@=}}U=lgyh~9ZtJm77ePd-yR&H0J|^Q5(gXoEVY$H< z`IsmjJv}lvH#g}9)jZNpdnlGL4OnB1mjAwfEslZ=$i|VZX_JuIESs=OXl0XeR5|Ot zv#}PiZrpbzK#FF}WKRH6u4H6n)6Rg=k4Z@?TgiGkW%}P%!Xt#+fNnA_ZIO? zb@^4=Yl!Frx=)j1HEbsg0(#aQN+9*s`#p#&rp05e2_m9zObiwtv#w=10T|Bg>9azO z4i;a4#}MQfpq`9AH7xGF1ICceU|v&){w2FP&wU@lbX6I5ZP9hCt8k#V)ujo>V|Kt%1 zWC(t%%Tq2l%bp@zCir<|m4h^UWvnq!&udd0Pj*DPH6k)G!tGb`$na=hQ%SI(cU53) zzEkEyGcXhgaO-x*(9LpAL#uPq9i%X02j2sP@?D;Js~PnmkU6aGN+zm_`u!-#zQB%pkE=||l`{DK*yl6xi&`G(2g6a9J?>~UWE2#!5eNait()Z= z&krS*TV1QI;|?}9jSs^mg$TnuC2>eE zzpdW-D@*e3*${YtvF9EMwa_MBHOx7*S&sxUe#rGRfokd?w;_@*7@EGq`EI}pRmnpT zb+%EEkwr&9oV^M4l1_tF?6|_++uPrLSy>np>3C}VO(Ql&oN8KIUr&gJvBZ*G*zxhj z;ia-7KeSvz#(yq@cg49;Y%rdHS{;7{6t|V{%<_*d1GN4A$3}BoqfGsPu&(|4L9xoK zg`xiiaBpCULR=u_caM91_MR-4>-1LKR4@XJck@tVV74BcwYD6&9YFNHnf?pNl`$FgN+-7!wFE-Ph2et;KiVA~Q~6yo(#=-t?#YtC58 zbwWfs6RR7uzIRx^X^9U>AaAaLBqhba=j#{Ae&|4~(#_X_vVavz-z&&5@C|dNgJ>t~ z29UG^JNe_uD(9=Yp)#4(21EK=#Y&hM6KI_~4lhT>UrDyF3XPqxNo=K zz3r2*^bFPPj^Mg6b5+tiu8=T$HnQ4?h;#^WK5)hRUps9N#!K_?(aS=Vsh=@sksxnS zVS$n!j6%*$pV)9?FLn_Tk;B!> zfgdLOR8 zbp_8nbzyBKkH{G>Mq~!U+kY3O-~$L>ue^aA4^j_b&~wuWsQRjldQ>X^A#|4%I2&jI z*ZQG?^Ow4)BVeW5`}bwKNH!ism+mXT$5nwI;DaQJU#c2NF*`_{4*mVXL8e>tpnQk> z;xTw<`nRPQD^Q#!l)sE>7Mphd|7g0(sH(cIdoG}$64DQiAl*tg(kb07Al;o8RFD?w zlj*Kwlnn(BU48YQIz;WuHSqJ1&s(CMV#=eE9g2k5?M;Y1URj`d<1hmf703 z(NvTOka(t}4Kgeh!p#v3<(=~SIXzXd9yDknRi}qfXP~4)93C4JRa3*Yu&_{{+juP= zY)$8YB^Di%GJyz{pJ_hZKE^Ek+kAJBpN8`FHxtk(g);2`L@pT7cCN$%+T!YEVr-jNW#H_Oqft;%`3Q>a6XC-J1XlN z@djo+0wvgTF)2K>sp9_S!?d$%%^3ix0o{R%@4bHW%0{B)F)7L$R6{bcuA&lA*;)!| zRfwHOBO)>ffbrNQc(f*sfZc`qRGhM*j)Co-k)oOYOB0kC46j&eYiH&NUfXsO#b_1R zzuO{z|KJimVWT3{58xrP2h(Wr4`fP*S@F2^&3FQZVeT)g4o1rwxVc)p$KjlO6Roq*vWhU>VmG& z8mCgoyCKOd%_|<{cTCsUZ5c-NJ6*bYS;{$3IdZ0w4OQZ0#5?KOxS~OiM2^Ah?OObq zTASqWCAbAa46jCiOKs7YnH(I0n?k3f_5aR|KB6Q4?Z>HINW1!WxG67}g%X^KIVT0;KBhMkYCH_B9 zHD9wyln*#bOFzTKjpOjVMo4)Fp?E+tGBTiVK+QdUO{ldEv)2=QA`JgwFOy>ia^>n()E+u?NE1H4_T7a9v`iIB~pGt*1r_^K}5nI*l zB!c~nLvBq=DYC#U{Xv?@cnVp|54EO$)5{lbL6xK=M6egXXi+c1WPaG)-d5s;%!{zr zSl)?&)@pOF_!CgH&Qy}YWkVeTs}StUAkZ@6ixQCXNXIxj;3D}zQX42KG#FF90}0{q;UH^Pz>;1)x%$>u34 z!VUy0kbO+@HEn)ofze0Yos(yZYXP{>DUGkMTwM;W^Fy5xIxIgzPXOxb>iTyUEMNNF z11b$RM``WWp`j7c|1zpw!R;%j`E86#%h5#}37cQf<|xq|k=%ZO6aoW{C^H1%5}Y=( z*(SS@F!6;?zfhE5_Qd{<8lT#PMMV6E11m!ltunKJ1{eK9ut&=L^Z`=F4YP~YP5$k= zOMR^;)~l%}`4XS>we_y09^~{r>q|F388|-!pZ0h28~8`Y{%3%u1RF+2!QBWAgpICJ z{C&~(1QQJgA@#K!vWQ8SUHo%TL^hNyCfW7^^B#WjGOiJ*m@l+I-p&5vo`5IBBcRR? z9!|tXKQjvtUwt5FhFS-M;ebuMCzgn^WeGYiPvB*dyRqUkp@EaOD}xRe%&ue396LH) zlJg;P6FdkbzL#MBm01IUn`O%A_(2ptu;Z#H0m1(u+KWF8>J-%wWY)yDVF{y-K4RzC zhLtm&+30OOX*txRs0e39d*bY-E&S z&(k^mj~fJzwPB3l3$BYOX}E?sJ1RK?%sO<$9Ba_=0tJ?y{vww$p=|m50gUQCcMEKS z53D|<%R$80Y@@qp?a{FATxr;{B`P|@F(d`E=Mck&MQ4iU)K{{_h8rUo{^8h=;>K12 zsI+UQ8dk(R_8T(8{n;7-+dskiiBb9mU;!xOq5p*h!oGiK*R9kFq{-?;T;8P;g*39> z4I`$HS$^@Vmt_>KUO1@_&Z$<4%9jQDC4 zDg+3r1L}@=#)C0zt}FXWR+vQq;uP&|f{lEq4mM2@SYkB6-`W&a4>*e^B@y}7k^sH5 ziTR6y_r4bo&jiey8M$kYeBF$U`A;W68Ak3LE9AWO?I;jbS~dR>a-u{^#@g-_IqA0v z$JV3l<~hnRZiWT;%VQ(irJPGR>j#+^nMjtqJ4Tf6gVB>%>jfAsA*tar^kWDI4{vmg zQXQH86w-YW-COlLYc?9Nmdg1!)J3rIduuGrcXOK?4SHPPnKkw0DUtaZ*g0!gOa*=4 zFg$-v&HvSb!1yqEa8Lorb!3>_puyeIYQE6oLXzTPs82uH|7U3J$w+vmZ%-#?SD<}` z?;}r$WHp$B+37>mj#=g&D25YG3Gw6`_ z;??R^nT3BZLG~Yz!7In%NGpG*5&|ZF8JjY|I%PtY$oiHV2I(eWPcpx1)TD!~74(CL z7f>D#rE$KmXh&+_(iwOu=FySvYUvxtn9OW*`_T6SGMDwjYBj zjYMl-HAhG_u+Pr@Fm$#`w>tB1*z)k}dr1%kVok9K8q6N`CA}-o0Ai1H$m&Ct+v19n z`V85_pX`fwQt#l3?`VYcGq|?JlQh58aLw1mt7+)%;=K^6$k4>ZjmxDK|Aw{QENbrm z65&7~XfR|p=DqxTn>N!<1oRsen)=C^5yqXMDJmveQo1nI z2|k$hz`puNd_0ZYKy6Loz4kv|`3p7UbJ%_=re{)V8kQ07T$IN#+iTpGo zFc)Hj&!W$m=Hlf4h5k2(c+uEh{_il9PE|BZtVhJmOVmy*7?);Z7v(W?p z1_}KrIeY5qCCRnut=p&_%eToG)S=cL0k>Pj^E~4Is(qL9pYajAUgBd@#q|#U`J;;a zpoQGSp8duAjOA5BUwnLgmEDryNZ{c0rwk_^TiZ9029c1aq`p#Ok3u9g9rW3UrmL2+ zq2Dg6He2pL7lb3X)z#lo$2Og{U?@a!;=WGj>_NQR5sHAWutY5g(?!hT%x0?;|Dm;` zzz-lz@iaw8PDM9puYKcFuWEvcXhoo_&3ep!l6cMue_czN6E8%1U^_pY^JQ9}?bhH4 zZ2_|M@J^~)xFVjW_)FTsC$Hb`^4dydaYq*9LB*71ngATZgl-g-ZH;fD-crncgUD9r zcn!FOl`Ov0qmv7Biw{r!3%8-aPn43ES0lM=eERMEH>}JYD>~2bMch!T3v%mrRN#Zi zdzfZu{ZnCZV-oGZURUgYB`x9L59U-&9xxVffebdCA=UPWC7%nD2}}Gq9{S(P#0L-% zCyGi-W1cN{>JOygBe{9S1R>>*Q$p&mN?}J+B#>8y?~-%{!eV;;f8I?dS#CDLs_Vl` z&nO`g-$j{|{w3d=lD_kK>yg@?iOYw zTqKzB1cz|B=vaE3V5}&0`W%cGDJ|&Jd8Y?VuIFU3!^dGzGkWXHu^NgVxG5J^)js+^ z0=U!-+v6!}d=bH8kieWc-@@1BlmM(Ky*tBA<#g+**D31Z?P9e+Fb3!)y*IsKXM6%j z7qLP%jP5iC`XYZfLmN=ecp(}|Dz}1BE8Tcnd7v84u$Ox4MTLc(YyNX$Q*Jv2S}r8G z$ik0H_-DZ7*Pvw5C*?H#b)=Yc5r=b&1?;fO=O&bPslBkHfA$~T-8|yIJYDe%QhhKG z+jScg`5YaVZ2@s*DaAi=H6B-nDk-uW0@)EDPr}y4;<;oo>BK!B5+IO4Mnm{ACYHqs zLhxx$-SFY6cY5KVWbEq736v-lXYN_cHB4b@)viz>Y(!1J1YP(&A~(Y*im;kXjix|J zX~xOL-&e)nlTk|QO1((}C&sPM?q;pr2|2RS;cbwS{~+d*dZmm@J0Pc282v!(d4s%9 z0zpdemm)~`Xc?l_Vt2m42Sckzo2qrCRT>qPY?Z%4Tj=lwtzR5U)^jwq1!Gd_A2)i5 zA^6ctKeYE0l!f}_3$yH4bADIr@$_>Vh)V^G=}&6yp-I^=AkWlQ2q#IB+~Fdi5eLMT zpobDkQ}&_*X!?g<_u%(59ok9`LR7gu{G#Jv6DdjvuENc>c2nC;J5IOs(}~9Gd{1fS zo=Z3vqvRF1qANn?iJ}EzYkx;-?Z^s+4#^w^2@eA-L5$UQ@p5tUzxCJFtTE5v$z!+wz8S z#}B<*U4SZ+y>(+Cx$88BuWp%r}tn-rhN@w{Rv`u9#&@>`lt&+kupo#!e~ zyMyzrEqkwsGZ2@TQCd6iNjhsmwL;0l0v0nBd=x?3hyhvHIhKW<9&1yLw@Yk0lUm`c zv$|+0I1&hRKoitq#E9{BA3j~{p(@Z&=Wv@~Sb#X3p0-8||9$ppds0UKRzs)#!H^Hx z@I{=eaClTM46G&dh5TPV>fNqLFK7m(k>|(7Rf!FJ{#LF{J6^KW5uAiB+k9GM=MP8K z2!rO?(?4;0Is%!gTI-q2N*}^MI)81dfRGw3sosVvKcYUV%Vzc$aKHyuzpx?2xTgQ9au@seb@B^7AB9^085OLpk z`Jk?M3KF0E3q%{ty)%=d;+R4q_=ikLw6O*&kBWhw{_B#FRUxpbLgtGG+hw<$Oj zw(+re4w1LRQXww1Z}<;P+SOlym|*O_A55W$F`c6}0{U8dVl`EYYqPb!q7bx!CRWQk zoAu`WZJi>gMKLqhUm7RKnWmp7q0aTk^dxw09TLqK-=Z{LPkt|VLrS-C<|auJ57`&G zJrbRu{G+TVY-PO-{e>uMLOHDdu0@@(!%x#s5YsfWg;I*e31PIfN~3I3BVRjXP+du6 zap#uU(-C3q{+xvg^hKQ{e3-6rh`;uq{>-WdC+>0Q?2BwzzZzcoliD;Uy2344DXEvc zur08)MNQ1pdrB({wfQZpnJMm$rYNDoj=GjUQ`5U9fyC_LsnQSzXN1pBFIls#WGn6@ z$$YXsKMz59ME1s3$ik{92|xy*RIgEtsMZSW;M+#IS zgrlrH$;hp5Bp_JQ5R|yuEo;fG*QBHPi6Et<)v7){3zI?=qq+kdd@%i64kgBMfhyjV zc=p8j6VKJ{eAx}@B1*x_sf z`(#hPMLan_ckb56$5zI+B^Z~sZuC5}j35;#tn{H*(7biS%yWSNU93|ret>YC@{fOW z1#ev3pO+cW)4@(pBNwvsZsHfKwV3fN?3&R$n)B_W|66117P%MLm@FVT`4B2Io~aKn z3#9p(0s>+uef7N$m>(;YyhM;igR2-vy2&84??{4(X$ZG1S??fq9uG!Ed%0@^?)^-R zR#iD~+iwWS4;s-4(e@xpPP|b4-VpmOYZ*W}eaz1$Px6c$b2?*UaDciUxMAF**gk~X z-=yfmg5>nu@puRUrO32#SMI-5@mP#eAwyO;R z>}w%@51O9#*L|VOWkd(Q>nCw*CVhT>o%)AZBWk{&bp;OHU``M>yE9J=zt`eUh$nHN zV9uzis?+({?@mV)-x|l+#`b0)vZKB*R`s?*d+%)XT(-X;KZb`f!@t%EvTb~&u!n++ zkADgVi3>^FEJ7ccjaeRJXjy6LM%|Jxw#|P8gu-jr5veh1@lklW>-h0X$_MNnl__Zn z`JNPwr#xTs(KyB`s0>P{GuR8S?%9rAcTrfA*GCu5tp9?H$&;-#vgz!wpQYtBjbv#* zl+reOzd1hL51U5)Dibj=3j+y<mt#V_JG`M-guT$NjP z;1B-NwDaCjo{(L~YwIe+dK>$cfx>+jW$hJWOn9S&Gxl6=Ufx@`1KkPA+aEg!=beks z97ojdZ?8`G78sIaCin|U(e}9Aw7Ua>%Fo~Vv1$dy2l15I&Fm*>W#x58hQF7&d<(fkP!@lVnWo4A=z`kQK1w~Sv z`0PdG_N!VZMn;p3KZ%8s?)dmOC{=Zc_Bf%!@4}TFpzFuvj_o-K6kD6uAm;W(3;*Uj z*wGU`eJo~eb~jz;j}?@V_C36bbd(UX$Ucg!w$%NRW|Itd^VMH$Ajha0@eTGzMXME) zJ5y3Yb+pB1jyQ44}VF+u5#R1Yv2N@BPHl>|_4EY|(0T%7@4r=B(^ z&dg3vKGx#jeJb76zRMf;(TtzP5Gxy%XhFJMx5Hoj_~V5g0b{SMk=AM=FH3g{mw9%3 zC`ml<4QuJo>3dLxto$KRGQ6fRZKa`q!Tns48)CqedA(x+O=Z-p(xPTMsM*_O>+Jqs zeMRGWxm0uIJ3x98x{f~};=tBPhtsErmZju+SE`)gW*^lk>5834JYe<~Xr*chv|nf{ zL(D>MFkG6& znm1e@@(UJ}aqIeWWMdeDH%*~rDvKittty-ySDov=>)iy05p_ zXs*^K6OAbF2_{L#d*o_-Wn}VU4Qg^#4OT+be}~g@COWo-CKtz5gg>OZ7ysxqMuzcU z{Rx{kivl%qP9V)Y;wWV$9;DFG3}-ud-J++gg&!lGjfYF_Cx;lsayiR-<1gOr#!H$r z$ax9_bo5O4GZXmen1u|^VHV_*JWEcuDfODN-5#`DYA0tbmP&9*ntyF79S&`R?Y&;~ zbnu>G2(+DOY9GKHBM`SMn!ZP1rn)_qBwmO1U}C*Jrv0725Pgi-GgMNOqM|eyE29-K ziy2T4h$(9qytk?YX+|7g3t98`%oKZ9_^4`SC7jN z*@=VpK5I=%E>`W2Ha&gLb*h)-8M$y)OmZ13g5qmNya-e@L+i3y3Z3%m*rKqcmev+~ zwv``3t36sjGr#**&HQ0$=}BS6aO3i{ki1HoRr_G8rv zkB0&oOgCAxVjm@#?qu~89O(z78R25()$STe^i6iE82mW44`t2gNz>;Dbnug9NODaI zd09jF?>${F0)wjJuI78sh(9p!J=w!iMlRWF0Q$4emB!Kd-rOCF5MBp#44s)bodb2%tF%5TqPuXLjkAAR71&n2Z9HvX1?P=%2u4Lzv3@v~$RQx?XspZC z8$-!?D;4VY*3k-&&RSJSYWv)8YxtG#^)^k#-IK+(C#HU7pR-6H2k90WLPJBEpVrwC zE~@Hk(G+0_ozFMtr_6p|Sg-&rlJVX<$bnZJfUfp(>l_j8>3ZjA$iq*~@#y=%sEQ z88$HBl$B1qigk=_aZ~=dZHiU?64wCMUC5JM!XV_(SZK`YMTxJCvluJonoj6BjN9=q z3Yg2qnKM`x5T0Dy`^-HHgN(>b!9=38G{urX$VM9I(@DOm(|R>D(gnS?!p6St3fWax zNb6TG#F=tHt2_z4Qa4h3+4>>+I>dHL9w)?g%<3CLg6jvCoVf=O*a|#vSX$qll}#?A zqQfugTYx>{5((P)7P^u$HW8l``|maCoyN ze^k}aaw{5!mIMTbM(F-cMxFA-q%2Fbm}$;;RM%(B{QovBxj!K)K7TER3Yu=hW@fqO zW78W^#_}#CyCCK`-WFG*rwg7Tn3jmw1nVHXVa?g{Cg`rwTaip{bjt_DApLlU=$*h@ zEgNpl%N~!rpT-~mVPdzT+EU7U&U}OvLdGB+r?_~aW9B`%&<4K*wX+MWsl{@mrN2>= z@Ah;hmB*bSKznMtHTrICtR4>ly1AwutQB{wXUH7<;SmgqdNv!M$T%5F*j%JO7O_ZX zzy#S*qn|xXm8%+rxtqKr8@a~!IPSt%b9z{BZIlVJSJnj|yN12BRRLf=lh%SUjI|`Q z@`9U3`QjIe^y#SkP|7*jDx#27~ zM{Ik`-M(F6p`V)OXFp3S@LSL=n3QB-_%KnZmWj5o5awC)Gn`FLy+Yodh>Z|6OeNuB zzi*zmKDxecR#mGjk_f}Xn}S4RbvjW_ogN47=~=I(_F`D}@=8nFcGcZ-lEc91!=WpC zXiR?qsI7v5g@CCA_H#qz)I62d}hm zD1hW?=68WOuwe>Ax!(@{>un&CrBDA2lJOKK)eljv6K?O`(ZZ5OO)uFK<73DR%IOLx1WUY!I2Hx^U3_xqx} z*qB)4I>egIo*Y1m!1(6HA6P^o^Xeb&Xhi6!s*{$5B@zI)^3@r6LCK3~h95%u>8IUU zwXB82GyWT#uzySV(<8m<%x2*}?z=2QM z5Cft>g;o6Rkhgysy^!JmtEFhvPfL1#`U*`hZk@M6M z`(QtK8KE{qrnu$4ekBioMhjlO@Z}SZcqiXe@@b8^sDog%Cdb`yDk7^uL;e}3-OIl4 z=U-Z=ql4oOe6_6o{GTuXC=9Jf7~FtVVHBo6z&L-UN0l7ZG1d_dyZ&mJtgNKfHOl0u zcfD94|Fvjc2|mb_XmJtr_;KK&}q-8g*Xe!zLVBh zWl`L!=&EP1W9#R+RxQ`vnQZhTKM`QB-8{tD*!pU?igW$IMZ)jXwRDI*7n8RC_j`-D zxOmf5uH%R=!I9mBxbBmW&H&j@spI#%_bx0nnb>Ha701pGMVN^h|KnXts(?A?W{%g3 z_8hw{B@xtN*9>y`x|Q26yWX`?rOv+pQV?=D+0*?*QWEKHQ=@p|>7v zFU8Jpp~U3jGW(YdljC4tc=O!K0ribTaVh8N50@KoCM(#ib%!H&6{bKJ;RhmLTQEN0 z{8nLU>~r40GgpwEg6r~ z0_n09CIMmVrwa9geGx}-M9Dp@=cvi+VHoZ0coV>{xN zF>+I7VG`Kn2In`hU;6y468j&oCo~LjW!nwfb>QAHnJ-q0(OZW!DPSCCEA3xa*@cH( zM?UXRn%fLcF}TssEOBy20CieXSPg85t6(9a2ama#&ciM^yJ{Q_Y7bpadi`DV(pYvpy0n6<8fcbb9U9e5YIG@9-ftdFQQOqMG}G5@aA|XQlm4$F+ZjcF*-VZ$Fw(#W`CoNHv)O ziWd;y{Jb*)>334x>1^7YA*cUm#3ih*j<-8mf^Em2*$6r%$cHTI$y&CotwnBVP8LD( zIhog+Ou?Yzv!my{!})`0!IoF!m}$=ArKH2$bFy>8z)jAUW00_Yglef%p-1eV84Xx@SN#N%-h& z5fdLD%4wr3lrWteMWK_Lbi9I*9OK;Ari^hE=_N@k{gpRB>A7PeUqsp~D$4gD103fN zymokzvtAqSt)Z^=)^6beJ>ODQ=qrG#X-jT?6WQ6{-+JV5l=loi6oMeLw)Z0etX5$E z@u+oT$Nxqj?8Hw2-yIkA$aiP!V}6k&om}t*)yw_;ZO+QN7f_k>nJRglf`*ruz(ZW- zEkf~1P&hJAyxNga%f-SNL~Xjll3HXV=s@SPDkP1^?-!jEo{@GBwULqU9;E81AA zT&MuPbu3>*#no`^S({aR{w3$Fhpru(f{B-Bwaw$udrxhBalq&1>v16xmyP0CdF_Zw z_#iGFDDoyk9wu(LmMRGDc=K@&1aJ1LVq#-Jnss0zF_UMI6WARGyoq&auEmV(P%K1`){({c4S=sG(y=gh12Hl9vU z+G`jeBsH7-#l5?KSWG+M_c?`fI6(5(nkgQt8X1S!RM;S8(~3(B$NG$o0I+_|a$#M` z9?dU5$9Xv@Da*c2Z5EH1l<)<*tY6W6V&g5qUtU{pYXuFPyNQ3}F3zuC@5vkQr@fY+ zVx*(;EYZkSENE4rm1~)_UX3s z9Q=ln9hQ*wcy6VSaf_yYqgR3o-c$60`FF*6zRZpAX6a3K5GwUHOFW+8#Px*amUZIy z*Qwa|dNL7Cu1&U`I7~rr&HgO6`Xd%f@&!=cmVMgghOD5Glx`qE=Cn7>Hj-;AL79bn zUF~8y`0oi3x34-gwl^K6XZ6MsMwoNMwtp7RbGdjMfq+!;dFLHS>#D1%&OFP^>TdIU zpC5>(FX}MV3kzi4zu!EYGMHL*A5xSQ_bdGjn$xkgIEX+4Rio)Rym^+#NAM>nAP1}# zM8@kon1B83@4)7L({97A@gGMa6?oRI#fFOMMg8VEUYAI!WOTZLXZ2E2aEyw9`#A}p zF*v%j^`fxFK;94Eq+9Rx;CQQw>iccoRyATsQ4Ue+RAL0Pt+Pu^x^5EF9=sNO!H4xX z+lqAHuikf4$*IP}V?CFX|AoJabo5guq}%p9Y>C)+|1d~6$l&^Qy7_${)>?xTCMSgq2mx)I^FQ| zEu?=>SN%)ly=xEyAjCLhw?+o6hFSiJL;n_8)Q>}H@@4vO-aCUJ+4R;=fXWni%C92A z%)7;mWu@(2j-)V`&=vC&%$964@g9Vhp~K0FyB-4h$(p$Qa^d5kjDpI(14Lct9qPu& zAM0}yUiBXmW{32G%%ak#r3IWe^6pmrZK)e( z!KrhyeK=@P7JLuqm}e_~5vdS^CahI;gXEW5qq~N^y`@V40Dxbfu9c(vdal#ii;~#I#s@dtj@!&(xb#D7H-nC66Rp+ZsyxmyngY#ZcL1oBCAw65J z%}W{n<4$aC{@Z9chUJkb7o$VyN~;0$^nC!;lT=tS!eaaP2-`@TtStxpbKYm(upS}d z(tsjy2qHxijQa0angX%;2Fi8Z31y zb#He%@P!y}HU%;xzOPn34-`Blz@_&2R~F$DY)3SJ92tgu4&<%fr%+cGzk4Ql|`mPLcn?kS#g)K!aMpSsQjJF zD~%n5dMTbbicoZ{4|aBTfWSdLTQ6;b5;{pX>@rOvrdE^4(Wvl}1oK^3`1SRt+a>}W z{0`7W7-41n`sdcT@SAnt=-SDi{|MxR(kn(?SBP-D!sGR0_O^dFcOP9mYjJ8zPLgV+Y+Gmy zUP{>h1UeT{xHRZJnE}zS*EBS9CU@F0$mwWt{vI5z2)Yw6R>Tard98_v2n;9!I9t3OPQFWa{ z3fr&1VVmT(TMZG1<6A_Cb7%badkTorz0oulqhjiEaphdWc|uy$l@m%ILC>ej^@25z zwZuFn3W!mJIZA>_0!T^M0+}#ihQbx zgqAKTF5ha$$A?tlAdE`3x1i`Qo?z z=In|>J&}P%BA`7;!QPsuz%9%>7z_f3oyOsabu&;Dtu*Py8kj8VaQ~3#VR#nwsmL$H z9rm$YihVKx*xWgFlVqA9$n??wch{}~>fq2l7fT5_RRKXSODuoinv$%|ojq>Pz-y6v zF-atY^H!Fk{J)noj1k!;@JKmysqHgrzkX}Ceq;*+$P{{Y!8b$_ldlq;>moYKj6fvI+?A_7GIU+$i7&OVts z5$_=6_R+f%%bRY;iD8{nThJS4tGFFM@`~!z=Xtp!Cd0)qime_g8WLguwQY~h%J}d- z=i=f@;<8h)wq^uwM>+DzoWR`i{(gOBMFg<&si>$-%*-m?4)t9R=6gZ;SM402U_>D2 zyg(#kV-IPAq38!jojO}8LH%6E*eIROa28*A)7Zr-O9o&#b$Pp;Y>tJwx|LOr`twW9 zAHBwSkKOZaY(^I`5Jw$O(-r1iaGy;F>)d%~ zJh;A|o7e3?4!oG-^HPtP%FN6F@ld?uHe|ALAb!piaRD98oaw6)5kuwquY#S`AJtNU z4_zlw%h45!=a1U(qto-hV>C3DoKbuKw#R^>3Bv%78`blW{!7nwg{KE9-!T$sF| zEa;MT0GV`JW_4xqg=fw!i=meNG%EwGey;<8pGbkR?F)v zgUFE)S+cTP54U7mP3~-U^NueT8r?P@#hw55L?spPJ|~t@*qoSYaeTwnJt8^cV9dX; z;O^+?M}2lkPN2iM#tI0t8~eD?+#u}@6V_oSpg5G$mmrB~*EI;9AB#R2z&kdP2sM#gNQ zw*p+<0s`*295G+M6t=f#eY9Hq{Q36Y3Rpn`uf}X(vvqlW&Aav9#YMeXvu$f6i!HgU zp~??gzj5+~WhltG-!Jn78-f1@20QNV?q4|x$d+SfKY`WY zqqiTJAuPMR1A@h;!=e}tfK!P{S}I@Irp1=La=Jd3SmVraGK7;lDC(*fZhR?V$yIq19kL1$p4^XER%!A&NW#7Xj$QO!-3$waVHKF8~eO zQac(gDkp)tBLT1T7KdoilP6C=2RRN{CqB9!&Mzzg6UKsACD?8 zn^!^jj$BC4!;vCgR5z_wM5k1W@ zwU&Xz#>2PI@Y>CKW9!3~ZcueK`(&{eCC3uVUdt?(l?oc9dC!02uNizioFS@JV;Spy zYN~^Mbu-lxvcF5epY-{(H#++0?E8eFt_%ogqJgL*h4JSh>^8G8fRHqrD^E0AZ6R8u zUSd99&j#FO{|hgrRMAz_8$di{Ha1XB0+0UTBRD_%y!?KRjC2oI1d#b&`U7W6nSmd$ z#h>67bTY?K|Fk}beP%cYyQdOZSTs0qQ_OmsL~?rnvPS;+<}3qmax~xf<>B3xggogz zV4%vSaF66-_`!iz0>dC9aL%Topa=lIus{gi9Bfw5Wvh`N$_z*z=@B|0YK!68_-??M z9GbD_>F(8V`iCh~dWRRib!<$f>o)0%Wa?+B=8zlQ9e&Mss5_z6z$}sFFN7A&gW(c2 z43P6X#)KjqCt-aVo_01=zO6yVqm%%KmsH_ zs3d%j-Q}g6MVBnJbRX9vJ~V+i3JAoZ%)&XakO1MSf25W=NKiUXzgRZ@>)#-$f2dZzr#gKx&a#FMn)=|b-hC-W>%GGF_v2aWc|?;a?ya9#2VSdWl&b6 z5iX@y^&@MtAx@m6b!skJ2ibzJmz_!=67aldz%Fj48N3@3_1mma)o-@SUo=e@272%z zKex4)ifuqH!V{81RsP#guxci3#u|^I3*eZ*_E4*iOWMQ$-`?%*$zU~`v(hl#kg3=xFHPps0a3$YT0i%fCX@+VH<+y`5w44(6Z|mx(x<^k=~iQzN&T` zKGVUXleG$!3nv7|OwalWNumHk_yDev_vM0cV++BcqPk zb67*==6A5l6WM&SJxKwy9lsQRP+4KU|5(1#N(bmLo#3+0mY6xj$G|=yYP6hp*@tn> zzf8&|0)kWBzVZyYxw?vqiFr+mg=BxxvgsszSaqKYkY{klwtiK%eCk7EmSC|C zgZ@t^P`2${>|S2F*V-*H3o^$s!;54xCS8%2pBmPTUl!!0DepW7ORD|H$6MOkGI_ws z2m>c+c=A)9lOS}GOoJ9*dLy=hL{=jN-~g;V*sGzD>|wc~%t>5dIOeMcNb6>|>k^)| zp?HTamU%e=I(g^EU=f6b+Q!@Qwf7tJawr$S0y;MJcuLM87Q&B(!ALo28hSnftD1UL zoXR70V$cjv(|4echNB1$dJ8grrTn?mhVUDSvt@ra0S=|NdcXk?cw(w+ts>bEfJ_i% zsI^dLLNO5g21mTizV3DSbKWlY<+W~_<@Jn3XK?(>l<7B1$=BSwT3Z1n`|imSoo6U0 ziu$|z54XSlCim&)t9}2+1;Cy|zVu`}%%Kze+%u>QDCy-N1+%hmle#*1nI(W>V*i6n zWKS(XYmQk4sJjR0^FYXxFcRq$o5*r-f4R~I6AQe?2BdfZKEVX_FK;}Js#h%ltL1e| z{ht#xRS6MkfQ6d=#f?=~R95g+qT8%w!ng~Yrzo^aP)9vE-N+GIXl>=zx|k-TV`ir2 z4wr)>|+pN%-pSD#F=Z#NRZMIOfhr*J3a5*5BOYPmxm14RWUIdifX1;f0*8? zDH|15&hRwWT6XTbVNmjS8dPzBL3XN%AfeXE-R5-8=dimdvhwODAqhNAxf6({rDC8= zp+3i^!*1knyBC!6b7%DTUF@?`Q&qJeljm|cN?UzDADQ{b zWWR3wMK=EhI@C%mOw4r;?l5QVKN!lt(~bL)V`DFk4s+I{Yk`ifXREi5=3M zlix1yuD4-xovqEaEXY{DwaNMQ_RX9BxNM29uKMUJ)mr7O^ffhz;m?JO`?b~8e=psi zbLAk)kxoA{JY=lupGV5=bu14uv1{#uGsTfURtV${f`Cw$=D9<+UQ<|D_&7h@FXcJjd>c%1eybnjMn zzy5A1T@CF~F+#;y83hdBh2RT^j9-FW@gtb(SshpilSoU_y$LFn+gSn`#)KL9C~!;Z z294vqUTy7{czC}@b7Vc1VNt)vO>dXl(KvR_%IF?vziiHUj%$JdwP=;~te0KjmW+h- z&rM|5hwF|H!Kf30Z`6TTcuy4NOtV)VUcVYbnbyr}!(T`hvXco11i46o^8{tvT*8_u8@0 z-melOU3;7!)$8BhZs^VPG!+eWepOah-dbqn0!+Oz%ih!0eh#*LD|FDD7d7>N#Lq)Q zcJVayV*!|xR%}`T2dGoiLr?hSO+LmY7&0>*&sX}pK4CrlHSY52$^npXD*?U#(V}*_ zZMFv=5gbV z<3D-*Jv~gL-ZmtuMJqI6(0FW+-)X)l=r4kEXKwaihF=u0qXPbT80G@qe`S|9lL#lo zkz0|P(7l=TmO+De{n6_7`72sK*R*_If*VJ($y%>hJlGZ4{&_n1j~&u zW$(Jav2mrc510XiMnO*7`TVwm(lB7gr`O<|VYl=P zm6#hikzHF*ELkxDg1M2KILCVoE(C9r6$39YZ*Lu0piC`#u&>ofiXo^^`Y+g~uiTbt z8+%R_+r!a{2Q>7+8hi_!$;Umnv6ju!g^hI^@4uAf5xPHT$i^!BtzqEPHC~`Xz;6ES z@+C+#O?8Yzlv`@E>MPv%Kz~6ix<=poSp3)BxsLo~gA|xF+McPRIX*t_?Cu`>>GUyq z>8R{CIHca3(hL-OmzXK)hSlE{Wy_I0Rn_?x=j1+n46|SWVK?hzqq%2|-8*oo12RZmP>^{8c*9xWj_(t8>nlm|((}obBJRSYAuKft^2el%5LW#eB zGDab8z62cS)hjy3?oH)iw_K3=;d?F(&a?0L&%ieoCIF8?Q`u_i2S642b^M1ef>v`_`ex3)K z^XWEqx!c-ZZ*QrIiHW2Xjo2Sv(b&1YrJ8~YZHV9+5iJiIkSS@vSDQY!Q>bJ!jY>`K zl*>^vyP*0_>p@~QI0;?lPCgJab*BF#3ZzV*8Ll1{+WrOz(0%yn7dD zc+`?bzSX(P$jDHh7Yc^T^@5J$a+8yjj?RTuR({fbtKz>yP5_ejAV9srL zSJdFmCX-IjFVzA~aS3Yv-6RO>fxH@BjBCPReDj}?%N>E-Aa@8-o6PD<()Vdgl;*u>2}=(EgA_O9Les(31`RbgWb zPr;y<6u2L7Vky4Fc8f_R%&(7+`Gub#cWlz@h30RL2T~W<4Z3h{d=(&b*9?8N2)e*^ zZh^C>!@mJvF+->|2-Pa@OVmIBj@n#AorB^FDcY-eom zKF@ZuqxG!E-#?r8vEWUi!G4AXL-c)J6%#G68#6&C4lzDozKZC0D`&q0JH>>>zUsIH z$U-oqAn4-(wl^92_mbpL{IcmaC~LW~QO~;7#FIB*SUt@&&caLN7t_g%ggX97z8Xep zl5b9u=CgZlZ_KX>dm4zfXZ@OOmJE(!;twh!zPZoFp`JVZz2lME)um0SL#Wd9qy7dM zmCYqM#d`J7h0n&#Hd?(|BK($vy7ljV7eg{#a<9T&~rgaJ6Tra$*J z#{Stq`0SmA%#%p^*+G34eA1C)pD+g|X9MM^Z4tnk<{b^kc&388N9k5u;3sCo(q|l5 zBOQs#N0L|bbne}&feWCeoszemjo!^{kelR;haNMTq6h;mX+)jC6Vfsy~ycpFl zDF_*8xFI&wJAtXP&t~61cF>owX9R6}u6jw~AUHfocK#IRR#QOLS6)qB-5O@M5&2m2 zc_FEqG`Fm9rzG<@pqB~K_A9p1pc%m7wqk3)0%GQ|(|dm~{}CAxskNYH1Ix0wQ-s%; zA%^sr0!0ORFw0V^Az?kmBXU!U`C!cHZ+XGBxFCTk7)X0%JUh`Lc*c| zE%0BuY+%=JmQH;$y*NB8Z-pQray(hUESPD*RmIhN`cF@7_ zG=%e1*pw(%dhX~KJSGPds6po!1Uy)1yLfm{1rd}QT(hCV7yym$yhzI2sYMfIU9 z+xxVIiHYBFd`9y_KXNwBL|CO-yVH1Gf_ZJXNI$fA+C*R3uLiZw4fC24YFW*-7EsVd zzr>z7KR>_ea3JZ$c+BM;6zt9*Zkp zyIf@_TNhWIovOlm1m^hD&2=(q5>2DetQkSanFI?CXzE`Fi#%R3SX7*u7PdKmN2#31 zQT!NfPh_adK#)QcYV-l-RG&WZqPyym^XTDLxYXX@vo|VFWyL;2aI1{3UB#7G!C=#c zF-yj!r0`Tl0>=A5{UV3qyZgFLyO*p6+}RQVUP`rp_^#7>9GHCqdzQSy-W%EP#J@-; zSz_|K5CBOmBuN3MiBJ)0ck~y9`G=Vas0n(|xjnFQU8n*Q_Zqz72z*d^0g zo0F*dLH;C%_f@f+G6#v>3nDskAB~ojDvrjj(cUv%bqv=N)keIxS}#btF25OD>>h zL()O7d+)w8uW;x}h}D4WPVtl^=mHHs}fQP*WQ1l9Ih@m1nBo0~e%aDjA_fUq1tKJRrmHsaGkqw_pV!+#~x^_XM2CX z9$Zn%zKz(Bc@mFT5vs`b=G-rYh)Eh6n&s5~Dw;(7kFBy2iwGjruV=Q zO%!vwxYVE5{}^8Zn&l>{)N=z|uSKhjnu}{}c@`gpg4PuViOl+deFMZ~TTrXoWD6kt zQ?=C!LVgS7z;)bFYkUX|I*e{}8t?IVMq%vsJsgxHek}c=!$HsUb9T#Ck)Bh37Ojaf zP}>+NwBo13b?{doYCX=iiQh9?x-hQHt5S4I0fJ1_(E>#7#uibPUVAsp{t&~a&k~Zr zYJdEL-k;CkuoL%A%JUs;Tz%A+NfJg%1WZ+#pBlAD*&`{cX?33z##Ecq$@|Xe~^D~gv<#D{}Ps> zrX-C~$?XSk2kgj1QHS?g=AxBD!XNJcymC3M-C`!(sRt0`V1go{0I68@k2-nQ?dT%q`Q|F!Gxcjnj#pe(Bv$DnOzx{ZzQTg zjppd+=s%FVi*RZLQM1mjPgJK!xHPeRl=>Qjtf7yIRhYfw#yXk(mvmld@;P#*_b^t$ zVrKEhnh|^%^;51~aFMw?+G(&{7=8z-Om116jfuH8zO-wFERSvj*s9ttrs_Ge2|M=ssE9o z@PUIGEOnY%8{696&Hzrc$Zjwim^}##%fu(zo+bx znVDJTcM8~pw-BQaIUb;{Xw|WsrR5baE-nxbyo-yQrJ&yDF(ai%x1$>)q?=}7!dxzp zzDLREKClM=lmPdNj=C!x=|UBAA^kerAt-sO32yjjtxsYeT?Ym#@JJP}UF+e5eZRD( zde!3ZHgng%<~%qwn*m@nc=a2$`+KgCgQi>d*9B|U?^PwGIdg{ln?UG|IgCm4KPHNk zI=*|Gb=n4AyyH#0UH`H5zZ!@$Q3fc-;n#jvwc$ijdTD6_5PSJn8m&FtduwBP6>cE2 z9WAvIBT#3{u|M^HY$Y_`A_n)7aZEH05NleNN#hrMmeT0)O~uzup%Lxxdw=zoo{BGuDSfEZ%$mo?|6v{rBf!gz!cE=~VOF!8>(t#% zQ=5A3$sbYC;ptJ@U1mWCNQIeozn<$->w8Hz{*u40$qjmWC`X_3J@0No!PIw!KU%~k zt-jM?TmOVm;Qo`3L>fxZB?Kw?`9V6`$G~O-rD~m@qAu>aeYHA>dT#s4k6|ORCKmOw6#H9bRTD&V#l;P1CkiAmPk0L-{i?ZTkMmQ6 zCM8f85#e)d&EZaczR>mR7&j*iwz@(n$J0LmAC+yjN#;q6WM3g1eTU(W{$wUsHE6-fT^_wx0IXt+44Szjr`FrIQfQ)Ur-B>NLXaOmVz7> zSKwlCau%Finh6Q|8RZ#3@Hup32O`%W`Xwp8>?qJmxDptXLC3)0JeTTL0Myirz?qfz zmUDL>Eb0dok;?U!<;^4n2c`f9UGzZNh|a4%y~C8G5q?pn!n;`op23&WwzYO75PeofY!d-@ca1I!zKoO>@h& z(MrT-js0kU4VH7m1^*a2=}l*nf`Gj5(|a0XSl*-wDUEae)s!PtcHK6=fwD94m^lkh zK>7%G_StNWv@Bo(DmI&VlJ+LzVqyd=TNpv2pSMu%67}jNLl;~mW2|_8?VylRlJNt@ zzIU-)3#Tw^F=$z&ged%b(Tn~hf(h==ovBYs`ef+zVYh~CXMu$({%FllM{6RXC%Wdec}ll=5nI+yCMQ;RgTY=)3dXiBhKxe`HyY`pZ{T|tKXtE zNf;y*8v~Cb4LDYEnjkT8NMz(3F#RH9IU@PywG`%$9wkb9?Vhd5qeq=iUKB$`0iZac zV8>F`; z?7XAHJ*$Sd?5_9sB|J7SgE4Oo7M1JY{fi5iM7w>4Dy%A(Lz_ze^HKt zjU*v_CT@(;nndaZPLZ#2!IbJRsv|;?S0V5}+L}Cea7?X$ZYQj3YFV7-I~DKmFAwo+ z$M{IN&Tl;Fx-0@|AREhRK>k>^kd<5tNVxFfR__%^c|)i}3l|ra^ITiy>Qb!km7TdN z)owOjejV**Z6p&z&wFFljfgMc3g&^n{r;Czl8LnGONh0G*F(OC-RE*wS68=JhU23y z-R{cPU%HI4$ci9t?@o?Cx1X$cMr6JQq*A~dHE0S-aWOm*jxBJWcen)Bk_Ps zp3RM#tMsloL`T`D7Q)V9-g5r5C^LiwpZW$>v;;e z4YvSMxB=1pcc5%7C35o(sM_99|tx_9b5mwC}e#HvDNGI34& zZy`B3AIDc4HrA`ZX|meM-dw}Ekvg)7k!3vU9B9YTcEVPcVTM0 zuv7|)Rm>1N1P&oa>$jzDlpD2TCX85rt8Xt3Jl+^CT% zQ459Q)%RBa9#>U*MHYOmSd+!eIH#E(jzbQI#iVRVHmOUii6?GwWkI@ zI`KmpCVEWW&l3%Q+C8$5y%m;kc22!;nLDJ4S1VsP1Z}S4c>46^8${I>iu{+VTQKKl z|19J4>w_5{x4We^`?l>@^bLjnaFm@JywKjVCEgwj3wHyQG#qy5jON?wp$~XTW9uFu zRJ6$9des*)mkYx9s1Gdu(~_@b51$LJ``kE|9wWlBtY>5Hzcc6rB5)8i2xGWe(J$Y= zSwK>RmPEmzL_ws=q?;5my`)GH9 z#G|-6n^FNG>%1&!W#RdVj-%P157X}N^u#yru?Ic&5iZ7$)7Nc`HS^;noQefN{Ks80 zJtYTNF!(;^U=JDl?#}>77d|v(_{>?yjhj`@%S$p@)FJD`hX9wQzI@jeLkS59ZS6hq zO5uZ*3gr-x3_5p);?lNkuy-rnAxo*vD6_bg2CbdmMqV5hdWwl0rWCPP)Wq@<*Q z;-^gN>grFDH5slT!9VGc%U4PKdu*#^<<|{x>;YN}J}Dg{6Vy)2*q6tBLB|e0j93+J z*2Qy1LBC!zG%!&7zJh zQaDDd*RE7i47{DSB5xwXQzf8rg1_% zNzuu$uhv{WA# z;{=o?%+@dLqW6es{XKC$lr*(Oz9)=>tCXYlQaR*5EBh}3TBaq>GNbTD%_1DdmJ`w5 zDnx;F7E_fnF^0Ion@ujbV@9mtiYU^=B=J z-*}^kdYrcSCT10dP#|}85XbJ^fF+;n52buHigR*J;8r!sR4m*b-CjEyz zSoU!hoDY#bDMc4nN~=5A@7KP%ynWwkxn1%?^0yNZe}OnAq2(;s*Xx(}vi7di%pS3X zaE~`*4=<~!%@Jj4QRB0;WYK-dagaYsP5~LLKabqBv(7}u1&(4=W0=wSGk!9EcyZ)W zf#M!5_gL1s*XQJfeLo#{VaBL!x?+WiAq%kjBc-_HrdI=qDhgw^h&p{A*N$-!bv%R5 zXxPUHKduw_{bal!(;kp5ekk*}e6B(g-_R?rr;jiZSrrRbp15kK@#~>ZF!PJQYryMC=-$#$Lb?wpl{-z0;3#CY-kv z5SVLqCo`F9)lhYq>v1|f1&0%S`Eio3j6TN(s};_N3ty+efc}9Ecz6N77!oB%GU3ZI zi9I}DG|uW>MD3?NYlN*l8?wmFtc!flCGOxx_H^&Zm`T@kG9o=kYholwBR!}8>wqOY zC8T;IUW*=4SPxXdpWCb1sGk_#puXuzZO!2l^zk#9oJ^Dk%!y2>p>@w`tSJwRiD zaO&!{Fl^0cLF&1X;8F`s)OZiqumK;WebzW1hj1fecV%ZT+AAv~4b8oIr2YCq$W3B@ zS6lfI(xaHr(o815Ooi{sKgonG#!D!+grysV`&f`EV!3rr%$sUDL~(?+^j^s>LN`5^ z)THc5$-`%PC+ev}%V}vuD@}lBE*)W;e2@87UP&g0!f*()6W;jv8VPb{kN6YBITZ-g{?y5-C+Dh@ip2%*ejY95h5NBJ3mE(0WD<5(2&hH zJro-eOk^qNz5frydHG=(Y1W z_I;1C=^FE_f@nhu#60ugsG`PeLTIxn%&*H5-zhpfJ$K{Tz-gVq)3gX`H;9Y3^M6B+ zO8<0e-THTZ4Bbp}vx<#w{hc3=q6FKdqF|H@Ai(KUuh>z{L(@lf1 z$A$aXI8h)MSCy5i<(}lD<$4S>P%ky5k;6`Y|Nk{NfJn~DYTQWHG8ip4ohmv+k3ils zoA<0Q>W&5fbsz2b4T4ET?`s?CTJ!UKx$fE{pi}mi77@m$vHx$pZGEIzvh(uVbwgew zA00-ljSy=)J4&6E0D)H=5`>R*hx=WyW$QEZXTL>(XvNWnYCTFJ9_ZssY(rm_D1{vcFBFim>6(R{f(aAlkVQU|v31p*=R|JKvmhk?lpkQ&vNG04 z(jj$p--h3bv&c}ur`b0g+Ju+IY$NH){+%LyrDSqOMoeId7V70V9G_E!$x&Y;snkNT z35Mqs?I6oTKX}xBz_k7QA7yIfJUUopi3MS=KUO#!XRQSTsx$7S??vq|f`0QgMZd-S zlw1R{?wat2h7ZQn-iFHa(=Mt(>Qhj>c0!sJH`5Kojv6NX7U4w!LCAl@DBDwV&oiHp z$g!?7IoHyN6TU?;Lew#I9@|IF#iXrhAK`mTQR`{%3FpDn$w*~*)yd4qCyt(nmU69M zfOx@%kXlBWq5)mK=$zuFKBA7kzB;cpl2xgLt1ib@jD)cvvG0v!`LQ;9<0gtN+J@(; zGy?Q741Ka0h3Yn8SQi{Sn7&CP$fYKg77Gp!o1CEJrlVQW53kpIiQbwK#;Zpe^B1Y! zK_FXsM$9t0+=ordRgzr_#I~0-nn`PEcPcvqxsQ!5% z;2PuMH4Eojp5aNABErjUMaN0XHd5WtZuH)F?%oYE%N(K-;N}-|$S7OhU77XqHo1PE zJ1UFtw0D>fY@&u<2trt<@v${Y681%3jPKuPaH!mg`!V#R?*+|M!YrSBUTmwXI!;2w zDwy@L#aNGK>Ccu@ViN=*go6dA+8IcU3k&fgw4Q!G-kGpWBUwf7X?{aaliwooQ2Y8t zAoi)X_4j`ZR}F^nX+Tw~foQ-NEhPA5xpMFH+)PpvH`2gKSfs7uKHh4s4ft#?TL zua-X74li3~a;PxlJ20J9;n@857Wb#|KgclUtHv}-1zYtAO)f4j`JiW4?Hog9D+4(W z2p=k@ohN_x=?7VQ`Ny!2FQjw~l*4cp#{BO!Ew~Q>AL;C-oxilHbuEFL&Wf1wY|FbI^c{k%YYkVpZRMQ;;qW{xCN1Ntk#M}C$Ae^a3&*Po#rr`1 zSiai3$Wm+U8(P9^OE?cS1>PHW4}C|EF}>stMqs&C^sTm9@Coqtb@^|wM-?9V1=QDjBr39;V1 zG|`f3zlQf)$c*Q$Ef0vDRjEC95JK-X=7i=aE80b3=w=0kG- zy>dUx{=N2&<%yz4M#0PzrIUa1XA8`AMDvq|zD{Hn05viW`L2rTj$VDa=@fzXhfmTW zCVodQ|8~j}f9hjyPFK?e>0aaAg>3a|>>|GXA}ozWc1;9sL*YGikNVfFkN?%^>XGJU z2&65^ItP?herO<$AXuzyK!*tfp(#qk_9#bPL2vB~}+}=EM+*Z|NjL()m`jR=ojwAPv z<{&)$c6#M7@Bj+8ZwGyOp;i6*`}pRTx&=pRyWj53e^=_t+S=Sky}IeNA9_2H`z$AH zl*bXr#BJi$c|U(9-^*DZt{9VO$G7UX@yT9`=MeR-Gsf34m_CNY%z=Ef6J$u^zusQD zi!ni_D4me32&UGs#@)h&neM;O{ywh@FJ}FX13~$KwjE!Gb*^p-?DA*>yx-eoP(4}5 zFd*Z79)c|vGs{Ype6z(&vbOww*mX0#T%m81UIWHo#Nc4LTqt$hU%_~CLyVA>pe(wq zn=@%IJHGI+`C8lcb{v_0vE8}K%E~~#z7tY#&8NU^8*L!r&k#6*ZMYOU?hz1i8k>-k z!su7rfmgpLW8Zdbs_aRGPof2Ql95uI!I|^g>0`$E|6a9)lWG93P94H&>WmxzBAG5s>G-p)zYMB-3NdYg8OoTwMH{-j?ha>sNE9q~dLwMm2Fv>s*QV2|aG=A!*>XSx*g^f9vpl@F zj!D4Wp+C8|KpS=SP>qA*tB1jIBdAw}L&fvF)dFCa*owZ1tjT=jBliuln zOKaIA+tJ@&ZxR2X4!8;NfsbSXCj)3du*juki^Jy1Ifvy*UpE%r-QA6b1hy3(TP?BS z=S5!r!uyJd{(9sat`--cT>W?FzH>c;<(ZR%SfUZ2ru+C(kJ0YZ+eE=zJw$BLgLo>e z_FE>v`v-s6GJwB7+jMX!z3Q=TVB+JPw=aA{t2g)quKTH|rf4I6^!*bkrvVAdBOxsy zb@6;&fH*ej_)eS-HJk`bD7-*NxU_W9k01H&8|Kay`TAiw{D~MIBR6MU4cFGvM@FU`%)c6D@MAQ(>Jp6B?y zll|dtyLjI@YxbK;=Ow?URN;?o%NC=DURGw!dGH#SzGW~G_%>>E%3Iuw`cm8fT0{04 z)lXVBxFrQD_O@sa)tliu@zh-AZM8peHqNvUqdIr;SL71!9EN0HKXps#bW-H=5WUtV{*Ok{mR?y{s5^$6qk zEQm6pzQ*r_>k}pWfBmM#{Ej}&4LYw^cI2JC#c_*d#CxX}xACHx@r=3bvBp-jDL(q( zz_U3+);|pnoG5Cz4I58wk-Mv{AKF)JC;_5Z_LA2iG;MKWEzLbDzGR@CIXH=HO38Vv zm%+K(>RLvIfb5f<|BOl^nCwdfa{0Tx-wXZQR z1uWZjzG-XgnStXz2_+xb!{$}D)W5ve*ba00wxp)9QBP(!-lW=|jZcm3s-t-jAgM=~ zb=#>ivJv8n)Pd}1wGS*v)Uwmb^|e5*x3E4Bc9s&woh|>cOg3!Ldhf~hZRUuM%u`+E z;hJ774{53LzEPMR&Ux+kqeH7L$R3jq;jNxtty`RUQmj3iRry93ocg;BM6M`WQXOkR zg6u|0qaaBF=eAkjQ`^}M29f;Aea)MuqW`7nhe5xu+YY%~nhL!y54krzo3T3(W6>N( zAOt$c99FqJ==H-ya8VjyTt`2EO1kIlJ`f^mftIJBprFCaC6C%qH1_5dI?V+XUEsP9 zD*to_1U9*|f(&66vpr6ftcEF@bI9?-te3Td6(LfTGcCx%A)wf$==bEzNR}X;Gst1LYtH5$IeR@7Zv|FL{JTwlqrS_pJa93Grer3XQ@HwVzO%KmU%HqXLN?E9>v9!4AG(|&U`Uwqn#}4ms=gypoq7-JRIypFU zqBY@q^>CG+U-@uYJFAs@CV3E{8%5*%H}d+y@J=nWS3$ALnw%lZy*oGsYf zUa|m)dQ^IEWGXf;?n=SLp1=KgUxx2RMa9cJbZ92>L4jKH>ubBi4to-klHGrIvwCy# zel7O)_j{Fon3*-4ZZu(FUsH#0m2>JUf!l^FBvIo4=1Y~D#Xa-%NW%U}W zohU>)OkPoOn$qt!nkg5D^InUPTwOObH0=6o!l`I$o3jU-hVYb-_dRaKOG1vULq+O} ziqX8Jyqv$O44jr7$ud6Wdx5Mt3{=upbvwjgCwvK%RTcFtUM+`R@xP`49nL8DExToSic)?0 zST`zCyCgc;JNhOdu7QEkA3uK%w#_Yj4FL&`M?3w_jU4f(RX0fS8}2FK9BhD@op1kF z0!Sx0c!5gV#HC%7>2G)acMT80pZyyj-HGiN^Vv zshgW9)Kd8H9B>* zbxjNm8bL0a0PyxFrSOjstPSzg!1-|T@LUoVW$=f(M+XN7!Sz#ckvTyjX)qsPczJ+{ z{*;;d3XjJ>$I-rUXDKBch+QRv6}tafa>FVk+g44AHqVr(pK8|c1x&334%H!lz;w4i zPaUHk=)HDJQL*bQJ=URXEV$ggl7jdAwj}?>i!^6mVX9wB-4amErNvaCW!ccbDRGq8lMhuU$7rIgJM=IYqq#G&Oyj)7 zxnxRNdrrta$tYsNR4Rgybss2RemLbyqR>=EE2N2atzDK>z>% literal 0 HcmV?d00001 diff --git a/assets/integrations/service.png b/assets/integrations/service.png new file mode 100644 index 0000000000000000000000000000000000000000..fedf0d38039eeafd0db85b77aeea656ca4f23437 GIT binary patch literal 45232 zcmeEt`9GBZ7xpNkR77RpMIu{5w#iP(zOR*?B3srmDk{ zrnv&VGxhF@3-E)|S^0%B2via-%;3x2PqS^ojeq4R0W#Iz?-Mx45O`&bbzXQBU?W_Gl zoqCD(#?AXTwz`*>fh&Tv)s&3`CO2pNL)fv9ZSrd08+9?t@xQtWyzRV^Zl>k@y>2yc zCd!wWe#?4!uoukDz3@(wt%v#8cdox@pVCg>7PT-HGZo9b-wof)yLK-sgBz6Nnen6< ztWT*@*ziem9Bm~=!`<^*JlKhFw&}`nagYE1>;HKNXu{@igZ@d-+_(5{1PT*`f~-?O zPscCa_XWL+r2u79f!c5VSKV5DJ?!OwyA-!gKv+sFJBZ$m<{qe>Ui=9*Y~C95RQvYA zcOMGBO>`$*_<>Jb? znO{CbH)MRom#dTBewV(M$^9X9&XiW@pr6vCV37N@MnJ1Ll|t(6kzjj%KKr}?-fR~U zxotLYcC&7WR4?n*j86w8LPvpOtkJ@EIII(x8zAH); zdCrRU>pX{OBvvO@qt=OR_EBm%PAY`-WZs0YIpsejBEGDv1Y|ltO7&HCbd^pkw^f=8 zhO&14^s4gZ>n1?*M}%;}D5;jShgtm(l60pctCC1H-*pR;(W?R>5~rC3zH$$7!Bk)o zwoY^H3e1CyXLo$r*NrXf-+1h_Wj)OKPjqQ_1hyy3;ZXO7Vp=19{wjAE$mq6QpKR#t z?RWC3TsvF82=52c%-RBX;~IL=3L_SQn8~j5M+09+Wu!EE5*>AO8e3xp89add{rlQW zhBX>l%I4+huq4~z#oj65efBXtQuY?M%Zp!_Wl^@2ZR_FB7~UbRQHhSNv)VCd#w zBT360UOo5j2?i8nn3KdijQ3=1{#4=5GN{he#fo}QLw|>LA{sr3y6($*3diFyhs5nm zFzhMj$lTqBu^*1tzv{p*N zH@&bg$s$w|&3-)&Rmwu25h=P-6z29JCnz!PdTY_FUZ=)4(rJ46hPDD3M}{*twt8kM z(28}n?SE6A!DaaA1hDt4MbGh3YH4QXHmnANf&I4cL>|2fescIvk>I48DVvCcZl)|<53>H@B3n)o0v&us z#T^#*-dxSV=FOn&*41|A+-R{tyQRsv(7M-SlaH$(ANsKN3O``_%NTjDl&$EIh6EpD zib1`EZnC{jiC21%y-tS2ZwRx})g3L7$fP;gOCp zn>{>#m(KALsPFra*G8spIGpSH_iu^c4i)h(0%;S;8{3mu?~0Sbgtjl1KlRx4nol-R zn?7k-J*dMD_rSxWU)Qk+x9H=#L2(s&3JS__*wt`Y0!rHQ-_AYPkzmmmA)BkA*{6M# z7-?f;)3JL!i8s0(_WE2DLNjq1(nmLfqqL$}a_byuSU_Q3>Wmg?<%SxDbALpeg;M%a z@!F2>^NbWd3r31Cn4}l|V#KA16+G!oror;(Pwx$mf#n#!_}dIR9c%J<0 zzkSSt-haQeU%zE53YE=5%T!laZ%-YaM|*}W(?TaFho_~gzWjylhUUBq=e~4_hK6JD zI{V@^{{6d|^B#5oK6ZUkB0V~E$CQ8zD&VJ*SU)B?*dwf0`WS3c>WIu9_XiZV9-3HX zh#?SbL(tIPT2i;QbVKd>_-;Y1(&;~3oZ`V#G%lF1Brh*_Zt$@+;BvQGutva7N(#zK zrGOZjH`hr&@yODXdOdQ`$vrb`0~ybjoWR5V6&q_?)AzYv>46aCN;+NR3fEfS+346nFz>i&>0y+^EZmI0>@H3V-(uism?f(Swud^c`xjroyXhuUa^-DMEymGeEv;_4^p-sW`O ze9pD?^}xe_X`ZMfvn+DkC!jK9nBb$|II;6M6zfZxj&p|tA7H_#(7`Ad!ynwH2N5Bq zlJRr{9_e6ml#5i56F&naZcz^5(hCY>7*0a3W}#PorXCfImMTu!25#t)37%YU<-De0 zx8x$O#FYmr)9`YKaZ7C#j1NlC%L3t=f%7~qT6!!qI~uSuSy?6|EY{HCHJuMzET-?B zy99ccUR^UoAYwzr$dg5w?_4B@UZb*U|EaeeXwRAS^{Lyl*RQgDK8X4g18|`qLt9RH zI})?AH-)>Kw|~d|1@Cs-*;YB6^i)6g^8<54YKTc_j1B)+`VR#9$AJQ_Y#FSO@oZ+y z_k2w?*#8X_=cz=P_>K8~7vcZ?n1fZLgHfyF8e2F${ZsLWZ-2-ClQ{4pd6MVJC8U4- zU`VKm-=TV>!=7p>uG9!0%3fsx0(EruOqN!j%e5xF^zNbR_j-dV}kjI zPxL@;2q=cc$OIg7k(b1hiRWeKrxEr0t5ES&9TMG0g9LZ)$|aEaNpW>eC!sM#UT(6v zi9FV11tv6y;!1MA&}rj2Gan~B;XQ}|mdJ8PA%@?EkRA6obX8Bue>Xj%gRWyCl1?60 z`NQZ2=)bDn{gZP{=JxiP?M&r9%I;+R1$6}Fmmw7zWdMcxy8iKKBS~#sFU~Ps#NEKy zeWvr*QmhTrUg`I$n!>il?ORFIJO8L|7Zt(>Cf|`~zWJPb^ z4?h3S{c#kTc7F zhB~xOf^I(umj(Uxm6dip-IuL|=k}M*P{oJ$rU-@jvkJ_!ra6H*!WGVilC86!5g{)U z4@*LDac3022&=^ZvgC`a_aO4SQxA7mb>*EIm(H42AvtkBLq9a5E$uL?^~fK%ehFP= zv^PvbQuh^|iS7rmGrOj)@`vdzFCFDqv&=?2yEmciO{-HnJF7AFS8R18K6w-nm@{U} zQVmejYo6;+wmNb#WYr)?ru;f3ll-xdD$2C=)x{&hPEAdb*4v%40uR`rC++ssvmv=; zA)&%-3xz))Kpzc0b-Zg)<)egs;I42YULl^CHJ8uFv_A(YY6(C~Z<|(0{>ERe5;ML| zGMC(IY^<0lu1Kw_NR5$PpCw%OoQT;A)H?~$wN)w$Q4$l&GE&MiQ%W%sO*8q85B8bZ ze5#z*@3b5osI7s*3ng_Qd}c(1NDHI)PorMyP3SoN>wcWlFN9z2B!d1~A*=;Pm{dNx z`D)aM=$=@6w1RPgC#Ze#-5jN~?%TI%DpGauC+Yukos_16!MdAIuwA{Y$-RzeQ9PV* zL3T>s(%0qY$`2p0M)$<330T)Zf#sTum_7|C_vSND&n>^i8Jy=LB7lw6Irf)>A%K^i zUe}NU5lUB%IKRm6?sLTU#A;k=wG+u?rnx)hLWT^n887Ur*}X^pi3Tj?5kYKC)Ici&X-{yid8U0qdJnQN2r_U*>zn!aXmn%vaX*vL%2v0j{^ zqGC^;QBU6RvjU@M1%|-uX9S{rlCE%8uTWO+gDm{RQlI+S)-krl$bp@gEtdtHuJ_J? z`9f0?ZhNJp;u%b&jt0(Lq<#nR76nLVt4S8A0WZjBYWUSeN5^O1lM#&-h9+Z=;l*%6 zlNYfM5O5O{BLw^>t9s7Rhn>e#G5tAV!%6)PoNrt`NVBU@fJ|ErGyzKO z;jNu6*{VU#_mcs_9lAmw{o1expLM&tZQ!7a43X)ALboy>4NV2Vx}unHKu}mZ3*u=E zu@#Kt!ValY`z?$cJkFBMUir=F<|>yBJCe>13OWsXbQ&ys8YJv(u`Ec>eVs`Vq+_X7 z&UjVa!&A%>itc(Er_EbhvFANDI_5qw9r$xZ*6fDQs_Sg)=bz9HUXw9QkKb94b&i30 zjzK*PF`I3RZ~VNWa7x~8@3P`f>v#Kh z#S=Hl$D;*{GUSDVfhYFe!*gFdUdu4sKX3}26_@NtSUfe?fsR=M+OJAeZ{hADWH>N3I!di4z7S`JOfy^mqRoYakAbs zU#Gtc%P<&7v6?vg#jc2&|Dtjh!L;6cULA1a-EZ?pKiS0OL0Pe(qe=CH2Z$vk>>PCx zApPHbNmAE7gsg9-*=c4+>aHY^p;@OVszIymT2OWhhvIWh8lJFUYHgk7O`N~+Egf1F zbH@V@qX+7Das19<6dqfj!svL4GbZ-Nx|{pPbjhR~V;4VVft{+SZJ$xB0_< z!R6)seDMw@ka)eGf@C5?x8&o2tg;hlk>QW((S99?`4=1AZ&Ro5EMfr%fFnqFNB;me zZY`P;+<53P(hS{=3ndRdj8jS+*jw33MCR#Cq8vGZ$EU&gDCU+Xs~af@Vv7-<9I z!@5c`z2Fv3lL8+hi2SaDlB1>Ijg8IG%&jsPJd9(q)~5l+sg(Eqw?MUYma$wKhxA(t z5WV??(po=rZco&;e%EwF$H{)R!^yLfi9gEj(&@SR!f0cuNtx_sC~00txI5MXLUxf$ zMlF7J&N}JfBCizC`TM;;Z96aetZOJ34y3Bey#1*53hmv8!(7mH&y+XFj=~h9L;$Te2X~Bc6C!SZ-8> zq=_lkW!b{rcYum%J%?bMO%;6?kyG((*8yObmu(`&xdYMrwr)=*4#t|vQ0VE7Dq(se zcVsm}i;oVSsKNDJ9e9Q>zfU(W&S2YE_R4Mi2Rhc~h`8F)j7R?4cs1Uk=Te`J_Qio% zlzT=vSX;~fG+Ali@E&!ZV;c|@eUmEy;qN-7OgAYN1o}_s20x4+>t|wXQDowdUH|x8 z3>&d8Hki09{Fof`$RS230Gh2W|K43b7>PukOwQWOosMdPtD2kN4<3JUx}`$HegDIb zM5d!3wMzF~$o|}_op<|LXnAAff?4dM)IimjO?yq4OJ_9NeQF==3OukNKeTB6$U4L9 zEo~#zakxsw+nJ&s_fgZd3QC#!>epWyTnIyBv ziV}3lVH~~612BV%9f^&2M1aEf?A{I|Q)aotIyq9q9-sUz9Rr4Y)?1_Zziq4!m73WO zV>-FUj+nBrEi!Y)xFXC5(h~&w@lg41%55K;{#1E0cZ}y~JyVk=zJ3p4+4!dZ_3rUg zbeAuXNm_dTtIdkO=nV4VXYetxg^l@$%<`$NGeEGA5m-FU5JV0WkC->Dl?3X-3Dm9_ zMf#m}s{fXl4)$YrZt<;2Ce?E&&dkUK9ml`-3|6<|glLH=HK}K0aP%zi_W=swH#Pe8 ztiPPm8e_7aDADE>IU7GBb=8~Y2V*kQJzh2^nKCn)KYJW2>F~3tt?1^HN^nzILxFqM zyg8%AJs@#F$~3_3L|9LhK?&5)(G6=rYzp)$0i-W|0Nmwe z`+?|rKc)Ys4nIIEzl~wgLq8AM{Klx23im~W;_iys@yIZw9ud{|G6_B1_MN0tJJdNg zZW@B4ll-LeSF5d@InjjITDPV6Drb8*on~TDG2FNL?7z0vEXcs`W|$xYS8^(h$4sN6 z(ElF7Hx7unN)*|1Zj?YK+Ep#@YK~2h@=pc+QS517Ja^R405JC!&k zcA=2#Oixe0*sL#Ro#+28fUZLAz|Py#z!UHXI6V(+v1?zUy1Hhqz94fnmoQs0Gi7V= z{CTAU+TMd21j=2*TWSd~SibAvwFx9r`_8=J8XHb_evqJ}ySibqt|JFo)sqGvex4)u zkrlr6I$dK)RM(@SpyZ+9TIBRToDd_LY#nl)jF%akni4yFi@hCn@UJ)(1N6m|8cgWU z*S-MRmj)mKixU;-DTT6HqIKt3>%C~du^TcQ8+6l!j?SfqjxS;xLqaWK0BS1+?v&J$ z7W6`KkH8bP!PtM1>*4g5Xr6J0Tt5g2ndd@D;E6J!YjF#af6>gTdqs3yFiTzO^=@iB zHkUAy%OTeE%lLtM8zqRlaMiMN=;Nci>ruE;te*_=8>V<}Twc?GHAqzfJvBD%yMR|Z z+)dk7Iy(B$7?H&&IqYV^_xa{WtuAruJhiqjIOIwe%6}>~#Rk$p_jfT;k2!UG^fPR> zt=C~VXcHBDkY#rkwS7$TKUiTa6P~NhxdQ^xe64o#^u*3)$RF%gq6_m+^4#2T##Uxx zof(scgI?(!6?=QMp66@*skKMVHTzpKgtH*RyWND1w0XB*2};zLXq@zq2U(t{JR!fw zZ84=c?M^qtI$qkkaqAfJsSFW| zMZq7&f#IdFn;(Io9qF<}EOx@y+e^~?Hg|aL_4bSc<%4%<&G-&Yf{RFv>+(kB26{GN zVEfBSqeQ3aI7!Rj#(f(6z?rqOqZiQ*-1K$w^*l}8pI5Ftv@QgQV123lowkZ} zEvX8R7MAFh3?CAeJXhWRkr6AzvvT);P4nmZHWtwkYe?Id*LLS$%81@CATwFAut)yG zbmLwQ5WBLi*?doOFZM8te0-;=+&^ybL?Jj31QKVYz2QDKF_u45)>zDx6==DfeCKEF zyX?g7ilVDL30)RGOd{4HL(68HC_;_Z&wQ`s>rU*|3@ZB5ywmgcYwYyS#NRK5pyy?H z2onDLh%Uz7O^_TGD=hK{fIMhDk7q!Ft@VNO1n^an{28E+# z=o!Y-cx;rEAn`lMkPgGl!qS8cP9dFmk_>AhXU%>~JIXA*)pWpc;aa@$gM2Judk7)# zf1fqhmDH+kG{pAWTBf4WV<0_0r4dgI-bO8-3FdjdV96H@2YnQ}$<=nV0l9Zd_qTax zxeSU#)&Q%LFi8sh1GFkhy6*dJbQ@gc1@uAR_kwS_`OF?b$@xlqi*J1?k}+t0y&ROkJ%Y~Vs2icm%__9PMK)^?_-9ez061pQYmq*YO$ZadZV+;ejZ0T8~?x|biO zyJ!9ztSfB@(N$e!S+xn+J)+(|Hb8X9tfyZ@qHlOyV04Nad0oEtTxBc$6b#6jS)-R9 zYn0(gF%kI$bYt4%d;qDss%p{C{LD!l{N`Q{?hMa|L-k;N|Q zGSKZ)pMi(H)%lA0I-saEEijChwngqOBgajF4Gn*Vv_VOeKXv-AH)|QJ?5*dTaAmy<6 zMVsea&%9B7SR&IyThs#DM#dYH$C0;Kh^SPph2WYG(9p9KHxIW3_!ZDE%ILddSy;BS zqm5WdaJ9>XcfZx2cZN_$sk`#dTc@!D0QCtXmB`rkGTl{Cg?;mE(NNI3B5>} z!?d-sl5Fct_~eOmhaRfW5OTTT1r5y;)A>jf^%+h3YrtO9vYn3$yyJMiS8GBaaSVw8l_Fs5ay!9ESO zx0;3B*Yo^-2q`rNiq;#0k%@_Ho3cj!PCwt3pOtrF+Xa{XpTLalOpKbAif^!G1s`WN zOh#R2_$EzB-}*(v&JCO3(9_e)N8YiUJJE-rzdz8CyGHH|xc-$&zvdSHwC07Z^Zyumfx zAk)X<+8Rg>CK43s(whS&qMF~&Xq$Ur4~{H7g69QKr_jXmqs{RSldFJkX|n``X%o`t zi+6qcfMu<1cST9!kc)5oH6umBpaHmW%Pt$iNt=41C}3qVDLE)W>I zmtG|KWxoj~37+m{k&BMDW?viw6W`BY;;nUo`oRZe0&uD$&)@GACYHRd#Y%8H=)qNjn?L`0#x+ITG0MfiVEcFA0YN`kL)rgB5Ji zt^YJOs$=4+`zSvJrTWd=?=*MbQ8F#F;W{e;pC{RgA?}k|x+#O8EcU*~|4~4e zX|u4jR<-Z+C_rJ_!V$yk-+&aeirrsvok&G2%j*8hEoZj}hOW!c2O8EC#b%(LZ(l54 zXT9Fe8}`lNI6lH|7w*3{_SQf2m)CY9fGEaPK`&vsfVguyX&0(&#Tn=#`Mnb$aJ@aA8Tx>1>^|XMwlVBWu*Gb70TVDjJIG>(xv5g_EZog;hM=`n$tvtX??qG z>|+4g4M~(ImspX{z{wly^Bb2yAR|le47o9Q8=&+|-e5DC8)x3!Vq?_iPwGWm*3PF= zZ=Vl!-$M1na-G_4-|qf#4w9VygJ{9epyK?d?5#?zDnd@M?47POd%pl48;3tmK<%}I zjn0aUL~7-`HwUl0>~qCC_Q~I9XJ)Zql7+E%vK)^eKxUcw9v#dPYYS#y`4MXsch1-g zJpIpju8zmxhq=z>K`*D{AQ)J%OIh{N%;k5*$%n)bokOIl3KV#!iP&uaub-fIRy#-V zoFh;xZQBKQ9}#cfKy9_yiEmm7f*h5RG!AdXexRequWz5urjXVMQ z_a+p96i@gx7eRYJ#z-(MTsq|XO>VxU5UpYI*&bK~wbBt6qIfsDS!V5WFloL2bb*Uh z3gddxngPDb1JqQB`)aZEGdpFk)UZEm=J1Bqj>mtFfP=|NPK z@!BM+T(j`DkeO4|dCP~tgyq0YKsuWv%@~V^$O*x;!4BywY3?p&JY>%};#JrO8!0aCh zuk~a+dBR*IR6Cw}Sr%qH(_}Pve%34yLTUW1q4YwL<7Zfh%yEhECXBRdK*g?nrUX$Q zaVwnekH53?Pxp$I=URKra{Cz|VrH%i%@Mqmvbc`f;}Y7tm%WhyNtNCwE(CH)H2vnH@=9kuaKMtw$ApqtowBbf<}_ZD|kufr*J3 zq5}Txj)@7pu-G_I{WE|w$yx%y9|Ws*i_ma^^b_MwAWfJA7vtRk>qrN~?g~kunJ@$O$x?S|z-Uq3SjUj*3i5?P%!T0Vf1c5aM z$zMmwe;I*$Hn1q#_{j>;@FFOd$=?Ok_iY@4?`d-0H_@V;N`NU#~=2qaRuYt-x2ek;}oF zQbFjB!G_50b;|fbs`w$`K0ynt_UX;%1)ib5JapSUyMLI6#eEOjn5RC(+Yu`aXJ)Jw z`QEW>0^8&0=m>|Gn!d2faC37rHZ{mLSI`{h(R6P#L_m^sVPx$QyhqN2)<bVs-%gXs*>O*g1>?oj*A#3Fo8F&Eh8WjYK zRO5{jWOz_ks4tny9uR6Ur@+`KPr)8~_7-^h?lbjimmr+)`4T#kol6w=K9SFBx8xwA zz{IeU7tl=%A zwVxlnv}k=xQjdSB&iW>R0}tw~&2~}qmcZUT<4=<80HhY(fnPa7DQMhy%O}h7Q<02v zU)M0k;TOD_TBF705R4ut^dAbNTs;|J+K|Hp)bGX1U4wCFSQCzEoP_wnvqWNtzsE$K zZ+p)iAmG^QYuZEX=;j~=9^~!XZ@LA~Pu1IG~V86#~J%tM>Yr6+0+QNQn8= zmIlWKH!(Hf>GZnt{rur!%5rR8&5`t>ufOX^dkVapW_qWD{xKkzZ!s|9hbrT&&m7H@5w39`-PXi*);)WHy3)E);tP$D4qpX zD%-~Z0}?pA$N(Vn4;~1!*iwOhNT3Z8wA3!P;(H3=COUCxdH){NL-(gO>!~mXx5(BT_(;&o6bEGoqnA$tJf(ve@Pqu6A&Zb&F9=p<3g?u2s8zYcKv(W zTG2=uuOALHQTBC_s|8dE&F395VwQhghRD(##*`)@Sqh=nz2GI|2FUUSSpt{A^z2BHf%Zst(Q1sRJVP*5foZe8*oPq<5ZP34g zsjSU))*rNzncZuYalf`N>nd~4+gIUP-ZNu1s&NawrkN*|9bT+&?S#cvUL@|FG;W{F z(;X~4(n~Rh=16D&HOB3SyqVwOVH4X^3XnLhTCTxZe`;<-y$?{UEDU{^WAlz+Hx&c0 z9CE(@9zKB~!rTOs?h7~UiILas#NwCeiiR|Tl~ctT=!2eqyvogJ@gAGJ1U&(h&i3kQ z?}ko#rEk0(ZbU#D-mh7a4<2X8dNn#E0zI3N4W!9!W8?O7{s4YJdqI6{V^+5>pFQD8 z_zDp7bB0u|*Zdcgv7QaL5_y6`J%sTvx-2by5)l7S7cdXek7XVoX7yz!q1UNW`47Tw zb18M4qO#}Jm~4Yw+gsJ2m-=(~d(j)rb`>^M8WE;XAzO2`wfu(pUhF2urf%EkCqQ7Y zXS^KPt^kEi^jOxHUX1+uGr-NYGI^8{tZdp7A-ZwX{&7e6se-4-r>C>PV9?{#4!U>Z z7vx;p`Oh{!VU~uEGM?p*hK76br&Z{<-O1LRjh%tP?uUPzNg-PSyMcHl;E)94Y+gKr<#Kr=~i%Jsyuw=|fPcb_sa{@l=D&8`xZ}RxD40#kyI&U3v zlesZR+Zts~881Y~qzabKm6~n=(o~g;eB21%*hsr9&uo9NqzcYF1hPorCprz=z)Qn+ z6>zOIaZ6|bFy{6{#uKBbFO&FFmmNz4pJpTFw9GwCmB zXFsXFlBOT(rIgKzOEGR?j}`GF45)pA0Uq0 z0n&0sN*a#*?S#QY4$m?DKr_Op98u!p_jc zt9HTU?w6csD?qQ0u}!aZHqAnIREH~|>79en}=7G^v*_)F_)@7TcK z*28w^Obo4_X9E6ki^qJ^EvKWXu(YUQfe%cqtp4OyUw%Y2B%U1}rUFX+_EN&|GMoFd zu+3R^nj735Cz>L0M_DJWXH~IjXVY{mXnENpP*5Y+px^b4v7^bIeMJz6*|Yq03=p}% zR(hFw^ttmN9vrM?H{gu{b(7!d9EZd__^xGrU<4-664@AGQcP;$H!gMQ)Zgr*A#+Z@ zBxarDlLKveZ#9d5y~7rhpj(|j`pKW9weA~)v7s0;*Ezg#13=|ppD954VS7>60Jyp@ zN4u)as$F)zR6k1{xOH@Zg5Qx-4W}nW!{W%}ky$frKK+#$XXN<(f z=r@T@=@(`tsq(jzzTkMe>N79rA15YH^Nr|ga+0Y&oBpLchj|Haxs zM>Lt-&l`Sy0!bES+ZW?mJg7u`#tyk$VmcXDsjCmRal!9#`Ky`9&d!~v)ehzh-V8_n z94iGZVkhra)HfsTJ1=knQP5zeArN*i@I`+;lWoYN_~Q(z(*;#Z#!Fp!)Cs%AA&&g( ziuk5Iw@!&M<>HVrU|S0~L-oJVH9mIVuG@=>vLHkO9q6NqH5{9lcv^&fFsjTsGeiZL z6>lIKql#-CUf)T+!(_Wla_>3ZW=i>j3Cg9SYo(!ZF#Oo%^1PBod!^V9#|Ug=qg}_h zJ5v;ckvjZIo9_Tc_V&h{gkpluz)zJUp#QHrjO_e|UcSWq?)9BW zjTG{E8FA%gV}{Y8%90M~tZuL^o}jWlj~+FjK`|OR3GE;FL(n6$98%{W2_}_-_rU>G;zsoGjf%2f{&6iKIdas z;aTVbb$l{n0Ljk~S$cf|G_0sNd1~&7q$76wvYTK>?^~E^^j-qJWaS4s4jUuBN3;=7 zt+X#RWHW#K^*5od;{x}dP;MxoWXZ% zk4s2~i+B?Q6Jng&M)&vjyah5YgTyn`b?5R9_V)HH4F334)w=fb(FrpR(pE=0OazMp zvtGpMUcY(UHW0xzVcwHFh&bY=pc#LXBhw_v6j6a1tJG7<9vFZP*6x{$bmRfYJ)Bf- zjmror%F#xst}5!GJp-QsmPt46#}SLN)jlgyqDFY>YkXT#O4b}SKtNEz41o4{NyNO$ z#e$#Vi4g|=LxwMxQ8Ug3D=a3251d!}08uSl*p)503JPQ4=8Qu)-M!u}xnYy&KL^|L zj>&zE6SF`dBTj|E2fKR4aR&R^n=oBa>c@W)D^B5nrsWj~QEjVlX_R|ag-cHHJ>I&U z?b#0i&I9L4;G~NaX5YuraqY`%7Hu2FnV{v9RbP1ZllWJ&!1D8P15e64ejq)0Uwsl+ z0SVprjakJNJno?tZ7Qw-+^;_u;#%(~KXaX;1;(T%_Re4Y&FPni5*zX8*>j z8JpOYxOADdOC}y|kC*aM)Ad|^840!5@g76^Az~L}s#q~DV$VK-7_`zj5#=|oH(S=Y z`Q0O!4;noA;EU|K5y&t>yZ} zm|3+OYMY8NNrIn*zHt2AiT*?=&%dUzELpz?L*v z<2R`#t>1P%0Jw-YEiYhp_*xn**1Y$oQ+BMK9=}2m>Y#(|6HOcoQ3|pr)H@QCRypP-f0^R9Uf0Yr+(7~2|e)D;)^>=;t!r}XDR`d58=0V zuSJhb#KL_2d?WVrSaKZ#p9tFl79+zU7vt`|y}<#hUH+!S4xF2-TVjk*oj@goLLfm) zia)7){C@VQFlCUC+?$xuDT!e;i#EI{tMaekOuhQs8fU;wS!Fv_ZB+C|;M>EA{}J!u z9AJQ69@X>giLUH(b-wlSY*o2G758KjVXHvEnNY&=Tbli?q}+#a5QB?sAl6DD z^=s$ra=AMv&pz14=&~#3{9f!-39bNo)9fwcA571_f~5lJ*OGd!!97*vkC+7Cy}w?- z2ue1dK3VH~M&)+;{>pVDHdvJek21gw)|J)F?4b50k`^^pg|l?p9bUeG2d_H)%{52& z#~p=@IlSM>ewe$Pn3)y0mo=FJrRMSs0FmDu64qn^szb9ZF2X02)Vn@D-`3+KsXk{5 zl)cbay`6T>c;lwT3U`(pTJd&v5ARI9l!>U#b3Ry z<2enF9s2mN;N|qyv6#XCbRep#dB{?a$Su#Q&m);pL> z%*$1pPtux;5T(dx{xg0loFO~P)ipIemTZ&DS1CZu*!_m;sLsWxj@Qo$MzZ00F?j<= z?;p!#e8W$@E?9rS5dF~o%^UXV%aYTf-0ugQx8mF8O>{1G6~0=u;j#b54Gc5Bw?9C* zV-Q`O`Lm$me8x^#xM&#j;Q;w-L_qqQOU_WCw8wOz3Bq(8i7Oo;pa_bJglQi)cia}{ z-d6yZ04MHmbCKUVavVr|LdjUdCu)G5EY*3GSL^x?4KJG{T1t^J0hcwE6gG)+lar&~dk$-tzAnlFf>R~x1AmgXO~`J$Wpij#X28PLnZVVIpxt(+ zv6-2NwO!QGz~{jMy=1#o^aRCKdb!s0%T0ED(JZsZ@l{v*9AJ>#q(4j%i_*H44SV2I?bsAE%_}G)l!adJYPp=9u6ip{L$%@LrNqiW27&+oPF! znN{g2Kz$E}T%@WH)|^iQ^iQnc>vP@@DF3DQ;_X&F>nc6b3-gVV{!+)+EScPZrPG-5Gl1T>Wld<%kZreCBJR zzkD1>Il0l<$p;oz)9&qwISDTICML=9&vuyc-i^Rqmszc&n!(ft(ss>WXQ%-mc+#lL zE`Jxr=p3_fPwL!_zZADoTsbhzMc(DYiIEn>4u;}FjUPXLd=LRVjn4tRw|dmg_H1nh zVH^)h5ykwu_j=l5hOh+T&Q(x4K zyQ{d~swLT>T@3a&>i0>LsiiBr`XxBw=PP>3_^Z^g-H-tw?-~+Zs!-$aok~os+!09m z1{MF-oT2-|PjBSH;m@!({u$D5C-Vd|15|2fgAhksQ@{bn{*z0fa2O65rIDL`of}xu zI>$$oztLf)iHmZ{!e(Wu2>-J1$3Q0K-uZHmL!bHM<8u1F((vG;C#%Wb_gU50F8sB7 zCn0OPrXrlHCPah2)JK{!b0L+e)3LG*8R))DwNwAk4}(`hPhFz;>~G(g|N13Fsf>~0 zAWRE5kXOBbSHxUFrC&=zN;iM@vf+Un6+jdan;B$-kV+3FFY8Pu6o?_VeM#0~qFltf zPQUrcz4FjWki)^k5W9Mn40I9TivA3(#&HoWAT3*toGC(|m6_pmlEvNnM zVH$Q3DQ2u!xv9P+-IC${=8z&{V~b}l(0PDL1+XEJtHR@jeGHKHF%peA;DLR=qX}pC z_NY6^4LHKLlf!>4L6g(K*}0&&I{X$0gg&9DOuTSLk4)t6!Qr#n=IZH}v9YEu$4|IE zxvM|t<_V)@(bA>82TT@a=NHGZUZ`Ykpzx&DLtEUkwhxEF7- zGQKFjyo#39)oGV@p?B|SJ5dF z5&hSJW#qo|Wv@?o7yC~pQ8Kk^a%$=nUf=32?OrnNlD86bulK>h0Io~b?vK;H@};h? zZzw+4yIhWDwtubMcztkUsPv5hBOK#4I(7Lq4flsHNFeSv{6km$58}wjai?pVV0gXH zY7YStqy`w*IG^$+Xsy9<)&hu97z_lPda!^5+F5DSSVJ+GshqNoida<(v$kdE3cPMn zHqyU6HFCBN==<07Ydi}!#T98JC?I=)4*Pbx&H!OrGF*vPLNwq}R%tSxrvCmiNuv~? zkq$qbCosmJvw?hoXjq8IZ6es!j5mXa^{uSzO8lkS@>YSiBp~gvs!G?>Hp`m5zHl6J zxt;v|-Ie&+{52;t&@RNT$k^>=YD+42s?G)kADnf8XwI2|8nx5j(ZF%eY~bj!{qGc*IjYPWTj@_5o1In4CDojs20?Z{agCax z_U;f#crpr8Y?13C#fr>NqX5l2mTcLC352(mH8eC_v2CQ~4p)C16h~cl6ZVlao>xk0 zpyZuREpq&+dyIjZa{+*2bqr~XeaNe8_13y^ibp>(0UMM}=*cp2w}Y+PbwprnbhPhB z{OI_2q22LkyTLnV*4gsgb*gmW^FOmdV1B3pS{?^)4#98xV56Cto;<@3n=7+SPuVk+ zuDds?{&^N^)M|K>+x~?lmj_|L1?-G)cN1pE7A3qfDCs)N-xH^z7w%5|torBF@BRA? z)Y6T;Ta+N?dUYI8_RqdVv=giMoY`EX>bWC(H(yWd?6Qc{^F9opty0yNs z_xK2NvyaFfcAHZ+B4k#NKXDnx_@C$+2Jkmoq4;Ig1UrO2Ec>64JCX$THs-%@Ve~@x z7;S>q^Z%D6v1~HHWG_gA-8nF@UKDlwy2P&87Km%kABD4HW9`oz0SPTI^!n6uYoMe} z_yRf8TB!j$6pwwKhVP`l2-Yte4DGm|)_*^FGmzy&KFH0A z-h+Ri=r)d~Jh4N$n#+Nl$^cK6D)1DV6)y>&n7W+ES;ZDKKV~I$x2U8D5v%*_J$uP* zk$PK(OIq_cI=wgq@2aJBJ3Z>jdz3A&ov%<;01Z_)GD_NRBG@YK_dN4ij?u?tWhla6iid0O z&-43-dtUc-&iU-~dB2aQwvl^=UF~lr#9C=;Y7V{#0pZHkE*yCxbE0bq(fRQVsn0*d z(%s$tBLIbYiKFt}16?u};y9M@lKSdGP2>k?neO0snNule{qhQ>?#v|6uhZpVD9WsF z5L}`J$bi)IOs|6NYfI0R^!4L6s^%_8o$e0S{MqjOm*b=b^QyB%#_gMhw3^l&&IbQ; z&YkagAT4D$$2^& zst~2vu50$T`*BRSKR1S&;@jQ8%vIUy?Ce{W(=R?(*iGgH^VX;!p^J#k_Y?&df%VoG z78V(HRWtaBK7WtKYu!sB?ATn{8-1kB9?kp^8`Y+bFBW#GgsQNlI-vw>y?Rl?&RbX4 z{UPo1n{R)sD)FrtMXHl9Jx)lz@_PsNV+yXkd5h=3RBZ!6p2Wk8H4op-@UZ)-4E&yJ zciF5vcnV2e&1?|YZe+NZ0z8i27iVds78Vg%{q$n>6JlOg3qEfQcpprDIz2gn)`Jaw zRAXGPlZofbbdD=a&E|=$WMqw6iwi#9n|_h1$A^?y=3O=8A#-aLutDQsSebVyVgb|@ zZ+jQz^v%tFISq}E=OAJY8ae_dLfX*hbWZ{j9Ze%6>GgI|pTAz-i#L+{W3-L3KX26B zLCoj{M(M88^Qbr%VKjSU)RHHbIkt6(!0QBD=o$O?)7W(Ojo)0Anq*-7{ooN%Z2GUp8l;i>95#=aSw))`e=n1X%+oDZf~>MnQ`;~D@r2>h z$64UQPd4l}tWk|AOC)A;qPTZf>0L~{Wg?c!RoG^UvoDSjK)PQI`>BSbZ{89_n(Q3a zI#%0c{e2N2&V;vEF}2_s&LEY8n1AF)En*Eb5zVn4$%B2*u`-57X9{~RYa{;>JlBR3 z!#WVR)nC8p#Gb=O&RmCpl2KfkuDV3xyX1p;rS#Trg-UfvhV(FYjH;eKy%tO4GXsu5 z2C}c6o^>}h1Fm7nq_M?)?%utNBVRgo_V-}z72}_jk$ip8b@|fk*RMM-#AvUAs1D1M zN_|8|I8kZ;BuxVKvy6;fWXxGFX_G5ZaXGzMM`72E4E>Urn$@cpJ*vm@-Z^yb?ZjVd z13xe6PhPwRPUz)R!ez<*JsRJz79t;DG*zfPU-Vm!o&OPX16AUmizO!w=iEFDYS%vNSJ9efLSmyKo{HsrOirpk?QysKCGI5g_(d>O+81e4`9V}gd>d&4-g zEAwBYyGXGx>fCST4zV9fC&!k5!6h4YNvFtXY&{3n#RpNVanlsx90u)TzBY?L7d&uO zvi7lvR#vG@&cS9o&>{88MKOhsHIk{gW_K9b3oWxE_~HP;rZSLacVm;^-d!R$l9 zh>95(bvx7X+|v1bb@D~nUs3FrgTsW?K6s-cX2iS1`n7G&0mv4DOyl1cd&?I|@_Aw5 zYCKO0XOta9d-4|L}6PjQy%`+8d6fK1Aumy?>d(TJDwgT!h+yok<(-iRrP1zW}Vi`md7Fz(2*5n6k@n+OF6S!BVv1u%l8gS^N4DEi2VY zF~6RUGN0dm%8g|nR4AZdqg@#1$53%%b@umE!fh)B8QAIWy|Xguz3h7z<)G1D`~2=c z2KPZTO|>1{iiBOUQ}BDgut8cs8XiGvP^P?>3wM&10{8;R&~mLVhVDs-GqQ$JZF?u9 zuu+`VdX2{~Hj^}4)o_Xh+XOD|^w2qL^yRoF&iHWC59L?!prPnGIH|r zh7_p`KZbkFv-$z3|IEYK6Oc0t} z`AA^CSgB@h5%Pc~vJX<{tbZZdBd_AcuJ_Gfv7n&K6VbbbGah)q%^}gx>6nZ=h!wkI z-rv2Qof5qnVozpudA&(ra4V#ibn4NW?)6qpFbd(e z_qz%5C@g+f_?9ZWOTQx33ZgFyv5$K0bD>cFA72{qj*8;u9>xb(*$ucD%KwBwbn=Z8 zOBe|g`s9U5fN`kw$~j<+VuQ$D^@^z7P`k`u2Sew6#iy2*##xjo<85nSe(6!2iO&~n zcpR^g?b-P=7$0(Gzk&9^;w!Jj>+<&!t1N|BbT$6CG+x(v&(_>b3GUfHB?t-FJh{%tH=21j-WjPlLm@%k+xs*` zjMv(#9fgS!9A|TGBEn*e@OKzLxXIcjaXoA0SeoY|t7v3>lQcuP^PC-W^rLshy53yHs9Ir1s*0%>R+IXm4< zz9^s1w$cJ%BgDQ<*j8Fi*Ly?i0R5xVsFb0nsYTKo`ghx<`>r;eh+};$Qo1pp<`jbc zzRqOf#&ZT7-=XVJ8>rps86*+?k%Bm8WXKoWM}~(p+8KQekeuzsc62fnHYi|Mql`jD zC)qYQ>6JBp6fv@W5yQbt9y*TVm6|49x~@Yg;;aIpZ{OIHdLWY@Xa^j~=gbNQW>~!qBN(_{QTbVDuVq@M&Q1SY}3Q$zUtD8gZRGpg9G6iv)S# z949!H<866|-_4cZ7MxlZ$@``UDT7%hw;R#ZO};ES@hYHP3%2z1%Adwa z?-z%!1aTGIH#3_Hq3$uy*My21kQ1hXb2%BW^Q?s4T4>eQF#_l|SF>_4QOkwe8!Z<> z+HlFt=BYn5e4+HF#4TsO@R*@@2LT;Gey=b3h{@bGl~hQ>LM`b~lo5|!yvi=l+SEnlNK zvCPSX?b{>g#4hTa-&uQv)p=06mH4XOG={QPjg($}lP-KXS9{Hj@#K3WUHJQf^q=lY z7l8bL@pM_)<8UTm57dM7M>0SNQpgAwoRlGOVr-n1(TW1HvnPj3u1ODxn7%mCjaEfk z%0*qXqjF+h^Zd3@k}woOKne}^i*8=Xnw5_l<@)>k(@=nLjY9Ua*TEXTJfd$uJYnCE zZzIx42Zr32QdKP1W^KaNUybsJB`4Cw9V;9qs5u%>*WTffpt1=b_!tcoGjiW?Ub>z; zag!`S!trtI!ePj-3h~Zg?#eQ=v9lX_$D_ZS&UmUaQDLjW8@ra+t^i1xx8_%=9)6f{ zCP$yM7Qw@FYj;?vv1__S@97!9UrgaacqP0X>e+gqpXVdFtD!?HrCy2o*O1k)jg^&t zq;i0yJ-w1UAvHBMI_gx=DTHnU_$ZO-g}|C+RifO3g+Dpi{I~E(APraBmJI)ig=&m2E%>fk z^zeIc=_5iUw;HI#e*0re*5Gv|t2K!f(~VQvZ+eO>7j3^6$s*BfLG>ank8X*18~xIk zR|I=V9RjLD{otDfidd)st&aH`!t}h#<05Xv&*}negVDP=l@#e)3T7VKX0T<^b)G)z zQm9h7Y3n&}m~Z;HIe|fuW!PsNlt*$Fh=OmrZajpv*BKCEnnyUe5xK}51oY6z>$jr!!RP5=FpFuRMT-EC zg-PQk;mNEdU{hyMNGYx_G>}&InFNq8lp2nQK1pXFiK4_@$c@K8f*wHaxUhCR2x2B) zBzRg~U5zS#2Q!6&PH?FxY8uhN`kP!_opcd26lmHjSN4z+aj}k#5`8?_sL!sO=q)kF z`3|(0sjeXkbuPC#=sG@>2Oz8#2wQu2soFO)FUBievxwh=Q^I36Yom5YO=_zV$782k zRCf$o8&FA)Y#;2nc|KBj%mD37f^jof5(NZhyRk0j8k^{Fs3r78-;>Tpta4%9{s+KB zK`9@4(C(|wP@*l1uqH2H;ZhK>2#KqH2n)MlJ5{UeI_+JV@kEvA-JtjGVghx7txvdSU7Wbd{7|#q#Q2ux61DGx`^#`y&?4B!R`aHI>Jm?xMiZapWp+04Sz~r zKsk81dMQZ*L?T3>YX^h?Hsrpk2$qcCmZHjHxZgh(>0H_wFE#_<_Y#BDpRMMnc6RRs z{I9xzk>!1NYZ*7<+3RrE5AiXnlcw|SBDqkp(Qut||HR-vf3fapF&;6T#%+=hqGbq~ zn28t~b-)8UIhAgF-jqh71jJpHjB(57!!Hvu-Qd8OJ-4*7>L0u(laDMOBIY9TF)c5k zqw~I}FN*yBT*lG`^-bIzq#0!PlH;57<2Cq^;5t* zfAynYMPn24Q-%wVto8x<(0rt_^T~Q8#7k3AOYYpcAPsMP<38}!)s-hvMiFUPwuOui4yI3jc3lgQ`ICz5NxL{ZtC-#bwL$Z!popBB&9DbU z=G>E>Z~VwsW^h*^mE6K01Gq(u2$9IJ{g5%xZ?f~ZEkh)A94&K1K`-_UowOt{1UPjJ zirk=1*TDJR_om1Dco9iS40TYx2VnvHY-1kfLn^{@1Lmxv%(^fA1Y(17ggcU;8*XIM z?4BAgsul6=7UYS)JwSwTV5@L4>ukLXxz6C{*K+e|V$aD~T{jMnDc8 zn-OZwjA^SvF%*>&*4)yE^Y7(veK>N@;3_F$>0$ALyBO6IJoFkKJm0@g5AEi39-e1h ze_`t##qT0)@XUqooIEKZJVRltaL(R<|5ceI>Tqxi zJ=e%xFq73x$dvJ3&hoTTVSdLQ@jx{znep@P3>@ZBRRy`Q^oB@Tfvsr}7c=%jIw<=K z@ux_XApKp@z^`{u>`e#W`3BYMqLa=P9A;5<+uyjxODxWFU^nK4F|}&FFMN ztbtn@a=;~1)eXK;W&m@0jY4N#kwlx{-}vs6=JXjDtLi-xWqKvlwMksLHJF3A<6T)2 z5ku;iFj$l^OoO_@^LILa-~yR!gbm0*shdHmQ}_0H>Xo3E9mScAYkJq72f6_-RsLY2 zWgxzCs?aJ1Ocy5mLHK=L8uq!7GC*WM31*_3a#E@2s0EQZhK@Uxb{E`pvsAtswUGQvkE84Wy$QG6PN@+f zfFFz!wB;Gak3fHK0ZB1Fcc%*VI8~oqMS%LJzOxr1g2GH~!LU*Ffj)N&X4=koNH z4cQ#@L7?~}vGXeNHOQRMS#uCL$GN_Wdu2!54O4EMvnF;ePJDUD;kC*CEP>nhf0w`r z#CpVEc?o{BacC6LY{j9 zj>qfI;01n7$^@Po{2N_Z*Um{XDYlR{!MdQc{i!bnF+|LgckE6hv(IkigZH34m1!3P zPr!X5f8)HVUuqW3o@{WN6~WWYTL03JFDw|pl9ln<_{3r%FkzVNP{z75E{PZSk@+pe zql!k0y@t=G=FQ#bU&O#8R2*1@OO)}t0f<^82-7*CGtWygA@5`o!~OV8nQ}1dUg;Ph^o`nS*Hm|tJ=hkF&=C9VQ`6_jf2-?a1QHUDdU)NS07 zHYVVm#+Efc33Mhzbb~+s!gtXmTDL)Rs%FO1I2dWzOJU@y@`pu=S|$%2T7j!$eCI|e z+!p3I6a{mPLivJ$OHGZvwG5Ak93W@2UDJ%@3D-i@%=_BM`~F}F1r7U~@f`u~gd)86 z$Cz<y8*2)N0j$9BGG;s5d;m|0;$Yz91la!;1`=Fks=3M-e^2Hu=K@tlW7D2CwQsG^+nf06Q2(UXf}2iuPuQ zk{^6bL#GYz20@EpY^5^U?83z89nwRjy5BazRBPv~u~@@;hsI z!8qqm6_XZrS|CWQ_uP7Npph7>JYwbJwh7bH*2IjIc_?|HOfhehbg{G$al|y&itlI%Mph;YY|ccu~q@iFXWT7q|m_2mIx) z4IG5yVk~G?F8;@+|BpF-DV+yt9_s#hIn$o830#rsN+WLAlsH=m1FS^cz;#{>ZOFe1 z_CF7v3h0Z&>0~U^hYzh*sW;PsP(Y9uG4K_NYOp2AMGw=QGc!IrUT8rM zW~}k;=rgTWTU=8@0c4^({vgY`iY7y6ew^%o4o`01xOu;sGK^VAQM9%xJ~?#-`K3FQ zPl9uri9Zz)?qVX6kx%9M$6s?Xwn`%~mzbcp1=%+Ccq%4>{a$1j0~h?K)VSp7Z+hVI zOwM#14#Z-Hr998K)qMsz5Ghl&CHa(=EB3UiNdPyuZ=OFIA=g3;IWo(VstAx2d{=5$ zLX>QfLORY*+V=lg`Rn`SiEsm1bNm+}k}!)deYcP3(lA~^x$oqqb7)ePi}?lL+pJaj z;iiD2LbnNzQZ8%eoYFs6fVqYg+lxngF#lr?YW_cSgSVB>H(mLj>1Amo5aOwa?SK&S zUkzGE2_(p2cRVB@>y2GH$)4Q_hzXgm;pG6X7USzB0VKw#civ0SAx>=iVc=C->izpY z85kn|*A{e6?w?{bX7da0nw*@s8TT7_xu?ApM zQ0HSAcm!Nr?!H1VpltLU ziIWH)1!6&kUGM##_nN2CJu;r&A_@uk1t6;7@zc@evaVyY>)U;esDfYqbp>X(74KR1 znZ(7%!S?PW10HaNOm0z~34I?*-B%C3fMZIy*9<>NXir8oe>n^P{1-BAU= z(Ayp?J(WOn6&T)5rD6=h8L{d^ntG1z;W?wv(}RID9noBzu+UM(XCil`D+R#GKpn);*3z3_Sk7 zJ{?nE{7R&%UNkcuS%?SB9M(8-a&|PY6jATLJzqQ7-Z?5Omt`YM4_tFpM@G-)^Buh{ zUwrZ)R?y#=Wbj?vtiHAP+7ixZ3c3wMMuB4q8($SZ35LlE-EalWuGPr1A>Y>!MhV#V zv_~>#(#sbG*)-*)A%m$n z?!K(s`W}_jIG_l|YGw@IMFU=s8@L^IYzu*j;N*Jvzx&=RzaspjER_H(Jrro!=vLk@ zgQ-HP;zDlK&6w$PPQ`)Qk4P0>($HxpQC-OJkB9i}()jXAgXYkGS15O(+nazZnAcok z{I~F{)}l{~tiXmbr;h1ad-G9sDJ?67bxy)HFau!w5XIg+sh;{}U_dCKb6psrA2kW`9=crt8ei=(C6+lzTl^b)9FU!8HgXJCu+GY zO5kTv1e?k2LH+t~S5DGmcpOohX9{2@QC)#@_6C)5ds<@t#-URp!DsC7qF-Sj4i>VK z=tIO~eW#n}{Q%G6nk0IVEKY9RBcJVGyhEssO_YY8?$31zmf8B*$Z`#1ZK@ML%A5XcXjKBRKw++3YGJA6tooxU8#*$MdA`N}&-U_Xxcu0CO$8Fw zo!?t>kr^?WqxPzDK%ElBi0(|^L;@RH@<0`)!GQl8J^<5b1buBPd>*I<|Ei<+sDur^ zKu}_X;Y{13Vb#?`GWWOaWY+mX`n^No?ct`}oT=Kfm)Y%4@>rP01)iEX?4bSFou90TOIH9JQAn zdNgGJ+Ux>6MY2!AA}?4pH1(Y-;~cSusyYQ=;IEq1|L44sFI<*2$uoijSVCwFUQtF$ zd%=+=-n8<}2RNTnwzdqM=Y-iK&Kv77xCKqgUV=pt_4O;gz~8o;!J+R=^4L3>fegE2 zw+_2;8hj6$M_ybQqRg~m_~rY}4+zN7mVe=4xpth6lRi&Ot1u@np1951)v>1-IBfyQwou{(><_3*IRuqF4fbYoK zGFz@Rz7j|XUz=9hE9Z%K?=V>pLPdwuOl6x-I4Fw-G@<>iy}@^@6`}gpJii zd=qC7yNArPbLZ~Ri6kEc+uLgidQ-_?Yd{8=g_rr$&2V(3ld+YtZu5i&iq9`04YW+( z@sbJT>h0)okP{>DV?eGf>FZZ`c{%aStOi9HFj2%ZpxVXVHBy0Rj z_=tz>je>ay5u4AvB-EnMwIJ^0LR;K3HMym~uuUt+X@jkYbtfuBDxWlPz>-BaLgRj) z@ZQ=PC8DRkpsErS$7g-xgtP z=~gz-Hi`ds7YUKC8Zo_oPVV93u6_7hUpM}sHiZ6&?LKGdOOPEuz<6~oNUohs`u-p) zxKiWJgsdYW6Z8~%pI~smbS{-jvjWkv#86 zpy?(-3$DP{b!VM2}D=0}%6P@PzOw0XqOq`=z>=_!v#z z`0nr{Tk6YT@JX27=PjMQ;CDj*cy{4S8WpVS014v^-Al?CfiG_x+ou1QQ7h%B8XC10x{()}t>7v0zF;-BYby)d8!wvfMUr1a#VKqk)9 z8778xYM3oDIXLHTRL>DeNdKk^#+rIl_n#h$V*J)$Yp`CU4ZbiAqIbJvN2-^qjsXub zhAR)h9Z2yRIy2kb+LNkuU~Nc7i|dd3=N9oZ56dLY}~SCbPh^M8{L1fSMVB}0MD z=fr*b9zkU4q>2LcB{_2h{ZmQ}>e!#5N-U17k!W^nw)8pzjTV{o6@7SqDi|oV?q=}NR7`g~yaA!|m z=u0GCd(IdUMPCoWmH;{aQ8Kk5(JK1LzEUlziA@#pEavG$uQD?Ow{ z^wb>esZ{qp8#Mf2k#mg|FNLB_{yb<3Xd~o+YIoM@g|deIuzg-QPKzZdr|QjeJ^33y7CQyq zg(fFKuSd{NdHXYz8l15MHv=4R%mP-=lKZ;8dAncCyr(p|}sC>kECL(v08SQ-D0H-;!Ux2u>VIoWciy-+LSCCU4{T?c|nKfSknxZe;u}Fq-ec4 zoII2XnZArpMLBQJf6d!Gxkk~*+3^=eo;k95QeW$pGy+ci4PHdi9!9}H`;EC^R>PUO ztE;Q$BNPp-2qD8Xa*kBKR;D8B;h)t=X09D$+wCef0 z|3XfE^Zh~u3>tMyclxqn=~2??K>yT~KvN-Y@620C4|E$LjhPUgb$&9}bMDk~tv3p3 zt_G$ZuwCK)3&L3Rtu3!dM`U3yfaT;i{s_)&C15bao-m5No1tI%%Q;hI!oF3<3LT~0 z^?Rj)nOXT=tPixI5hGP^phu;SqF?eJ?;VK2`A={HmjEHiWi!VB2&-@3zM1DwSDw?l zSRNj`DfVL5(sf$K`(;~8KaSwm z8+ahK8IbQ5tc2jK`J}H>v-a-PgwsiV=41Arc>CP#77E`4oar46+80#L*k8qp&uKX@E4GKzuN93verahJ z6B8q-b|`4V8xNf^sps!OIQ$3pA4TFxCLL}@K`-!vSmv^uasK-1tv$No6~5VUS2Js9 zpfWd=HB)J(`A%}W9Lajk`t--)`1CTc{mlCp1@=CMdNjm86X#j1V>->70I{MxEId5d zbS3(zMfuS`Rrd2fyCHU33&* z^o8qmLeriBFVz4_oub3`sD2ta<`0SLJSpTnpu;AbK~3#ETlT|MO>5hp3s&9~?$OZD z0FYk#nrJvZsD#-|5mJtjM0m_AWw=vBtYNW zOqH#~n%O79!oo!J-tz2IU1@)yi6-*CeeXj#@NtzfKmn#0K(ybONA1v3Ocj-jUm3(5 zKR!t?dBp%tIet;u^qIfEU!;w(wXza7&F`l4@vX`sGC@Ut1n^R4*fOL=z*u|`b73Vo zwAjr6kRQH3-fgP!M-?=fYdknLng}aLN zK0e`*SDtM;5xph>q$1I4HL3AK>j!m$au9kO@81GIa>5z&E$Fj#H28E_GkyI2-QuFQ z@SjQ2q)#Hq{=y8yNPu@`fCTZd@$_jUgmEDCRR9g;d)5#2AIMaP0c6) zMZV_v-lcGt=g#IzM2u#`kxZD7abqCUZN4k94Sh9w<`%(LW2aI~!sX?uHsxTGDP{%I zfR4ub)Jd}W@Q2-1fNOCw+}q`4G%&y}2FJjcAmMU7x7bq(jUvh;A*GVWmmVD(8{2R{ zn!C>H%SrntD>GB;0f(1sXe1nrL9~9I2Dut6zPh5KBB#E6g*{?82^&<|(`{6i zT=j14IV}RY%jFLIphSB5yHT|dda(fw*?M!N*5gB3hhG0hJ9DdhRMX zU@9gJ4Q*}{qR<_nQzr>_&AJ@QbjmP#U|~?4ez)X)7i`-)tvB|u+p01dx^384X~Obx z$^$qPNU++VKsXS)J~LBEb9gva${{<6!GgvgngOz)ryxU4@=s1lnjF04Tg&;O`>`v- zO^-4Qzbygvd_*EHQOdRL6ob4G=*lde*t4JIa6Vwe zL2kvC==A6xyz|h)BzKdTYj`rBu8s*(U3K1AjhxRn%bE*AJUCLcwKVtgF`7U1lWo|Y zNts>*~)-oYy*knfIZtuCC=hQ;~R0 zS8XbUM}};5WA)tnywk0qcGC}{KJ|2F=E2#Wc`>oQqUDw4byBK^Vg`rCl-$@V50Ep! zA|n@lzwcL6JmihNX@w&e0j!@qdL;^KDx!Nk`I`4!Mel55qsRn7N+9;wmLEScxeENC zWtP89@dPm3@tt)?$Iv5{>gxX}RD|U*i_29kTvq(BV%l!2>Cy2Yf1)-|J_XgA(J6Cj z6}+VwaA#6dmxxinr@~PPi9?QuhX=6Ko6C9pQBs$ef3+wilpqq4lQJQwl(bZCF;BHP`c_b>3CsLq%6=cMnD?c z_fzdW^O((u%=6u4r;+ir*sy2x%IB$g9~v4OX6ZlWjVC$9p@j&h2DR$*g};UC(XG14 zOP%ldHYdL^VQN_8yEkMU=T#2)g?~O1X2C+Jd8lc6{RQQ5-uTRK&7?zqhx^<3zJ`8o z_0Hn!nydQO!;@p#&eyNf$RjjON}B6R06d&bOibN81s=Bkm#@#`&`5!25fK)aO1hm< zEAW!w3K=vN+Lclev8D6YJ)iH0tk_6O`V>5VUZL*J7&}OgSL-=oBDrSL@Yl{ToM;7{ z5T}u}443~M{ac7QPbP?O2Tw)=;R>Ck+q2~9&-8!Xd_v%uoA+-qg0s?0r;Lme&RL)H z^Vc)bFN(|c%G`=`a;@d29RULicYDhkZj@b0#`QULmfqa;QRXJ^Piqa^y{Kbi3Y#R=f7}C*69vs~?{7=v1NYIs*6ri{Yss zFZiA~pBUk2ivF5amw$KUK>pNK=a%NMrk-0{Q~lP1U-3gDQ~NE@0Y4@XCgC6{#RU5c zSyJH@XYDE^y}!olPP!l7TUGpedE$&?)TP{G?sVNz4_m1ho7#2k7t6ID zS>OFMV2?jQoL^@7m0Y+dUX)AC2Efp{rV#ZZBv?7xr6$WUY`_A;qg$bRQIx!6-9qqv z?3CA9{^;JuWN&*8fg&eVue6x@`AME_HF)3JKYTM+2q_&~;3Ed;xV0H8(Jc;ocy>SU z;XP7>smLl~b>%&!eBp7omF04NGr zGN-#?2E4AEfv=nlwVxE|WkBAb+y7ywIzg>F$`0!JO>FVqs}HSnw@b~Vjm<)|@{RHV z_=vXaJvbeCH!?!6^=&PY>RmhN6CoJH6!|wo&kx3}tPT!~&A@RO_7W6dOJy4#9<@FS zqAXpU&(ye<_|z(L$-PVns@#|*cL_ice-<6(zBaVFx>o*lZxy}&H{zoK@4?7Uugsb0 zqeRJpzl>dHux__b81jFURFc1o`B>+9Aib#Hz#1qe3(&kCy;3FN;o;KE*RHNE0@vuF z%G)QiQu{gATI-pI)z=!no zchptz0Hv802MsKPt?aL%wii^;)1xao+j#qfsD1rqUW4ycEI|!lyrg~1M|%=agqyJC zKj>`2?z19Z`0uV0>I%`8ozp*nJ1pC!!Z7-wv{%bQQ}ceEgpr2EAiZ)p-O9*!imvwKaZB(Hej5BUXu*-g z2G?$!v0}6K8L*WBuf)>^$>*euH$;SFIb0V7)uPM*Be}3Y(abhvVM$w*=9|PV`mp$YlUSanV66F%T^KOE^%f&EX>cj@xX7QfKCnhF_ zZ%oylY;o=-w%kT&KZ#i>to)j?1Ay83N0>n7Y$#1-{LDU@B z{38`Z(5P!Usox*SwSV?ES)VBVR9#v+XE9wVj#s3TuqL5bx5(Wc-+(T4z?y)YU(95O zsIf=_LasT!kHcgAa6n?F?iier(66Fk-D8v+^z9tp1C6ghM7f$_J5gEpajM^tV$7?L zTmmW(^!M2XuNh7ljqTr;PL`o=%wU<;8y*?4+FKnCQe-LJ2J%NDD|>6RWSIT7pC|PQ z=jcL0xNpOQx;jR#UP%o&X(r;sw*x2o#6-UJAH}8Fd>G`E4@(lU@ju2u@kDf`Sy;sC z&?v&>%0@6>y<1snx>;~uEmHyYsEfL7^9*Fj?0o&=TnF>OZN86RwhioO2=8g_S0&Eyb%UWayv^EVNwrtyK;TrIQc# ze~mfW-;eIkF9$HpU=0bJiJ|mllS?A$69hF=8}qti?ix>$jTD%X1`?R&KUGD^G4QVnWuOWp3rA=jcRIooBeSi;EQH_kQYh zVe8SH;$nyA$Z*;Xkj@zVJ>!$ew=czu2ILO9<`?Ac9z=}07r3K0b3AoV6yLV$} zDQhPHaQwF52&7O!T?4lcQ7B>#@O9 zU*Xx#9lrW9pZi-|&d2=6t|Zx`9nB^FH=T*KKRcdlv)M88wqyC6rhm)B?CeTD;Cth} zr_bB)s#qvy@K!C$0m){pf^}v6G#qF0UBIQRd{3?FhW};2M<<4WLuzt^6XzWVZJ*a_ylqAbq(1#a`y*SE=WTOva3- zlq5+5_GMY%kXpbp-`dKFl$IHP#{DThowXF`EDAF-7fop>_>RBaQimfw6)iHRUaax# zeN?~xUhs#WMX7Fv#xGZr{_>-aBV zeeYWLN_%<>(HfEuz1Gt*;+@u`7kAE@-|q(S<$Gs6NSAJ@1H+8tdue3n{9o`Kpr@Af z@Sdyd*NxZZmi;V-FBW0|zT~U93-w`-^}W%C<8PgO4Z~;IvZ-d4_d5Dea#i3v-BC1a zb2C#ys)zhh2^$+524-ev*7EYrG3*#4vRi^}mTK~^KE%hnY{OB1v&L*N`>QJtBoa4l z=L@(37y!db{Zg6t6SRky0|8xWdiqDvnf$!0K2LyxL3BvG{n*yk^?>5whs$XGinGM0 z-*+V$t?or!^daDRj}p|yGOtPzNa{_ztN=)*n_77~(lQM95-1{qXJ zll6VmPMWT;9xZ5?209y)I?s+_9VZ!`C}2>xg&+30gLk2Ib$Pj?D^W&N>Pb(+dHVKj z!c`2ApqSI8!x`W}_<4Q7Y7D9-5JayeF)VB!O)*?-Hz|=o8S5R@7eXw!Um*fZo|9Xu zq5Co*@8gXgovhBO6C+J6EquL+k+nZot>gIX4sz}<>cf5C5+3~Pg#a}|Nd#N zC=kQzF|<10DjS~ondkuz$2p^EZ8+1~)wL2lAR-VwVPU>YG{p8}+Y2?TfC7Y+!g4YS zstNW}wMSO~z_JW1+cNlY@Q**~IUWRf91+Q$6uw_>BFd?np{@+#=m30(@2||ujt&na zM8aSYf4pLC5KIF?FB|rBm;?x=TwGmAcf*LMEGDUMY~iCg`(GOO?O--fw!M9fe@FL( zT}YI0xlIozL4&tpPu!q4Z9VUsuC*~dJnV5+KKgXyN42j%V9h?0(ur*iw=;O!e41LB zq0T-qt4h^2Tlc?!-BIQj5n2D_N3O{&L4ZqT16C{nN^zfsvMk)MUb`Hg4Q%Xp?{dJQ zU(DYY`!|KG*BdErNhE6BbFG9DNX#nyZ1e-(*yz|^;LteoCXIOIsY=KWh3c0|XMaNf zE(I8fVTkuGj<=HxG$x%)=*@(hHh%Tb-YgE_;wYM%Xkgb=@n zPy3vmPH(QRezV+_lJWm1NzqJA*W${ozY<3$oHEZeGwv40%p(SuuRC65a|ZBB;dsya zN1m!R(;0fLWUMY97DGf=L<4vzI%)VXdZ5 z@QyGw2Qnpt&`^0o1Mph(T&t}BKY-owwt|> zyUHw!xH}V%3@mgy|9wpbT>1o)kGBB_=_<81ui@!G^k{CPo*6vqpFVTBxx9ZY1F<_PiMRbJv+1*x<>XiWJ~p(D6U|a~;k$0(#^W8e?mNz{ zG1{P|*S8wS2M2e$fDL4`=W&U6sASpGk5l21`)4T!;B50pLufXNpU3rv4(-ekKm95H zD$sqaopkE#hRn{5c-Su}Aleq-qkXgbW2Z+0=y4!Ky0&)mBVOZ@n!jfn4yx-{yRE7b zv!=d&E>}T~dT6K<4&RJaFZ8-~SK3W}eq!%5{mCnI>SQLESxdwJ*76rgrO@)#VTEbL zxp^rnwidhZEE@h2Q`3wffIL~~Ne>;=5>{bT;YT#=UEFR~cF~L}s9L?AFS*n8m3n&( z-hFYVEOXp1m9!LEHr@WSZ^r5{l50-)caSj@IV#9Z+h`sto#X0x_GMvfra2#&dUmdJ zaq$s5+LPUa2O3Z(3j+4{sYKew4_@^_leZy?=2{5xsatLQ zr3-$Ml~WRZj%{g*^blWD;Z@6;G<+D>13w8TmZx*~LbuCHzISxW5cTNCK5}>sa=@Mu z1pB>X$KTV#PpvgHG-CZ3O&6^GoM4DAu&CaK{;TVL#G+?rEb^uJQx_N|yv$p_QCe7h zE!h)v#Flb1dkBoTK+H>j)8iqH9Q0HW7WIf(uhSYH)gRpb=J+`ZVuvZOzZ|`13l+8# z7p!-W=vnt(5{oXqihx_UK=pHeF_&WoPGGl{hsQr>uLj2|r}wu;|NeRZ(W63OD^C=A zk&pyg1X&;gKhDW5)Y8-({`jMQ^XL=8yjCa;Fh0JCDd!vNht!f--c{m^>35|+fdY`{ zkeLb{^K*xiN%$9pSO5rp3>=dKAV(eQ)~bAa2?W|KdlJMUa+7%RD^75Xn;CYWF8%7# z%ynKE*3PY{cwX{t9|)^D^8tj`I0;)nV*=pJPkau|kfU4WDA8@lNvX0eH0)IgB8>Ch zqSH3IWtI<~+y(yowm9=XAHO-Voa7tIlnV;@eysVb)@bv|$%ZGJv}3K^?qR7t?V%5DWvwQX!x+E2g8+tip_ zKc%KsVDmr&rj;8L!jTa2L}jE=UnIViLygW8;KmkvZxsIIef8MNO4cX@Qcr~JE8F=4 zBJE0i+=ki^-VMiS8)oPHzw*xVugN$3`$LgXkW!?P?i4{%L?lK!N5`ZE84bcnag<6) z2?$6^NQpGamz+omV-gb(5s(-mj2iJf`Q1fI^={bBaF`<}nOEEO3ue7B^%XfLm3=h2c z-@m`OK+Wf;H|6CO{-aShRwIE~P|1AT2AvTN2PO}{*}Ob^=7vNXtj&m75T)vrVbwbM z&(9}BQ2SG{B0ZCm)m-{e!MT=?(Y&V4 z=#0S2Z#2yGO}1Oiz$yeda|Gy6-d5@0Bd6Qj5I9lYWey36975q-`jd&fVEF{PySrhc zY5W-jamy_s0}u$uudQUm4~vyM%Zq=?9@(>KSs^@}oEAO9LD%&ODKj(EDeo6U{qFsq z@E%`!^*sZsZFYefsn~H^z`I2OZ;{2t&R(aitc-v|GYLmAu(DLgPZlyX_nlo_dILD7 zK?KeV0_Fti%r<-7%T+|d{o30_ z=;4!V>TI^+13oU!&Q+X>vJ6IoOd|u`&uJS&_Y2puIMhPSQ6yAxlGN94-!9LAs47V% z#Lv&~D@YXn8QSsGMT8^p2<)eiYi3o}hZTT!wzVc<*OiRXQ|E9EYom*#Ap4_Eo`mYW z&xWRA;swh~QS=MHNMb_xk;1Cmf>;tTyhukqbveXa6zfh$+8AlHcCu~e}*Pdptp zPb}_E-FgEpo<2q4+=zR?PKKYJlDI>75;GjfuW%&DNnWDHu3aw%#RwM}G$PZ}D9C!G zKI_z)e;fwBG9f-*kSVUGHXxdwAsOxhmE~g{^|#^r<702ZZ1M>LnWR*)V7K=A{S|F} zP?z_u4wHKwD~CUQ;ZSdx(~Kgnf>FEp{kw>~@zNf?(i81Mde|o761bu0r1!R;*lg}H z!@!BMzNyLUcJ@#}fFFM@@+lRVOSatvw!jKEOOl3~)S(6sd>Y(Pt3n8Wimsyf?@Nkj zdw5yDCpxKaXUQ`SgjNNQp9kl}-;PtwV|>U#6j-e8%ENt@ofiT@&?C9#+{BA(PAmO#+n>bC(cEB>30k){4196p(?iFLQhMLhuFavb2bnxp}%Nwmy zUxYzaQyDfi-9S*Ds185N0IZh{%)ckuiKVKxxTho@4V>5oDxRW^5F<9T=F6*D`?V7w z9JJVc*eo`sFPQsRha*GY48v$tbPFd^=-}{XgOFyQf3p{7;;ou^^8(~Q$>lg57f}JJ zTqS|!*}(gKLVdl``pKMB+~+@5{nLq4V({k45|gI{;h>&K#iVuIGArOkuTI57w!=V9o_7)9L#C+Kf2-(3lpWt+ z&=5e9jUc}iCyjh?+_EzBwxsgoNAqVOaOUCX2iFr!8L9VumlU(9udBk90iK?#oJ!)k zYf!a3mn{tlBmf&1R|4k6cDzHa2n6j6H9GR&;>{`{j1L5-K^I&5`}+}XXPuM?_x}4~ zeD7i;4xhj;y#nTs2OWDRyb?w`Tu9DR-_qh{WMmYstdvzIn9|?zA@wDp=Wc~-)-I8# zE0~>p`bEt`f|*qTgp&aSOy}#4RNGB3MX5w3c>OJyqmmO6t$st{qytp58>rIr6sOqoKxL_xxi$w zD#`IrjXu;Bcmxx#(kZsi4+Ww|*}tzNjPsTOD?5A2&So13yyqs(e)*CUCKixi^DkV| zre$|i(zY!S8EVz)gPasG;gpN~O1=!I)>M=<*oduc8L4Op14XLteJq-m&vJ)Va zy=^9*`BkUr3kswR=>+(}f7dppnwk~cTL5MkWcus%&fJP&AYb%$F9Xk3acu67F|7Tr{oaMFCK{UitU?r-hgJ zz-=z>vMQ3MD`aJ4YOw?r`6(09#$b+e$1$#Y_9PD!GVq_q4|T&krl*&>)1J(%GlDIP zP7=DVp{WUw0-mlO9vcjTzSUO-e-P7D?z~G@A~cQ0PMo{ws%~wJ@9HhpTxaO zvp@mF-(T}_I@;obz}fjM4YCHE5PfCUWO(`=$2vp;*ylif)~PdiR;p*ue!b|BNP`Bv zc+YT6i7hG-_54LA1@+CzeEP9L;ZzsSu3Ru1p+w}}KhiZ(JPR!25DY1G}+Fi(-s1$2@NCU7JNsz^f1gnXt6 zLALeg;-_;);8-tp_w-;vtO41IXHjFjE&pLuk&N6RI!TyzK%nQ-^J=h{-#wkEMi}uk zK5Adtt#M_GS)|5A!k|SJ6?&hh9s=&`*bGi0!9lOtni=D54*GW{Rh&gZLD9cKtkJ+q zoy(s^Gu!0aZJ9XCg3(rpRoucII%a0(QxfnYTiilILYo7HxZ(!BkR~1ts^<n+a6_C4X>3{!^pn%#D59A_)?u zYdMXtza8|Tfo6SKwbsfT7?|5=2}ZKF`K`P<+X85{SwEY`hKX`v8^Zb4o5b*F?qrtO zGwM@PYvGR=pQ8DWfc^p>gwuXWpMLaG6+d9F!rN~9v2V{ zTC%LH{my>?g~OhuuL@P&Ko}ez8ahn`z^nrWpxLV?dOcFzOe@iJ9r&9Ok7G1qmzd<) zOfb`^WNs_d11{21+ z$L8#IEa(=hxp5)pP5mk++Eqs|C3$pI2H1e~)8}Nm#jvLXL9MIrrX|y$;Vl7#>G0FP zg^3-aX;71(&+Xciql4`6`}YpO*GP;>Diz?#9$`(yIBd4}A~7 zhvCoWOw0941c)CX2~|e$$j)wG4i{H%@9i0oe_lBI0#wEcalet_+A&li5~>1rBCvBe zCzpWKXqlpiRr42Z$U;Q`Kr(Rda3}oCB{VcN+=E3cz%1;^8sUM;NE&qN&D0dhXFsMD z{2oGj&F`-LxELqYdBS-SQZw_sn~C8*mOo7&3ZSgBU({IH*xn32F+C!Y9$@7jW-$u5 zHAW@S8rGU0FYN9Pb({jqNq8C1n(3&6ORuH*-krD7O&0SwKHZ{X(nv*nvX7N2r3>;} zs&HXP^6rMOX1XMt)(sDjD;C4uCp?y*n*Ek0l|IelAl#jvp3dLVCa$Na_vv$3xRX#Nrcus^!`Jk*e;yaBjV?=ZXh z_VW^g2_TWolQ0!l4`?|b;)ChWztnvmetmq>cOFg6Ha_hHnp{=ir6GO6of{>tG_Qn1 z8#fhiBPCupRSBh=B>ObC1&E|M1_cF4y#JF9ls__>al)V5R>gDYF5KN1;`LZgsT=25 zIn?(<&t28MC^)#d5qvSAdV!Joz1;-lt`6YtutbaS&U$>bG}%8`Y1vqXdN0=Xi=~_q z20BQs{Tlg?E4h!l$ggNeR#-8df58_#Me-5j(Mi*gf}Zffr1y}5xL8?(?!eRL!!Ka6)bp%)p>+$4eXyo54hUu2$%Hc?>bvF}t z{A}ADh6|V3Z$j3;s6Ui?%+ey+QuWG?#XhTqC7K89d~xOsxTwgC%?z+J5;kCo!7!9E zICPhYd6FgAQsrq%$DFK6f2SejYOYy>_|u~w1S>FSsCPsyFE0;KMtwQ{LKNb!S5a7h z+zl@tun%jrHFOGLNu}*hILV!_zP-K}_2V*2dVZf^|vKrAJWA) zYwhFAo+DCSYg?1!S!GY(j^bQCf$}B2csxyn{ko4$3`Hppo%&@SX1a7wr-2 z@ln;e@Uuf5vM-&xpuu(%^J~6o7`Ab5;|04y$*I>i)<_764{+*n z)349a^TIj|bc~HG9$t(UeA*%Is-25++X6FEj&z+J4_r!Iozr03FNm~d+8z9Pq^D$E zS3;)(_1jhV`6$7W!R_p!_~~4|*xj+5PqM_+Ly;^p0lS7#Bi*t3YHl)%4k2 zR==~GUchQO6(3-L+U+=9UAsH;RchI+@!c;Vcd78*CdS!xKn{`ztDs}$JeU~kEls~W z?F7+we-{!I1QTLUaN*x4Cc**Q@@}Q(?{$%aKP>W(AhA6QE-9rLR(u0D*gu{Cn{_tv zAukNoY%H9*xXqX-ogs{??2?z;4~e8e+yFM(w za2C@20s4n?&++$B8uNv~MTPG63*C@X1-RDFGGg`zla{^Twu7vVx2W*5=fR0i$z!fH zf9%?Xnr2EL;Xb?vGWK`7z%}cf_9P3aP~ie8tcOCi=IgA*@YmJ!kcgZ?P7j{Ek?;`G z9)Ygoa0%bf4KT1lCxHYUHQhs~dY5T)c_m6f!@>6g)gF>+O38-l%p=%8Px>LAqG%SJ z=$Zu`OS{zrv{4rg*)iJYVU?HT@zkcfbH{KDR=E$uz2!*!Z6Q)TRH!RGj65x1ykbm%=K|II3nn;6#AfqDv-U8uqN#F= zdwkUjs4o~>1?F4C48xqPz}1xxh?Uj#5T#6LPHg593qlI=>;1M$H4y1vEF{uARLn!X zh~PAbFqmsQRA<|l#R&^HF#vPxtx_N-`{CR5bNSzLuPALSNx1Q(1SiHi^-~1Y$NO_B z2e`XGdE({wzIrpzNa-ZKYAux4UbNWniwESt({Jp#grHoLGWZvSx%Dw{bu+n{na$wX zqH0wJFSkla2_xed)@PUF5H|;s**7oDB~kWa!PdjnC4w zxWd$zXo*YIk1iD44kXZK*|DH_85H{Z1St~k2JdPj!q2F&a=`{z6+oWqlQYnx*8&k0 z2|v*D1g>f-zSdw<@4M);g5lnnJqb@qG5Y{C{!5%24je+0`{J9}qv$$p%YIT}yJ-0T z9-+a>6|`1!Y96D$xm}}KouOk_iqwcj(3gdJ=EmxHdU^s~lgm2nsFiCdkn(~vt|#A7 zHb|T+QMEXgdG5%(7m!fDmt3qe^M!5PF|4u{HK?je_edXD>@8kgY*p|yoNMzBcFu=M zTHVIfp^7=|y0vcswPI315GMYr8PF9zIK7CWDa$F*T}Z~gfXZ4*kd11flPXD}W+F~S z*p)wDzj}Eh5S~V|nX85$u$j5JyF2}kzZDIzFsO(@WxYV$D_D#D2o)qY(2J5jCG+gCip&%NuhY9oC>+#6O|d7X1oQFWm8c-mHsZ zJjcWMqVM@q#D2bt7NKQ``|;HDdlRo6s&`D|O9?`Fs-+uC(=81gdM32kzRdBHgO^v$ zGqCr-G?D#4*-Vv!)R!W~y?zD)ayFY|a+TUxxt&^7VUt$K<1=o#G?e??X3IwTTFXGf z8nfzzL}lZg@SiJD6|Y`hnH!nOYu&$@A@(sHYFcal+0@LeB6k^CX;{qmZZMea9)IbO z_Bbqg`j7}tl4t6Xsi+7@WcrUN;rQ`JzSx6$A=hha@@$wG30haU25kS#iu+X$2Mdc1 zCmY8)Ckw}&pIB@PkPx9ztUh?gqf3PRyZ_45w7ax#2hSZ*Z9pPI|MQI!}L;Uh}hX+D*C?{226Q@C%tY3UYr_>i}t*Iwq<3>KTbK``)t z<}aYE{K%%?u^cbG`?kb2x{AHTwK|AW@z{wMwJ}Fv?pB5`|v{Sfw+)AmyrL`rKpnk>M-TXQL@eCeHWJK^YUQO zA&9Y;;p0kNt2G~djj=Cr&5AC03XOxcl=u~|W#zN8hnLu~l;p30e}K^?_AU9CG)AYt zS{iY%tg6hcDomQN!9@MlhgtclV>F<$X;D>H@MVdsw*gj}_v4kUk~Lj}GM4O15jrAW zJK`8`Qw{OuI|=Qz@0Ra)OnIw5J*iroH1y_s2J(#FoQ*9q)oyqJ&l3yx0ZD+pol0<+ zO0e-SZ*WphKq@yZOePSNz`(YuwYX))gf+`_lS=cnawm%hCyRh$d&P7oMPM!bIWWnX z;6OJ&m8}aGjNgu3rKaM1I32D@%~G#weyN?ia6gQUK`aEj{sSrc(JRe4qI=gNdk>cnjWt{0+W)Jdie<-4I)J zkZq4smuY+X4Y@9g_Cs?$?_MR@4mHl1ya9LK%OE^kT0-R z$kICBR3xAFmr7%(@wL+Uncn5vd)RDR4mWnSTV0e9FW59KX?cbkz?yMY+InI>m(CZa zjnIIQ#Ym-tU5T~`YjW&-mnUoTRxxke2Tox7uH#nS*rq1sb&>)Ur+Z|pFAa7k5M06zlkPX<*nx_$h^tWkRLc6 zzWaRW5 zZ;91u>Wl)9<&^Kb#~K?PBk;|HPdOJSN5~wNr;{|1H-6vXFF?l&lLvNN& zJbuVC_1E_V%xzLQ!LjKvH{`KYRJ=3yoW6gdvxe?CEXDLbMgBzqNu~VJPq@rC6dgn0 zur|JZN4S)^!KlF%LY{V%xWBs2XXVN_=B7SXwR|%We6q@OV%?NSI4(V>cJ8y*y~0rk zE0LjXUUHd4BWaY&YKYsq%9)brX+16 zvinQG_@V8U>cwZ@?OG{ovhP0_4>fuFoqiA9HYL4K?>ZGAagkmvV}vRLLlfbDda|J#Nec3F?sbfco~Wyc@LUo(`Y6KHs1R;9l0z_ymgbN}>2f>57XfHCa0`dqX zJ>>szEFu4Yy_A>FE+z(DH|M%D)&uTb!0&;%pQD|hlahmv6Ziv>m6ny3kdcv)xdoSz yQ<9TblDQ=&Ev+Oi4Y5=Q#n}JX8<3ukuFgUK?>Bf;9z>k4t?mOu%_@z@k^ckrIcwAa literal 0 HcmV?d00001 diff --git a/build.rs b/build.rs index a2a37f162..bd1ac2696 100644 --- a/build.rs +++ b/build.rs @@ -1,3 +1,50 @@ +use std::env; +use std::fs::{self, read_dir, File}; +use std::io::Write; + + fn main() -> shadow_rs::SdResult<()> { + let assets_dir = "assets/integrations"; + let out_dir = env::var("OUT_DIR").unwrap(); + let output_file_path = format!("{}/available_icons.rs", out_dir); + let mut output_file = File::create(&output_file_path).expect("Failed to create output file"); + + writeln!( + output_file, + "use std::collections::HashMap;\n\npub fn get_available_icons() -> HashMap<&'static str, &'static [u8]> {{\n let mut icons = HashMap::new();" + ) + .expect("Failed to write to output file"); + + for entry in read_dir(assets_dir).expect("Failed to read assets directory") { + let entry = entry.expect("Failed to read directory entry"); + let path = entry.path(); + + if path.extension().and_then(|ext| ext.to_str()) == Some("png") { + let image_data = fs::read(&path).expect("Failed to read image file"); + let file_stem = path + .file_stem() + .and_then(|stem| stem.to_str()) + .expect("Failed to get file stem"); + + let constant_name = format!("{}_ICON_BYTES", file_stem.to_uppercase()); + + writeln!( + output_file, + " pub const {}: &[u8] = &{:?};", + constant_name, image_data + ) + .expect("Failed to write constant definition"); + + writeln!( + output_file, + " icons.insert(\"{}.png\", {});", + file_stem, constant_name + ) + .expect("Failed to write HashMap entry"); + } + } + + writeln!(output_file, " icons\n}}").expect("Failed to write closing brace"); + shadow_rs::new() -} \ No newline at end of file +} diff --git a/src/http/routers/v1.rs b/src/http/routers/v1.rs index 74257ddf2..3e86857ae 100644 --- a/src/http/routers/v1.rs +++ b/src/http/routers/v1.rs @@ -41,7 +41,7 @@ use crate::http::routers::v1::system_prompt::handle_v1_system_prompt; use crate::http::routers::v1::vecdb::{handle_v1_vecdb_search, handle_v1_vecdb_status}; #[cfg(feature="vecdb")] use crate::http::routers::v1::handlers_memdb::{handle_mem_query, handle_mem_add, handle_mem_erase, handle_mem_update_used, handle_mem_block_until_vectorized, handle_mem_list}; -use crate::http::routers::v1::v1_integrations::{handle_v1_integration_get, handle_v1_integration_save, handle_v1_integrations}; +use crate::http::routers::v1::v1_integrations::{handle_v1_integration_get, handle_v1_integration_icon, handle_v1_integration_save, handle_v1_integrations}; use crate::http::utils::telemetry_wrapper; pub mod code_completion; @@ -124,6 +124,7 @@ pub fn make_v1_router() -> Router { .route("/integrations", telemetry_get!(handle_v1_integrations)) .route("/integration-get", telemetry_post!(handle_v1_integration_get)) .route("/integration-save", telemetry_post!(handle_v1_integration_save)) + .route("/integration-icon/:icon_name", get(handle_v1_integration_icon)) .route("/docker-container-list", telemetry_post!(handle_v1_docker_container_list)) .route("/docker-container-action", telemetry_post!(handle_v1_docker_container_action)) diff --git a/src/http/routers/v1/v1_integrations.rs b/src/http/routers/v1/v1_integrations.rs index 87eb5a9d2..2f65f4688 100644 --- a/src/http/routers/v1/v1_integrations.rs +++ b/src/http/routers/v1/v1_integrations.rs @@ -3,24 +3,19 @@ use axum::Extension; use axum::http::{Response, StatusCode}; use hyper::Body; use serde::Deserialize; -// use url::Url; -// #[allow(deprecated)] -// use base64::encode; -// use indexmap::IndexMap; use tokio::sync::RwLock as ARwLock; +use axum::extract::Path; use crate::custom_error::ScratchError; use crate::global_context::GlobalContext; -// use crate::integrations::{get_empty_integrations, get_integration_path}; -// use crate::yaml_configs::create_configs::{integrations_enabled_cfg, read_yaml_into_value, write_yaml_value}; pub async fn handle_v1_integrations( Extension(gcx): Extension>>, _: hyper::body::Bytes, ) -> axum::response::Result, ScratchError> { - let with_icons = crate::integrations::setting_up_integrations::integrations_all_with_icons(gcx.clone()).await; - let payload = serde_json::to_string_pretty(&with_icons).map_err(|e| { + let integrations = crate::integrations::setting_up_integrations::integrations_all(gcx.clone()).await; + let payload = serde_json::to_string_pretty(&integrations).map_err(|e| { ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Failed to serialize payload: {}", e)) })?; Ok(Response::builder() @@ -82,51 +77,26 @@ pub async fn handle_v1_integration_save( .unwrap()) } +mod generated { + include!(concat!(env!("OUT_DIR"), "/available_icons.rs")); +} -// async fn get_image_base64( -// cache_dir: &PathBuf, -// icon_name: &str, -// icon_url: &str, -// ) -> Result { -// let assets_path = cache_dir.join("assets/integrations"); - -// // Parse the URL to get the file extension -// let url = Url::parse(icon_url).map_err(|e| e.to_string())?; -// let extension = url -// .path_segments() -// .and_then(|segments| segments.last()) -// .and_then(|name| name.split('.').last()) -// .unwrap_or("png"); // Default to "png" if no extension is found - -// let file_path = assets_path.join(format!("{}.{}", icon_name, extension)); - -// // Check if the file already exists -// if file_path.exists() { -// info!("Using image from cache: {}", file_path.display()); -// let mut file = fs::File::open(&file_path).map_err(|e| e.to_string())?; -// let mut buffer = Vec::new(); -// file.read_to_end(&mut buffer).map_err(|e| e.to_string())?; -// #[allow(deprecated)] -// let b64_image = encode(&buffer); -// let image_str = format!("data:{};base64,{}", extension, b64_image); -// return Ok(image_str); -// } - -// // Create the cache directory if it doesn't exist -// async_fs::create_dir_all(&assets_path).await.map_err(|e| e.to_string())?; - -// // Download the image -// info!("Downloading image from {}", icon_url); -// let client = Client::new(); -// let response = client.get(icon_url).send().await.map_err(|e| e.to_string())?; -// let bytes = response.bytes().await.map_err(|e| e.to_string())?; - -// // Save the image to the cache directory -// async_fs::write(&file_path, &bytes).await.map_err(|e| e.to_string())?; - -// // Return the base64 string -// #[allow(deprecated)] -// let b64_image = encode(&bytes); -// let image_str = format!("data:{};base64,{}", extension, b64_image); -// Ok(image_str) -// } +pub async fn handle_v1_integration_icon( + Path(icon_name): Path, +) -> axum::response::Result, ScratchError> { + let icons = generated::get_available_icons(); + let sanitized_icon_name = icon_name + .split('/').last() + .map(|x| x.replace("_TEMPLATE", "")).ok_or( + ScratchError::new(StatusCode::BAD_REQUEST, "invalid file name".to_string()) + )?; + if let Some(icon_bytes) = icons.get(sanitized_icon_name.as_str()) { + return Ok(Response::builder() + .status(StatusCode::OK) + .header("Content-Type", "image/png") + .header("Content-Disposition", "inline") + .body(Body::from(*icon_bytes)) + .unwrap()); + } + Err(ScratchError::new(StatusCode::NOT_FOUND, "icon not found".to_string())) +} diff --git a/src/integrations/config_chat.rs b/src/integrations/config_chat.rs index 32592ddc3..d2ed6b855 100644 --- a/src/integrations/config_chat.rs +++ b/src/integrations/config_chat.rs @@ -19,7 +19,7 @@ pub async fn mix_config_messages( tracing::info!("post.integr_config_path {:?}", chat_meta.current_config_file); let mut context_file_vec = Vec::new(); - let all_integrations = crate::integrations::setting_up_integrations::integrations_all_with_icons(gcx.clone()).await; + let all_integrations = crate::integrations::setting_up_integrations::integrations_all(gcx.clone()).await; for ig in all_integrations.integrations { if !ig.integr_config_exists { continue; diff --git a/src/integrations/setting_up_integrations.rs b/src/integrations/setting_up_integrations.rs index e73a8e23e..039621902 100644 --- a/src/integrations/setting_up_integrations.rs +++ b/src/integrations/setting_up_integrations.rs @@ -26,6 +26,7 @@ pub struct IntegrationRecord { pub integr_name: String, pub integr_config_path: String, pub integr_config_exists: bool, + pub icon_path: String, pub on_your_laptop: bool, pub when_isolated: bool, #[serde(skip_serializing)] @@ -33,7 +34,7 @@ pub struct IntegrationRecord { } #[derive(Serialize, Default)] -pub struct IntegrationWithIconResult { +pub struct IntegrationResult { pub integrations: Vec, pub error_log: Vec, } @@ -94,6 +95,7 @@ pub fn read_integrations_d( let mut rec: IntegrationRecord = Default::default(); rec.project_path = project_path.clone(); rec.integr_name = integr_name.clone(); + rec.icon_path = format!("/integration-icon/{integr_name}.png"); rec.integr_config_path = path_str.clone(); rec.integr_config_exists = path.exists(); if rec.integr_config_exists { @@ -142,6 +144,7 @@ pub fn read_integrations_d( let mut rec: IntegrationRecord = Default::default(); rec.integr_config_path = integrations_yaml_path.clone(); rec.integr_name = key_str.to_string(); + rec.icon_path = format!("/integration-icon/{key_str}.png"); rec.integr_config_exists = true; rec.config_unparsed = serde_json::to_value(value.clone()).unwrap(); result.push(rec); @@ -150,6 +153,7 @@ pub fn read_integrations_d( let mut rec: IntegrationRecord = Default::default(); rec.integr_config_path = integrations_yaml_path.clone(); rec.integr_name = key_str.to_string(); + rec.icon_path = format!("/integration-icon/{key_str}.png"); rec.integr_config_exists = true; rec.config_unparsed = serde_json::to_value(value.clone()).unwrap(); result.push(rec); @@ -288,18 +292,16 @@ pub fn split_path_into_project_and_integration(cfg_path: &PathBuf) -> Result<(St } } -pub async fn integrations_all_with_icons( +pub async fn integrations_all( gcx: Arc>, -) -> IntegrationWithIconResult { +) -> IntegrationResult { let (config_dirs, global_config_dir) = get_config_dirs(gcx.clone()).await; let lst: Vec<&str> = crate::integrations::integrations_list(); let mut error_log: Vec = Vec::new(); let integrations_yaml_path = get_integrations_yaml_path(gcx.clone()).await; let vars_for_replacements = get_vars_for_replacements(gcx.clone()).await; let integrations = read_integrations_d(&config_dirs, &global_config_dir, &integrations_yaml_path, &vars_for_replacements, &lst, &mut error_log); - - // rec.integr_icon = crate::integrations::icon_from_name(integr_name); - IntegrationWithIconResult { + IntegrationResult { integrations, error_log, } From 7e019edd6162ca32ab167ecdda32d6b0557528e7 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Mon, 9 Dec 2024 16:59:29 +0100 Subject: [PATCH 109/185] integrations-filtered/:integr_name --- src/http/routers/v1.rs | 8 ++-- src/http/routers/v1/v1_integrations.rs | 50 +++++++++++++++++++++ src/integrations/setting_up_integrations.rs | 2 +- 3 files changed, 56 insertions(+), 4 deletions(-) diff --git a/src/http/routers/v1.rs b/src/http/routers/v1.rs index 3e86857ae..d754ed8c2 100644 --- a/src/http/routers/v1.rs +++ b/src/http/routers/v1.rs @@ -41,7 +41,7 @@ use crate::http::routers::v1::system_prompt::handle_v1_system_prompt; use crate::http::routers::v1::vecdb::{handle_v1_vecdb_search, handle_v1_vecdb_status}; #[cfg(feature="vecdb")] use crate::http::routers::v1::handlers_memdb::{handle_mem_query, handle_mem_add, handle_mem_erase, handle_mem_update_used, handle_mem_block_until_vectorized, handle_mem_list}; -use crate::http::routers::v1::v1_integrations::{handle_v1_integration_get, handle_v1_integration_icon, handle_v1_integration_save, handle_v1_integrations}; +use crate::http::routers::v1::v1_integrations::{handle_v1_integration_get, handle_v1_integration_icon, handle_v1_integration_save, handle_v1_integrations, handle_v1_integrations_filtered}; use crate::http::utils::telemetry_wrapper; pub mod code_completion; @@ -122,6 +122,7 @@ pub fn make_v1_router() -> Router { .route("/fullpath", telemetry_post!(handle_v1_fullpath)) .route("/integrations", telemetry_get!(handle_v1_integrations)) + .route("/integrations-filtered/:integr_name", get(handle_v1_integrations_filtered)) .route("/integration-get", telemetry_post!(handle_v1_integration_get)) .route("/integration-save", telemetry_post!(handle_v1_integration_save)) .route("/integration-icon/:icon_name", get(handle_v1_integration_icon)) @@ -132,16 +133,17 @@ pub fn make_v1_router() -> Router { .route("/patch-single-file-from-ticket", telemetry_post!(handle_v1_patch_single_file_from_ticket)) .route("/patch-apply-all", telemetry_post!(handle_v1_patch_apply_all)) + .route("/links", telemetry_post!(handle_v1_links)) + // experimental .route("/get-dashboard-plots", telemetry_get!(get_dashboard_plots)) .route("/code-completion-prompt", telemetry_post!(handle_v1_code_completion_prompt)) .route("/commit-message-from-diff", telemetry_post!(handle_v1_commit_message_from_diff)) + // to remove .route("/subchat", telemetry_post!(handle_v1_subchat)) .route("/subchat-single", telemetry_post!(handle_v1_subchat_single)) - - .route("/links", telemetry_post!(handle_v1_links)) ; #[cfg(feature="vecdb")] diff --git a/src/http/routers/v1/v1_integrations.rs b/src/http/routers/v1/v1_integrations.rs index 2f65f4688..f1fbffe3e 100644 --- a/src/http/routers/v1/v1_integrations.rs +++ b/src/http/routers/v1/v1_integrations.rs @@ -4,6 +4,7 @@ use axum::http::{Response, StatusCode}; use hyper::Body; use serde::Deserialize; use tokio::sync::RwLock as ARwLock; +use regex::Regex; use axum::extract::Path; use crate::custom_error::ScratchError; @@ -25,6 +26,55 @@ pub async fn handle_v1_integrations( .unwrap()) } +pub async fn handle_v1_integrations_filtered( + Extension(gcx): Extension>>, + Path(integr_name): Path, +) -> axum::response::Result, ScratchError> { + let integrations_result: crate::integrations::setting_up_integrations::IntegrationResult = crate::integrations::setting_up_integrations::integrations_all(gcx.clone()).await; + let mut filtered_integrations = Vec::new(); + + for integration in &integrations_result.integrations { + let pattern = integration.integr_name.replace("_TEMPLATE", "_.*"); + match Regex::new(&pattern) { + Ok(re) => { + if re.is_match(&integr_name) { + let mut integration_copy = integration.clone(); + integration_copy.integr_name = integr_name.clone(); + if let Some(pos) = integration.integr_config_path.rfind(&integration.integr_name) { + let (start, end) = integration.integr_config_path.split_at(pos); + integration_copy.integr_config_path = format!("{}{}{}", start, integr_name, &end[integration.integr_name.len()..]); + } + if integration.integr_name.find("_TEMPLATE").is_some() { + let config_path_exists = integrations_result.integrations.iter().any(|existing_integration| { + existing_integration.integr_config_path == integration_copy.integr_config_path + }); + if config_path_exists { + continue; + } + } + filtered_integrations.push(integration_copy); + } + } + Err(e) => { + return Err(ScratchError::new(StatusCode::BAD_REQUEST, format!("Invalid regex pattern: {}", e))); + } + } + } + + let payload = serde_json::to_string_pretty(&crate::integrations::setting_up_integrations::IntegrationResult { + integrations: filtered_integrations, + error_log: integrations_result.error_log, + }).map_err(|e| { + ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Failed to serialize payload: {}", e)) + })?; + + Ok(Response::builder() + .status(StatusCode::OK) + .header("Content-Type", "application/json") + .body(Body::from(payload)) + .unwrap()) +} + #[derive(Deserialize)] struct IntegrationGetPost { pub integr_config_path: String, diff --git a/src/integrations/setting_up_integrations.rs b/src/integrations/setting_up_integrations.rs index 039621902..f118cf1a4 100644 --- a/src/integrations/setting_up_integrations.rs +++ b/src/integrations/setting_up_integrations.rs @@ -20,7 +20,7 @@ pub struct YamlError { pub error_msg: String, } -#[derive(Serialize, Default, Debug)] +#[derive(Serialize, Default, Debug, Clone)] pub struct IntegrationRecord { pub project_path: String, pub integr_name: String, From a1c9f2bf57aa35670b7e34005f33e34b32bf2623 Mon Sep 17 00:00:00 2001 From: Nick Frolov Date: Mon, 9 Dec 2024 17:36:08 +0100 Subject: [PATCH 110/185] docker integration - reorder fields and hide docker extra fields --- src/integrations/docker/integr_docker.rs | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/src/integrations/docker/integr_docker.rs b/src/integrations/docker/integr_docker.rs index b2485b0ac..4fa46cd9c 100644 --- a/src/integrations/docker/integr_docker.rs +++ b/src/integrations/docker/integr_docker.rs @@ -266,6 +266,10 @@ fn command_append_label_if_creates_resource(command_args: &mut Vec, labe pub const DOCKER_INTEGRATION_SCHEMA: &str = r#" fields: + docker_cli_path: + f_type: string_long + f_desc: "Path to the Docker CLI executable." + f_default: "docker" label: f_type: string_short f_desc: "Label for the Docker container." @@ -274,31 +278,33 @@ fields: f_type: string_long f_desc: "The address to connect to the Docker daemon; specify only if not using the default." f_default: "" - docker_cli_path: - f_type: string_long - f_desc: "Path to the Docker CLI executable." - f_default: "docker" + f_extra: true remote_docker: f_type: bool f_desc: "Use SSH to connect to remote Docker." + f_extra: true ssh_host: f_type: string_long f_desc: "SSH host to connect to remote Docker." f_label: "SSH Host" + f_extra: true ssh_user: f_type: string_short f_desc: "SSH user to connect to remote Docker." f_default: "root" f_label: "SSH User" + f_extra: true ssh_port: f_type: string_short f_desc: "The SSH port to connect to remote Docker." f_default: "22" f_label: "SSH Port" + f_extra: true ssh_identity_file: f_type: string_long f_desc: "Path to the SSH identity file to connect to remote Docker." f_label: "SSH Identity File" + f_extra: true available: on_your_laptop_possible: true when_isolated_possible: false From 1c0f6ec9933b60c81858a1af65bb5af7fb843b85 Mon Sep 17 00:00:00 2001 From: Nick Frolov Date: Mon, 9 Dec 2024 18:01:32 +0100 Subject: [PATCH 111/185] integration command line - add filter and timeout to extra fields --- src/integrations/integr_cmdline.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/integrations/integr_cmdline.rs b/src/integrations/integr_cmdline.rs index db3ccebe9..883a417bd 100644 --- a/src/integrations/integr_cmdline.rs +++ b/src/integrations/integr_cmdline.rs @@ -247,7 +247,7 @@ fields: f_placeholder: "echo Hello World" command_workdir: f_type: string_long - f_desc: "The working directory for the command." + f_desc: "The working directory for the command. If empty then workspace directory will be used." f_placeholder: "/path/to/workdir" description: f_type: string_long @@ -259,10 +259,12 @@ fields: f_type: string_short f_desc: "The command must immediately return the results, it can't be interactive. If the command runs for too long, it will be terminated and stderr/stdout collected will be presented to the model." f_default: "10" + f_extra: true output_filter: f_type: "output_filter" f_desc: "The output from the command can be long or even quasi-infinite. This section allows to set limits, prioritize top or bottom, or use regexp to show the model the relevant part." f_placeholder: "filter" + f_extra: true description: | There you can adapt any command line tool for use by AI model. You can give the model instructions why to call it, which parameters to provide, set a timeout and restrict the output. If you want a tool that runs in the background such as a web server, use service_* instead. From 84c3beb5199681cd693d9de4a360a02bc7108be8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Tue, 10 Dec 2024 06:08:39 +0100 Subject: [PATCH 112/185] Experimental integrations (#469) * feat: allow integrations to be marked as experimental * Revert "feat: allow integrations to be marked as experimental" This reverts commit 010f079dc8832e3ed9bd60d155401d53831615c2. * fix: add experimental integrations based on cmdline param from lsp --- src/http/routers/v1/links.rs | 2 +- src/integrations/mod.rs | 13 +++++++++---- src/integrations/project_summary_chat.rs | 3 ++- src/integrations/running_integrations.rs | 8 ++++---- src/integrations/setting_up_integrations.rs | 5 +++-- 5 files changed, 19 insertions(+), 12 deletions(-) diff --git a/src/http/routers/v1/links.rs b/src/http/routers/v1/links.rs index 4f98e2500..1308240f0 100644 --- a/src/http/routers/v1/links.rs +++ b/src/http/routers/v1/links.rs @@ -63,7 +63,7 @@ pub async fn handle_v1_links( .map_err(|e| ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, format!("JSON problem: {}", e)))?; let mut links = Vec::new(); tracing::info!("for links, post.meta.chat_mode == {:?}", post.meta.chat_mode); - let (integrations_map, integration_yaml_errors) = crate::integrations::running_integrations::load_integrations(gcx.clone(), "".to_string(), true).await; + let (integrations_map, integration_yaml_errors) = crate::integrations::running_integrations::load_integrations(gcx.clone(), "".to_string(), gcx.read().await.cmdline.experimental).await; if post.messages.is_empty() { let (already_exists, summary_path_option) = crate::scratchpads::chat_utils_prompts::dig_for_project_summarization_file(gcx.clone()).await; diff --git a/src/integrations/mod.rs b/src/integrations/mod.rs index ba01f4a0f..bcf6dd3cf 100644 --- a/src/integrations/mod.rs +++ b/src/integrations/mod.rs @@ -65,8 +65,8 @@ pub fn icon_from_name(_n: &str) -> String return "".to_string(); } -pub fn integrations_list() -> Vec<&'static str> { - vec![ +pub fn integrations_list(allow_experimental: bool) -> Vec<&'static str> { + let mut integrations = vec![ // "github", // "gitlab", // "pdb", @@ -76,8 +76,13 @@ pub fn integrations_list() -> Vec<&'static str> { "service_TEMPLATE", // "chrome", "docker", - "isolation" - ] + ]; + if allow_experimental { + integrations.extend(vec![ + "isolation", + ]); + } + integrations } pub fn go_to_configuration_message(integration_name: &str) -> String { diff --git a/src/integrations/project_summary_chat.rs b/src/integrations/project_summary_chat.rs index 38d0f18a7..b9392712f 100644 --- a/src/integrations/project_summary_chat.rs +++ b/src/integrations/project_summary_chat.rs @@ -28,7 +28,8 @@ pub async fn mix_project_summary_messages( } }; - let available_integrations: Vec<&str> = crate::integrations::integrations_list(); + let allow_experimental = gcx.read().await.cmdline.experimental; + let available_integrations: Vec<&str> = crate::integrations::integrations_list(allow_experimental); let mut available_integrations_text = String::new(); for integration in available_integrations.iter() { available_integrations_text.push_str(&format!("- {}\n", integration)) diff --git a/src/integrations/running_integrations.rs b/src/integrations/running_integrations.rs index 3514f35c5..25f624727 100644 --- a/src/integrations/running_integrations.rs +++ b/src/integrations/running_integrations.rs @@ -11,9 +11,9 @@ use crate::integrations::integr_abstract::IntegrationTrait; pub async fn load_integration_tools( gcx: Arc>, _current_project: String, - _allow_experimental: bool, + allow_experimental: bool, ) -> IndexMap>>> { - let (integraions_map, _yaml_errors) = load_integrations(gcx.clone(), _current_project, _allow_experimental).await; + let (integraions_map, _yaml_errors) = load_integrations(gcx.clone(), _current_project, allow_experimental).await; let mut tools = IndexMap::new(); for (name, integr) in integraions_map { if integr.can_upgrade_to_tool() { @@ -26,14 +26,14 @@ pub async fn load_integration_tools( pub async fn load_integrations( gcx: Arc>, _current_project: String, - _allow_experimental: bool, + allow_experimental: bool, ) -> (IndexMap>, Vec) { // XXX filter _workspace_folders_arc that fit _current_project let (config_dirs, global_config_dir) = crate::integrations::setting_up_integrations::get_config_dirs(gcx.clone()).await; let integrations_yaml_path = crate::integrations::setting_up_integrations::get_integrations_yaml_path(gcx.clone()).await; let mut error_log: Vec = Vec::new(); - let lst: Vec<&str> = crate::integrations::integrations_list(); + let lst: Vec<&str> = crate::integrations::integrations_list(allow_experimental); let vars_for_replacements = crate::integrations::setting_up_integrations::get_vars_for_replacements(gcx.clone()).await; let records = crate::integrations::setting_up_integrations::read_integrations_d(&config_dirs, &global_config_dir, &integrations_yaml_path, &vars_for_replacements, &lst, &mut error_log); diff --git a/src/integrations/setting_up_integrations.rs b/src/integrations/setting_up_integrations.rs index f118cf1a4..67251797f 100644 --- a/src/integrations/setting_up_integrations.rs +++ b/src/integrations/setting_up_integrations.rs @@ -296,7 +296,8 @@ pub async fn integrations_all( gcx: Arc>, ) -> IntegrationResult { let (config_dirs, global_config_dir) = get_config_dirs(gcx.clone()).await; - let lst: Vec<&str> = crate::integrations::integrations_list(); + let allow_experimental = gcx.read().await.cmdline.experimental; + let lst: Vec<&str> = crate::integrations::integrations_list(allow_experimental); let mut error_log: Vec = Vec::new(); let integrations_yaml_path = get_integrations_yaml_path(gcx.clone()).await; let vars_for_replacements = get_vars_for_replacements(gcx.clone()).await; @@ -429,7 +430,7 @@ mod tests { #[tokio::test] async fn test_integration_schemas() { - let integrations = crate::integrations::integrations_list(); + let integrations = crate::integrations::integrations_list(true); for name in integrations { let integration_box = crate::integrations::integration_from_name(name).unwrap(); let schema_json = { From f556123baea3ada4b2c3edcbcca06a061a919f66 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Mon, 9 Dec 2024 17:07:31 +0100 Subject: [PATCH 113/185] feat: move serializers to utils and add a new serializer for optional numbers --- src/integrations/docker/integr_docker.rs | 11 +----- src/integrations/docker/integr_isolation.rs | 16 +-------- src/integrations/integr_cmdline.rs | 6 ++-- src/integrations/mod.rs | 1 + src/integrations/utils.rs | 40 +++++++++++++++++++++ 5 files changed, 46 insertions(+), 28 deletions(-) create mode 100644 src/integrations/utils.rs diff --git a/src/integrations/docker/integr_docker.rs b/src/integrations/docker/integr_docker.rs index 4fa46cd9c..fe7e87d9a 100644 --- a/src/integrations/docker/integr_docker.rs +++ b/src/integrations/docker/integr_docker.rs @@ -12,6 +12,7 @@ use crate::global_context::GlobalContext; use crate::integrations::integr_abstract::IntegrationTrait; use crate::tools::tools_description::Tool; use crate::integrations::docker::docker_ssh_tunnel_utils::{SshConfig, forward_remote_docker_if_needed}; +use crate::integrations::utils::{serialize_num_to_str, deserialize_str_to_num}; #[derive(Clone, Serialize, Deserialize, Default, Debug)] pub struct SettingsDocker { @@ -26,16 +27,6 @@ pub struct SettingsDocker { pub ssh_identity_file: String, } -pub fn serialize_num_to_str(num: &T, serializer: S) -> Result { - serializer.serialize_str(&num.to_string()) -} -pub fn deserialize_str_to_num<'de, T, D>(deserializer: D) -> Result -where - T: std::str::FromStr, T::Err: std::fmt::Display, D: serde::Deserializer<'de>, -{ - String::deserialize(deserializer)?.parse().map_err(serde::de::Error::custom) -} - impl SettingsDocker { pub fn get_ssh_config(&self) -> Option { if self.remote_docker { diff --git a/src/integrations/docker/integr_isolation.rs b/src/integrations/docker/integr_isolation.rs index 7e3e464c5..27491e5a7 100644 --- a/src/integrations/docker/integr_isolation.rs +++ b/src/integrations/docker/integr_isolation.rs @@ -1,7 +1,7 @@ use serde::{Serialize, Deserialize}; use serde_json::Value; -use crate::integrations::docker::integr_docker::{serialize_num_to_str, deserialize_str_to_num}; +use crate::integrations::utils::{serialize_num_to_str, deserialize_str_to_num, serialize_ports, deserialize_ports}; use crate::integrations::docker::docker_container_manager::Port; use crate::integrations::integr_abstract::IntegrationTrait; use crate::tools::tools_description::Tool; @@ -17,20 +17,6 @@ pub struct SettingsIsolation { pub keep_containers_alive_for_x_minutes: u64, } -fn serialize_ports(ports: &Vec, serializer: S) -> Result { - let ports_str = ports.iter().map(|port| format!("{}:{}", port.published, port.target)) - .collect::>().join(","); - serializer.serialize_str(&ports_str) -} -fn deserialize_ports<'de, D: serde::Deserializer<'de>>(deserializer: D) -> Result, D::Error> { - let ports_str = String::deserialize(deserializer)?; - ports_str.split(',').filter(|s| !s.is_empty()).map(|port_str| { - let (published, target) = port_str.split_once(':') - .ok_or_else(|| serde::de::Error::custom("expected format 'published:target'"))?; - Ok(Port { published: published.to_string(), target: target.to_string() }) - }).collect() -} - #[derive(Clone, Default, Debug)] pub struct IntegrationIsolation { pub settings_isolation: SettingsIsolation, diff --git a/src/integrations/integr_cmdline.rs b/src/integrations/integr_cmdline.rs index 883a417bd..efe5f415e 100644 --- a/src/integrations/integr_cmdline.rs +++ b/src/integrations/integr_cmdline.rs @@ -13,7 +13,7 @@ use crate::tools::tools_description::{ToolParam, Tool, ToolDesc}; use crate::call_validation::{ChatMessage, ChatContent, ContextEnum}; use crate::postprocessing::pp_command_output::{CmdlineOutputFilter, output_mini_postprocessing}; use crate::integrations::integr_abstract::IntegrationTrait; - +use crate::integrations::utils::{serialize_num_to_str, deserialize_str_to_num, serialize_opt_num_to_str, deserialize_str_to_opt_num}; #[derive(Deserialize, Serialize, Clone, Default)] pub struct CmdlineToolConfig { @@ -31,9 +31,9 @@ pub struct CmdlineToolConfig { pub output_filter: CmdlineOutputFilter, // background - #[serde(default)] + #[serde(default, serialize_with = "serialize_opt_num_to_str", deserialize_with = "deserialize_str_to_opt_num")] pub startup_wait_port: Option, - #[serde(default = "_default_startup_wait")] + #[serde(default = "_default_startup_wait", serialize_with = "serialize_num_to_str", deserialize_with = "deserialize_str_to_num")] pub startup_wait: u64, #[serde(default)] pub startup_wait_keyword: Option, diff --git a/src/integrations/mod.rs b/src/integrations/mod.rs index bcf6dd3cf..17fed0430 100644 --- a/src/integrations/mod.rs +++ b/src/integrations/mod.rs @@ -25,6 +25,7 @@ pub mod project_summary_chat; pub mod yaml_schema; pub mod setting_up_integrations; pub mod running_integrations; +pub mod utils; use integr_abstract::IntegrationTrait; diff --git a/src/integrations/utils.rs b/src/integrations/utils.rs new file mode 100644 index 000000000..a04a19b0b --- /dev/null +++ b/src/integrations/utils.rs @@ -0,0 +1,40 @@ +use std::fmt::Display; + +use serde::{Deserialize, Serializer, Deserializer}; + +use crate::integrations::docker::docker_container_manager::Port; + +pub fn serialize_opt_num_to_str(value: &Option, serializer: S) -> Result { + serializer.serialize_str(&value.as_ref().map_or_else(String::new, |v| v.to_string())) +} +pub fn deserialize_str_to_opt_num<'de, T, D>(deserializer: D) -> Result, D::Error> +where + T: std::str::FromStr, T::Err: Display, D: Deserializer<'de>, +{ + Option::::deserialize(deserializer)?.filter(|s| !s.is_empty()) + .map_or(Ok(None), |s| s.parse::().map(Some).map_err(serde::de::Error::custom)) +} + +pub fn serialize_num_to_str(num: &T, serializer: S) -> Result { + serializer.serialize_str(&num.to_string()) +} +pub fn deserialize_str_to_num<'de, T, D>(deserializer: D) -> Result +where + T: std::str::FromStr, T::Err: Display, D: Deserializer<'de>, +{ + String::deserialize(deserializer)?.parse().map_err(serde::de::Error::custom) +} + +pub fn serialize_ports(ports: &Vec, serializer: S) -> Result { + let ports_str = ports.iter().map(|port| format!("{}:{}", port.published, port.target)) + .collect::>().join(","); + serializer.serialize_str(&ports_str) +} +pub fn deserialize_ports<'de, D: Deserializer<'de>>(deserializer: D) -> Result, D::Error> { + let ports_str = String::deserialize(deserializer)?; + ports_str.split(',').filter(|s| !s.is_empty()).map(|port_str| { + let (published, target) = port_str.split_once(':') + .ok_or_else(|| serde::de::Error::custom("expected format 'published:target'"))?; + Ok(Port { published: published.to_string(), target: target.to_string() }) + }).collect() +} \ No newline at end of file From 6b0e53c266b3cd4b8949f431828c7f40cc508ebe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Mon, 9 Dec 2024 17:24:10 +0100 Subject: [PATCH 114/185] fix: detailed description for parameters --- src/integrations/integr_cmdline.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/integrations/integr_cmdline.rs b/src/integrations/integr_cmdline.rs index efe5f415e..5686ef211 100644 --- a/src/integrations/integr_cmdline.rs +++ b/src/integrations/integr_cmdline.rs @@ -254,7 +254,7 @@ fields: f_desc: "The model will see this description, why the model should call this?" parameters: f_type: "tool_parameters" - f_desc: "The model will fill in those parameters." + f_desc: "Enter a JSON array of parameters. Each parameter is a JSON object with a name, description (to guide the model), and an optional type (e.g. boolean, default: string). The model will fill these parameters to call the command." timeout: f_type: string_short f_desc: "The command must immediately return the results, it can't be interactive. If the command runs for too long, it will be terminated and stderr/stdout collected will be presented to the model." From 7e9acb221e11b4c176ab52530dc4f0fce5c2af21 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Mon, 9 Dec 2024 13:52:13 +0100 Subject: [PATCH 115/185] fix: to pathbuf normalize did not canonicalize unix paths --- src/files_correction.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/files_correction.rs b/src/files_correction.rs index ddb4be2a6..15e76d3d3 100644 --- a/src/files_correction.rs +++ b/src/files_correction.rs @@ -143,7 +143,7 @@ pub fn to_pathbuf_normalize(path: &String) -> PathBuf { if cfg!(target_os = "windows") { PathBuf::from(winpath_normalize(path)) } else { - PathBuf::from(path) + PathBuf::from(canonical_path(path)) } } From 300020165985607836eb68f6a3ea4ce4757252f5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Mon, 9 Dec 2024 14:48:06 +0100 Subject: [PATCH 116/185] fix: make search tool search for dirs if search for files fails --- src/at_commands/at_file.rs | 3 ++- src/tools/tool_search.rs | 37 +++++++++++++++++++++---------------- 2 files changed, 23 insertions(+), 17 deletions(-) diff --git a/src/at_commands/at_file.rs b/src/at_commands/at_file.rs index 0201438f7..98a196551 100644 --- a/src/at_commands/at_file.rs +++ b/src/at_commands/at_file.rs @@ -154,7 +154,8 @@ pub async fn return_one_candidate_or_a_good_error( } } if f_path.is_relative() { - let projpath_options = project_paths.iter().map(|x|x.join(&f_path)).filter(|x|x.is_file()).collect::>(); + let projpath_options = project_paths.iter().map(|x| x.join(&f_path)) + .filter(|x| if dirs { x.is_dir() } else { x.is_file() }).collect::>(); if projpath_options.len() > 1 { let projpath_options_str = projpath_options.iter().map(|x|x.to_string_lossy().to_string()).collect::>().join("\n"); return Err(format!("The path {:?} is ambiguous. Adding project path, it might be:\n{:?}\nAlso, there are similar filepaths:\n{}", f_path, projpath_options_str, similar_paths_str)); diff --git a/src/tools/tool_search.rs b/src/tools/tool_search.rs index 30a681bd9..803db0b30 100644 --- a/src/tools/tool_search.rs +++ b/src/tools/tool_search.rs @@ -24,36 +24,41 @@ async fn execute_att_search( ) -> Result, String> { let gcx = ccx.lock().await.global_context.clone(); if scope == "workspace" { - return Ok(execute_at_search(ccx.clone(), &query, None).await?) + return execute_at_search(ccx.clone(), &query, None).await } + let scope_is_dir = scope.ends_with('/') || scope.ends_with('\\'); - // XXX doesn't work for dirs - // "/Users/user/code/refact-lsp" does not exist. There are paths with similar names however:" - // maybe use this: - // let dir_candidates = correct_to_nearest_dir_path(gcx.clone(), &path, false, 10).await; - - let filter = if scope.ends_with('/') { - let dir = return_one_candidate_or_a_good_error( + let filter = if scope_is_dir { + return_one_candidate_or_a_good_error( gcx.clone(), scope, &correct_to_nearest_dir_path(gcx.clone(), scope, false, 10).await, &get_project_dirs(gcx.clone()).await, - true - ).await?; - format!("(scope LIKE '{}%')", dir) + true, + ).await.map(|dir| format!("(scope LIKE '{}%')", dir))? } else { - let file_path = return_one_candidate_or_a_good_error( + match return_one_candidate_or_a_good_error( gcx.clone(), scope, &file_repair_candidates(gcx.clone(), scope, 10, false).await, &get_project_dirs(gcx.clone()).await, - false - ).await?; - format!("(scope = \"{}\")", file_path) + false, + ).await { + Ok(file) => format!("(scope = \"{}\")", file), + Err(file_err) => { + return_one_candidate_or_a_good_error( + gcx.clone(), + scope, + &correct_to_nearest_dir_path(gcx.clone(), scope, false, 10).await, + &get_project_dirs(gcx.clone()).await, + true, + ).await.map(|dir| format!("(scope LIKE '{}%')", dir)).map_err(|_| file_err)? + }, + } }; info!("att-search: filter: {:?}", filter); - Ok(execute_at_search(ccx.clone(), &query, Some(filter)).await?) + execute_at_search(ccx.clone(), &query, Some(filter)).await } #[async_trait] From 86292e912a111b477ec7e76136f94e0da0b23923 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Tue, 10 Dec 2024 06:28:19 +0100 Subject: [PATCH 117/185] fix schema test --- src/integrations/docker/integr_docker.rs | 1 - src/integrations/docker/integr_isolation.rs | 3 +-- src/integrations/integr_chrome.rs | 6 +----- src/integrations/integr_cmdline_service.rs | 1 - src/integrations/setting_up_integrations.rs | 1 + 5 files changed, 3 insertions(+), 9 deletions(-) diff --git a/src/integrations/docker/integr_docker.rs b/src/integrations/docker/integr_docker.rs index fe7e87d9a..71e7bc46f 100644 --- a/src/integrations/docker/integr_docker.rs +++ b/src/integrations/docker/integr_docker.rs @@ -268,7 +268,6 @@ fields: docker_daemon_address: f_type: string_long f_desc: "The address to connect to the Docker daemon; specify only if not using the default." - f_default: "" f_extra: true remote_docker: f_type: bool diff --git a/src/integrations/docker/integr_isolation.rs b/src/integrations/docker/integr_isolation.rs index 27491e5a7..d0715bd7f 100644 --- a/src/integrations/docker/integr_isolation.rs +++ b/src/integrations/docker/integr_isolation.rs @@ -81,5 +81,4 @@ fields: available: on_your_laptop_possible: true when_isolated_possible: false -smartlinks: [] -"#; \ No newline at end of file +"#; diff --git a/src/integrations/integr_chrome.rs b/src/integrations/integr_chrome.rs index e01a38430..04add2a80 100644 --- a/src/integrations/integr_chrome.rs +++ b/src/integrations/integr_chrome.rs @@ -314,7 +314,7 @@ async fn setup_chrome_session( headless: args.headless.parse::().unwrap_or(true), ..Default::default() }; - + setup_log.push("Started new chrome process.".to_string()); Browser::new(launch_options).map_err(|e| e.to_string()) }?; @@ -983,21 +983,17 @@ fields: chrome_path: f_type: string_long f_desc: "Path to Google Chrome or Chromium binary. If empty, it searches for Google Chrome in your system" - f_placeholder: "" window_width: f_type: string_short f_desc: "Width of the browser window." - f_default: "" f_extra: true window_height: f_type: string_short f_desc: "Height of the browser window." - f_default: "" f_extra: true idle_browser_timeout: f_type: string_short f_desc: "Idle timeout for the browser in seconds." - f_default: "" f_extra: true headless: f_type: string_short diff --git a/src/integrations/integr_cmdline_service.rs b/src/integrations/integr_cmdline_service.rs index c2dece2f2..660d89e8b 100644 --- a/src/integrations/integr_cmdline_service.rs +++ b/src/integrations/integr_cmdline_service.rs @@ -334,7 +334,6 @@ fields: description: f_type: string_long f_desc: "The model will see this description, why the model should call this?" - f_placeholder: "" parameters: f_type: "tool_parameters" f_desc: "The model will fill in those parameters." diff --git a/src/integrations/setting_up_integrations.rs b/src/integrations/setting_up_integrations.rs index 67251797f..23141e0f0 100644 --- a/src/integrations/setting_up_integrations.rs +++ b/src/integrations/setting_up_integrations.rs @@ -420,6 +420,7 @@ pub async fn integration_config_save( Ok(()) } + #[cfg(test)] mod tests { // use super::*; From dc1d0819e5c93d465f37ffd78d2ad6f318f7440d Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Tue, 10 Dec 2024 07:20:54 +0100 Subject: [PATCH 118/185] warnings --- src/scratchpad_abstract.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/scratchpad_abstract.rs b/src/scratchpad_abstract.rs index 446afff34..1ec48da77 100644 --- a/src/scratchpad_abstract.rs +++ b/src/scratchpad_abstract.rs @@ -98,8 +98,8 @@ pub trait ScratchpadAbstract: Send { fn response_message_n_choices( &mut self, - choices: Vec, // XXX replace with Value - finish_reasons: Vec, + _choices: Vec, // XXX replace with Value + _finish_reasons: Vec, ) -> Result { Err("not implemented".to_string()) } From d889d5aef88262154218ed5b6366df633d69c591 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Tue, 10 Dec 2024 08:12:42 +0100 Subject: [PATCH 119/185] Fix 534 --- src/integrations/integr_cmdline.rs | 24 +++++++++++++++++++--- src/integrations/integr_cmdline_service.rs | 3 ++- 2 files changed, 23 insertions(+), 4 deletions(-) diff --git a/src/integrations/integr_cmdline.rs b/src/integrations/integr_cmdline.rs index 5686ef211..d761dd487 100644 --- a/src/integrations/integr_cmdline.rs +++ b/src/integrations/integr_cmdline.rs @@ -1,3 +1,4 @@ +use std::path::PathBuf; use std::collections::HashMap; use std::sync::Arc; use std::process::Stdio; @@ -15,6 +16,7 @@ use crate::postprocessing::pp_command_output::{CmdlineOutputFilter, output_mini_ use crate::integrations::integr_abstract::IntegrationTrait; use crate::integrations::utils::{serialize_num_to_str, deserialize_str_to_num, serialize_opt_num_to_str, deserialize_str_to_opt_num}; + #[derive(Deserialize, Serialize, Clone, Default)] pub struct CmdlineToolConfig { pub command: String, @@ -102,6 +104,9 @@ pub fn format_output(stdout_out: &str, stderr_out: &str) -> String { if !stderr_out.is_empty() { out.push_str(&format!("STDERR\n```\n{}```\n\n", stderr_out)); } + if stdout_out.is_empty() && stderr_out.is_empty() { + out.push_str(&format!("Nothing in STDOUT/STDERR\n\n")); + } } out } @@ -110,6 +115,7 @@ pub fn create_command_from_string( cmd_string: &str, command_workdir: &String, env_variables: &HashMap, + project_dirs: Vec, ) -> Result { let command_args = shell_words::split(cmd_string) .map_err(|e| format!("Failed to parse command: {}", e))?; @@ -120,7 +126,17 @@ pub fn create_command_from_string( if command_args.len() > 1 { cmd.args(&command_args[1..]); } - cmd.current_dir(command_workdir); + + if command_workdir.is_empty() { + if let Some(first_project_dir) = project_dirs.first() { + cmd.current_dir(first_project_dir); + } else { + tracing::warn!("no working directory, using whatever directory this binary is run :/"); + } + } else { + cmd.current_dir(command_workdir); + } + for (key, value) in env_variables { cmd.env(key, value); } @@ -132,11 +148,12 @@ pub async fn execute_blocking_command( cfg: &CmdlineToolConfig, command_workdir: &String, env_variables: &HashMap, + project_dirs: Vec, ) -> Result { info!("EXEC workdir {}:\n{:?}", command_workdir, command); let command_future = async { - let mut cmd = create_command_from_string(command, command_workdir, env_variables)?; + let mut cmd = create_command_from_string(command, command_workdir, env_variables, project_dirs)?; let t0 = tokio::time::Instant::now(); let result = cmd .stdout(Stdio::piped()) @@ -206,8 +223,9 @@ impl Tool for ToolCmdline { let command = replace_args(self.cfg.command.as_str(), &args_str); let workdir = replace_args(self.cfg.command_workdir.as_str(), &args_str); let env_variables = crate::integrations::setting_up_integrations::get_vars_for_replacements(gcx.clone()).await; + let project_dirs = crate::files_correction::get_project_dirs(gcx.clone()).await; - let tool_ouput = execute_blocking_command(&command, &self.cfg, &workdir, &env_variables).await?; + let tool_ouput = execute_blocking_command(&command, &self.cfg, &workdir, &env_variables, project_dirs).await?; let result = vec![ContextEnum::ChatMessage(ChatMessage { role: "tool".to_string(), diff --git a/src/integrations/integr_cmdline_service.rs b/src/integrations/integr_cmdline_service.rs index 660d89e8b..11c9287c1 100644 --- a/src/integrations/integr_cmdline_service.rs +++ b/src/integrations/integr_cmdline_service.rs @@ -155,8 +155,9 @@ async fn execute_background_command( } tracing::info!("SERVICE START workdir {}:\n{:?}", cmdline_workdir, command_str); actions_log.push_str(&format!("Starting service with the following command line:\n{}\n", command_str)); + let project_dirs = crate::files_correction::get_project_dirs(gcx.clone()).await; - let mut command = create_command_from_string(&command_str, cmdline_workdir, env_variables)?; + let mut command = create_command_from_string(&command_str, cmdline_workdir, env_variables, project_dirs)?; command.stdout(Stdio::piped()); command.stderr(Stdio::piped()); let mut command_wrap = TokioCommandWrap::from(command); From 2ddbf7523d064d5b84dcf35c5c164dfbce2ce98f Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Tue, 10 Dec 2024 08:36:09 +0100 Subject: [PATCH 120/185] fix 'parameters' description --- src/integrations/integr_cmdline.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/integrations/integr_cmdline.rs b/src/integrations/integr_cmdline.rs index d761dd487..f770cecfd 100644 --- a/src/integrations/integr_cmdline.rs +++ b/src/integrations/integr_cmdline.rs @@ -272,7 +272,7 @@ fields: f_desc: "The model will see this description, why the model should call this?" parameters: f_type: "tool_parameters" - f_desc: "Enter a JSON array of parameters. Each parameter is a JSON object with a name, description (to guide the model), and an optional type (e.g. boolean, default: string). The model will fill these parameters to call the command." + f_desc: "Enter a JSON array of parameters. Each parameter is a JSON object with a name, description (to guide the model). The model will fill these parameters to call the command." timeout: f_type: string_short f_desc: "The command must immediately return the results, it can't be interactive. If the command runs for too long, it will be terminated and stderr/stdout collected will be presented to the model." From 33e02857d840c1dc05097a50c6d519ad7f6e7828 Mon Sep 17 00:00:00 2001 From: Dimitry Ageev Date: Tue, 10 Dec 2024 15:27:49 +0100 Subject: [PATCH 121/185] Chrome improvements 09 12 24 (#473) * add filtering for styles command * scroll_to element command * html for given element, no filter yet * 3000 symbols budget for html output * remote object formatting for eval command * new settings for chrome: add scale factor; similar for mobile and new device type tablet * remove unneded comments --- src/integrations/integr_chrome.rs | 359 ++++++++++++++++++++++++------ 1 file changed, 293 insertions(+), 66 deletions(-) diff --git a/src/integrations/integr_chrome.rs b/src/integrations/integr_chrome.rs index 04add2a80..b5110b50f 100644 --- a/src/integrations/integr_chrome.rs +++ b/src/integrations/integr_chrome.rs @@ -19,9 +19,8 @@ use crate::integrations::integr_abstract::IntegrationTrait; use tokio::time::sleep; use chrono::DateTime; -use reqwest::Client; use std::path::PathBuf; -use headless_chrome::{Browser, LaunchOptions, Tab as HeadlessTab}; +use headless_chrome::{Browser, Element, LaunchOptions, Tab as HeadlessTab}; use headless_chrome::browser::tab::point::Point; use headless_chrome::protocol::cdp::Page; use headless_chrome::protocol::cdp::Emulation; @@ -32,6 +31,7 @@ use serde::{Deserialize, Serialize}; use base64::Engine; use std::io::Cursor; +use headless_chrome::protocol::cdp::Runtime::RemoteObject; use image::imageops::FilterType; use image::{ImageFormat, ImageReader}; @@ -39,14 +39,31 @@ use image::{ImageFormat, ImageReader}; #[derive(Clone, Serialize, Deserialize, Debug, Default)] pub struct SettingsChrome { pub chrome_path: String, - #[serde(default )] + #[serde(default)] + pub idle_browser_timeout: String, + #[serde(default)] + pub headless: String, + // desktop + #[serde(default)] pub window_width: String, #[serde(default)] pub window_height: String, #[serde(default)] - pub idle_browser_timeout: String, + pub scale_factor: String, #[serde(default)] - pub headless: String, + // mobile + pub mobile_window_width: String, + #[serde(default)] + pub mobile_window_height: String, + #[serde(default)] + pub mobile_scale_factor: String, + // tablet + #[serde(default)] + pub tablet_window_width: String, + #[serde(default)] + pub tablet_window_height: String, + #[serde(default)] + pub tablet_scale_factor: String, } #[derive(Debug, Default)] @@ -59,6 +76,7 @@ pub struct ToolChrome { enum DeviceType { DESKTOP, MOBILE, + TABLET, } impl std::fmt::Display for DeviceType { @@ -66,6 +84,7 @@ impl std::fmt::Display for DeviceType { match self { DeviceType::DESKTOP => write!(f, "desktop"), DeviceType::MOBILE => write!(f, "mobile"), + DeviceType::TABLET => write!(f, "tablet"), } } } @@ -196,7 +215,7 @@ impl Tool for ToolChrome { break } }; - match chrome_command_exec(&parsed_command, command_session.clone()).await { + match chrome_command_exec(&parsed_command, command_session.clone(), &self.settings_chrome).await { Ok((execute_log, command_multimodal_els)) => { tool_log.extend(execute_log); mutlimodal_els.extend(command_multimodal_els); @@ -227,16 +246,17 @@ impl Tool for ToolChrome { fn tool_description(&self) -> ToolDesc { let mut supported_commands = vec![ - "open_tab ", + "open_tab ", "navigate_to ", + "scroll_to ", "screenshot ", - // "html ", + "html ", "reload ", "press_key_at ", "type_text_at ", "tab_log ", "eval ", - "styles ", + "styles ", "click_at_element ", ]; if self.supports_clicks { @@ -301,8 +321,6 @@ async fn setup_chrome_session( setup_log.push("Connect to existing web socket.".to_string()); Browser::connect_with_timeout(debug_ws_url, idle_browser_timeout).map_err(|e| e.to_string()) } else { - - // let path = PathBuf::from(args.chrome_path.clone()); let mut path: Option = None; if !args.chrome_path.is_empty() { path = Some(PathBuf::from(args.chrome_path.clone())); @@ -314,7 +332,7 @@ async fn setup_chrome_session( headless: args.headless.parse::().unwrap_or(true), ..Default::default() }; - + setup_log.push("Started new chrome process.".to_string()); Browser::new(launch_options).map_err(|e| e.to_string()) }?; @@ -366,10 +384,118 @@ async fn screenshot_jpeg_base64( MultimodalElement::new("image/jpeg".to_string(), base64::prelude::BASE64_STANDARD.encode(data)) } +fn get_inner_html( + element: &Element, +) -> Result { + let func = r" + function() { + function wrap_html(text, depth) { + return ' '.repeat(depth) + text + '\n'; + } + + function budget_html(el, max_depth, symbols_budget) { + let innerHtml = ''; + let elements = [el] + for (let depth = 0; depth < max_depth; depth++) { + let expanded_html = ''; + let expanded_elements = []; + elements.forEach(el => { + if (typeof el === 'string') { + expanded_html += el; + expanded_elements.push(el); + } else { + if (el.innerHTML.length > 0) { + let tagHtml = el.outerHTML.split(el.innerHTML); + const tag_open = wrap_html(tagHtml[0], depth); + expanded_html += tag_open; + expanded_elements.push(tag_open); + const children = Array.from(el.children); + if (children.length > 0) { + expanded_html += wrap_html('...', depth + 1) + Array.from(el.children).forEach(child => { + expanded_elements.push(child); + }); + } else if (el.innerText.length > 0) { + const tag_text = wrap_html(el.innerText, depth + 1); + expanded_html += tag_text; + expanded_elements.push(tag_text); + } + if (tagHtml.length > 1) { + const tag_close = wrap_html(tagHtml[1], depth); + expanded_html += tag_close + expanded_elements.push(tag_close); + } + } else { + const tag = wrap_html(el.outerHTML, depth); + expanded_html += tag; + expanded_elements.push(tag); + } + } + }); + if (expanded_html.length > symbols_budget) { + break; + } + if (expanded_html.length === innerHtml.length) { + break; + } + innerHtml = expanded_html; + elements = expanded_elements; + } + return innerHtml; + } + return budget_html(this, 100, 3000); + }"; + let result = element.call_js_fn(func, vec![], false).map_err(|e| e.to_string())?; + Ok(result.value.unwrap().to_string()) +} + +fn format_remote_object( + remote_object: &RemoteObject, +) -> String { + let mut result = vec![]; + if let Some(subtype) = remote_object.subtype.clone() { + result.push(format!("subtype {:?}", subtype)); + } + if let Some(class_name) = remote_object.class_name.clone() { + result.push(format!("class_name {:?}", class_name)); + } + if let Some(value) = remote_object.value.clone() { + result.push(format!("value {:?}", value)); + } + if let Some(unserializable_value) = remote_object.unserializable_value.clone() { + result.push(format!("unserializable_value {:?}", unserializable_value)); + } + if let Some(description) = remote_object.description.clone() { + result.push(format!("description {:?}", description)); + } + if let Some(preview) = remote_object.preview.clone() { + result.push(format!("preview {:?}", preview)); + } + if let Some(custom_preview) = remote_object.custom_preview.clone() { + result.push(format!("custom_preview {:?}", custom_preview)); + } + format!("result: {}", result.join(", ")) +} + +fn set_device_metrics_method( + width: u32, + height: u32, + device_scale_factor: f64, + mobile: bool, +) -> Emulation::SetDeviceMetricsOverride { + Emulation::SetDeviceMetricsOverride { + width, height, device_scale_factor, mobile, + scale: None, screen_width: None, screen_height: None, + position_x: None, position_y: None, dont_set_visible_size: None, + screen_orientation: None, viewport: None, display_feature: None, + } +} + async fn session_open_tab( chrome_session: &mut ChromeSession, tab_id: &String, device: &DeviceType, + settings_chrome: &SettingsChrome, ) -> Result { match chrome_session.tabs.get(tab_id) { Some(tab) => { @@ -378,28 +504,42 @@ async fn session_open_tab( }, None => { let headless_tab = chrome_session.browser.new_tab().map_err(|e| e.to_string())?; - match device { + let method = match device { + DeviceType::DESKTOP => { + let (width, height) = match (settings_chrome.window_width.parse::(), settings_chrome.window_height.parse::()) { + (Ok(width), Ok(height)) => (width, height), + _ => (800, 600), + }; + let scale_factor = match settings_chrome.scale_factor.parse::() { + Ok(scale_factor) => scale_factor, + _ => 0.0, + }; + set_device_metrics_method(width, height, scale_factor, false) + }, DeviceType::MOBILE => { - headless_tab.call_method(Emulation::SetDeviceMetricsOverride { - width: 375, - height: 812, - device_scale_factor: 0.0, - mobile: true, - scale: None, - screen_width: None, - screen_height: None, - position_x: None, - position_y: None, - dont_set_visible_size: None, - screen_orientation: None, - viewport: None, - display_feature: None, - }).map_err(|e| e.to_string())?; + let (width, height) = match (settings_chrome.mobile_window_width.parse::(), settings_chrome.mobile_window_height.parse::()) { + (Ok(width), Ok(height)) => (width, height), + _ => (400, 800), + }; + let scale_factor = match settings_chrome.mobile_scale_factor.parse::() { + Ok(scale_factor) => scale_factor, + _ => 0.0, + }; + set_device_metrics_method(width, height, scale_factor, true) }, - DeviceType::DESKTOP => { - headless_tab.call_method(Emulation::ClearDeviceMetricsOverride(None)).map_err(|e| e.to_string())?; - } - } + DeviceType::TABLET => { + let (width, height) = match (settings_chrome.tablet_window_width.parse::(), settings_chrome.tablet_window_height.parse::()) { + (Ok(width), Ok(height)) => (width, height), + _ => (600, 800), + }; + let scale_factor = match settings_chrome.tablet_scale_factor.parse::() { + Ok(scale_factor) => scale_factor, + _ => 0.0, + }; + set_device_metrics_method(width, height, scale_factor, true) + }, + }; + headless_tab.call_method(method).map_err(|e| e.to_string())?; let tab = Arc::new(AMutex::new(ChromeTab::new(headless_tab, device, tab_id))); let tab_lock = tab.lock().await; let tab_log = Arc::clone(&tab_lock.tab_log); @@ -437,8 +577,9 @@ async fn session_get_tab_arc( enum Command { OpenTab(OpenTabArgs), NavigateTo(NavigateToArgs), + ScrollTo(TabElementArgs), Screenshot(TabArgs), - Html(TabArgs), + Html(TabElementArgs), Reload(TabArgs), ClickAtPoint(ClickAtPointArgs), ClickAtElement(TabElementArgs), @@ -446,12 +587,13 @@ enum Command { PressKeyAt(PressKeyAtArgs), TabLog(TabArgs), Eval(EvalArgs), - Styles(TabElementArgs), + Styles(StylesArgs), } async fn chrome_command_exec( cmd: &Command, chrome_session: Arc>>, + settings_chrome: &SettingsChrome, ) -> Result<(Vec, Vec), String> { let mut tool_log = vec![]; let mut multimodal_els = vec![]; @@ -461,7 +603,7 @@ async fn chrome_command_exec( let log = { let mut chrome_session_locked = chrome_session.lock().await; let chrome_session = chrome_session_locked.as_any_mut().downcast_mut::().ok_or("Failed to downcast to ChromeSession")?; - session_open_tab(chrome_session, &args.tab_id, &args.device).await? + session_open_tab(chrome_session, &args.tab_id, &args.device, &settings_chrome).await? }; tool_log.push(log); }, @@ -488,6 +630,29 @@ async fn chrome_command_exec( }; tool_log.push(log); }, + Command::ScrollTo(args) => { + let tab: Arc> = { + let mut chrome_session_locked = chrome_session.lock().await; + let chrome_session = chrome_session_locked.as_any_mut().downcast_mut::().ok_or("Failed to downcast to ChromeSession")?; + session_get_tab_arc(chrome_session, &args.tab_id).await? + }; + let log = { + let tab_lock = tab.lock().await; + match { + let element = tab_lock.headless_tab.find_element(&args.selector).map_err(|e| e.to_string())?; + element.scroll_into_view().map_err(|e| e.to_string())?; + Ok::<(), String>(()) + } { + Ok(_) => { + format!("scroll_to `{}` successful: {}.", args.selector, tab_lock.state_string()) + }, + Err(e) => { + format!("scroll_to `{}` failed: {}.", args.selector, e.to_string()) + }, + } + }; + tool_log.push(log); + }, Command::Screenshot(args) => { let tab = { let mut chrome_session_locked = chrome_session.lock().await; @@ -511,7 +676,6 @@ async fn chrome_command_exec( tool_log.push(log); }, Command::Html(args) => { - // NOTE: removed from commands list, please rewrite me... let tab = { let mut chrome_session_locked = chrome_session.lock().await; let chrome_session = chrome_session_locked.as_any_mut().downcast_mut::().ok_or("Failed to downcast to ChromeSession")?; @@ -519,24 +683,25 @@ async fn chrome_command_exec( }; let log = { let tab_lock = tab.lock().await; - let url = tab_lock.headless_tab.get_url(); match { - let client = Client::builder() - .build() - .map_err(|e| e.to_string())?; - let response = client.get(url.clone()).send().await.map_err(|e| e.to_string())?; - if response.status().is_success() { - let html = response.text().await.map_err(|e| e.to_string())?; - Ok(html) + let elements = tab_lock.headless_tab.find_elements(&args.selector).map_err(|e| e.to_string())?; + if elements.len() == 0 { + Err("No elements found".to_string()) } else { - Err(format!("status: {}", response.status())) + let mut elements_log = vec![]; + let first_element = elements.first().unwrap(); + elements_log.push(get_inner_html(first_element)?); + if elements.len() > 2 { + elements_log.push(format!("\n\nShown html for first of {} elements", elements.len())); + } + Ok::(elements_log.join("\n")) } } { Ok(html) => { - format!("innerHtml of {}:\n\n{}", tab_lock.state_string(), html) + format!("html of `{}`:\n\n{}", args.selector, html) }, Err(e) => { - format!("can't fetch innerHtml of {}: {}", tab_lock.state_string(), e.to_string()) + format!("can't fetch html of `{}`: {}", args.selector, e.to_string()) }, } }; @@ -684,8 +849,8 @@ async fn chrome_command_exec( let log = { let tab_lock = tab.lock().await; match tab_lock.headless_tab.evaluate(args.expression.as_str(), false) { - Ok(result) => { - format!("eval result at {}: {:?}", tab_lock.state_string(), result) + Ok(remote_object) => { + format_remote_object(&remote_object) }, Err(e) => { format!("eval failed at {}: {}", tab_lock.state_string(), e.to_string()) @@ -707,15 +872,26 @@ async fn chrome_command_exec( tab_lock.headless_tab.call_method(CSSEnable(None)).map_err(|e| e.to_string())?; let element = tab_lock.headless_tab.find_element(&args.selector).map_err(|e| e.to_string())?; let computed_styles = element.get_computed_styles().map_err(|e| e.to_string())?; - Ok::(computed_styles.iter() + let mut styles_filtered = computed_styles.iter() + .filter(|s| s.name.contains(args.property_filter.as_str())) .map(|s| format!("{}: {}", s.name, s.value)) - .collect::>().join("\n")) + .collect::>(); + let max_lines_output = 30; + if styles_filtered.len() > max_lines_output { + let skipped_message = format!("Skipped {} properties. Specify filter if you need to see more.", styles_filtered.len() - max_lines_output); + styles_filtered = styles_filtered[..max_lines_output].to_vec(); + styles_filtered.push(skipped_message) + } + if styles_filtered.is_empty() { + styles_filtered.push("No properties for given filter.".to_string()); + } + Ok::(styles_filtered.join("\n")) } { Ok(styles_str) => { - format!("styles for element `{}` at {}:\n{}", args.selector, tab_lock.state_string(), styles_str) + format!("Style properties for element `{}` at {}:\n{}", args.selector, tab_lock.state_string(), styles_str) }, Err(e) => { - format!("styles get failed at {}: {}", tab_lock.state_string(), e.to_string()) + format!("Styles get failed at {}: {}", tab_lock.state_string(), e.to_string()) }, } }; @@ -796,6 +972,13 @@ struct TabElementArgs { selector: String, } +#[derive(Debug)] +struct StylesArgs { + tab_id: String, + selector: String, + property_filter: String, +} + fn parse_single_command(command: &String) -> Result { let args = shell_words::split(&command).map_err(|e| e.to_string())?; if args.is_empty() { @@ -811,7 +994,8 @@ fn parse_single_command(command: &String) -> Result { let device = match device_str.as_str() { "desktop" => DeviceType::DESKTOP, "mobile" => DeviceType::MOBILE, - _ => return Err(format!("unknown device type: {}. Should be either `desktop` or `mobile`.", parsed_args[0])) + "tablet" => DeviceType::TABLET, + _ => return Err(format!("unknown device type: {}. Should be `desktop`, `mobile` or `tablet`.", parsed_args[0])) }; Ok(Command::OpenTab(OpenTabArgs { device: device.clone(), @@ -819,7 +1003,7 @@ fn parse_single_command(command: &String) -> Result { })) }, _ => { - Err("Missing one or several arguments `tab_id`, ``".to_string()) + Err("Missing one or several arguments `tab_id`, ``".to_string()) } } }, @@ -836,6 +1020,19 @@ fn parse_single_command(command: &String) -> Result { } } }, + "scroll_to" => { + match parsed_args.as_slice() { + [tab_id, selector] => { + Ok(Command::ScrollTo(TabElementArgs { + selector: selector.clone(), + tab_id: tab_id.clone(), + })) + }, + _ => { + Err("Missing one or several arguments `tab_id`, `selector`".to_string()) + } + } + }, "screenshot" => { match parsed_args.as_slice() { [tab_id] => { @@ -850,13 +1047,14 @@ fn parse_single_command(command: &String) -> Result { }, "html" => { match parsed_args.as_slice() { - [tab_id] => { - Ok(Command::Html(TabArgs { + [tab_id, selector] => { + Ok(Command::Html(TabElementArgs { + selector: selector.clone(), tab_id: tab_id.clone(), })) }, _ => { - Err("Missing one or several arguments `tab_id`".to_string()) + Err("Missing one or several arguments `tab_id`, `selector`".to_string()) } } }, @@ -963,10 +1161,11 @@ fn parse_single_command(command: &String) -> Result { }, "styles" => { match parsed_args.as_slice() { - [tab_id, selector] => { - Ok(Command::Styles(TabElementArgs { + [tab_id, selector, property_filter] => { + Ok(Command::Styles(StylesArgs { selector: selector.clone(), tab_id: tab_id.clone(), + property_filter: property_filter.clone(), })) }, _ => { @@ -983,6 +1182,15 @@ fields: chrome_path: f_type: string_long f_desc: "Path to Google Chrome or Chromium binary. If empty, it searches for Google Chrome in your system" + idle_browser_timeout: + f_type: string_short + f_desc: "Idle timeout for the browser in seconds." + f_extra: true + headless: + f_type: string_short + f_desc: "Run Chrome in headless mode." + f_default: "true" + f_extra: true window_width: f_type: string_short f_desc: "Width of the browser window." @@ -991,14 +1199,33 @@ fields: f_type: string_short f_desc: "Height of the browser window." f_extra: true - idle_browser_timeout: + window_scale: f_type: string_short - f_desc: "Idle timeout for the browser in seconds." + f_desc: "Scale factor of the browser window." f_extra: true - headless: + mobile_window_width: f_type: string_short - f_desc: "Run Chrome in headless mode." - f_default: "true" + f_desc: "Width of the browser window in mobile mode." + f_extra: true + mobile_window_height: + f_type: string_short + f_desc: "Height of the browser window in mobile mode." + f_extra: true + mobile_window_scale: + f_type: string_short + f_desc: "Scale factor of the browser window in mobile mode." + f_extra: true + tablet_window_width: + f_type: string_short + f_desc: "Width of the browser window in tablet mode." + f_extra: true + tablet_window_height: + f_type: string_short + f_desc: "Height of the browser window in tablet mode." + f_extra: true + tablet_window_scale: + f_type: string_short + f_desc: "Scale factor of the browser window in tablet mode." f_extra: true available: on_your_laptop_possible: true @@ -1037,4 +1264,4 @@ docker: - role: "user" content: | šŸ”§ Your job is to modify chrome config in the current file to connect through websockets to the container, use docker tool to inspect the container if needed. Current config file: %CURRENT_CONFIG%. -"#; \ No newline at end of file +"#; From 006562edcdaffe53c121203f62e6a67c226f07d5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Mon, 9 Dec 2024 17:59:28 +0100 Subject: [PATCH 122/185] feat: add return button --- src/http/routers/v1/links.rs | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/src/http/routers/v1/links.rs b/src/http/routers/v1/links.rs index 1308240f0..2533cc8a1 100644 --- a/src/http/routers/v1/links.rs +++ b/src/http/routers/v1/links.rs @@ -115,14 +115,24 @@ pub async fn handle_v1_links( } } - if post.meta.chat_mode == ChatMode::CONFIGURE && !get_tickets_from_messages(gcx.clone(), &post.messages).await.is_empty() { + if post.meta.chat_mode == ChatMode::CONFIGURE { links.push(Link { - action: LinkAction::PatchAll, - text: "Save and return".to_string(), + action: LinkAction::Goto, + text: "Return".to_string(), goto: Some("SETTINGS:DEFAULT".to_string()), current_config_file: None, link_tooltip: format!(""), }); + + if !get_tickets_from_messages(gcx.clone(), &post.messages).await.is_empty() { + links.push(Link { + action: LinkAction::PatchAll, + text: "Save and return".to_string(), + goto: Some("SETTINGS:DEFAULT".to_string()), + current_config_file: None, + link_tooltip: format!(""), + }); + } } // if post.meta.chat_mode == ChatMode::AGENT { From 16197a393ea6692ef67ea076b3a7d1bebb0b6fda Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Mon, 9 Dec 2024 23:38:29 +0100 Subject: [PATCH 123/185] feat: new format of commit link, one link per project with changes --- src/git.rs | 64 ++++++++++++++++---- src/http/routers/v1.rs | 4 +- src/http/routers/v1/git.rs | 110 +++++++++++++++++------------------ src/http/routers/v1/links.rs | 86 ++++++++++++++++++--------- 4 files changed, 167 insertions(+), 97 deletions(-) diff --git a/src/git.rs b/src/git.rs index fd4167a48..2398a02f3 100644 --- a/src/git.rs +++ b/src/git.rs @@ -1,7 +1,30 @@ use std::path::PathBuf; +use serde::{Serialize, Deserialize}; use tracing::error; use git2::{Branch, BranchType, DiffOptions, IndexAddOption, Oid, Repository, Signature, Status, StatusOptions}; +#[derive(Serialize, Deserialize, Debug)] +pub struct FileChange { + pub path: String, + pub status: FileChangeStatus, +} + +#[derive(Serialize, Deserialize, Debug)] +pub enum FileChangeStatus { + ADDED, + MODIFIED, + DELETED, +} +impl FileChangeStatus { + pub fn initial(&self) -> char { + match self { + FileChangeStatus::ADDED => 'A', + FileChangeStatus::MODIFIED => 'M', + FileChangeStatus::DELETED => 'D', + } + } +} + pub fn git_ls_files(repository_path: &PathBuf) -> Option> { let repository = Repository::open(repository_path) .map_err(|e| error!("Failed to open repository: {}", e)).ok()?; @@ -65,24 +88,36 @@ pub fn stage_all_changes(repository: &Repository) -> Result<(), String> { /// Returns: /// /// A tuple containing the number of new files, modified files, and deleted files. -pub fn count_file_changes(repository: &Repository, include_unstaged: bool) -> Result<(usize, usize, usize), String> { - let (mut new_files, mut modified_files, mut deleted_files) = (0, 0, 0); +pub fn get_file_changes(repository: &Repository, include_unstaged: bool) -> Result, String> { + let mut result = Vec::new(); let statuses = repository.statuses(None) .map_err(|e| format!("Failed to get statuses: {}", e))?; for entry in statuses.iter() { let status = entry.status(); - if status.contains(Status::INDEX_NEW) { new_files += 1; } - if status.contains(Status::INDEX_MODIFIED) { modified_files += 1;} - if status.contains(Status::INDEX_DELETED) { deleted_files += 1; } + if status.contains(Status::INDEX_NEW) { + result.push(FileChange {status: FileChangeStatus::ADDED, path: entry.path().unwrap().to_string()}) + } + if status.contains(Status::INDEX_MODIFIED) { + result.push(FileChange {status: FileChangeStatus::MODIFIED, path: entry.path().unwrap().to_string()}) + } + if status.contains(Status::INDEX_DELETED) { + result.push(FileChange {status: FileChangeStatus::DELETED, path: entry.path().unwrap().to_string()}) + } if include_unstaged { - if status.contains(Status::WT_NEW) { new_files += 1; } - if status.contains(Status::WT_MODIFIED) { modified_files += 1;} - if status.contains(Status::WT_DELETED) { deleted_files += 1; } + if status.contains(Status::WT_NEW) { + result.push(FileChange {status: FileChangeStatus::ADDED, path: entry.path().unwrap().to_string()}) + } + if status.contains(Status::WT_MODIFIED) { + result.push(FileChange {status: FileChangeStatus::MODIFIED, path: entry.path().unwrap().to_string()}) + } + if status.contains(Status::WT_DELETED) { + result.push(FileChange {status: FileChangeStatus::DELETED, path: entry.path().unwrap().to_string()}) + } } } - Ok((new_files, modified_files, deleted_files)) + Ok(result) } pub fn commit(repository: &Repository, branch: &Branch, message: &str, author_name: &str, author_email: &str) -> Result { @@ -113,15 +148,20 @@ pub fn commit(repository: &Repository, branch: &Branch, message: &str, author_na } /// Similar to `git diff`, but including untracked files. -pub fn git_diff_from_all_changes(repository: &Repository) -> Result { +pub fn git_diff(repository: &Repository, file_changes: &Vec) -> Result { let mut diff_options = DiffOptions::new(); diff_options.include_untracked(true); diff_options.recurse_untracked_dirs(true); + for file_change in file_changes { + diff_options.pathspec(&file_change.path); + } // Create a new temporary tree, with all changes staged let mut index = repository.index().map_err(|e| format!("Failed to get repository index: {}", e))?; - index.add_all(["*"].iter(), IndexAddOption::DEFAULT, None) - .map_err(|e| format!("Failed to add files to index: {}", e))?; + for file_change in file_changes { + index.add_path(std::path::Path::new(&file_change.path)) + .map_err(|e| format!("Failed to add file to index: {}", e))?; + } let oid = index.write_tree().map_err(|e| format!("Failed to write tree: {}", e))?; let new_tree = repository.find_tree(oid).map_err(|e| format!("Failed to find tree: {}", e))?; diff --git a/src/http/routers/v1.rs b/src/http/routers/v1.rs index d754ed8c2..09fd3a42e 100644 --- a/src/http/routers/v1.rs +++ b/src/http/routers/v1.rs @@ -22,7 +22,7 @@ use crate::http::routers::v1::chat::{handle_v1_chat, handle_v1_chat_completions} use crate::http::routers::v1::chat_based_handlers::handle_v1_commit_message_from_diff; use crate::http::routers::v1::dashboard::get_dashboard_plots; use crate::http::routers::v1::docker::{handle_v1_docker_container_action, handle_v1_docker_container_list}; -use crate::http::routers::v1::git::handle_v1_git_stage_and_commit; +// use crate::http::routers::v1::git::handle_v1_git_stage_and_commit; use crate::http::routers::v1::graceful_shutdown::handle_v1_graceful_shutdown; use crate::http::routers::v1::snippet_accepted::handle_v1_snippet_accepted; use crate::http::routers::v1::telemetry_network::handle_v1_telemetry_network; @@ -112,7 +112,7 @@ pub fn make_v1_router() -> Router { .route("/sync-files-extract-tar", telemetry_post!(handle_v1_sync_files_extract_tar)) - .route("/git-stage-and-commit", telemetry_post!(handle_v1_git_stage_and_commit)) + // .route("/git-stage-and-commit", telemetry_post!(handle_v1_git_stage_and_commit)) .route("/system-prompt", telemetry_post!(handle_v1_system_prompt)) // because it works remotely diff --git a/src/http/routers/v1/git.rs b/src/http/routers/v1/git.rs index aae94f1b7..5adfb5a26 100644 --- a/src/http/routers/v1/git.rs +++ b/src/http/routers/v1/git.rs @@ -1,64 +1,64 @@ -use std::sync::Arc; -use axum::Extension; -use axum::http::{Response, StatusCode}; -use git2::Repository; -use hyper::Body; -use serde::{Deserialize, Serialize}; -use tokio::sync::RwLock as ARwLock; -use url::Url; +// use std::sync::Arc; +// use axum::Extension; +// use axum::http::{Response, StatusCode}; +// use git2::Repository; +// use hyper::Body; +// use serde::{Deserialize, Serialize}; +// use tokio::sync::RwLock as ARwLock; +// use url::Url; -use crate::custom_error::ScratchError; -use crate::git::{commit, count_file_changes, create_or_checkout_to_branch, stage_all_changes}; -use crate::global_context::GlobalContext; +// use crate::custom_error::ScratchError; +// use crate::git::{commit, create_or_checkout_to_branch, stage_all_changes}; +// use crate::global_context::GlobalContext; -#[derive(Serialize, Deserialize, Clone, Debug)] -pub struct GitStageAndCommitPost { - chat_id: String, - repository_path: Url, -} +// #[derive(Serialize, Deserialize, Clone, Debug)] +// pub struct GitStageAndCommitPost { +// chat_id: String, +// repository_path: Url, +// } -pub async fn handle_v1_git_stage_and_commit( - Extension(_gcx): Extension>>, - body_bytes: hyper::body::Bytes, -) -> Result, ScratchError> { - let post = serde_json::from_slice::(&body_bytes) - .map_err(|e| ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, format!("JSON problem: {}", e)))?; +// pub async fn handle_v1_git_stage_and_commit( +// Extension(_gcx): Extension>>, +// body_bytes: hyper::body::Bytes, +// ) -> Result, ScratchError> { +// let post = serde_json::from_slice::(&body_bytes) +// .map_err(|e| ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, format!("JSON problem: {}", e)))?; - let repo_path = crate::files_correction::canonical_path( - &post.repository_path.to_file_path().unwrap_or_default().to_string_lossy().to_string()); - let repository = Repository::open(&repo_path) - .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Could not open repository: {}", e)))?; +// let repo_path = crate::files_correction::canonical_path( +// &post.repository_path.to_file_path().unwrap_or_default().to_string_lossy().to_string()); +// let repository = Repository::open(&repo_path) +// .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Could not open repository: {}", e)))?; - let branch_name = format!("refact-{}", post.chat_id); - let branch = create_or_checkout_to_branch(&repository, &branch_name) - .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, e))?; +// let branch_name = format!("refact-{}", post.chat_id); +// let branch = create_or_checkout_to_branch(&repository, &branch_name) +// .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, e))?; - stage_all_changes(&repository) - .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, e))?; +// stage_all_changes(&repository) +// .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, e))?; - let (new_files, modified_files, deleted_files) = count_file_changes(&repository, false) - .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, e))?; +// let (new_files, modified_files, deleted_files) = count_file_changes(&repository, false) +// .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, e))?; - let commit_oid = if new_files + modified_files + deleted_files != 0 { - Some(commit( - &repository, - &branch, - &format!("Refact agent commit in chat {} at {}", post.chat_id, chrono::Utc::now().format("%Y-%m-%d %H:%M:%S")), - "Refact Agent", - "agent@refact.ai", - ).map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, e))?) - } else { - None - }; +// let commit_oid = if new_files + modified_files + deleted_files != 0 { +// Some(commit( +// &repository, +// &branch, +// &format!("Refact agent commit in chat {} at {}", post.chat_id, chrono::Utc::now().format("%Y-%m-%d %H:%M:%S")), +// "Refact Agent", +// "agent@refact.ai", +// ).map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, e))?) +// } else { +// None +// }; - Ok(Response::builder() - .status(StatusCode::OK) - .header("Content-Type", "application/json") - .body(Body::from(serde_json::json!({ - "commit_oid": commit_oid.map(|x| x.to_string()), - "new_files": new_files, - "modified_files": modified_files, - "deleted_files": deleted_files, - }).to_string())) - .unwrap()) -} \ No newline at end of file +// Ok(Response::builder() +// .status(StatusCode::OK) +// .header("Content-Type", "application/json") +// .body(Body::from(serde_json::json!({ +// "commit_oid": commit_oid.map(|x| x.to_string()), +// "new_files": new_files, +// "modified_files": modified_files, +// "deleted_files": deleted_files, +// }).to_string())) +// .unwrap()) +// } \ No newline at end of file diff --git a/src/http/routers/v1/links.rs b/src/http/routers/v1/links.rs index 2533cc8a1..b2c20c334 100644 --- a/src/http/routers/v1/links.rs +++ b/src/http/routers/v1/links.rs @@ -3,9 +3,10 @@ use std::fs; use axum::Extension; use axum::http::{Response, StatusCode}; use hyper::Body; -use serde::{Deserialize, Serialize}; +use serde::{Deserialize, Serialize, Serializer}; use tokio::sync::RwLock as ARwLock; use tracing::error; +use url::Url; use crate::agentic::generate_commit_message::generate_commit_message_by_diff; use crate::call_validation::{ChatMessage, ChatMeta, ChatMode}; @@ -14,6 +15,7 @@ use crate::global_context::GlobalContext; use crate::integrations::go_to_configuration_message; use crate::tools::tool_patch_aux::tickets_parsing::get_tickets_from_messages; use crate::agentic::generate_follow_up_message::generate_follow_up_message; +use crate::git::FileChange; #[derive(Deserialize, Clone, Debug)] pub struct LinksPost { @@ -32,7 +34,7 @@ enum LinkAction { SummarizeProject, } -#[derive(Serialize, Deserialize, Debug)] +#[derive(Serialize, Debug)] pub struct Link { // XXX rename: // link_action @@ -44,15 +46,28 @@ pub struct Link { #[serde(skip_serializing_if = "Option::is_none")] goto: Option, #[serde(skip_serializing_if = "Option::is_none")] - // projects: Option>, current_config_file: Option, // XXX rename link_tooltip: String, + link_payload: Option, +} + +#[derive(Debug)] +pub enum LinkPayload { + CommitPayload(CommitInfo), +} +impl Serialize for LinkPayload { + fn serialize(&self, serializer: S) -> Result { + match self { + LinkPayload::CommitPayload(commit_payload) => commit_payload.serialize(serializer), + } + } } #[derive(Serialize, Deserialize, Debug)] -pub struct ProjectCommit { - path: String, +pub struct CommitInfo { + project_path: Url, commit_message: String, + file_changes: Vec, } pub async fn handle_v1_links( @@ -75,6 +90,7 @@ pub async fn handle_v1_links( goto: None, current_config_file: summary_path_option, link_tooltip: format!("Project summary is a starting point for Refact Agent."), + link_payload: None, }); } else { // exists @@ -94,6 +110,7 @@ pub async fn handle_v1_links( goto: Some(format!("SETTINGS:{igname}")), current_config_file: None, link_tooltip: format!(""), + link_payload: None, }); } else { tracing::info!("tool {} present => happy", igname); @@ -122,6 +139,7 @@ pub async fn handle_v1_links( goto: Some("SETTINGS:DEFAULT".to_string()), current_config_file: None, link_tooltip: format!(""), + link_payload: None, }); if !get_tickets_from_messages(gcx.clone(), &post.messages).await.is_empty() { @@ -131,23 +149,32 @@ pub async fn handle_v1_links( goto: Some("SETTINGS:DEFAULT".to_string()), current_config_file: None, link_tooltip: format!(""), + link_payload: None, }); } } - // if post.meta.chat_mode == ChatMode::AGENT { - // let (project_commits, files_changed) = generate_commit_messages_with_current_changes(gcx.clone()).await; - // if !project_commits.is_empty() { - // links.push(Link { - // action: LinkAction::Commit, - // text: format!("Commit {files_changed} files"), - // goto: None, - // // projects: Some(project_commits), - // current_config_file: None, - // link_tooltip: format!(""), - // }); - // } - // } + if post.meta.chat_mode == ChatMode::AGENT { + for commit in get_commit_information_from_current_changes(gcx.clone()).await { + let project_name = commit.project_path.to_file_path().ok() + .and_then(|path| path.file_name().map(|name| name.to_string_lossy().into_owned())) + .unwrap_or_else(|| "".to_string()); + let tooltip_message = format!( + "git commmit -m \"{}{}\"\n{}", + commit.commit_message.lines().next().unwrap_or(""), + if commit.commit_message.lines().count() > 1 { "..." } else { "" }, + commit.file_changes.iter().map(|f| format!("{} {}", f.status.initial(), f.path)).collect::>().join("\n"), + ); + links.push(Link { + action: LinkAction::Commit, + text: format!("Commit {} files in `{}`", commit.file_changes.len(), project_name), + goto: Some("LINKS_AGAIN".to_string()), + current_config_file: None, + link_tooltip: tooltip_message, + link_payload: Some(LinkPayload::CommitPayload(commit)), + }); + } + } if post.meta.chat_mode == ChatMode::AGENT { for failed_integr_name in failed_integration_names_after_last_user_message(&post.messages) { @@ -157,6 +184,7 @@ pub async fn handle_v1_links( goto: Some(format!("SETTINGS:{failed_integr_name}")), current_config_file: None, link_tooltip: format!(""), + link_payload: None, }) } } @@ -168,6 +196,7 @@ pub async fn handle_v1_links( goto: Some(format!("SETTINGS:{}", e.integr_config_path)), current_config_file: None, link_tooltip: format!("Error at line {}: {}", e.error_line, e.error_msg), + link_payload: None, }); } @@ -183,6 +212,7 @@ pub async fn handle_v1_links( goto: None, current_config_file: None, link_tooltip: format!(""), + link_payload: None, }); } } @@ -196,23 +226,23 @@ pub async fn handle_v1_links( .unwrap()) } -async fn generate_commit_messages_with_current_changes(gcx: Arc>) -> (Vec, usize) { - let mut project_commits = Vec::new(); - let mut total_file_changes = 0; +async fn get_commit_information_from_current_changes(gcx: Arc>) -> Vec { + let mut commits = Vec::new(); for project_path in crate::files_correction::get_project_dirs(gcx.clone()).await { let repository = match git2::Repository::open(&project_path) { Ok(repo) => repo, Err(e) => { error!("{}", e); continue; } }; + tracing::info!("repository opened"); - let (added, modified, deleted) = match crate::git::count_file_changes(&repository, true) { - Ok((0, 0, 0)) => { continue; } + let file_changes = match crate::git::get_file_changes(&repository, true) { + Ok(changes) if changes.is_empty() => { continue; } Ok(changes) => changes, Err(e) => { error!("{}", e); continue; } }; - let diff = match crate::git::git_diff_from_all_changes(&repository) { + let diff = match crate::git::git_diff(&repository, &file_changes) { Ok(d) if d.is_empty() => { continue; } Ok(d) => d, Err(e) => { error!("{}", e); continue; } @@ -223,14 +253,14 @@ async fn generate_commit_messages_with_current_changes(gcx: Arc { error!("{}", e); continue; } }; - project_commits.push(ProjectCommit { - path: project_path.to_string_lossy().to_string(), + commits.push(CommitInfo { + project_path: Url::from_file_path(&project_path).ok().unwrap_or_else(|| Url::parse("file:///").unwrap()), commit_message: commit_msg, + file_changes, }); - total_file_changes += added + modified + deleted; } - (project_commits, total_file_changes) + commits } fn failed_integration_names_after_last_user_message(messages: &Vec) -> Vec { From 2d85e78d045abcdacbffa3de8f3093225a176bf1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Tue, 10 Dec 2024 11:30:00 +0100 Subject: [PATCH 124/185] feat: stage changes based on file changes and get configured author email and name --- src/git.rs | 86 +++++++++++++++++++++++++++++++++--------------------- 1 file changed, 52 insertions(+), 34 deletions(-) diff --git a/src/git.rs b/src/git.rs index 2398a02f3..af52a84ab 100644 --- a/src/git.rs +++ b/src/git.rs @@ -1,7 +1,7 @@ use std::path::PathBuf; use serde::{Serialize, Deserialize}; use tracing::error; -use git2::{Branch, BranchType, DiffOptions, IndexAddOption, Oid, Repository, Signature, Status, StatusOptions}; +use git2::{Branch, DiffOptions, Oid, Repository, Signature, Status, StatusOptions}; #[derive(Serialize, Deserialize, Debug)] pub struct FileChange { @@ -51,43 +51,52 @@ pub fn git_ls_files(repository_path: &PathBuf) -> Option> { } /// Similar to git checkout -b -pub fn create_or_checkout_to_branch<'repo>(repository: &'repo Repository, branch_name: &str) -> Result, String> { - let branch = match repository.find_branch(branch_name, BranchType::Local) { - Ok(branch) => branch, - Err(_) => { - let head_commit = repository.head() - .and_then(|h| h.peel_to_commit()) - .map_err(|e| format!("Failed to get HEAD commit: {}", e))?; - repository.branch(branch_name, &head_commit, false) - .map_err(|e| format!("Failed to create branch: {}", e))? - } - }; - - // Checkout to the branch - let object = repository.revparse_single(&("refs/heads/".to_owned() + branch_name)) - .map_err(|e| format!("Failed to revparse single: {}", e))?; - repository.checkout_tree(&object, None) - .map_err(|e| format!("Failed to checkout tree: {}", e))?; - repository.set_head(&format!("refs/heads/{}", branch_name)) - .map_err(|e| format!("Failed to set head: {}", e))?; - - Ok(branch) -} - -/// Similar to git add . -pub fn stage_all_changes(repository: &Repository) -> Result<(), String> { +// pub fn create_or_checkout_to_branch<'repo>(repository: &'repo Repository, branch_name: &str) -> Result, String> { +// let branch = match repository.find_branch(branch_name, git2::BranchType::Local) { +// Ok(branch) => branch, +// Err(_) => { +// let head_commit = repository.head() +// .and_then(|h| h.peel_to_commit()) +// .map_err(|e| format!("Failed to get HEAD commit: {}", e))?; +// repository.branch(branch_name, &head_commit, false) +// .map_err(|e| format!("Failed to create branch: {}", e))? +// } +// }; + +// // Checkout to the branch +// let object = repository.revparse_single(&("refs/heads/".to_owned() + branch_name)) +// .map_err(|e| format!("Failed to revparse single: {}", e))?; +// repository.checkout_tree(&object, None) +// .map_err(|e| format!("Failed to checkout tree: {}", e))?; +// repository.set_head(&format!("refs/heads/{}", branch_name)) +// .map_err(|e| format!("Failed to set head: {}", e))?; + +// Ok(branch) +// } + +pub fn stage_changes(repository: &Repository, file_changes: &Vec) -> Result<(), String> { let mut index = repository.index() .map_err(|e| format!("Failed to get index: {}", e))?; - index.add_all(["*"].iter(), IndexAddOption::DEFAULT, None) - .map_err(|e| format!("Failed to add files to index: {}", e))?; + + for file_change in file_changes { + match file_change.status { + FileChangeStatus::ADDED | FileChangeStatus::MODIFIED => { + index.add_path(std::path::Path::new(&file_change.path)) + .map_err(|e| format!("Failed to add file to index: {}", e))?; + }, + FileChangeStatus::DELETED => { + index.remove_path(std::path::Path::new(&file_change.path)) + .map_err(|e| format!("Failed to remove file from index: {}", e))?; + }, + } + } + index.write() .map_err(|e| format!("Failed to write index: {}", e))?; - Ok(()) + + Ok(()) } -/// Returns: -/// -/// A tuple containing the number of new files, modified files, and deleted files. pub fn get_file_changes(repository: &Repository, include_unstaged: bool) -> Result, String> { let mut result = Vec::new(); @@ -120,6 +129,15 @@ pub fn get_file_changes(repository: &Repository, include_unstaged: bool) -> Resu Ok(result) } +pub fn get_configured_author_email_and_name(repository: &Repository) -> Result<(String, String), String> { + let config = repository.config().map_err(|e| format!("Failed to get repository config: {}", e))?; + let author_email = config.get_string("user.email") + .map_err(|e| format!("Failed to get author email: {}", e))?; + let author_name = config.get_string("user.name") + .map_err(|e| format!("Failed to get author name: {}", e))?; + Ok((author_email, author_name)) +} + pub fn commit(repository: &Repository, branch: &Branch, message: &str, author_name: &str, author_email: &str) -> Result { let mut index = repository.index() @@ -139,7 +157,7 @@ pub fn commit(repository: &Repository, branch: &Branch, message: &str, author_na repository.find_commit(target) .map_err(|e| format!("Failed to find branch commit: {}", e))? } else { - return Err("No parent commits found (initial commit is not supported)".to_string()); + return Err("No parent commits found".to_string()); }; repository.commit( @@ -147,7 +165,7 @@ pub fn commit(repository: &Repository, branch: &Branch, message: &str, author_na ).map_err(|e| format!("Failed to create commit: {}", e)) } -/// Similar to `git diff`, but including untracked files. +/// Similar to `git diff`, from specified file changes. pub fn git_diff(repository: &Repository, file_changes: &Vec) -> Result { let mut diff_options = DiffOptions::new(); diff_options.include_untracked(true); From 058eb92af92faa5d941eafef2924bfd3ae0b1767 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Tue, 10 Dec 2024 11:30:44 +0100 Subject: [PATCH 125/185] feat: add commit handler --- src/http/routers/v1.rs | 4 +- src/http/routers/v1/git.rs | 148 ++++++++++++++++++++++------------- src/http/routers/v1/links.rs | 14 +--- 3 files changed, 99 insertions(+), 67 deletions(-) diff --git a/src/http/routers/v1.rs b/src/http/routers/v1.rs index 09fd3a42e..8784e76be 100644 --- a/src/http/routers/v1.rs +++ b/src/http/routers/v1.rs @@ -22,7 +22,7 @@ use crate::http::routers::v1::chat::{handle_v1_chat, handle_v1_chat_completions} use crate::http::routers::v1::chat_based_handlers::handle_v1_commit_message_from_diff; use crate::http::routers::v1::dashboard::get_dashboard_plots; use crate::http::routers::v1::docker::{handle_v1_docker_container_action, handle_v1_docker_container_list}; -// use crate::http::routers::v1::git::handle_v1_git_stage_and_commit; +use crate::http::routers::v1::git::handle_v1_git_commit; use crate::http::routers::v1::graceful_shutdown::handle_v1_graceful_shutdown; use crate::http::routers::v1::snippet_accepted::handle_v1_snippet_accepted; use crate::http::routers::v1::telemetry_network::handle_v1_telemetry_network; @@ -112,7 +112,7 @@ pub fn make_v1_router() -> Router { .route("/sync-files-extract-tar", telemetry_post!(handle_v1_sync_files_extract_tar)) - // .route("/git-stage-and-commit", telemetry_post!(handle_v1_git_stage_and_commit)) + .route("/git-commit", telemetry_post!(handle_v1_git_commit)) .route("/system-prompt", telemetry_post!(handle_v1_system_prompt)) // because it works remotely diff --git a/src/http/routers/v1/git.rs b/src/http/routers/v1/git.rs index 5adfb5a26..f888e28a3 100644 --- a/src/http/routers/v1/git.rs +++ b/src/http/routers/v1/git.rs @@ -1,64 +1,102 @@ -// use std::sync::Arc; -// use axum::Extension; -// use axum::http::{Response, StatusCode}; -// use git2::Repository; -// use hyper::Body; -// use serde::{Deserialize, Serialize}; -// use tokio::sync::RwLock as ARwLock; -// use url::Url; +use std::sync::Arc; +use axum::Extension; +use axum::http::{Response, StatusCode}; +use git2::Repository; +use hyper::Body; +use serde::{Deserialize, Serialize}; +use tokio::sync::RwLock as ARwLock; +use url::Url; -// use crate::custom_error::ScratchError; -// use crate::git::{commit, create_or_checkout_to_branch, stage_all_changes}; -// use crate::global_context::GlobalContext; +use crate::custom_error::ScratchError; +use crate::git::{FileChange, stage_changes, get_configured_author_email_and_name}; +use crate::global_context::GlobalContext; -// #[derive(Serialize, Deserialize, Clone, Debug)] -// pub struct GitStageAndCommitPost { -// chat_id: String, -// repository_path: Url, -// } +#[derive(Serialize, Deserialize, Debug)] +pub struct GitCommitPost { + pub commits: Vec, +} -// pub async fn handle_v1_git_stage_and_commit( -// Extension(_gcx): Extension>>, -// body_bytes: hyper::body::Bytes, -// ) -> Result, ScratchError> { -// let post = serde_json::from_slice::(&body_bytes) -// .map_err(|e| ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, format!("JSON problem: {}", e)))?; +#[derive(Serialize, Deserialize, Debug)] +pub struct CommitInfo { + pub project_path: Url, + pub commit_message: String, + pub file_changes: Vec, +} -// let repo_path = crate::files_correction::canonical_path( -// &post.repository_path.to_file_path().unwrap_or_default().to_string_lossy().to_string()); -// let repository = Repository::open(&repo_path) -// .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Could not open repository: {}", e)))?; +#[derive(Serialize, Deserialize, Debug)] +pub struct GitError { + pub error_message: String, + pub project_name: String, + pub project_path: Url, +} -// let branch_name = format!("refact-{}", post.chat_id); -// let branch = create_or_checkout_to_branch(&repository, &branch_name) -// .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, e))?; +pub async fn handle_v1_git_commit( + Extension(_gcx): Extension>>, + body_bytes: hyper::body::Bytes, +) -> Result, ScratchError> { + let post = serde_json::from_slice::(&body_bytes) + .map_err(|e| ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, format!("JSON problem: {}", e)))?; -// stage_all_changes(&repository) -// .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, e))?; + let mut error_log = Vec::new(); + let mut commits_applied = Vec::new(); -// let (new_files, modified_files, deleted_files) = count_file_changes(&repository, false) -// .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, e))?; + for commit in post.commits { + let repo_path = crate::files_correction::to_pathbuf_normalize( + &commit.project_path.to_file_path().unwrap_or_default().display().to_string()); -// let commit_oid = if new_files + modified_files + deleted_files != 0 { -// Some(commit( -// &repository, -// &branch, -// &format!("Refact agent commit in chat {} at {}", post.chat_id, chrono::Utc::now().format("%Y-%m-%d %H:%M:%S")), -// "Refact Agent", -// "agent@refact.ai", -// ).map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, e))?) -// } else { -// None -// }; + let project_name = commit.project_path.to_file_path().ok() + .and_then(|path| path.file_name().map(|name| name.to_string_lossy().into_owned())) + .unwrap_or_else(|| "".to_string()); + + let git_error = |msg: String| -> GitError { + GitError { + error_message: msg, + project_name: project_name.clone(), + project_path: commit.project_path.clone(), + } + }; + + let repository = match Repository::open(&repo_path) { + Ok(repo) => repo, + Err(e) => { error_log.push(git_error(format!("Failed to open repo: {}", e))); continue; } + }; + + if let Err(stage_err) = stage_changes(&repository, &commit.file_changes) { + error_log.push(git_error(stage_err)); + continue; + } + + let (author_email, author_name) = match get_configured_author_email_and_name(&repository) { + Ok(email_and_name) => email_and_name, + Err(err) => { + error_log.push(git_error(err)); + continue; + } + }; + + let branch = match repository.head().map(|reference| git2::Branch::wrap(reference)) { + Ok(branch) => branch, + Err(e) => { error_log.push(git_error(format!("Failed to get current branch: {}", e))); continue; } + }; + + let commit_oid = match crate::git::commit(&repository, &branch, &commit.commit_message, &author_name, &author_email) { + Ok(oid) => oid, + Err(e) => { error_log.push(git_error(e)); continue; } + }; + + commits_applied.push(serde_json::json!({ + "project_name": project_name, + "project_path": commit.project_path.to_string(), + "commit_oid": commit_oid.to_string(), + })); + } -// Ok(Response::builder() -// .status(StatusCode::OK) -// .header("Content-Type", "application/json") -// .body(Body::from(serde_json::json!({ -// "commit_oid": commit_oid.map(|x| x.to_string()), -// "new_files": new_files, -// "modified_files": modified_files, -// "deleted_files": deleted_files, -// }).to_string())) -// .unwrap()) -// } \ No newline at end of file + Ok(Response::builder() + .status(StatusCode::OK) + .header("Content-Type", "application/json") + .body(Body::from(serde_json::to_string(&serde_json::json!({ + "commits_applied": commits_applied, + "error_log": error_log, + })).unwrap())) + .unwrap()) +} \ No newline at end of file diff --git a/src/http/routers/v1/links.rs b/src/http/routers/v1/links.rs index b2c20c334..665be55d9 100644 --- a/src/http/routers/v1/links.rs +++ b/src/http/routers/v1/links.rs @@ -15,7 +15,7 @@ use crate::global_context::GlobalContext; use crate::integrations::go_to_configuration_message; use crate::tools::tool_patch_aux::tickets_parsing::get_tickets_from_messages; use crate::agentic::generate_follow_up_message::generate_follow_up_message; -use crate::git::FileChange; +use crate::http::routers::v1::git::{CommitInfo, GitCommitPost}; #[derive(Deserialize, Clone, Debug)] pub struct LinksPost { @@ -53,22 +53,16 @@ pub struct Link { #[derive(Debug)] pub enum LinkPayload { - CommitPayload(CommitInfo), + CommitPayload(GitCommitPost), } impl Serialize for LinkPayload { fn serialize(&self, serializer: S) -> Result { match self { - LinkPayload::CommitPayload(commit_payload) => commit_payload.serialize(serializer), + LinkPayload::CommitPayload(post) => post.serialize(serializer), } } } -#[derive(Serialize, Deserialize, Debug)] -pub struct CommitInfo { - project_path: Url, - commit_message: String, - file_changes: Vec, -} pub async fn handle_v1_links( Extension(gcx): Extension>>, @@ -171,7 +165,7 @@ pub async fn handle_v1_links( goto: Some("LINKS_AGAIN".to_string()), current_config_file: None, link_tooltip: tooltip_message, - link_payload: Some(LinkPayload::CommitPayload(commit)), + link_payload: Some(LinkPayload::CommitPayload(GitCommitPost { commits: vec![commit] })), }); } } From c5e8bc0f9dbf96a2831b654355a6ce2010853168 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Tue, 10 Dec 2024 12:35:43 +0100 Subject: [PATCH 126/185] feat: add uncommited warning message --- src/http/routers/v1/links.rs | 21 ++++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/src/http/routers/v1/links.rs b/src/http/routers/v1/links.rs index 665be55d9..874a0eaeb 100644 --- a/src/http/routers/v1/links.rs +++ b/src/http/routers/v1/links.rs @@ -71,6 +71,7 @@ pub async fn handle_v1_links( let post = serde_json::from_slice::(&body_bytes) .map_err(|e| ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, format!("JSON problem: {}", e)))?; let mut links = Vec::new(); + let mut uncommited_changes_warning = String::new(); tracing::info!("for links, post.meta.chat_mode == {:?}", post.meta.chat_mode); let (integrations_map, integration_yaml_errors) = crate::integrations::running_integrations::load_integrations(gcx.clone(), "".to_string(), gcx.read().await.cmdline.experimental).await; @@ -149,6 +150,7 @@ pub async fn handle_v1_links( } if post.meta.chat_mode == ChatMode::AGENT { + let mut project_changes = Vec::new(); for commit in get_commit_information_from_current_changes(gcx.clone()).await { let project_name = commit.project_path.to_file_path().ok() .and_then(|path| path.file_name().map(|name| name.to_string_lossy().into_owned())) @@ -159,6 +161,11 @@ pub async fn handle_v1_links( if commit.commit_message.lines().count() > 1 { "..." } else { "" }, commit.file_changes.iter().map(|f| format!("{} {}", f.status.initial(), f.path)).collect::>().join("\n"), ); + project_changes.push(format!( + "In project {project_name}: {}{}", + commit.file_changes.iter().take(3).map(|f| format!("{} {}", f.status.initial(), f.path)).collect::>().join(", "), + if commit.file_changes.len() > 3 { ", ..." } else { "" }, + )); links.push(Link { action: LinkAction::Commit, text: format!("Commit {} files in `{}`", commit.file_changes.len(), project_name), @@ -168,6 +175,13 @@ pub async fn handle_v1_links( link_payload: Some(LinkPayload::CommitPayload(GitCommitPost { commits: vec![commit] })), }); } + if !project_changes.is_empty() { + if project_changes.len() > 4 { + project_changes.truncate(4); + project_changes.push("...".to_string()); + } + uncommited_changes_warning = format!("You have uncommitted changes, which may cause issues when rolling back agent changes:\n{}", project_changes.join("\n")); + } } if post.meta.chat_mode == ChatMode::AGENT { @@ -216,8 +230,10 @@ pub async fn handle_v1_links( Ok(Response::builder() .status(StatusCode::OK) .header("Content-Type", "application/json") - .body(Body::from(serde_json::to_string_pretty(&serde_json::json!({"links": links})).unwrap())) - .unwrap()) + .body(Body::from(serde_json::to_string_pretty(&serde_json::json!({ + "links": links, + "uncommited_changes_warning": uncommited_changes_warning, + })).unwrap())).unwrap()) } async fn get_commit_information_from_current_changes(gcx: Arc>) -> Vec { @@ -228,7 +244,6 @@ async fn get_commit_information_from_current_changes(gcx: Arc repo, Err(e) => { error!("{}", e); continue; } }; - tracing::info!("repository opened"); let file_changes = match crate::git::get_file_changes(&repository, true) { Ok(changes) if changes.is_empty() => { continue; } From ff2e0e0da56eda95af574858598f4c3236c3351c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Tue, 10 Dec 2024 13:34:04 +0100 Subject: [PATCH 127/185] fix: limit git diff size + move git commit info code to src/git.rs --- src/git.rs | 72 +++++++++++++++++++++++++++++++++--- src/http/routers/v1/git.rs | 9 +---- src/http/routers/v1/links.rs | 42 +-------------------- 3 files changed, 70 insertions(+), 53 deletions(-) diff --git a/src/git.rs b/src/git.rs index af52a84ab..b6b98a7ac 100644 --- a/src/git.rs +++ b/src/git.rs @@ -1,15 +1,28 @@ +use std::sync::Arc; +use tokio::sync::RwLock as ARwLock; use std::path::PathBuf; +use url::Url; use serde::{Serialize, Deserialize}; use tracing::error; use git2::{Branch, DiffOptions, Oid, Repository, Signature, Status, StatusOptions}; +use crate::global_context::GlobalContext; +use crate::agentic::generate_commit_message::generate_commit_message_by_diff; + #[derive(Serialize, Deserialize, Debug)] +pub struct CommitInfo { + pub project_path: Url, + pub commit_message: String, + pub file_changes: Vec, +} + +#[derive(Serialize, Deserialize, Debug, Clone)] pub struct FileChange { pub path: String, pub status: FileChangeStatus, } -#[derive(Serialize, Deserialize, Debug)] +#[derive(Serialize, Deserialize, Debug, Clone)] pub enum FileChangeStatus { ADDED, MODIFIED, @@ -166,7 +179,7 @@ pub fn commit(repository: &Repository, branch: &Branch, message: &str, author_na } /// Similar to `git diff`, from specified file changes. -pub fn git_diff(repository: &Repository, file_changes: &Vec) -> Result { +pub fn git_diff(repository: &Repository, file_changes: &Vec, max_size: usize) -> Result { let mut diff_options = DiffOptions::new(); diff_options.include_untracked(true); diff_options.recurse_untracked_dirs(true); @@ -174,9 +187,14 @@ pub fn git_diff(repository: &Repository, file_changes: &Vec) -> Resu diff_options.pathspec(&file_change.path); } + let mut sorted_file_changes = file_changes.clone(); + sorted_file_changes.sort_by_key(|fc| { + std::fs::metadata(&fc.path).map(|meta| meta.len()).unwrap_or(0) + }); + // Create a new temporary tree, with all changes staged let mut index = repository.index().map_err(|e| format!("Failed to get repository index: {}", e))?; - for file_change in file_changes { + for file_change in &sorted_file_changes { index.add_path(std::path::Path::new(&file_change.path)) .map_err(|e| format!("Failed to add file to index: {}", e))?; } @@ -191,10 +209,54 @@ pub fn git_diff(repository: &Repository, file_changes: &Vec) -> Resu let mut diff_str = String::new(); diff.print(git2::DiffFormat::Patch, |_, _, line| { - diff_str.push(line.origin()); - diff_str.push_str(std::str::from_utf8(line.content()).unwrap_or("")); + let line_content = std::str::from_utf8(line.content()).unwrap_or(""); + if diff_str.len() + line_content.len() < max_size { + diff_str.push(line.origin()); + diff_str.push_str(line_content); + if diff_str.len() > max_size { + diff_str.truncate(max_size - 4); + diff_str.push_str("...\n"); + } + } true }).map_err(|e| format!("Failed to print diff: {}", e))?; Ok(diff_str) } + +pub async fn get_commit_information_from_current_changes(gcx: Arc>) -> Vec { + const MAX_DIFF_SIZE: usize = 4096; + let mut commits = Vec::new(); + + for project_path in crate::files_correction::get_project_dirs(gcx.clone()).await { + let repository = match git2::Repository::open(&project_path) { + Ok(repo) => repo, + Err(e) => { error!("{}", e); continue; } + }; + + let file_changes = match crate::git::get_file_changes(&repository, true) { + Ok(changes) if changes.is_empty() => { continue; } + Ok(changes) => changes, + Err(e) => { error!("{}", e); continue; } + }; + + let diff = match git_diff(&repository, &file_changes, MAX_DIFF_SIZE) { + Ok(d) if d.is_empty() => { continue; } + Ok(d) => d, + Err(e) => { error!("{}", e); continue; } + }; + + let commit_msg = match generate_commit_message_by_diff(gcx.clone(), &diff, &None).await { + Ok(msg) => msg, + Err(e) => { error!("{}", e); continue; } + }; + + commits.push(CommitInfo { + project_path: Url::from_file_path(&project_path).ok().unwrap_or_else(|| Url::parse("file:///").unwrap()), + commit_message: commit_msg, + file_changes, + }); + } + + commits +} diff --git a/src/http/routers/v1/git.rs b/src/http/routers/v1/git.rs index f888e28a3..496e69b3e 100644 --- a/src/http/routers/v1/git.rs +++ b/src/http/routers/v1/git.rs @@ -8,7 +8,7 @@ use tokio::sync::RwLock as ARwLock; use url::Url; use crate::custom_error::ScratchError; -use crate::git::{FileChange, stage_changes, get_configured_author_email_and_name}; +use crate::git::{CommitInfo, stage_changes, get_configured_author_email_and_name}; use crate::global_context::GlobalContext; #[derive(Serialize, Deserialize, Debug)] @@ -16,13 +16,6 @@ pub struct GitCommitPost { pub commits: Vec, } -#[derive(Serialize, Deserialize, Debug)] -pub struct CommitInfo { - pub project_path: Url, - pub commit_message: String, - pub file_changes: Vec, -} - #[derive(Serialize, Deserialize, Debug)] pub struct GitError { pub error_message: String, diff --git a/src/http/routers/v1/links.rs b/src/http/routers/v1/links.rs index 874a0eaeb..4f3576456 100644 --- a/src/http/routers/v1/links.rs +++ b/src/http/routers/v1/links.rs @@ -5,17 +5,15 @@ use axum::http::{Response, StatusCode}; use hyper::Body; use serde::{Deserialize, Serialize, Serializer}; use tokio::sync::RwLock as ARwLock; -use tracing::error; -use url::Url; -use crate::agentic::generate_commit_message::generate_commit_message_by_diff; use crate::call_validation::{ChatMessage, ChatMeta, ChatMode}; use crate::custom_error::ScratchError; use crate::global_context::GlobalContext; use crate::integrations::go_to_configuration_message; use crate::tools::tool_patch_aux::tickets_parsing::get_tickets_from_messages; use crate::agentic::generate_follow_up_message::generate_follow_up_message; -use crate::http::routers::v1::git::{CommitInfo, GitCommitPost}; +use crate::git::get_commit_information_from_current_changes; +use crate::http::routers::v1::git::GitCommitPost; #[derive(Deserialize, Clone, Debug)] pub struct LinksPost { @@ -236,42 +234,6 @@ pub async fn handle_v1_links( })).unwrap())).unwrap()) } -async fn get_commit_information_from_current_changes(gcx: Arc>) -> Vec { - let mut commits = Vec::new(); - - for project_path in crate::files_correction::get_project_dirs(gcx.clone()).await { - let repository = match git2::Repository::open(&project_path) { - Ok(repo) => repo, - Err(e) => { error!("{}", e); continue; } - }; - - let file_changes = match crate::git::get_file_changes(&repository, true) { - Ok(changes) if changes.is_empty() => { continue; } - Ok(changes) => changes, - Err(e) => { error!("{}", e); continue; } - }; - - let diff = match crate::git::git_diff(&repository, &file_changes) { - Ok(d) if d.is_empty() => { continue; } - Ok(d) => d, - Err(e) => { error!("{}", e); continue; } - }; - - let commit_msg = match generate_commit_message_by_diff(gcx.clone(), &diff, &None).await { - Ok(msg) => msg, - Err(e) => { error!("{}", e); continue; } - }; - - commits.push(CommitInfo { - project_path: Url::from_file_path(&project_path).ok().unwrap_or_else(|| Url::parse("file:///").unwrap()), - commit_message: commit_msg, - file_changes, - }); - } - - commits -} - fn failed_integration_names_after_last_user_message(messages: &Vec) -> Vec { let last_user_msg_index = messages.iter().rposition(|m| m.role == "user").unwrap_or(0); let tool_calls = messages[last_user_msg_index..].iter().filter(|m| m.role == "assistant") From b1ed957de27ce16d356b9bdde1ab7061c5ceb01a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Tue, 10 Dec 2024 19:26:30 +0100 Subject: [PATCH 128/185] fix: extra m in 'commit' and no uncommited warning if there are messages --- src/http/routers/v1/links.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/http/routers/v1/links.rs b/src/http/routers/v1/links.rs index 4f3576456..647fa4036 100644 --- a/src/http/routers/v1/links.rs +++ b/src/http/routers/v1/links.rs @@ -154,7 +154,7 @@ pub async fn handle_v1_links( .and_then(|path| path.file_name().map(|name| name.to_string_lossy().into_owned())) .unwrap_or_else(|| "".to_string()); let tooltip_message = format!( - "git commmit -m \"{}{}\"\n{}", + "git commit -m \"{}{}\"\n{}", commit.commit_message.lines().next().unwrap_or(""), if commit.commit_message.lines().count() > 1 { "..." } else { "" }, commit.file_changes.iter().map(|f| format!("{} {}", f.status.initial(), f.path)).collect::>().join("\n"), @@ -173,7 +173,7 @@ pub async fn handle_v1_links( link_payload: Some(LinkPayload::CommitPayload(GitCommitPost { commits: vec![commit] })), }); } - if !project_changes.is_empty() { + if !project_changes.is_empty() && post.messages.is_empty() { if project_changes.len() > 4 { project_changes.truncate(4); project_changes.push("...".to_string()); From 93ae64094170cc7f6f65b66b94d1b0b57a4c7161 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Wed, 11 Dec 2024 05:23:32 +0100 Subject: [PATCH 129/185] uncommited_changes_warning wording --- src/http/routers/v1/links.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/http/routers/v1/links.rs b/src/http/routers/v1/links.rs index 647fa4036..ef92ecec2 100644 --- a/src/http/routers/v1/links.rs +++ b/src/http/routers/v1/links.rs @@ -134,7 +134,7 @@ pub async fn handle_v1_links( link_tooltip: format!(""), link_payload: None, }); - + if !get_tickets_from_messages(gcx.clone(), &post.messages).await.is_empty() { links.push(Link { action: LinkAction::PatchAll, @@ -154,8 +154,8 @@ pub async fn handle_v1_links( .and_then(|path| path.file_name().map(|name| name.to_string_lossy().into_owned())) .unwrap_or_else(|| "".to_string()); let tooltip_message = format!( - "git commit -m \"{}{}\"\n{}", - commit.commit_message.lines().next().unwrap_or(""), + "git commit -m \"{}{}\"\n{}", + commit.commit_message.lines().next().unwrap_or(""), if commit.commit_message.lines().count() > 1 { "..." } else { "" }, commit.file_changes.iter().map(|f| format!("{} {}", f.status.initial(), f.path)).collect::>().join("\n"), ); @@ -178,7 +178,7 @@ pub async fn handle_v1_links( project_changes.truncate(4); project_changes.push("...".to_string()); } - uncommited_changes_warning = format!("You have uncommitted changes, which may cause issues when rolling back agent changes:\n{}", project_changes.join("\n")); + uncommited_changes_warning = format!("You have uncommitted changes:\n```\n{}\n```\nāš ļø You might have a problem rolling back agent's changes.", project_changes.join("\n")); } } @@ -229,7 +229,7 @@ pub async fn handle_v1_links( .status(StatusCode::OK) .header("Content-Type", "application/json") .body(Body::from(serde_json::to_string_pretty(&serde_json::json!({ - "links": links, + "links": links, "uncommited_changes_warning": uncommited_changes_warning, })).unwrap())).unwrap()) } From 27ce283ad69906f6208f2c6c2aa2bcd1efd23f6a Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Wed, 11 Dec 2024 08:04:16 +0100 Subject: [PATCH 130/185] output filter: fix stupid bug --- src/postprocessing/pp_command_output.rs | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/src/postprocessing/pp_command_output.rs b/src/postprocessing/pp_command_output.rs index 24e52df72..61ae47a08 100644 --- a/src/postprocessing/pp_command_output.rs +++ b/src/postprocessing/pp_command_output.rs @@ -63,7 +63,7 @@ pub fn output_mini_postprocessing(filter: &CmdlineOutputFilter, output: &str) -> if filter.valuable_top_or_bottom == "bottom" { for i in 0..lines.len() { - ratings[i] += 0.9 * (i as f64) / lines.len() as f64; + ratings[i] += 0.9 * ((i + 1) as f64) / lines.len() as f64; } } else { for i in 0..lines.len() { @@ -174,6 +174,14 @@ line6 remove_from_output: "".to_string(), }, output_to_filter); assert_eq!(result, "...2 lines skipped...\nline3\nline4\nline5\n...1 lines skipped...\n"); + + let result = output_mini_postprocessing(&CmdlineOutputFilter { + limit_lines: 100, + limit_chars: 8000, + valuable_top_or_bottom: "bottom".to_string(), + ..Default::default() + }, output_to_filter); + assert_eq!(result, "line1\nline2\nline3\nline4\nline5\nline6\n"); } } From f389a98795bafc89a93ced8b852ca17353f53ad6 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Wed, 11 Dec 2024 08:34:16 +0100 Subject: [PATCH 131/185] repair confirmation for integr_cmdline --- src/integrations/integr_cmdline.rs | 61 +++++++++++++++++++----------- 1 file changed, 38 insertions(+), 23 deletions(-) diff --git a/src/integrations/integr_cmdline.rs b/src/integrations/integr_cmdline.rs index f770cecfd..10bb684c6 100644 --- a/src/integrations/integr_cmdline.rs +++ b/src/integrations/integr_cmdline.rs @@ -190,6 +190,32 @@ pub async fn execute_blocking_command( } } +fn parse_command_args(args: &HashMap, cfg: &CmdlineToolConfig) -> Result<(String, String), String> +{ + let mut args_str: HashMap = HashMap::new(); + let valid_params: Vec = cfg.parameters.iter().map(|p| p.name.clone()).collect(); + + for (k, v) in args.iter() { + if !valid_params.contains(k) { + return Err(format!("Unexpected argument `{}`", k)); + } + match v { + serde_json::Value::String(s) => { args_str.insert(k.clone(), s.clone()); }, + _ => return Err(format!("argument `{}` is not a string: {:?}", k, v)), + } + } + + for param in &cfg.parameters { + if cfg.parameters_required.as_ref().map_or(false, |req| req.contains(¶m.name)) && !args_str.contains_key(¶m.name) { + return Err(format!("Missing required argument `{}`", param.name)); + } + } + + let command = replace_args(cfg.command.as_str(), &args_str); + let workdir = replace_args(cfg.command_workdir.as_str(), &args_str); + Ok((command, workdir)) +} + #[async_trait] impl Tool for ToolCmdline { fn as_any(&self) -> &dyn std::any::Any { self } @@ -200,36 +226,17 @@ impl Tool for ToolCmdline { tool_call_id: &String, args: &HashMap, ) -> Result<(bool, Vec), String> { - let gcx = ccx.lock().await.global_context.clone(); - let mut args_str: HashMap = HashMap::new(); - let valid_params: Vec = self.cfg.parameters.iter().map(|p| p.name.clone()).collect(); + let (command, workdir) = parse_command_args(args, &self.cfg)?; - for (k, v) in args.iter() { - if !valid_params.contains(k) { - return Err(format!("Unexpected argument `{}`", k)); - } - match v { - serde_json::Value::String(s) => { args_str.insert(k.clone(), s.clone()); }, - _ => return Err(format!("argument `{}` is not a string: {:?}", k, v)), - } - } - - for param in &self.cfg.parameters { - if self.cfg.parameters_required.as_ref().map_or(false, |req| req.contains(¶m.name)) && !args_str.contains_key(¶m.name) { - return Err(format!("Missing required argument `{}`", param.name)); - } - } - - let command = replace_args(self.cfg.command.as_str(), &args_str); - let workdir = replace_args(self.cfg.command_workdir.as_str(), &args_str); + let gcx = ccx.lock().await.global_context.clone(); let env_variables = crate::integrations::setting_up_integrations::get_vars_for_replacements(gcx.clone()).await; let project_dirs = crate::files_correction::get_project_dirs(gcx.clone()).await; - let tool_ouput = execute_blocking_command(&command, &self.cfg, &workdir, &env_variables, project_dirs).await?; + let tool_output = execute_blocking_command(&command, &self.cfg, &workdir, &env_variables, project_dirs).await?; let result = vec![ContextEnum::ChatMessage(ChatMessage { role: "tool".to_string(), - content: ChatContent::SimpleText(tool_ouput), + content: ChatContent::SimpleText(tool_output), tool_calls: None, tool_call_id: tool_call_id.clone(), ..Default::default() @@ -255,6 +262,14 @@ impl Tool for ToolCmdline { parameters_required, } } + + fn command_to_match_against_confirm_deny( + &self, + args: &HashMap, + ) -> Result { + let (command, _workdir) = parse_command_args(args, &self.cfg)?; + return Ok(command); + } } pub const CMDLINE_INTEGRATION_SCHEMA: &str = r#" From 3ff0d983cca2e31a2526898c786007b17039dc4b Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Wed, 11 Dec 2024 10:01:10 +0100 Subject: [PATCH 132/185] IntegrationCommon struct --- src/integrations/docker/integr_docker.rs | 17 ++- src/integrations/docker/integr_isolation.rs | 17 ++- src/integrations/integr_abstract.rs | 25 +++- src/integrations/integr_chrome.rs | 19 ++- src/integrations/integr_cmdline.rs | 17 ++- src/integrations/integr_cmdline_service.rs | 15 +- src/integrations/integr_postgres.rs | 15 +- src/integrations/mod.rs | 158 +------------------- src/tools/tools_execute.rs | 8 +- 9 files changed, 117 insertions(+), 174 deletions(-) diff --git a/src/integrations/docker/integr_docker.rs b/src/integrations/docker/integr_docker.rs index 71e7bc46f..586b386c6 100644 --- a/src/integrations/docker/integr_docker.rs +++ b/src/integrations/docker/integr_docker.rs @@ -9,7 +9,7 @@ use serde_json::Value; use crate::at_commands::at_commands::AtCommandsContext; use crate::call_validation::{ChatContent, ChatMessage, ContextEnum}; use crate::global_context::GlobalContext; -use crate::integrations::integr_abstract::IntegrationTrait; +use crate::integrations::integr_abstract::{IntegrationTrait, IntegrationCommon}; use crate::tools::tools_description::Tool; use crate::integrations::docker::docker_ssh_tunnel_utils::{SshConfig, forward_remote_docker_if_needed}; use crate::integrations::utils::{serialize_num_to_str, deserialize_str_to_num}; @@ -43,8 +43,9 @@ impl SettingsDocker { } } -#[derive(Clone, Default, Debug)] +#[derive(Clone, Default)] pub struct ToolDocker { + pub common: IntegrationCommon, pub settings_docker: SettingsDocker, } @@ -62,6 +63,13 @@ impl IntegrationTrait for ToolDocker { return Err(e.to_string()); } } + match serde_json::from_value::(value.clone()) { + Ok(x) => self.common = x, + Err(e) => { + tracing::error!("Failed to apply common settings: {}\n{:?}", e, value); + return Err(e.to_string()); + } + } Ok(()) } @@ -69,8 +77,13 @@ impl IntegrationTrait for ToolDocker { serde_json::to_value(&self.settings_docker).unwrap() } + fn integr_common(&self) -> IntegrationCommon { + self.common.clone() + } + fn integr_upgrade_to_tool(&self, _integr_name: &str) -> Box { Box::new(ToolDocker { + common: self.common.clone(), settings_docker: self.settings_docker.clone() }) as Box } diff --git a/src/integrations/docker/integr_isolation.rs b/src/integrations/docker/integr_isolation.rs index d0715bd7f..5f7fd7478 100644 --- a/src/integrations/docker/integr_isolation.rs +++ b/src/integrations/docker/integr_isolation.rs @@ -3,9 +3,10 @@ use serde_json::Value; use crate::integrations::utils::{serialize_num_to_str, deserialize_str_to_num, serialize_ports, deserialize_ports}; use crate::integrations::docker::docker_container_manager::Port; -use crate::integrations::integr_abstract::IntegrationTrait; +use crate::integrations::integr_abstract::{IntegrationTrait, IntegrationCommon}; use crate::tools::tools_description::Tool; + #[derive(Clone, Serialize, Deserialize, Default, Debug)] pub struct SettingsIsolation { pub container_workspace_folder: String, @@ -17,8 +18,9 @@ pub struct SettingsIsolation { pub keep_containers_alive_for_x_minutes: u64, } -#[derive(Clone, Default, Debug)] +#[derive(Clone, Default)] pub struct IntegrationIsolation { + pub common: IntegrationCommon, pub settings_isolation: SettingsIsolation, } @@ -36,6 +38,13 @@ impl IntegrationTrait for IntegrationIsolation { return Err(e.to_string()); } } + match serde_json::from_value::(value.clone()) { + Ok(x) => self.common = x, + Err(e) => { + tracing::error!("Failed to apply common settings: {}\n{:?}", e, value); + return Err(e.to_string()); + } + } Ok(()) } @@ -43,6 +52,10 @@ impl IntegrationTrait for IntegrationIsolation { serde_json::to_value(&self.settings_isolation).unwrap() } + fn integr_common(&self) -> IntegrationCommon { + self.common.clone() + } + fn can_upgrade_to_tool(&self) -> bool { false } fn integr_upgrade_to_tool(&self, _integr_name: &str) -> Box { diff --git a/src/integrations/integr_abstract.rs b/src/integrations/integr_abstract.rs index 9472d133e..a48a64f4d 100644 --- a/src/integrations/integr_abstract.rs +++ b/src/integrations/integr_abstract.rs @@ -1,8 +1,31 @@ +use serde::Deserialize; +use serde::Serialize; + + pub trait IntegrationTrait: Send + Sync { fn as_any(&self) -> &dyn std::any::Any; fn integr_schema(&self) -> &str; fn integr_settings_apply(&mut self, value: &serde_json::Value) -> Result<(), String>; fn integr_settings_as_json(&self) -> serde_json::Value; + fn integr_common(&self) -> IntegrationCommon; fn can_upgrade_to_tool(&self) -> bool { true } - fn integr_upgrade_to_tool(&self, integr_name: &str) -> Box; // integr_name is sometimes different, "cmdline_compile_by_project" != "cmdline" + fn integr_upgrade_to_tool(&self, integr_name: &str) -> Box; // integr_name is sometimes different, "cmdline_compile_my_project" != "cmdline" +} + +#[derive(Deserialize, Serialize, Clone, Default)] +pub struct IntegrationAvailable { + pub on_your_laptop: bool, + pub when_isolated: bool, +} + +#[derive(Deserialize, Serialize, Clone, Default)] +pub struct IntegrationConfirmation { + pub ask_user: Vec, + pub deny: Vec, +} + +#[derive(Deserialize, Serialize, Clone, Default)] +pub struct IntegrationCommon { + pub available: IntegrationConfirmation, + pub confirmation: IntegrationAvailable, } diff --git a/src/integrations/integr_chrome.rs b/src/integrations/integr_chrome.rs index b5110b50f..7e76bcfa7 100644 --- a/src/integrations/integr_chrome.rs +++ b/src/integrations/integr_chrome.rs @@ -15,7 +15,7 @@ use crate::call_validation::{ChatContent, ChatMessage}; use crate::scratchpads::multimodality::MultimodalElement; use crate::postprocessing::pp_command_output::{CmdlineOutputFilter, output_mini_postprocessing}; use crate::tools::tools_description::{Tool, ToolDesc, ToolParam}; -use crate::integrations::integr_abstract::IntegrationTrait; +use crate::integrations::integr_abstract::{IntegrationTrait, IntegrationCommon}; use tokio::time::sleep; use chrono::DateTime; @@ -66,8 +66,9 @@ pub struct SettingsChrome { pub tablet_scale_factor: String, } -#[derive(Debug, Default)] +#[derive(Default)] pub struct ToolChrome { + pub common: IntegrationCommon, pub settings_chrome: SettingsChrome, pub supports_clicks: bool, } @@ -155,6 +156,13 @@ impl IntegrationTrait for ToolChrome { return Err(e.to_string()); } } + match serde_json::from_value::(value.clone()) { + Ok(x) => self.common = x, + Err(e) => { + tracing::error!("Failed to apply common settings: {}\n{:?}", e, value); + return Err(e.to_string()); + } + } Ok(()) } @@ -162,8 +170,13 @@ impl IntegrationTrait for ToolChrome { serde_json::to_value(&self.settings_chrome).unwrap() } + fn integr_common(&self) -> IntegrationCommon { + self.common.clone() + } + fn integr_upgrade_to_tool(&self, _integr_name: &str) -> Box { Box::new(ToolChrome { + common: self.common.clone(), settings_chrome: self.settings_chrome.clone(), supports_clicks: false, }) as Box @@ -332,7 +345,7 @@ async fn setup_chrome_session( headless: args.headless.parse::().unwrap_or(true), ..Default::default() }; - + setup_log.push("Started new chrome process.".to_string()); Browser::new(launch_options).map_err(|e| e.to_string()) }?; diff --git a/src/integrations/integr_cmdline.rs b/src/integrations/integr_cmdline.rs index 10bb684c6..13528853c 100644 --- a/src/integrations/integr_cmdline.rs +++ b/src/integrations/integr_cmdline.rs @@ -13,7 +13,7 @@ use crate::at_commands::at_commands::AtCommandsContext; use crate::tools::tools_description::{ToolParam, Tool, ToolDesc}; use crate::call_validation::{ChatMessage, ChatContent, ContextEnum}; use crate::postprocessing::pp_command_output::{CmdlineOutputFilter, output_mini_postprocessing}; -use crate::integrations::integr_abstract::IntegrationTrait; +use crate::integrations::integr_abstract::{IntegrationTrait, IntegrationCommon}; use crate::integrations::utils::{serialize_num_to_str, deserialize_str_to_num, serialize_opt_num_to_str, deserialize_str_to_opt_num}; @@ -47,7 +47,7 @@ fn _default_startup_wait() -> u64 { #[derive(Default)] pub struct ToolCmdline { - // is_service: bool, + pub common: IntegrationCommon, pub name: String, pub cfg: CmdlineToolConfig, } @@ -63,6 +63,13 @@ impl IntegrationTrait for ToolCmdline { return Err(e.to_string()); } } + match serde_json::from_value::(value.clone()) { + Ok(x) => self.common = x, + Err(e) => { + tracing::error!("Failed to apply common settings: {}\n{:?}", e, value); + return Err(e.to_string()); + } + } Ok(()) } @@ -70,9 +77,13 @@ impl IntegrationTrait for ToolCmdline { serde_json::to_value(&self.cfg).unwrap() } + fn integr_common(&self) -> IntegrationCommon { + self.common.clone() + } + fn integr_upgrade_to_tool(&self, integr_name: &str) -> Box { Box::new(ToolCmdline { - // is_service: self.is_service, + common: self.common.clone(), name: integr_name.to_string(), cfg: self.cfg.clone(), }) as Box diff --git a/src/integrations/integr_cmdline_service.rs b/src/integrations/integr_cmdline_service.rs index 11c9287c1..18c231038 100644 --- a/src/integrations/integr_cmdline_service.rs +++ b/src/integrations/integr_cmdline_service.rs @@ -15,7 +15,7 @@ use crate::global_context::GlobalContext; use crate::integrations::process_io_utils::{blocking_read_until_token_or_timeout, is_someone_listening_on_that_tcp_port}; use crate::integrations::sessions::IntegrationSession; use crate::postprocessing::pp_command_output::output_mini_postprocessing; -use crate::integrations::integr_abstract::IntegrationTrait; +use crate::integrations::integr_abstract::{IntegrationTrait, IntegrationCommon}; use crate::integrations::integr_cmdline::*; @@ -23,6 +23,7 @@ const REALLY_HORRIBLE_ROUNDTRIP: u64 = 3000; // 3000 should be a really bad pi #[derive(Default)] pub struct ToolService { + pub common: IntegrationCommon, pub name: String, pub cfg: CmdlineToolConfig, } @@ -38,6 +39,13 @@ impl IntegrationTrait for ToolService { return Err(e.to_string()); } } + match serde_json::from_value::(value.clone()) { + Ok(x) => self.common = x, + Err(e) => { + tracing::error!("Failed to apply common settings: {}\n{:?}", e, value); + return Err(e.to_string()); + } + } Ok(()) } @@ -45,8 +53,13 @@ impl IntegrationTrait for ToolService { serde_json::to_value(&self.cfg).unwrap() } + fn integr_common(&self) -> IntegrationCommon { + self.common.clone() + } + fn integr_upgrade_to_tool(&self, integr_name: &str) -> Box { Box::new(ToolService { + common: self.common.clone(), name: integr_name.to_string(), cfg: self.cfg.clone(), }) as Box diff --git a/src/integrations/integr_postgres.rs b/src/integrations/integr_postgres.rs index 7fa12bc2e..e22006ff3 100644 --- a/src/integrations/integr_postgres.rs +++ b/src/integrations/integr_postgres.rs @@ -10,7 +10,7 @@ use std::collections::HashMap; use std::sync::Arc; use tokio::process::Command; use tokio::sync::Mutex as AMutex; -use crate::integrations::integr_abstract::IntegrationTrait; +use crate::integrations::integr_abstract::{IntegrationTrait, IntegrationCommon}; #[derive(Clone, Serialize, Deserialize, Debug, Default)] @@ -26,6 +26,7 @@ pub struct SettingsPostgres { #[derive(Default)] pub struct ToolPostgres { + pub common: IntegrationCommon, pub settings_postgres: SettingsPostgres, } @@ -40,6 +41,13 @@ impl IntegrationTrait for ToolPostgres { return Err(e.to_string()); } } + match serde_json::from_value::(value.clone()) { + Ok(x) => self.common = x, + Err(e) => { + tracing::error!("Failed to apply common settings: {}\n{:?}", e, value); + return Err(e.to_string()); + } + } Ok(()) } @@ -47,8 +55,13 @@ impl IntegrationTrait for ToolPostgres { serde_json::to_value(&self.settings_postgres).unwrap() } + fn integr_common(&self) -> IntegrationCommon { + self.common.clone() + } + fn integr_upgrade_to_tool(&self, _integr_name: &str) -> Box { Box::new(ToolPostgres { + common: self.common.clone(), settings_postgres: self.settings_postgres.clone() }) as Box } diff --git a/src/integrations/mod.rs b/src/integrations/mod.rs index 17fed0430..dcf0d77a5 100644 --- a/src/integrations/mod.rs +++ b/src/integrations/mod.rs @@ -27,7 +27,7 @@ pub mod setting_up_integrations; pub mod running_integrations; pub mod utils; -use integr_abstract::IntegrationTrait; +use integr_abstract::{IntegrationTrait, IntegrationCommon}; pub fn integration_from_name(n: &str) -> Result, String> @@ -91,162 +91,6 @@ pub fn go_to_configuration_message(integration_name: &str) -> String { } - -// pub fn get_integration_path(cache_dir: &PathBuf, name: &str) -> PathBuf { -// cache_dir.join("integrations.d").join(format!("{}.yaml", name)) -// } - - -// pub async fn validate_integration_value(name: &str, value: serde_yaml::Value) -> Result { -// let integrations = get_empty_integrations(); -// match integrations.get(name) { -// Some(i) => { -// let j_value: serde_json::Value = i.integr_yaml2json(&value)?; -// let yaml_value: serde_yaml::Value = serde_yaml::to_value(&j_value).map_err(|e| e.to_string())?; -// Ok(yaml_value) -// }, -// None => Err(format!("Integration {} is not defined", name)) -// } -// } - -// pub async fn load_integration_tools( -// gcx: Arc>, -// ) -> IndexMap>>> { -// let paths = integrations_paths(gcx.clone()).await; -// let integrations_yaml_value = { -// let cache_dir = gcx.read().await.cache_dir.clone(); -// let yaml_path = cache_dir.join("integrations.yaml"); -// read_yaml_into_value(&yaml_path).await? -// }; -// let cache_dir = gcx.read().await.cache_dir.clone(); -// // let enabled_path = cache_dir.join("integrations-enabled.yaml"); -// // let enabled = match integrations_enabled_cfg(&enabled_path).await { -// // serde_yaml::Value::Mapping(map) => map.into_iter().filter_map(|(k, v)| { -// // if let (serde_yaml::Value::String(key), serde_yaml::Value::Bool(value)) = (k, v) { -// // Some((key, value)) -// // } else { -// // None -// // } -// // }).collect::>(), -// // _ => std::collections::HashMap::new(), -// // }; - -// let integrations = get_integrations(gcx.clone()).await?; - -// let mut tools = IndexMap::new(); -// for (i_name, i) in integrations.iter() { -// // if !enabled.get(i_name).unwrap_or(&false) { -// // info!("Integration {} is disabled", i_name); -// // continue; -// // } -// let tool = i.integr_upgrade_to_tool(); -// tools.insert(i_name.clone(), Arc::new(AMutex::new(tool))); -// } -// Ok(tools) -// } - -// pub async fn json_for_integration( -// yaml_path: &PathBuf, -// value_from_integrations: Option<&serde_yaml::Value>, -// integration: &Box, -// ) -> Result { -// let tool_name = integration.integr_name().clone(); - -// let value = if yaml_path.exists() { -// match read_yaml_into_value(yaml_path).await { -// Ok(value) => integration.integr_yaml2json(&value).unwrap_or_else(|e| { -// let e = format!("Problem converting integration to JSON: {}", e); -// json!({"detail": e.to_string()}) -// }), -// Err(e) => { -// let e = format!("Problem reading YAML from {}: {}", yaml_path.display(), e); -// json!({"detail": e.to_string()}) -// } -// } -// } else { -// json!({"detail": format!("Cannot read {}. Probably, file does not exist", yaml_path.display())}) -// }; - -// let value_from_integrations = value_from_integrations.map_or(json!({"detail": format!("tool {tool_name} is not defined in integrations.yaml")}), |value| { -// integration.integr_yaml2json(value).unwrap_or_else(|e| { -// let e = format!("Problem converting integration to JSON: {}", e); -// json!({"detail": e.to_string()}) -// }) -// }); - -// match (value.get("detail"), value_from_integrations.get("detail")) { -// (None, None) => { -// Err(format!("Tool {tool_name} exists in both {tool_name}.yaml and integrations.yaml. Consider removing one of them.")) -// }, -// (Some(_), None) => { -// Ok(value_from_integrations) -// }, -// (None, Some(_)) => { -// Ok(value) -// } -// (Some(_), Some(_)) => { -// Ok(value) -// } -// } - -// Ok(()) -// } - -// async fn load_tool_from_yaml( -// yaml_path: Option<&PathBuf>, -// tool_constructor: fn(&serde_yaml::Value) -> Result, -// value_from_integrations: Option<&serde_yaml::Value>, -// enabled: Option<&bool>, -// integrations: &mut IndexMap>>>, -// ) -> Result<(), String> { -// let yaml_path = yaml_path.as_ref().expect("No yaml path"); -// let tool_name = yaml_path.file_stem().expect("No file name").to_str().expect("No file name").to_string(); -// if !enabled.unwrap_or(&false) { -// tracing::info!("Integration {} is disabled", tool_name); -// return Ok(()); -// } -// let tool = if yaml_path.exists() { -// match read_yaml_into_value(yaml_path).await { -// Ok(value) => { -// match tool_constructor(&value) { -// Ok(tool) => { -// // integrations.insert(tool_name, Arc::new(AMutex::new(Box::new(tool) as Box))); -// Some(tool) -// } -// Err(e) => { -// tracing::warn!("Problem in {}: {}", yaml_path.display(), e); -// None -// } -// } -// } -// Err(e) => { -// tracing::warn!("Problem reading {:?}: {}", yaml_path, e); -// None -// } -// } -// } else { -// None -// }; - -// let tool_from_integrations = value_from_integrations -// .and_then(|value| match tool_constructor(&value) { -// Ok(tool) => Some(tool), -// Err(_) => None -// }); - -// match (tool, tool_from_integrations) { -// (Some(_), Some(_)) => { -// return Err(format!("Tool {tool_name} exists in both {tool_name}.yaml and integrations.yaml. Consider removing one of them.")); -// }, -// (Some(tool), None) | (None, Some(tool)) => { -// integrations.insert(tool_name.clone(), Arc::new(AMutex::new(Box::new(tool) as Box))); -// }, -// _ => {} -// } - -// Ok(()) -// } - pub const INTEGRATIONS_DEFAULT_YAML: &str = r#"# This file is used to configure integrations in Refact Agent. # If there is a syntax error in this file, no integrations will work. # diff --git a/src/tools/tools_execute.rs b/src/tools/tools_execute.rs index a62f6d77e..9c041fc09 100644 --- a/src/tools/tools_execute.rs +++ b/src/tools/tools_execute.rs @@ -97,7 +97,7 @@ pub async fn run_tools_remotely( pub async fn run_tools_locally( ccx: Arc>, - at_tools: IndexMap>>>, + tools: IndexMap>>>, tokenizer: Arc>, maxgen: usize, original_messages: &Vec, @@ -105,7 +105,7 @@ pub async fn run_tools_locally( style: &Option, ) -> Result<(Vec, bool), String> { let (new_messages, tools_runned) = run_tools( // todo: fix typo "runned" - ccx, at_tools, tokenizer, maxgen, original_messages, style + ccx, tools, tokenizer, maxgen, original_messages, style ).await?; let mut all_messages = original_messages.to_vec(); @@ -119,7 +119,7 @@ pub async fn run_tools_locally( pub async fn run_tools( ccx: Arc>, - at_tools: IndexMap>>>, + tools: IndexMap>>>, tokenizer: Arc>, maxgen: usize, original_messages: &Vec, @@ -152,7 +152,7 @@ pub async fn run_tools( let mut confirmation_rules = None; for t_call in last_msg_tool_calls { - let cmd = match at_tools.get(&t_call.function.name) { + let cmd = match tools.get(&t_call.function.name) { Some(cmd) => cmd.clone(), None => { let tool_failed_message = tool_answer( From 722b9ec23e28a424a460b69457a944510aa0f4ca Mon Sep 17 00:00:00 2001 From: JegernOUTT Date: Tue, 10 Dec 2024 17:59:40 +1030 Subject: [PATCH 133/185] feat: Add optional memid parameter to permdb_add and related functions - Updated `permdb_add` function in `MemoriesDatabase` to accept an optional `memid` parameter. - Modified `handle_mem_add` to pass `None` for the new `memid` parameter. - Refactored `try_to_download_trajectories` to log more detailed information and save the last download time. - Added constants for trajectory status filename and update interval. - Ensured `memories_add` and related functions handle the optional `memid` parameter correctly. - Included a one-time trajectory download attempt in `vecdb_background_reload`. - Improved logging for trajectory download process. --- src/http/routers/v1/handlers_memdb.rs | 3 +- src/knowledge.rs | 9 +- src/main.rs | 2 +- src/trajectories.rs | 118 ++++++++++++++++++++++++++ src/vecdb/vdb_highlev.rs | 14 ++- 5 files changed, 137 insertions(+), 9 deletions(-) create mode 100644 src/trajectories.rs diff --git a/src/http/routers/v1/handlers_memdb.rs b/src/http/routers/v1/handlers_memdb.rs index a3df4d648..4d4c5a4e7 100644 --- a/src/http/routers/v1/handlers_memdb.rs +++ b/src/http/routers/v1/handlers_memdb.rs @@ -53,7 +53,8 @@ pub async fn handle_mem_add( &post.mem_type, &post.goal, &post.project, - &post.payload + &post.payload, + None ).await.map_err(|e| { ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("{}", e)) })?; diff --git a/src/knowledge.rs b/src/knowledge.rs index 8a83aa8c6..2838f7843 100644 --- a/src/knowledge.rs +++ b/src/knowledge.rs @@ -124,7 +124,7 @@ impl MemoriesDatabase { Ok(()) } - pub fn permdb_add(&self, mem_type: &str, goal: &str, project: &str, payload: &str) -> Result { + pub fn permdb_add(&self, mem_type: &str, goal: &str, project: &str, payload: &str, memid: Option) -> Result { fn generate_memid() -> String { rand::thread_rng() .sample_iter(&rand::distributions::Uniform::new(0, 16)) @@ -132,14 +132,13 @@ impl MemoriesDatabase { .map(|x| format!("{:x}", x)) .collect() } - + let memid_str = memid.unwrap_or(generate_memid()); let conn = self.conn.lock(); - let memid = generate_memid(); conn.execute( "INSERT INTO memories (memid, m_type, m_goal, m_project, m_payload) VALUES (?1, ?2, ?3, ?4, ?5)", - params![memid, mem_type, goal, project, payload], + params![memid_str, mem_type, goal, project, payload], ).map_err(|e| e.to_string())?; - Ok(memid) + Ok(memid_str) } pub fn permdb_erase(&self, memid: &str) -> Result { diff --git a/src/main.rs b/src/main.rs index b30a74b90..807f6dbb6 100644 --- a/src/main.rs +++ b/src/main.rs @@ -66,7 +66,7 @@ mod privacy; mod privacy_compiled_in; mod git; mod agentic; - +mod trajectories; #[tokio::main] async fn main() { diff --git a/src/trajectories.rs b/src/trajectories.rs new file mode 100644 index 000000000..30ecc81f6 --- /dev/null +++ b/src/trajectories.rs @@ -0,0 +1,118 @@ +use std::collections::HashSet; +use crate::global_context::GlobalContext; +use crate::vecdb::vdb_highlev::{memories_add, memories_block_until_vectorized, memories_select_all}; +use serde_json::Value; +use std::sync::Arc; +use tokio::sync::RwLock as ARwLock; +use tracing::info; +use chrono::{NaiveDateTime, Utc}; + +static URL: &str = "https://www.smallcloud.ai/v1/trajectory-get-all"; +static TRAJECTORIES_STATUS_FILENAME: &str = "trajectories_last_update"; +static TRAJECTORIES_UPDATE_EACH_N_DAYS: i64 = 7; + + +async fn save_last_download_time(gcx: Arc>) -> Result<(), String> { + let cache_dir = gcx.read().await.cache_dir.clone(); + let now = Utc::now().naive_utc(); + let now_str = now.format("%Y-%m-%d %H:%M:%S").to_string(); + let file_path = cache_dir.join(TRAJECTORIES_STATUS_FILENAME); + tokio::fs::write(file_path, now_str).await.map_err(|x| x.to_string()) +} + +async fn is_time_to_download_trajectories(gcx: Arc>) -> Result { + let cache_dir = gcx.read().await.cache_dir.clone(); + let file_path = cache_dir.join(TRAJECTORIES_STATUS_FILENAME); + let last_download_time = match tokio::fs::read_to_string(file_path).await { + Ok(time_str) => { + NaiveDateTime::parse_from_str(&time_str, "%Y-%m-%d %H:%M:%S") + .map_err(|x| x.to_string())? + } + Err(_) => { + return Ok(true); + } + }; + let now = Utc::now().naive_utc(); + let duration_since_last_download = now.signed_duration_since(last_download_time); + Ok(duration_since_last_download.num_days() >= TRAJECTORIES_UPDATE_EACH_N_DAYS) +} + +pub async fn try_to_download_trajectories(gcx: Arc>) -> Result<(), String> { + if !is_time_to_download_trajectories(gcx.clone()).await? { + return Ok(()); + } + + let (vec_db, api_key) = { + let gcx_locked = gcx.read().await; + ( + gcx_locked.vec_db.clone(), + gcx_locked.cmdline.api_key.clone(), + ) + }; + if vec_db.lock().await.is_none() { + info!("VecDb is not initialized"); + return Ok(()); + } + memories_block_until_vectorized(vec_db.clone(), 20_000).await?; + + info!("starting to download trajectories..."); + let client = reqwest::Client::new(); + let response = client + .get(URL) + .header("Authorization", format!("Bearer {}", api_key)) + .send() + .await + .map_err(|err| err.to_string())?; + let response_json: Value = response.json().await.map_err(|err| err.to_string())?; + + if response_json["retcode"] != "OK" { + info!("failed to download trajectories: {:?}", response_json); + return Ok(()); + } + + let trajectories = response_json["data"].as_array().unwrap(); + let existing_trajectories = memories_select_all(vec_db.clone()) + .await? + .iter() + .map(|x| x.memid.clone()) + .collect::>(); + for trajectory in trajectories { + let m_memid = trajectory["memid"].as_str().ok_or("Failed to get memid")?; + if existing_trajectories.contains(m_memid) { + info!("trajectory {} already exists in the vecdb", m_memid); + continue; + } + + let m_type = trajectory["kind"].as_str().unwrap_or("unknown"); + let m_goal = trajectory["goal"].as_str().unwrap_or("unknown"); + let m_project = trajectory["framework"].as_str().unwrap_or("unknown"); + let m_payload = trajectory["payload"].as_str().unwrap_or(""); + if m_payload.is_empty() { + info!("empty or no payload for the trajectory: {}, skipping it", m_memid); + continue; + } + match memories_add( + vec_db.clone(), + m_type, + m_goal, + m_project, + m_payload, + Some(m_memid.to_string()), + ).await { + Ok(memid) => info!("Memory added with ID: {}", memid), + Err(err) => info!("Failed to add memory: {}", err), + } + info!( + "downloaded trajectory: memid={}, type={}, goal={}, project={}, payload={}", + m_memid, + m_type, + m_goal, + m_project, + crate::nicer_logs::first_n_chars(&m_payload.to_string(), 100) + ); + } + + info!("finished downloading trajectories"); + save_last_download_time(gcx.clone()).await?; + Ok(()) +} diff --git a/src/vecdb/vdb_highlev.rs b/src/vecdb/vdb_highlev.rs index d3019e484..aa437dfe0 100644 --- a/src/vecdb/vdb_highlev.rs +++ b/src/vecdb/vdb_highlev.rs @@ -11,6 +11,7 @@ use crate::fetch_embedding; use crate::files_in_workspace::Document; use crate::global_context::{CommandLine, GlobalContext}; use crate::knowledge::{lance_search, MemoriesDatabase}; +use crate::trajectories::try_to_download_trajectories; use crate::vecdb::vdb_cache::VecDBCache; use crate::vecdb::vdb_lance::VecDBHandler; use crate::vecdb::vdb_structs::{MemoRecord, MemoSearchResult, SearchResult, VecDbStatus, VecdbConstants, VecdbSearch}; @@ -193,6 +194,7 @@ pub async fn vecdb_background_reload( return; } + let trajectories_updated_once: bool = false; let mut background_tasks = BackgroundTasksHolder::new(vec![]); loop { let (need_reload, consts) = do_i_need_to_reload_vecdb(gcx.clone()).await; @@ -216,6 +218,14 @@ pub async fn vecdb_background_reload( } } } + if !trajectories_updated_once { + match try_to_download_trajectories(gcx.clone()).await { + Ok(_) => {} + Err(err) => { + error!("trajectories download failed: {}", err); + } + }; + } tokio::time::sleep(tokio::time::Duration::from_secs(60)).await; } } @@ -275,6 +285,7 @@ pub async fn memories_add( m_goal: &str, m_project: &str, m_payload: &str, // TODO: upgrade to serde_json::Value + m_memid: Option ) -> Result { let (memdb, vectorizer_service) = { let vec_db_guard = vec_db.lock().await; @@ -284,7 +295,7 @@ pub async fn memories_add( let memid = { let mut memdb_locked = memdb.lock().await; - let x = memdb_locked.permdb_add(m_type, m_goal, m_project, m_payload)?; + let x = memdb_locked.permdb_add(m_type, m_goal, m_project, m_payload, m_memid)?; memdb_locked.dirty_memids.push(x.clone()); x }; @@ -292,7 +303,6 @@ pub async fn memories_add( Ok(memid) } - pub async fn memories_block_until_vectorized_from_vectorizer( vectorizer_service: Arc>, max_blocking_time_ms: usize From 4211a604950fd660944e3d959c7197bf3c10a2f7 Mon Sep 17 00:00:00 2001 From: JegernOUTT Date: Tue, 10 Dec 2024 18:05:21 +1030 Subject: [PATCH 134/185] logging messages updated --- src/trajectories.rs | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/src/trajectories.rs b/src/trajectories.rs index 30ecc81f6..aedd3c0d2 100644 --- a/src/trajectories.rs +++ b/src/trajectories.rs @@ -50,8 +50,7 @@ pub async fn try_to_download_trajectories(gcx: Arc>) -> R ) }; if vec_db.lock().await.is_none() { - info!("VecDb is not initialized"); - return Ok(()); + return Err("vecdb is not initialized".to_string()); } memories_block_until_vectorized(vec_db.clone(), 20_000).await?; @@ -64,10 +63,8 @@ pub async fn try_to_download_trajectories(gcx: Arc>) -> R .await .map_err(|err| err.to_string())?; let response_json: Value = response.json().await.map_err(|err| err.to_string())?; - if response_json["retcode"] != "OK" { - info!("failed to download trajectories: {:?}", response_json); - return Ok(()); + return Err(format!("failed to download trajectories: {:?}", response_json)); } let trajectories = response_json["data"].as_array().unwrap(); @@ -77,7 +74,7 @@ pub async fn try_to_download_trajectories(gcx: Arc>) -> R .map(|x| x.memid.clone()) .collect::>(); for trajectory in trajectories { - let m_memid = trajectory["memid"].as_str().ok_or("Failed to get memid")?; + let m_memid = trajectory["memid"].as_str().ok_or("the trajectory doesn't have memid field")?; if existing_trajectories.contains(m_memid) { info!("trajectory {} already exists in the vecdb", m_memid); continue; @@ -99,8 +96,8 @@ pub async fn try_to_download_trajectories(gcx: Arc>) -> R m_payload, Some(m_memid.to_string()), ).await { - Ok(memid) => info!("Memory added with ID: {}", memid), - Err(err) => info!("Failed to add memory: {}", err), + Ok(memid) => info!("memory added with ID: {}", memid), + Err(err) => info!("failed to add memory: {}", err), } info!( "downloaded trajectory: memid={}, type={}, goal={}, project={}, payload={}", From e08de54455c7de428900d22a2b351441dd34e364 Mon Sep 17 00:00:00 2001 From: JegernOUTT Date: Tue, 10 Dec 2024 18:11:34 +1030 Subject: [PATCH 135/185] set trajectories_updated_once = true --- src/vecdb/vdb_highlev.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/vecdb/vdb_highlev.rs b/src/vecdb/vdb_highlev.rs index aa437dfe0..f47a8d0f5 100644 --- a/src/vecdb/vdb_highlev.rs +++ b/src/vecdb/vdb_highlev.rs @@ -194,7 +194,7 @@ pub async fn vecdb_background_reload( return; } - let trajectories_updated_once: bool = false; + let mut trajectories_updated_once: bool = false; let mut background_tasks = BackgroundTasksHolder::new(vec![]); loop { let (need_reload, consts) = do_i_need_to_reload_vecdb(gcx.clone()).await; @@ -220,11 +220,12 @@ pub async fn vecdb_background_reload( } if !trajectories_updated_once { match try_to_download_trajectories(gcx.clone()).await { - Ok(_) => {} + Ok(_) => { } Err(err) => { error!("trajectories download failed: {}", err); } }; + trajectories_updated_once = true; } tokio::time::sleep(tokio::time::Duration::from_secs(60)).await; } From fccf9138f802e4a2d0fdf47f28187ed87ca90dbe Mon Sep 17 00:00:00 2001 From: JegernOUTT Date: Tue, 10 Dec 2024 21:25:28 +1030 Subject: [PATCH 136/185] removing trajectories instead of updating them --- .../refact/chat_client.py | 5 +- src/http/routers/v1/handlers_memdb.rs | 5 +- src/knowledge.rs | 84 ++++++++++++++----- src/trajectories.rs | 36 ++++---- src/vecdb/vdb_highlev.rs | 8 +- src/vecdb/vdb_structs.rs | 1 + 6 files changed, 90 insertions(+), 49 deletions(-) diff --git a/python_binding_and_cmdline/refact/chat_client.py b/python_binding_and_cmdline/refact/chat_client.py index 357b2b2f9..1be3e4693 100644 --- a/python_binding_and_cmdline/refact/chat_client.py +++ b/python_binding_and_cmdline/refact/chat_client.py @@ -400,13 +400,14 @@ async def diff_apply( return await _better_response_json(response) -async def mem_add(base_url: str, mem_type: str, goal: str, project: str, payload: str) -> Dict[str, Any]: +async def mem_add(base_url: str, mem_type: str, goal: str, project: str, payload: str, origin: str = "local-committed") -> Dict[str, Any]: url = f"{base_url}/mem-add" data = { "mem_type": mem_type, "goal": goal, "project": project, - "payload": payload + "payload": payload, + "origin": origin, } async with aiohttp.ClientSession() as session: async with session.post(url, json=data) as response: diff --git a/src/http/routers/v1/handlers_memdb.rs b/src/http/routers/v1/handlers_memdb.rs index 4d4c5a4e7..e443de789 100644 --- a/src/http/routers/v1/handlers_memdb.rs +++ b/src/http/routers/v1/handlers_memdb.rs @@ -15,7 +15,8 @@ struct MemAddRequest { mem_type: String, goal: String, project: String, - payload: String, // TODO: upgrade to serde_json::Value + payload: String, + origin: String, // TODO: upgrade to serde_json::Value } #[derive(Deserialize)] @@ -54,7 +55,7 @@ pub async fn handle_mem_add( &post.goal, &post.project, &post.payload, - None + &post.origin ).await.map_err(|e| { ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("{}", e)) })?; diff --git a/src/knowledge.rs b/src/knowledge.rs index 2838f7843..380f09a25 100644 --- a/src/knowledge.rs +++ b/src/knowledge.rs @@ -44,12 +44,17 @@ fn map_row_to_memo_record(row: &rusqlite::Row) -> rusqlite::Result { m_goal: row.get(2)?, m_project: row.get(3)?, m_payload: row.get(4)?, - mstat_correct: row.get(5)?, - mstat_relevant: row.get(6)?, - mstat_times_used: row.get(7)?, + m_origin: row.get(5)?, + mstat_correct: row.get(6)?, + mstat_relevant: row.get(7)?, + mstat_times_used: row.get(8)?, }) } +fn fields_ordered() -> String { + "memid,m_type,m_goal,m_project,m_payload,m_origin,mstat_correct,mstat_relevant,mstat_times_used".to_string() +} + impl MemoriesDatabase { pub async fn init( cache_dir: &PathBuf, @@ -100,9 +105,28 @@ impl MemoriesDatabase { dirty_everything: true, }; db._permdb_create_table(reset_memory)?; + db._migrate_add_m_origin()?; Ok(db) } + fn _migrate_add_m_origin(&self) -> Result<(), String> { + let conn = self.conn.lock(); + let mut stmt = conn.prepare("PRAGMA table_info(memories)").map_err(|e| e.to_string())?; + let column_exists = stmt.query_map([], |row| { + let column_name: String = row.get(1)?; + Ok(column_name) + }) + .map_err(|e| e.to_string())? + .filter_map(|result| result.ok()) + .any(|column_name| column_name == "m_origin"); + + if !column_exists { + conn.execute("ALTER TABLE memories ADD COLUMN m_origin TEXT NOT NULL DEFAULT 'refact-standard'", []) + .map_err(|e| e.to_string())?; + } + Ok(()) + } + fn _permdb_create_table(&self, reset_memory: bool) -> Result<(), String> { let conn = self.conn.lock(); if reset_memory { @@ -115,6 +139,7 @@ impl MemoriesDatabase { m_goal TEXT NOT NULL, m_project TEXT NOT NULL, m_payload TEXT NOT NULL, + m_origin TEXT NOT NULL, mstat_correct REAL NOT NULL DEFAULT 0, mstat_relevant REAL NOT NULL DEFAULT 0, mstat_times_used INTEGER NOT NULL DEFAULT 0 @@ -124,7 +149,7 @@ impl MemoriesDatabase { Ok(()) } - pub fn permdb_add(&self, mem_type: &str, goal: &str, project: &str, payload: &str, memid: Option) -> Result { + pub fn permdb_add(&self, mem_type: &str, goal: &str, project: &str, payload: &str, m_origin: &str) -> Result { fn generate_memid() -> String { rand::thread_rng() .sample_iter(&rand::distributions::Uniform::new(0, 16)) @@ -132,21 +157,32 @@ impl MemoriesDatabase { .map(|x| format!("{:x}", x)) .collect() } - let memid_str = memid.unwrap_or(generate_memid()); + let conn = self.conn.lock(); + let memid = generate_memid(); conn.execute( - "INSERT INTO memories (memid, m_type, m_goal, m_project, m_payload) VALUES (?1, ?2, ?3, ?4, ?5)", - params![memid_str, mem_type, goal, project, payload], + "INSERT INTO memories (memid, m_type, m_goal, m_project, m_payload, m_origin) VALUES (?1, ?2, ?3, ?4, ?5, ?6)", + params![memid, mem_type, goal, project, payload, m_origin], ).map_err(|e| e.to_string())?; - Ok(memid_str) + Ok(memid) } - pub fn permdb_erase(&self, memid: &str) -> Result { - let conn = self.conn.lock(); - let affected_rows = conn.execute( - "DELETE FROM memories WHERE memid = ?1", - params![memid], - ).map_err(|e| e.to_string())?; + pub async fn permdb_erase(&mut self, memid: &str) -> Result { + let affected_rows = { + let conn = self.conn.lock(); + conn.execute( + "DELETE FROM memories WHERE memid = ?1", + params![memid], + ).map_err(|e| e.to_string())? + }; + + match self.memories_table.delete(&format!("memid IN ('{memid}')")).await { + Ok(_) => {} + Err(err) => { + tracing::error!("Error deleting from vecdb: {:?}", err); + } + } + Ok(affected_rows) } @@ -163,7 +199,7 @@ impl MemoriesDatabase { pub fn permdb_print_everything(&self) -> Result { let mut table_contents = String::new(); let conn = self.conn.lock(); - let mut stmt = conn.prepare("SELECT * FROM memories") + let mut stmt = conn.prepare( &format!("SELECT {} FROM memories", fields_ordered())) .map_err(|e| e.to_string())?; let rows = stmt.query_map([], |row| { Ok(( @@ -172,17 +208,18 @@ impl MemoriesDatabase { row.get::<_, String>(2)?, row.get::<_, String>(3)?, row.get::<_, String>(4)?, - row.get::<_, f64>(5)?, + row.get::<_, String>(5)?, row.get::<_, f64>(6)?, - row.get::<_, i32>(7)?, + row.get::<_, f64>(7)?, + row.get::<_, i32>(8)?, )) }).map_err(|e| e.to_string())?; for row in rows { - let (memid, m_type, m_goal, m_project, m_payload, mstat_correct, mstat_relevant, mstat_times_used) = row.map_err(|e| e.to_string())?; + let (memid, m_type, m_goal, m_project, m_payload, m_origin, mstat_correct, mstat_relevant, mstat_times_used) = row.map_err(|e| e.to_string())?; table_contents.push_str(&format!( - "memid={}, type={}, goal: {:?}, project: {:?}, payload: {:?}, correct={}, relevant={}, times_used={}\n", - memid, m_type, m_goal, m_project, m_payload, mstat_correct, mstat_relevant, mstat_times_used + "memid={}, type={}, goal: {:?}, project: {:?}, payload: {:?}, m_origin: {:?}, correct={}, relevant={}, times_used={}\n", + memid, m_type, m_goal, m_project, m_payload, m_origin, mstat_correct, mstat_relevant, mstat_times_used )); } Ok(table_contents) @@ -191,8 +228,8 @@ impl MemoriesDatabase { pub async fn permdb_select_all(&self, filter: Option<&str>) -> Result, String> { let conn = self.conn.lock(); let query = match filter { - Some(f) => format!("SELECT * FROM memories WHERE {}", f), - None => "SELECT * FROM memories".to_string(), + Some(f) => format!("SELECT {} FROM memories WHERE {f}", fields_ordered()), + None => format!("SELECT {} FROM memories", fields_ordered()), }; let mut stmt = conn.prepare(&query).map_err(|e| e.to_string())?; @@ -207,7 +244,7 @@ impl MemoriesDatabase { let memids: Vec = input_records.iter().map(|record| record.memid.clone()).collect(); let placeholders = memids.iter().map(|_| "?").collect::>().join(","); - let query = format!("SELECT * FROM memories WHERE memid IN ({})", placeholders); + let query = format!("SELECT {} FROM memories WHERE memid IN ({})", fields_ordered(), placeholders); let params = rusqlite::params_from_iter(memids.iter()); let mut statement = conn.prepare(&query).map_err(|e| e.to_string())?; @@ -225,6 +262,7 @@ impl MemoriesDatabase { record.m_goal = db_record.m_goal.clone(); record.m_project = db_record.m_project.clone(); record.m_payload = db_record.m_payload.clone(); + record.m_origin = db_record.m_origin.clone(); record.mstat_correct = db_record.mstat_correct; record.mstat_relevant = db_record.mstat_relevant; record.mstat_times_used = db_record.mstat_times_used; diff --git a/src/trajectories.rs b/src/trajectories.rs index aedd3c0d2..0ba2d7b02 100644 --- a/src/trajectories.rs +++ b/src/trajectories.rs @@ -1,9 +1,8 @@ -use std::collections::HashSet; use crate::global_context::GlobalContext; -use crate::vecdb::vdb_highlev::{memories_add, memories_block_until_vectorized, memories_select_all}; +use crate::vecdb::vdb_highlev::{memories_add, memories_block_until_vectorized, memories_erase, memories_select_all, VecDb}; use serde_json::Value; use std::sync::Arc; -use tokio::sync::RwLock as ARwLock; +use tokio::sync::{RwLock as ARwLock, Mutex as AMutex}; use tracing::info; use chrono::{NaiveDateTime, Utc}; @@ -37,6 +36,17 @@ async fn is_time_to_download_trajectories(gcx: Arc>) -> R Ok(duration_since_last_download.num_days() >= TRAJECTORIES_UPDATE_EACH_N_DAYS) } +async fn remove_legacy_trajectories(vecdb: Arc>>) -> Result<(), String> { + for memo in memories_select_all(vecdb.clone()) + .await? + .iter() + .filter(|x| x.m_origin == "refact-standard") { + memories_erase(vecdb.clone(), &memo.memid).await?; + info!("removed legacy trajectory: {}", memo.memid); + } + Ok(()) +} + pub async fn try_to_download_trajectories(gcx: Arc>) -> Result<(), String> { if !is_time_to_download_trajectories(gcx.clone()).await? { return Ok(()); @@ -68,24 +78,15 @@ pub async fn try_to_download_trajectories(gcx: Arc>) -> R } let trajectories = response_json["data"].as_array().unwrap(); - let existing_trajectories = memories_select_all(vec_db.clone()) - .await? - .iter() - .map(|x| x.memid.clone()) - .collect::>(); + remove_legacy_trajectories(vec_db.clone()).await?; for trajectory in trajectories { - let m_memid = trajectory["memid"].as_str().ok_or("the trajectory doesn't have memid field")?; - if existing_trajectories.contains(m_memid) { - info!("trajectory {} already exists in the vecdb", m_memid); - continue; - } - let m_type = trajectory["kind"].as_str().unwrap_or("unknown"); let m_goal = trajectory["goal"].as_str().unwrap_or("unknown"); let m_project = trajectory["framework"].as_str().unwrap_or("unknown"); let m_payload = trajectory["payload"].as_str().unwrap_or(""); + let m_origin = trajectory["origin"].as_str().unwrap_or("refact-standard"); if m_payload.is_empty() { - info!("empty or no payload for the trajectory: {}, skipping it", m_memid); + info!("empty or no payload for the trajectory, skipping it"); continue; } match memories_add( @@ -94,14 +95,13 @@ pub async fn try_to_download_trajectories(gcx: Arc>) -> R m_goal, m_project, m_payload, - Some(m_memid.to_string()), + m_origin, ).await { Ok(memid) => info!("memory added with ID: {}", memid), Err(err) => info!("failed to add memory: {}", err), } info!( - "downloaded trajectory: memid={}, type={}, goal={}, project={}, payload={}", - m_memid, + "downloaded trajectory: type={}, goal={}, project={}, payload={}", m_type, m_goal, m_project, diff --git a/src/vecdb/vdb_highlev.rs b/src/vecdb/vdb_highlev.rs index f47a8d0f5..8c917699d 100644 --- a/src/vecdb/vdb_highlev.rs +++ b/src/vecdb/vdb_highlev.rs @@ -286,7 +286,7 @@ pub async fn memories_add( m_goal: &str, m_project: &str, m_payload: &str, // TODO: upgrade to serde_json::Value - m_memid: Option + m_origin: &str ) -> Result { let (memdb, vectorizer_service) = { let vec_db_guard = vec_db.lock().await; @@ -296,7 +296,7 @@ pub async fn memories_add( let memid = { let mut memdb_locked = memdb.lock().await; - let x = memdb_locked.permdb_add(m_type, m_goal, m_project, m_payload, m_memid)?; + let x = memdb_locked.permdb_add(m_type, m_goal, m_project, m_payload, m_origin)?; memdb_locked.dirty_memids.push(x.clone()); x }; @@ -405,8 +405,8 @@ pub async fn memories_erase( vec_db.memdb.clone() }; - let memdb_locked = memdb.lock().await; - let erased_cnt = memdb_locked.permdb_erase(memid)?; + let mut memdb_locked = memdb.lock().await; + let erased_cnt = memdb_locked.permdb_erase(memid).await?; Ok(erased_cnt) } diff --git a/src/vecdb/vdb_structs.rs b/src/vecdb/vdb_structs.rs index 70ae1d680..7551ceef3 100644 --- a/src/vecdb/vdb_structs.rs +++ b/src/vecdb/vdb_structs.rs @@ -90,6 +90,7 @@ pub struct MemoRecord { pub m_goal: String, pub m_project: String, pub m_payload: String, + pub m_origin: String, pub mstat_correct: f64, pub mstat_relevant: f64, pub mstat_times_used: i32, From 033d7872773e2e623f6705869c3745f5b6e2aac5 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Wed, 11 Dec 2024 11:20:43 +0100 Subject: [PATCH 137/185] confirmation fixes --- src/integrations/integr_abstract.rs | 10 ++++++++++ src/integrations/integr_cmdline.rs | 3 +++ 2 files changed, 13 insertions(+) diff --git a/src/integrations/integr_abstract.rs b/src/integrations/integr_abstract.rs index a48a64f4d..446eb1b39 100644 --- a/src/integrations/integr_abstract.rs +++ b/src/integrations/integr_abstract.rs @@ -14,18 +14,28 @@ pub trait IntegrationTrait: Send + Sync { #[derive(Deserialize, Serialize, Clone, Default)] pub struct IntegrationAvailable { + #[serde(default = "default_true")] pub on_your_laptop: bool, + #[serde(default = "default_true")] pub when_isolated: bool, } +fn default_true() -> bool { + true +} + #[derive(Deserialize, Serialize, Clone, Default)] pub struct IntegrationConfirmation { + #[serde(default)] pub ask_user: Vec, + #[serde(default)] pub deny: Vec, } #[derive(Deserialize, Serialize, Clone, Default)] pub struct IntegrationCommon { + #[serde(default)] pub available: IntegrationConfirmation, + #[serde(default)] pub confirmation: IntegrationAvailable, } diff --git a/src/integrations/integr_cmdline.rs b/src/integrations/integr_cmdline.rs index 13528853c..92736e603 100644 --- a/src/integrations/integr_cmdline.rs +++ b/src/integrations/integr_cmdline.rs @@ -315,4 +315,7 @@ description: | available: on_your_laptop_possible: true when_isolated_possible: true +confirmation: + ask_user_default: ["*"] + deny_user_default: ["sudo*"] "#; From f338233e0651dffb8f716d5fe2d46a1fee8aedca Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Wed, 11 Dec 2024 11:26:35 +0100 Subject: [PATCH 138/185] don't serialize parameters_required --- src/integrations/integr_cmdline.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/integrations/integr_cmdline.rs b/src/integrations/integr_cmdline.rs index 92736e603..05d2695f4 100644 --- a/src/integrations/integr_cmdline.rs +++ b/src/integrations/integr_cmdline.rs @@ -24,6 +24,8 @@ pub struct CmdlineToolConfig { pub description: String, pub parameters: Vec, + + #[serde(skip_serializing_if = "Option::is_none")] pub parameters_required: Option>, // blocking From e31d9e9cf4f668b0db1d40310cc993ff74f639fb Mon Sep 17 00:00:00 2001 From: Sergey Vakhreev Date: Wed, 11 Dec 2024 21:24:31 +1030 Subject: [PATCH 139/185] Moving to config dir (#476) * initial moving to config_dir * use config_path for the memdb * use print! instead of info! * fix for the mixing cache_dir and config_dir * fix for the mixing cache_dir and config_dir * migrate_to_config_folder * remove extra imports --- src/global_context.rs | 22 ++++++++++++++++ src/http/routers/v1/customization.rs | 4 +-- .../docker/docker_container_manager.rs | 6 ++--- src/knowledge.rs | 4 +-- src/main.rs | 6 +++++ src/privacy.rs | 2 +- src/tools/tools_description.rs | 14 +++++----- src/vecdb/vdb_highlev.rs | 16 +++++++----- src/yaml_configs/create_configs.rs | 26 +++++++++---------- src/yaml_configs/customization_loader.rs | 4 +-- 10 files changed, 67 insertions(+), 37 deletions(-) diff --git a/src/global_context.rs b/src/global_context.rs index bac4a37cc..025d9eb8b 100644 --- a/src/global_context.rs +++ b/src/global_context.rs @@ -1,6 +1,7 @@ use std::collections::hash_map::DefaultHasher; use std::collections::HashMap; use std::hash::Hasher; +use std::io; use std::path::PathBuf; use std::sync::Arc; use std::sync::atomic::{AtomicBool, Ordering}; @@ -168,6 +169,27 @@ pub type SharedGlobalContext = Arc>; // TODO: remove thi const CAPS_RELOAD_BACKOFF: u64 = 60; // seconds const CAPS_BACKGROUND_RELOAD: u64 = 3600; // seconds + +pub async fn migrate_to_config_folder( + config_dir: &PathBuf, + cache_dir: &PathBuf +) -> io::Result<()> { + let mut entries = tokio::fs::read_dir(cache_dir).await?; + while let Some(entry) = entries.next_entry().await? { + let path = entry.path(); + let file_name = path.file_name().unwrap().to_string_lossy().into_owned(); + let file_type = entry.file_type().await?; + let is_yaml_cfg = file_type.is_file() && path.extension().and_then(|e| e.to_str()) == Some("yaml"); + if is_yaml_cfg { + let new_path = config_dir.join(&file_name); + tokio::fs::rename(&path, &new_path).await?; + print!("migrated {:?} to {:?}", path, new_path); + } + } + + Ok(()) +} + pub async fn try_load_caps_quickly_if_not_present( gcx: Arc>, max_age_seconds: u64, diff --git a/src/http/routers/v1/customization.rs b/src/http/routers/v1/customization.rs index d36adca97..b06afcc97 100644 --- a/src/http/routers/v1/customization.rs +++ b/src/http/routers/v1/customization.rs @@ -15,10 +15,10 @@ pub async fn handle_v1_config_path( Extension(global_context): Extension>>, _body_bytes: hyper::body::Bytes, ) -> Result, ScratchError> { - let cache_dir = global_context.read().await.cache_dir.clone(); + let config_dir = global_context.read().await.config_dir.clone(); Ok(Response::builder() .status(StatusCode::OK) - .body(Body::from(cache_dir.to_str().unwrap().to_string())) + .body(Body::from(config_dir.to_str().unwrap().to_string())) .unwrap()) } diff --git a/src/integrations/docker/docker_container_manager.rs b/src/integrations/docker/docker_container_manager.rs index 1d9cc2d69..1bfb7d55d 100644 --- a/src/integrations/docker/docker_container_manager.rs +++ b/src/integrations/docker/docker_container_manager.rs @@ -252,9 +252,9 @@ async fn docker_container_sync_yaml_configs( container_id: &str, gcx: Arc>, ) -> Result<(), String> { - let (cache_dir, config_dir) = { + let config_dir = { let gcx_locked = gcx.read().await; - (gcx_locked.cache_dir.clone(), gcx_locked.config_dir.clone()) + gcx_locked.config_dir.clone() }; let container_home_dir = docker_container_get_home_dir(&docker, &container_id, gcx.clone()).await?; @@ -274,7 +274,7 @@ async fn docker_container_sync_yaml_configs( let local_path = match *file { "integrations.yaml" if !remote_integrations_path.is_empty() => remote_integrations_path.clone(), // "competency.yaml" if !competency_path.is_empty() => competency_path.clone(), - _ => cache_dir.join(file).to_string_lossy().to_string(), + _ => config_dir.join(file).to_string_lossy().to_string(), }; let container_path = format!("{container_id}:{container_home_dir}/.cache/refact/{file}"); docker.command_execute(&format!("container cp {local_path} {container_path}"), gcx.clone(), true, true).await?; diff --git a/src/knowledge.rs b/src/knowledge.rs index 380f09a25..5a75e92fa 100644 --- a/src/knowledge.rs +++ b/src/knowledge.rs @@ -57,13 +57,13 @@ fn fields_ordered() -> String { impl MemoriesDatabase { pub async fn init( - cache_dir: &PathBuf, + config_dir: &PathBuf, // vecdb_cache: Arc>, constants: &VecdbConstants, reset_memory: bool, ) -> Result { // SQLite database for memories, permanent on disk - let dbpath = cache_dir.join("memories.sqlite"); + let dbpath = config_dir.join("memories.sqlite"); let cache_database = Connection::open_with_flags( dbpath, rusqlite::OpenFlags::SQLITE_OPEN_READ_WRITE diff --git a/src/main.rs b/src/main.rs index 807f6dbb6..b11156d4f 100644 --- a/src/main.rs +++ b/src/main.rs @@ -75,6 +75,12 @@ async fn main() { let home_dir = home::home_dir().ok_or(()).expect("failed to find home dir"); let cache_dir = home_dir.join(".cache/refact"); let config_dir = home_dir.join(".config/refact"); + match global_context::migrate_to_config_folder(&config_dir, &cache_dir).await { + Ok(_) => {} + Err(err) => { + print!("failed to migrate to config folder, exiting: {:?}", err); + } + } let (gcx, ask_shutdown_receiver, shutdown_flag, cmdline) = global_context::create_global_context(cache_dir.clone(), config_dir.clone()).await; let mut writer_is_stderr = false; let (logs_writer, _guard) = if cmdline.logs_stderr { diff --git a/src/privacy.rs b/src/privacy.rs index 3a1f2db11..9373d20b9 100644 --- a/src/privacy.rs +++ b/src/privacy.rs @@ -76,7 +76,7 @@ pub async fn load_privacy_if_needed(gcx: Arc>) -> Arc Result { - let yaml_path = cache_dir.join("integrations.yaml"); +pub async fn read_integrations_yaml(config_dir: &PathBuf) -> Result { + let yaml_path = config_dir.join("integrations.yaml"); let file = std::fs::File::open(&yaml_path).map_err( |e| format!("Failed to open {}: {}", yaml_path.display(), e) @@ -71,16 +71,16 @@ pub async fn tools_merged_and_filtered( gcx: Arc>, _supports_clicks: bool, // XXX ) -> Result>>>, String> { - let (ast_on, vecdb_on, allow_experimental, cache_dir) = { + let (ast_on, vecdb_on, allow_experimental, config_dir) = { let gcx_locked = gcx.read().await; #[cfg(feature="vecdb")] let vecdb_on = gcx_locked.vec_db.lock().await.is_some(); #[cfg(not(feature="vecdb"))] let vecdb_on = false; - (gcx_locked.ast_service.is_some(), vecdb_on, gcx_locked.cmdline.experimental, gcx_locked.cache_dir.clone()) + (gcx_locked.ast_service.is_some(), vecdb_on, gcx_locked.cmdline.experimental, gcx_locked.config_dir.clone()) }; - let integrations_value = match read_integrations_yaml(&cache_dir).await { + let integrations_value = match read_integrations_yaml(&config_dir).await { Ok(value) => value, Err(e) => return Err(format!("Problem in integrations.yaml: {}", e)), }; @@ -181,8 +181,8 @@ pub async fn commands_require_confirmation_rules_from_integrations_yaml(gcx: Arc { // XXX // let integrations_value = read_integrations_yaml(gcx.clone()).await?; - let cache_dir = gcx.read().await.cache_dir.clone(); - let integrations_value = read_integrations_yaml(&cache_dir).await?; + let config_dir = gcx.read().await.config_dir.clone(); + let integrations_value = read_integrations_yaml(&config_dir).await?; serde_yaml::from_value::(integrations_value) .map_err(|e| format!("Failed to parse CommandsRequireConfirmationConfig: {}", e)) diff --git a/src/vecdb/vdb_highlev.rs b/src/vecdb/vdb_highlev.rs index 8c917699d..363adbbea 100644 --- a/src/vecdb/vdb_highlev.rs +++ b/src/vecdb/vdb_highlev.rs @@ -63,18 +63,19 @@ async fn _create_vecdb( return Err(err.message); } - let (cache_dir, cmdline) = { + let (cache_dir, config_dir, cmdline) = { let gcx_locked = gcx.read().await; - (gcx_locked.cache_dir.clone(), gcx_locked.cmdline.clone()) + (gcx_locked.cache_dir.clone(), gcx_locked.config_dir.clone(), gcx_locked.cmdline.clone()) }; let api_key = api_key.unwrap(); - let base_dir: PathBuf = match cmdline.vecdb_force_path.as_str() { - "" => cache_dir, - path => PathBuf::from(path), + let (base_dir_cache, base_dir_config) = match cmdline.vecdb_force_path.as_str() { + "" => (cache_dir, config_dir), + path => (PathBuf::from(path), PathBuf::from(path)), }; let vec_db_mb = match VecDb::init( - &base_dir, + &base_dir_cache, + &base_dir_config, cmdline.clone(), constants, &api_key @@ -234,6 +235,7 @@ pub async fn vecdb_background_reload( impl VecDb { pub async fn init( cache_dir: &PathBuf, + config_dir: &PathBuf, cmdline: CommandLine, constants: VecdbConstants, api_key: &String @@ -242,7 +244,7 @@ impl VecDb { let cache = VecDBCache::init(cache_dir, &constants.embedding_model, constants.embedding_size).await?; let vecdb_handler = Arc::new(AMutex::new(handler)); let vecdb_cache = Arc::new(AMutex::new(cache)); - let memdb = Arc::new(AMutex::new(MemoriesDatabase::init(cache_dir, &constants, cmdline.reset_memory).await?)); + let memdb = Arc::new(AMutex::new(MemoriesDatabase::init(config_dir, &constants, cmdline.reset_memory).await?)); let vectorizer_service = Arc::new(AMutex::new(FileVectorizerService::new( vecdb_handler.clone(), diff --git a/src/yaml_configs/create_configs.rs b/src/yaml_configs/create_configs.rs index 1fc5f86a2..658166bbd 100644 --- a/src/yaml_configs/create_configs.rs +++ b/src/yaml_configs/create_configs.rs @@ -15,7 +15,7 @@ const DEFAULT_CHECKSUM_FILE: &str = "default-checksums.yaml"; pub async fn yaml_configs_try_create_all(gcx: Arc>) -> String { let mut results = Vec::new(); - let cache_dir = gcx.read().await.cache_dir.clone(); + let config_dir = gcx.read().await.config_dir.clone(); let files = vec![ ("bring-your-own-key.yaml", crate::caps::BRING_YOUR_OWN_KEY_SAMPLE), @@ -25,7 +25,7 @@ pub async fn yaml_configs_try_create_all(gcx: Arc>) -> St ]; for (file_name, content) in files { - let file_path = cache_dir.join(file_name); + let file_path = config_dir.join(file_name); if let Err(e) = _yaml_file_exists_or_create(gcx.clone(), &file_path, content).await { tracing::warn!("{}", e); results.push(format!("Error processing {:?}: {}", file_path, e)); @@ -34,17 +34,17 @@ pub async fn yaml_configs_try_create_all(gcx: Arc>) -> St } } - let integrations_d = cache_dir.join("integrations.d"); + let integrations_d = config_dir.join("integrations.d"); if let Err(e) = tokio::fs::create_dir_all(&integrations_d).await { tracing::warn!("Failed to create directory {:?}: {}", integrations_d, e); results.push(format!("Error creating directory {:?}: {}", integrations_d, e)); } - // let integrations_enabled = cache_dir.join("integrations-enabled.yaml"); + // let integrations_enabled = config_dir.join("integrations-enabled.yaml"); // let integrations = get_empty_integrations(); // for (file_name, content) in integrations.iter().map(|(k, v)| (k.clone(), v.integr_settings_default())) { - // let file_path = get_integration_path(&cache_dir, &file_name); + // let file_path = get_integration_path(&config_dir, &file_name); // if let Err(e) = _yaml_file_exists_or_create(gcx.clone(), &file_path, &content).await { // tracing::warn!("{}", e); // results.push(format!("Error processing {:?}: {}", file_path, e)); @@ -73,11 +73,11 @@ async fn _yaml_file_exists_or_create( the_default: &str ) -> Result { - let cache_dir = gcx.read().await.cache_dir.clone(); + let config_dir = gcx.read().await.config_dir.clone(); let config_path_str = config_path.to_string_lossy().to_string(); let config_name = config_path.file_name().ok_or_else(|| format!("{} is not a file", config_path.display()))?.to_string_lossy().to_string(); - let checksums_dict = read_checksums(&cache_dir).await?; + let checksums_dict = read_checksums(&config_dir).await?; if config_path.exists() { let existing_content = tokio::fs::read_to_string(&config_path).await @@ -102,7 +102,7 @@ async fn _yaml_file_exists_or_create( tracing::info!("created {}", config_path.display()); let new_checksum = calculate_checksum(the_default); - update_checksum(&cache_dir, config_name.to_string(), &new_checksum).await?; + update_checksum(&config_dir, config_name.to_string(), &new_checksum).await?; Ok(config_path_str) } @@ -113,8 +113,8 @@ fn calculate_checksum(content: &str) -> String { format!("{:x}", hasher.finalize()) } -async fn read_checksums(cache_dir: &Path) -> Result, String> { - let checksum_path = cache_dir.join(DEFAULT_CHECKSUM_FILE); +async fn read_checksums(config_dir: &Path) -> Result, String> { + let checksum_path = config_dir.join(DEFAULT_CHECKSUM_FILE); if checksum_path.exists() { let content = tokio::fs::read_to_string(&checksum_path).await .map_err(|e| format!("failed to read {}: {}", DEFAULT_CHECKSUM_FILE, e))?; @@ -126,9 +126,9 @@ async fn read_checksums(cache_dir: &Path) -> Result, Str } } -async fn update_checksum(cache_dir: &Path, config_name: String, checksum: &str) -> Result<(), String> { - let checksum_path = cache_dir.join(DEFAULT_CHECKSUM_FILE); - let mut checksums = read_checksums(&cache_dir).await?; +async fn update_checksum(config_dir: &Path, config_name: String, checksum: &str) -> Result<(), String> { + let checksum_path = config_dir.join(DEFAULT_CHECKSUM_FILE); + let mut checksums = read_checksums(&config_dir).await?; checksums.insert(config_name.to_string(), checksum.to_string()); let content = format!( "# This file allows to determine whether a config file still has the default text, so we can upgrade it.\n#\n{}", diff --git a/src/yaml_configs/customization_loader.rs b/src/yaml_configs/customization_loader.rs index 4f62c8c59..06c508e0c 100644 --- a/src/yaml_configs/customization_loader.rs +++ b/src/yaml_configs/customization_loader.rs @@ -196,8 +196,8 @@ pub async fn load_customization( }; // let competency_path = gcx.read().await.cmdline.competency.clone(); - let cache_dir = gcx.read().await.cache_dir.clone(); - let customization_yaml_path = cache_dir.join("customization.yaml"); + let config_dir = gcx.read().await.config_dir.clone(); + let customization_yaml_path = config_dir.join("customization.yaml"); let user_config_text = std::fs::read_to_string(&customization_yaml_path).map_err(|e| format!("Failed to read file: {}", e))?; From aacf6896da1d7609328381c30f942c703a89ea08 Mon Sep 17 00:00:00 2001 From: Nick Frolov Date: Wed, 11 Dec 2024 15:57:14 +0100 Subject: [PATCH 140/185] add handler delete integration v1/integration-delete?integration_path= --- src/http/routers/v1.rs | 3 ++- src/http/routers/v1/v1_integrations.rs | 29 ++++++++++++++++++++++++++ 2 files changed, 31 insertions(+), 1 deletion(-) diff --git a/src/http/routers/v1.rs b/src/http/routers/v1.rs index 8784e76be..a7be893dd 100644 --- a/src/http/routers/v1.rs +++ b/src/http/routers/v1.rs @@ -41,7 +41,7 @@ use crate::http::routers::v1::system_prompt::handle_v1_system_prompt; use crate::http::routers::v1::vecdb::{handle_v1_vecdb_search, handle_v1_vecdb_status}; #[cfg(feature="vecdb")] use crate::http::routers::v1::handlers_memdb::{handle_mem_query, handle_mem_add, handle_mem_erase, handle_mem_update_used, handle_mem_block_until_vectorized, handle_mem_list}; -use crate::http::routers::v1::v1_integrations::{handle_v1_integration_get, handle_v1_integration_icon, handle_v1_integration_save, handle_v1_integrations, handle_v1_integrations_filtered}; +use crate::http::routers::v1::v1_integrations::{handle_v1_integration_get, handle_v1_integration_icon, handle_v1_integration_save, handle_v1_integration_delete, handle_v1_integrations, handle_v1_integrations_filtered}; use crate::http::utils::telemetry_wrapper; pub mod code_completion; @@ -125,6 +125,7 @@ pub fn make_v1_router() -> Router { .route("/integrations-filtered/:integr_name", get(handle_v1_integrations_filtered)) .route("/integration-get", telemetry_post!(handle_v1_integration_get)) .route("/integration-save", telemetry_post!(handle_v1_integration_save)) + .route("/integration-delete", get(handle_v1_integration_delete)) .route("/integration-icon/:icon_name", get(handle_v1_integration_icon)) .route("/docker-container-list", telemetry_post!(handle_v1_docker_container_list)) diff --git a/src/http/routers/v1/v1_integrations.rs b/src/http/routers/v1/v1_integrations.rs index f1fbffe3e..35ce6462d 100644 --- a/src/http/routers/v1/v1_integrations.rs +++ b/src/http/routers/v1/v1_integrations.rs @@ -6,6 +6,8 @@ use serde::Deserialize; use tokio::sync::RwLock as ARwLock; use regex::Regex; use axum::extract::Path; +use axum::extract::Query; + use crate::custom_error::ScratchError; use crate::global_context::GlobalContext; @@ -150,3 +152,30 @@ pub async fn handle_v1_integration_icon( } Err(ScratchError::new(StatusCode::NOT_FOUND, "icon not found".to_string())) } +// Define a structure to match query parameters +#[derive(Deserialize)] +pub struct HTTPIntegrationDeleteQueryParams { + integration_path: String, // Optional field for flexibility +} + +pub async fn handle_v1_integration_delete( + Query(params): Query, +) -> axum::response::Result, ScratchError> { + + let integration_path = params.integration_path; + log::info!("Deleting integration path: {}", integration_path); + + // If file path exists, delete it + if !std::path::Path::new(&integration_path).exists() { + return Err(ScratchError::new(StatusCode::NOT_FOUND, "integration_path not found".to_string())); + } + + std::fs::remove_file(&integration_path).map_err(|e| { + ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Failed to delete file: {}", e)) + })?; + + Ok(Response::builder() + .status(StatusCode::OK) + .body(Body::from(format!("File {} deleted ", integration_path))) + .unwrap()) +} From 756101a42d9f3c20754521ffd0a37e2f8b97ddac Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Wed, 11 Dec 2024 14:00:50 +0100 Subject: [PATCH 141/185] fix: extra .refact in read integrations d for cmdline and service --- src/integrations/setting_up_integrations.rs | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/src/integrations/setting_up_integrations.rs b/src/integrations/setting_up_integrations.rs index 23141e0f0..8203d8f9f 100644 --- a/src/integrations/setting_up_integrations.rs +++ b/src/integrations/setting_up_integrations.rs @@ -82,8 +82,15 @@ pub fn read_integrations_d( continue; } let file_name_str_no_yaml = file_name_str.trim_end_matches(".yaml").to_string(); + let (_integr_name, project_path) = match split_path_into_project_and_integration(&entry.path()) { + Ok(x) => x, + Err(e) => { + tracing::error!("error deriving project path: {}", e); + continue; + } + }; if file_name_str.starts_with("cmdline_") || file_name_str.starts_with("service_") { - files_to_read.push((entry.path().to_string_lossy().to_string(), file_name_str_no_yaml.to_string(), project_config_dir.clone())); + files_to_read.push((entry.path().to_string_lossy().to_string(), file_name_str_no_yaml.to_string(), project_path)); } } } From 8291c1317556b1b7d68a012eee89638ef791b3fa Mon Sep 17 00:00:00 2001 From: mitya Date: Wed, 11 Dec 2024 18:23:00 +0100 Subject: [PATCH 142/185] add supports_agent field in caps --- src/caps.rs | 2 ++ src/known_models.rs | 3 +++ 2 files changed, 5 insertions(+) diff --git a/src/caps.rs b/src/caps.rs index 98b2bb60b..c1dda4718 100644 --- a/src/caps.rs +++ b/src/caps.rs @@ -36,6 +36,8 @@ pub struct ModelRecord { pub supports_multimodality: bool, #[serde(default)] pub supports_clicks: bool, + #[serde(default)] + pub supports_agent: bool, } #[derive(Debug, Deserialize)] diff --git a/src/known_models.rs b/src/known_models.rs index 0a7530499..f9a781a13 100644 --- a/src/known_models.rs +++ b/src/known_models.rs @@ -253,6 +253,7 @@ pub const KNOWN_MODELS: &str = r####" "n_ctx": 128000, "supports_tools": true, "supports_multimodality": true, + "supports_agent": true, "supports_scratchpads": { "PASSTHROUGH": { } @@ -299,6 +300,7 @@ pub const KNOWN_MODELS: &str = r####" "n_ctx": 16384, "supports_tools": true, "supports_multimodality": true, + "supports_agent": true, "supports_scratchpads": { "PASSTHROUGH": {} }, @@ -311,6 +313,7 @@ pub const KNOWN_MODELS: &str = r####" "supports_tools": true, "supports_multimodality": true, "supports_clicks": true, + "supports_agent": true, "supports_scratchpads": { "PASSTHROUGH": {} } From e5cd00d06c2d95f67ee2bd09c635646f8e44366c Mon Sep 17 00:00:00 2001 From: Kirill Starkov Date: Thu, 12 Dec 2024 10:59:52 +0800 Subject: [PATCH 143/185] add basic chat telemetry (#475) * 0.10.5 * add basic chat telemetry --------- Co-authored-by: Oleg Klimov --- Cargo.toml | 2 +- src/http/routers/v1.rs | 3 +++ src/http/routers/v1/telemetry_chat.rs | 22 ++++++++++++++++ src/telemetry/basic_chat.rs | 37 +++++++++++++++++++++++++++ src/telemetry/basic_transmit.rs | 8 +++--- src/telemetry/mod.rs | 1 + src/telemetry/telemetry_structs.rs | 19 ++++++++++++++ 7 files changed, 88 insertions(+), 4 deletions(-) create mode 100644 src/http/routers/v1/telemetry_chat.rs create mode 100644 src/telemetry/basic_chat.rs diff --git a/Cargo.toml b/Cargo.toml index 752ef5366..b84f116c6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -6,7 +6,7 @@ lto = true [package] name = "refact-lsp" -version = "0.10.4" +version = "0.10.5" edition = "2021" build = "build.rs" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html diff --git a/src/http/routers/v1.rs b/src/http/routers/v1.rs index a7be893dd..1cca1e7fa 100644 --- a/src/http/routers/v1.rs +++ b/src/http/routers/v1.rs @@ -26,6 +26,7 @@ use crate::http::routers::v1::git::handle_v1_git_commit; use crate::http::routers::v1::graceful_shutdown::handle_v1_graceful_shutdown; use crate::http::routers::v1::snippet_accepted::handle_v1_snippet_accepted; use crate::http::routers::v1::telemetry_network::handle_v1_telemetry_network; +use crate::http::routers::v1::telemetry_chat::handle_v1_telemetry_chat; use crate::http::routers::v1::links::handle_v1_links; use crate::http::routers::v1::lsp_like_handlers::{handle_v1_lsp_did_change, handle_v1_lsp_add_folder, handle_v1_lsp_initialize, handle_v1_lsp_remove_folder, handle_v1_set_active_document}; use crate::http::routers::v1::status::handle_v1_rag_status; @@ -48,6 +49,7 @@ pub mod code_completion; pub mod code_lens; pub mod chat; pub mod telemetry_network; +pub mod telemetry_chat; pub mod snippet_accepted; pub mod caps; mod docker; @@ -87,6 +89,7 @@ pub fn make_v1_router() -> Router { .route("/chat/completions", telemetry_post!(handle_v1_chat_completions)) // standard .route("/telemetry-network", telemetry_post!(handle_v1_telemetry_network)) + .route("/telemetry-chat", telemetry_post!(handle_v1_telemetry_chat)) .route("/snippet-accepted", telemetry_post!(handle_v1_snippet_accepted)) .route("/caps", telemetry_get!(handle_v1_caps)) diff --git a/src/http/routers/v1/telemetry_chat.rs b/src/http/routers/v1/telemetry_chat.rs new file mode 100644 index 000000000..53f1aace7 --- /dev/null +++ b/src/http/routers/v1/telemetry_chat.rs @@ -0,0 +1,22 @@ +use axum::Extension; +use axum::response::Result; +use hyper::{Body, Response, StatusCode}; +use serde_json::json; + +use crate::telemetry::telemetry_structs; +use crate::custom_error::ScratchError; +use crate::global_context::SharedGlobalContext; + +pub async fn handle_v1_telemetry_chat( + Extension(global_context): Extension, + body_bytes: hyper::body::Bytes, +) -> Result, ScratchError> { + let post = serde_json::from_slice::(&body_bytes).map_err(|e| { + ScratchError::new(StatusCode::BAD_REQUEST, format!("JSON problem: {}", e)) + })?; + global_context.write().await.telemetry.write().unwrap().tele_chat.push(post); + Ok(Response::builder() + .status(StatusCode::OK) + .body(Body::from(json!({"success": 1}).to_string())) + .unwrap()) +} diff --git a/src/telemetry/basic_chat.rs b/src/telemetry/basic_chat.rs new file mode 100644 index 000000000..bad650090 --- /dev/null +++ b/src/telemetry/basic_chat.rs @@ -0,0 +1,37 @@ +use std::sync::Arc; +use std::collections::HashMap; +use serde_json::json; + +use tokio::sync::RwLock as ARwLock; + +use crate::global_context; +use crate::telemetry::utils::compress_tele_records_to_file; + +pub async fn compress_basic_chat_telemetry_to_file( + cx: Arc>, +) { + let mut key2cnt = HashMap::new(); + let mut key2dict = HashMap::new(); + + for rec in cx.read().await.telemetry.read().unwrap().tele_chat.iter() { + let key = format!("{}/{}/{}", rec.scope, rec.success, rec.error_message); + if !key2dict.contains_key(&key) { + key2dict.insert(key.clone(), serde_json::to_value(rec).unwrap()); + key2cnt.insert(key.clone(), 0); + } + key2cnt.insert(key.clone(), key2cnt[&key] + 1); + } + + let mut records = vec![]; + for (key, cnt) in key2cnt.iter() { + let mut json_dict = key2dict[key.as_str()].clone(); + json_dict["counter"] = json!(cnt); + records.push(json_dict); + } + match compress_tele_records_to_file(cx.clone(), records, "chat".to_string(), "chat".to_string()).await { + Ok(_) => { + cx.write().await.telemetry.write().unwrap().tele_net.clear(); + }, + Err(_) => {} + }; +} \ No newline at end of file diff --git a/src/telemetry/basic_transmit.rs b/src/telemetry/basic_transmit.rs index 777ff101d..8a0c87486 100644 --- a/src/telemetry/basic_transmit.rs +++ b/src/telemetry/basic_transmit.rs @@ -7,7 +7,7 @@ use tokio::sync::RwLock as ARwLock; use crate::caps::CodeAssistantCaps; use crate::global_context::{GlobalContext, try_load_caps_quickly_if_not_present}; -use crate::telemetry::basic_network; +use crate::telemetry::{basic_chat, basic_network}; use crate::telemetry::basic_robot_human; use crate::telemetry::basic_comp_counters; use crate::telemetry::utils::{sorted_json_files, read_file, cleanup_old_files, telemetry_storage_dirs}; @@ -47,6 +47,8 @@ pub async fn send_telemetry_data( Ok(()) } +const TELEMETRY_FILES_SUFFIXES: [&str; 4] = ["-chat.json", "-net.json", "-rh.json", "-comp.json"]; + pub async fn send_telemetry_files_to_mothership( dir_compressed: PathBuf, dir_sent: PathBuf, @@ -69,8 +71,7 @@ pub async fn send_telemetry_files_to_mothership( let contents = contents_maybe.unwrap(); let path_str = path.to_str().unwrap(); let filename = path.file_name().unwrap().to_str().unwrap(); - if filename.starts_with(&file_prefix) && - (path_str.ends_with("-net.json") || path_str.ends_with("-rh.json") || path_str.ends_with("-comp.json")) { + if filename.starts_with(&file_prefix) && TELEMETRY_FILES_SUFFIXES.iter().any(|s| path_str.ends_with(s)) { info!("sending telemetry file\n{}\nto url\n{}", path.to_str().unwrap(), telemetry_basic_dest); let resp = send_telemetry_data(contents, &telemetry_basic_dest, &api_key, gcx.clone()).await; @@ -97,6 +98,7 @@ pub async fn basic_telemetry_compress( ) { info!("basic telemetry compression starts"); basic_network::compress_basic_telemetry_to_file(global_context.clone()).await; + basic_chat::compress_basic_chat_telemetry_to_file(global_context.clone()).await; basic_robot_human::tele_robot_human_compress_to_file(global_context.clone()).await; basic_comp_counters::compress_tele_completion_to_file(global_context.clone()).await; } diff --git a/src/telemetry/mod.rs b/src/telemetry/mod.rs index 1cb0e2902..de46a985b 100644 --- a/src/telemetry/mod.rs +++ b/src/telemetry/mod.rs @@ -6,3 +6,4 @@ pub mod snippets_transmit; mod basic_robot_human; mod basic_comp_counters; mod basic_network; +mod basic_chat; diff --git a/src/telemetry/telemetry_structs.rs b/src/telemetry/telemetry_structs.rs index 4cbfc0df4..672e4981f 100644 --- a/src/telemetry/telemetry_structs.rs +++ b/src/telemetry/telemetry_structs.rs @@ -15,6 +15,7 @@ pub struct Storage { pub tele_snippet_next_id: u64, pub snippet_data_accumulators: Vec, pub last_seen_file_texts: HashMap, + pub tele_chat: Vec, } impl Storage { @@ -27,6 +28,7 @@ impl Storage { tele_snippet_next_id: 100, snippet_data_accumulators: Vec::new(), last_seen_file_texts: HashMap::new(), + tele_chat: Vec::new(), } } } @@ -141,3 +143,20 @@ impl TeleCompletionAccum { } } } + +#[derive(Debug, Serialize, Deserialize, Clone, Default)] +pub struct TelemetryChat { + pub scope: String, // in relation to what + pub success: bool, + pub error_message: String, // empty if no error +} + +impl TelemetryChat { + pub fn new(scope: String, success: bool, error_message: String) -> Self { + Self { + scope, + success, + error_message, + } + } +} From cf4e47e1def8747f26e0dcfd04d71105641d4b10 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Thu, 12 Dec 2024 04:01:39 +0100 Subject: [PATCH 144/185] fix: make filenames searchable in vecdb (#484) --- src/vecdb/vdb_thread.rs | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/src/vecdb/vdb_thread.rs b/src/vecdb/vdb_thread.rs index 12647ff76..7153c2ba5 100644 --- a/src/vecdb/vdb_thread.rs +++ b/src/vecdb/vdb_thread.rs @@ -356,6 +356,18 @@ async fn vectorize_thread( vec![] }); + // Adding the filename so it can also be searched + if let Some(filename) = doc.doc_path.file_name().map(|f| f.to_string_lossy().to_string()) { + splits.push(crate::vecdb::vdb_structs::SplitResult { + file_path: doc.doc_path.clone(), + window_text: filename.clone(), + window_text_hash: crate::ast::chunk_utils::official_text_hashing_function(&filename), + start_line: 0, + end_line: if let Some(text) = doc.doc_text { text.lines().count() as u64 - 1 } else { 0 }, + symbol_path: "".to_string(), + }); + } + if DEBUG_WRITE_VECDB_FILES { let path_vecdb = doc.doc_path.with_extension("vecdb"); if let Ok(mut file) = std::fs::File::create(path_vecdb) { From 4fec63fca67fc72140500a0f0622934d2d714d10 Mon Sep 17 00:00:00 2001 From: Dimitry Ageev Date: Thu, 12 Dec 2024 04:37:20 +0100 Subject: [PATCH 145/185] move match confirm/deny/pass logic into tool itself (#483) --- src/http/routers/v1/at_tools.rs | 61 ++++++++++++++------------------- src/tools/tools_description.rs | 51 +++++++++++++++++++++++++++ 2 files changed, 77 insertions(+), 35 deletions(-) diff --git a/src/http/routers/v1/at_tools.rs b/src/http/routers/v1/at_tools.rs index c52422f1a..04948f5f5 100644 --- a/src/http/routers/v1/at_tools.rs +++ b/src/http/routers/v1/at_tools.rs @@ -12,10 +12,10 @@ use crate::at_commands::at_commands::AtCommandsContext; use crate::cached_tokenizers; use crate::call_validation::{ChatMessage, ChatToolCall, PostprocessSettings, SubchatParameters}; use crate::http::routers::v1::chat::CHAT_TOP_N; -use crate::tools::tools_description::{commands_require_confirmation_rules_from_integrations_yaml, tool_description_list_from_yaml, tools_merged_and_filtered}; +use crate::tools::tools_description::{commands_require_confirmation_rules_from_integrations_yaml, tool_description_list_from_yaml, tools_merged_and_filtered, MatchConfirmDenyResult}; use crate::custom_error::ScratchError; use crate::global_context::{try_load_caps_quickly_if_not_present, GlobalContext}; -use crate::tools::tools_execute::{command_should_be_confirmed_by_user, command_should_be_denied, run_tools}; +use crate::tools::tools_execute::run_tools; #[derive(Serialize, Deserialize, Clone)] @@ -111,7 +111,9 @@ pub async fn handle_v1_tools_check_if_confirmation_needed( }; let mut result_messages = vec![]; - let mut confirmation_rules = None; + let confirmation_rules = Some(commands_require_confirmation_rules_from_integrations_yaml(gcx.clone()).await.map_err(|e| { + ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Error loading generic tool config: {}", e)) + })?); for tool_call in &post.tool_calls { let tool = match all_tools.get(&tool_call.function.name) { Some(x) => x, @@ -127,42 +129,31 @@ pub async fn handle_v1_tools_check_if_confirmation_needed( } }; - let command_to_match = { + let result = { let tool_locked = tool.lock().await; - tool_locked.command_to_match_against_confirm_deny(&args) + tool_locked.match_against_confirm_deny(&args, &confirmation_rules) }.map_err(|e| { - ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, format!("Error getting tool command to match: {}", e)) + ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, e) })?; - if !command_to_match.is_empty() { - if confirmation_rules.is_none() { - confirmation_rules = Some(commands_require_confirmation_rules_from_integrations_yaml(gcx.clone()).await.map_err(|e| { - ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Error loading generic tool config: {}", e)) - })?); - } - - if let Some(rules) = &confirmation_rules { - let (is_denied, deny_rule) = command_should_be_denied(&command_to_match, &rules.commands_deny); - if is_denied { - result_messages.push(PauseReason { - reason_type: PauseReasonType::Denial, - command: command_to_match.clone(), - rule: deny_rule.clone(), - tool_call_id: tool_call.id.clone(), - }); - continue; - } - let (needs_confirmation, confirmation_rule) = command_should_be_confirmed_by_user(&command_to_match, &rules.commands_need_confirmation); - if needs_confirmation { - result_messages.push(PauseReason { - reason_type: PauseReasonType::Confirmation, - command: command_to_match.clone(), - rule: confirmation_rule.clone(), - tool_call_id: tool_call.id.clone(), - }); - continue; - } - } + match result.result { + MatchConfirmDenyResult::DENY => { + result_messages.push(PauseReason { + reason_type: PauseReasonType::Denial, + command: result.command.clone(), + rule: result.rule.clone(), + tool_call_id: tool_call.id.clone(), + }); + }, + MatchConfirmDenyResult::CONFIRMATION => { + result_messages.push(PauseReason { + reason_type: PauseReasonType::Confirmation, + command: result.command.clone(), + rule: result.rule.clone(), + tool_call_id: tool_call.id.clone(), + }); + }, + _ => {}, } } diff --git a/src/tools/tools_description.rs b/src/tools/tools_description.rs index c8b54bc64..ed1d2ad5e 100644 --- a/src/tools/tools_description.rs +++ b/src/tools/tools_description.rs @@ -11,6 +11,7 @@ use tokio::sync::Mutex as AMutex; use crate::at_commands::at_commands::AtCommandsContext; use crate::call_validation::{ChatUsage, ContextEnum}; use crate::global_context::GlobalContext; +use crate::tools::tools_execute::{command_should_be_confirmed_by_user, command_should_be_denied}; // use crate::integrations::docker::integr_docker::ToolDocker; @@ -20,6 +21,20 @@ pub struct CommandsRequireConfirmationConfig { pub commands_deny: Vec, } +#[derive(Clone, Debug)] +pub enum MatchConfirmDenyResult { + PASS, + CONFIRMATION, + DENY, +} + +#[derive(Clone, Debug)] +pub struct MatchConfirmDeny { + pub result: MatchConfirmDenyResult, + pub command: String, + pub rule: String, +} + #[async_trait] pub trait Tool: Send + Sync { fn as_any(&self) -> &dyn std::any::Any; @@ -31,6 +46,42 @@ pub trait Tool: Send + Sync { args: &HashMap ) -> Result<(bool, Vec), String>; + fn match_against_confirm_deny( + &self, + args: &HashMap, + confirmation_rules: &Option, + ) -> Result { + let command_to_match = self.command_to_match_against_confirm_deny(&args).map_err(|e| { + format!("Error getting tool command to match: {}", e) + })?; + + if !command_to_match.is_empty() { + if let Some(rules) = &confirmation_rules { + let (is_denied, deny_rule) = command_should_be_denied(&command_to_match, &rules.commands_deny); + if is_denied { + return Ok(MatchConfirmDeny { + result: MatchConfirmDenyResult::DENY, + command: command_to_match.clone(), + rule: deny_rule.clone(), + }); + } + let (needs_confirmation, confirmation_rule) = command_should_be_confirmed_by_user(&command_to_match, &rules.commands_need_confirmation); + if needs_confirmation { + return Ok(MatchConfirmDeny { + result: MatchConfirmDenyResult::CONFIRMATION, + command: command_to_match.clone(), + rule: confirmation_rule.clone(), + }); + } + } + } + Ok(MatchConfirmDeny { + result: MatchConfirmDenyResult::PASS, + command: command_to_match.clone(), + rule: "".to_string(), + }) + } + fn command_to_match_against_confirm_deny( &self, _args: &HashMap, From 091538825dca425e38f3421c2c560134bac9afed Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Thu, 12 Dec 2024 04:40:04 +0100 Subject: [PATCH 146/185] fixes (migration to .config later in main) --- src/caps.rs | 10 +++++----- src/global_context.rs | 4 ++-- .../docker/docker_container_manager.rs | 1 + src/main.rs | 20 +++++++------------ 4 files changed, 15 insertions(+), 20 deletions(-) diff --git a/src/caps.rs b/src/caps.rs index c1dda4718..b2b0a6eb6 100644 --- a/src/caps.rs +++ b/src/caps.rs @@ -235,7 +235,7 @@ macro_rules! get_api_key_macro { match std::env::var(env_var_name) { Ok(env_value) => env_value, Err(e) => { - error!("tried to read API key from env var {}, but failed: {}\nTry editing ~/.cache/refact/bring-your-own-key.yaml", env_var_name, e); + error!("tried to read API key from env var {}, but failed: {}\nTry editing ~/.config/refact/bring-your-own-key.yaml", env_var_name, e); cx_locked.cmdline.api_key.clone() } } @@ -257,7 +257,7 @@ pub async fn get_api_key( match std::env::var(env_var_name) { Ok(env_value) => env_value, Err(e) => { - error!("tried to read API key from env var {}, but failed: {}\nTry editing ~/.cache/refact/bring-your-own-key.yaml", env_var_name, e); + error!("tried to read API key from env var {}, but failed: {}\nTry editing ~/.config/refact/bring-your-own-key.yaml", env_var_name, e); gcx_locked.cmdline.api_key.clone() } } @@ -291,11 +291,11 @@ async fn load_caps_buf_from_file( ) -> Result<(String, String), String> { let mut caps_url = cmdline.address_url.clone(); if caps_url.is_empty() { - let cache_dir = { + let config_dir = { let gcx_locked = gcx.read().await; - gcx_locked.cache_dir.clone() + gcx_locked.config_dir.clone() }; - let caps_path = PathBuf::from(cache_dir).join("bring-your-own-key.yaml"); + let caps_path = PathBuf::from(config_dir).join("bring-your-own-key.yaml"); caps_url = caps_path.to_string_lossy().into_owned(); // info!("will use {} as the caps file", caps_url); } diff --git a/src/global_context.rs b/src/global_context.rs index 025d9eb8b..9515f7060 100644 --- a/src/global_context.rs +++ b/src/global_context.rs @@ -183,10 +183,10 @@ pub async fn migrate_to_config_folder( if is_yaml_cfg { let new_path = config_dir.join(&file_name); tokio::fs::rename(&path, &new_path).await?; - print!("migrated {:?} to {:?}", path, new_path); + tracing::info!("migrated {:?} to {:?}", path, new_path); } } - + Ok(()) } diff --git a/src/integrations/docker/docker_container_manager.rs b/src/integrations/docker/docker_container_manager.rs index 1bfb7d55d..8b548153b 100644 --- a/src/integrations/docker/docker_container_manager.rs +++ b/src/integrations/docker/docker_container_manager.rs @@ -262,6 +262,7 @@ async fn docker_container_sync_yaml_configs( let temp_dir = tempfile::Builder::new().tempdir() .map_err(|e| format!("Error creating temporary directory: {}", e))?; let temp_dir_path = temp_dir.path().to_string_lossy().to_string(); + // XXX now we're using .config docker.command_execute(&format!("container cp {temp_dir_path} {container_id}:{container_home_dir}/.cache/"), gcx.clone(), true, true).await?; docker.command_execute(&format!("container cp {temp_dir_path} {container_id}:{container_home_dir}/.cache/refact"), gcx.clone(), true, true).await?; diff --git a/src/main.rs b/src/main.rs index b11156d4f..cf8cd3071 100644 --- a/src/main.rs +++ b/src/main.rs @@ -75,12 +75,6 @@ async fn main() { let home_dir = home::home_dir().ok_or(()).expect("failed to find home dir"); let cache_dir = home_dir.join(".cache/refact"); let config_dir = home_dir.join(".config/refact"); - match global_context::migrate_to_config_folder(&config_dir, &cache_dir).await { - Ok(_) => {} - Err(err) => { - print!("failed to migrate to config folder, exiting: {:?}", err); - } - } let (gcx, ask_shutdown_receiver, shutdown_flag, cmdline) = global_context::create_global_context(cache_dir.clone(), config_dir.clone()).await; let mut writer_is_stderr = false; let (logs_writer, _guard) = if cmdline.logs_stderr { @@ -117,6 +111,13 @@ async fn main() { tracing::error!("Panic occurred: {:?}\n{:?}", panic_info, backtrace); })); + match global_context::migrate_to_config_folder(&config_dir, &cache_dir).await { + Ok(_) => {} + Err(err) => { + tracing::error!("failed to migrate config files from .cache to .config, exiting: {:?}", err); + } + } + { let build_info = crate::http::routers::info::get_build_info(); for (k, v) in build_info { @@ -186,13 +187,6 @@ async fn main() { let _ = main_handle.unwrap().await; } - // use nix::sys::signal::{kill, Signal}; - // info!("sending SIGTERM to children"); - // kill_children(gcx.clone(), Signal::SIGTERM).await; - // tokio::time::sleep(tokio::time::Duration::from_secs(2)).await; - // info!("sending SIGKILL to children"); - // kill_children(gcx.clone(), Signal::SIGKILL).await; - background_tasks.abort().await; integrations::sessions::stop_sessions(gcx.clone()).await; info!("saving telemetry without sending, so should be quick"); From b973fb9f8bc52a98337c50cdaf7de79e560b8b0b Mon Sep 17 00:00:00 2001 From: Sergey Vakhreev Date: Thu, 12 Dec 2024 20:24:23 +1030 Subject: [PATCH 147/185] Confirmation (#485) * Refactor confirmation logic and command denial rules - Remove hardcoded command confirmation and denial rules from `mod.rs`. - Introduce `confirmation_info()` method to provide custom confirmation and denial configurations for integrations. - Restructure `IntegrationConfirmation` and `IntegrationAvailable` by renaming their fields for clarity. - Integrate confirmation and denial rule parsing directly within each integration's configuration. - Remove redundant function `commands_require_confirmation_rules_from_integrations_yaml`. - Update tool command matching against confirmation/denial rules to utilize the new method within each integration. * Refactor struct field names for clarity and consistency * Refactor confirmation fields to improve configuration consistency - Rename `deny_user_default` to `deny_default` in integration schemas - Rename `ask_user_default` to `ask_user` for clarity - Update relevant function calls and data handling logic to use new field names - Enhance `read_integrations_d` and related functions for better data management and default value handling - Adjust tests and other references to reflect changes in field naming conventions * Add leading slash to schema keys in confirmation settings * Remove redundant check for existing integration configuration in records loop --- src/http/routers/v1/at_tools.rs | 7 +-- src/integrations/docker/integr_docker.rs | 17 +++-- src/integrations/docker/integr_isolation.rs | 3 + src/integrations/integr_abstract.rs | 4 +- src/integrations/integr_chrome.rs | 16 +++-- src/integrations/integr_cmdline.rs | 8 ++- src/integrations/integr_cmdline_service.rs | 10 ++- src/integrations/integr_postgres.rs | 15 +++-- src/integrations/mod.rs | 18 +----- src/integrations/setting_up_integrations.rs | 70 ++++++++++++++++++++- src/tools/tools_description.rs | 33 ++++------ src/tools/tools_execute.rs | 57 ++++++----------- 12 files changed, 155 insertions(+), 103 deletions(-) diff --git a/src/http/routers/v1/at_tools.rs b/src/http/routers/v1/at_tools.rs index 04948f5f5..48d934bc1 100644 --- a/src/http/routers/v1/at_tools.rs +++ b/src/http/routers/v1/at_tools.rs @@ -12,7 +12,7 @@ use crate::at_commands::at_commands::AtCommandsContext; use crate::cached_tokenizers; use crate::call_validation::{ChatMessage, ChatToolCall, PostprocessSettings, SubchatParameters}; use crate::http::routers::v1::chat::CHAT_TOP_N; -use crate::tools::tools_description::{commands_require_confirmation_rules_from_integrations_yaml, tool_description_list_from_yaml, tools_merged_and_filtered, MatchConfirmDenyResult}; +use crate::tools::tools_description::{tool_description_list_from_yaml, tools_merged_and_filtered, MatchConfirmDenyResult}; use crate::custom_error::ScratchError; use crate::global_context::{try_load_caps_quickly_if_not_present, GlobalContext}; use crate::tools::tools_execute::run_tools; @@ -111,9 +111,6 @@ pub async fn handle_v1_tools_check_if_confirmation_needed( }; let mut result_messages = vec![]; - let confirmation_rules = Some(commands_require_confirmation_rules_from_integrations_yaml(gcx.clone()).await.map_err(|e| { - ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Error loading generic tool config: {}", e)) - })?); for tool_call in &post.tool_calls { let tool = match all_tools.get(&tool_call.function.name) { Some(x) => x, @@ -131,7 +128,7 @@ pub async fn handle_v1_tools_check_if_confirmation_needed( let result = { let tool_locked = tool.lock().await; - tool_locked.match_against_confirm_deny(&args, &confirmation_rules) + tool_locked.match_against_confirm_deny(&args) }.map_err(|e| { ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, e) })?; diff --git a/src/integrations/docker/integr_docker.rs b/src/integrations/docker/integr_docker.rs index 586b386c6..6f35ebeaf 100644 --- a/src/integrations/docker/integr_docker.rs +++ b/src/integrations/docker/integr_docker.rs @@ -9,7 +9,7 @@ use serde_json::Value; use crate::at_commands::at_commands::AtCommandsContext; use crate::call_validation::{ChatContent, ChatMessage, ContextEnum}; use crate::global_context::GlobalContext; -use crate::integrations::integr_abstract::{IntegrationTrait, IntegrationCommon}; +use crate::integrations::integr_abstract::{IntegrationTrait, IntegrationCommon, IntegrationConfirmation}; use crate::tools::tools_description::Tool; use crate::integrations::docker::docker_ssh_tunnel_utils::{SshConfig, forward_remote_docker_if_needed}; use crate::integrations::utils::{serialize_num_to_str, deserialize_str_to_num}; @@ -178,6 +178,10 @@ impl Tool for ToolDocker { command_args.insert(0, "docker".to_string()); Ok(command_args.join(" ")) } + + fn confirmation_info(&self) -> Option { + Some(self.integr_common().confirmation) + } } fn parse_command(args: &HashMap) -> Result{ @@ -308,9 +312,6 @@ fields: f_desc: "Path to the SSH identity file to connect to remote Docker." f_label: "SSH Identity File" f_extra: true -available: - on_your_laptop_possible: true - when_isolated_possible: false smartlinks: - sl_label: "Test" sl_chat: @@ -318,4 +319,10 @@ smartlinks: content: | šŸ”§ The docker tool should be visible now. To test the tool, list the running containers, briefly describe the containers and express satisfaction and relief if it works, and change nothing. If it doesn't work or the tool isn't available, go through the usual plan in the system prompt. -"#; \ No newline at end of file +available: + on_your_laptop_possible: true + when_isolated_possible: false +confirmation: + ask_user_default: [] + deny_default: ["docker* rm *", "docker* rmi *", "docker* pause *", "docker* stop *", "docker* kill *"] +"#; diff --git a/src/integrations/docker/integr_isolation.rs b/src/integrations/docker/integr_isolation.rs index 5f7fd7478..113acb4f3 100644 --- a/src/integrations/docker/integr_isolation.rs +++ b/src/integrations/docker/integr_isolation.rs @@ -94,4 +94,7 @@ fields: available: on_your_laptop_possible: true when_isolated_possible: false +confirmation: + ask_user_default: [] + deny_default: ["docker* rm *", "docker* rmi *", "docker* pause *", "docker* stop *", "docker* kill *"] "#; diff --git a/src/integrations/integr_abstract.rs b/src/integrations/integr_abstract.rs index 446eb1b39..e91684fc1 100644 --- a/src/integrations/integr_abstract.rs +++ b/src/integrations/integr_abstract.rs @@ -35,7 +35,7 @@ pub struct IntegrationConfirmation { #[derive(Deserialize, Serialize, Clone, Default)] pub struct IntegrationCommon { #[serde(default)] - pub available: IntegrationConfirmation, + pub available: IntegrationAvailable, #[serde(default)] - pub confirmation: IntegrationAvailable, + pub confirmation: IntegrationConfirmation, } diff --git a/src/integrations/integr_chrome.rs b/src/integrations/integr_chrome.rs index 7e76bcfa7..780e8a763 100644 --- a/src/integrations/integr_chrome.rs +++ b/src/integrations/integr_chrome.rs @@ -15,7 +15,7 @@ use crate::call_validation::{ChatContent, ChatMessage}; use crate::scratchpads::multimodality::MultimodalElement; use crate::postprocessing::pp_command_output::{CmdlineOutputFilter, output_mini_postprocessing}; use crate::tools::tools_description::{Tool, ToolDesc, ToolParam}; -use crate::integrations::integr_abstract::{IntegrationTrait, IntegrationCommon}; +use crate::integrations::integr_abstract::{IntegrationTrait, IntegrationCommon, IntegrationConfirmation}; use tokio::time::sleep; use chrono::DateTime; @@ -294,6 +294,11 @@ impl Tool for ToolChrome { parameters_required: vec!["commands".to_string()], } } + + + fn confirmation_info(&self) -> Option { + Some(self.integr_common().confirmation) + } } async fn setup_chrome_session( @@ -1240,9 +1245,6 @@ fields: f_type: string_short f_desc: "Scale factor of the browser window in tablet mode." f_extra: true -available: - on_your_laptop_possible: true - when_isolated_possible: true smartlinks: - sl_label: "Test" sl_chat: @@ -1277,4 +1279,10 @@ docker: - role: "user" content: | šŸ”§ Your job is to modify chrome config in the current file to connect through websockets to the container, use docker tool to inspect the container if needed. Current config file: %CURRENT_CONFIG%. +available: + on_your_laptop_possible: true + when_isolated_possible: true +confirmation: + ask_user_default: [] + deny_default: [] "#; diff --git a/src/integrations/integr_cmdline.rs b/src/integrations/integr_cmdline.rs index 05d2695f4..760eb467a 100644 --- a/src/integrations/integr_cmdline.rs +++ b/src/integrations/integr_cmdline.rs @@ -13,7 +13,7 @@ use crate::at_commands::at_commands::AtCommandsContext; use crate::tools::tools_description::{ToolParam, Tool, ToolDesc}; use crate::call_validation::{ChatMessage, ChatContent, ContextEnum}; use crate::postprocessing::pp_command_output::{CmdlineOutputFilter, output_mini_postprocessing}; -use crate::integrations::integr_abstract::{IntegrationTrait, IntegrationCommon}; +use crate::integrations::integr_abstract::{IntegrationTrait, IntegrationCommon, IntegrationConfirmation}; use crate::integrations::utils::{serialize_num_to_str, deserialize_str_to_num, serialize_opt_num_to_str, deserialize_str_to_opt_num}; @@ -283,6 +283,10 @@ impl Tool for ToolCmdline { let (command, _workdir) = parse_command_args(args, &self.cfg)?; return Ok(command); } + + fn confirmation_info(&self) -> Option { + Some(self.integr_common().confirmation) + } } pub const CMDLINE_INTEGRATION_SCHEMA: &str = r#" @@ -319,5 +323,5 @@ available: when_isolated_possible: true confirmation: ask_user_default: ["*"] - deny_user_default: ["sudo*"] + deny_default: ["sudo*"] "#; diff --git a/src/integrations/integr_cmdline_service.rs b/src/integrations/integr_cmdline_service.rs index 18c231038..4336210cb 100644 --- a/src/integrations/integr_cmdline_service.rs +++ b/src/integrations/integr_cmdline_service.rs @@ -15,7 +15,7 @@ use crate::global_context::GlobalContext; use crate::integrations::process_io_utils::{blocking_read_until_token_or_timeout, is_someone_listening_on_that_tcp_port}; use crate::integrations::sessions::IntegrationSession; use crate::postprocessing::pp_command_output::output_mini_postprocessing; -use crate::integrations::integr_abstract::{IntegrationTrait, IntegrationCommon}; +use crate::integrations::integr_abstract::{IntegrationTrait, IntegrationCommon, IntegrationConfirmation}; use crate::integrations::integr_cmdline::*; @@ -333,6 +333,10 @@ impl Tool for ToolService { parameters_required, } } + + fn confirmation_info(&self) -> Option { + Some(self.integr_common().confirmation) + } } pub const CMDLINE_SERVICE_INTEGRATION_SCHEMA: &str = r#" @@ -363,7 +367,6 @@ fields: f_type: string f_desc: "Wait until a keyword appears in stdout or stderr at startup." f_placeholder: "Ready" - description: | As opposed to command line argumenets @@ -372,4 +375,7 @@ description: | available: on_your_laptop_possible: true when_isolated_possible: true +confirmation: + ask_user_default: ["*"] + deny_default: ["sudo*"] "#; diff --git a/src/integrations/integr_postgres.rs b/src/integrations/integr_postgres.rs index e22006ff3..c32a6d594 100644 --- a/src/integrations/integr_postgres.rs +++ b/src/integrations/integr_postgres.rs @@ -10,7 +10,7 @@ use std::collections::HashMap; use std::sync::Arc; use tokio::process::Command; use tokio::sync::Mutex as AMutex; -use crate::integrations::integr_abstract::{IntegrationTrait, IntegrationCommon}; +use crate::integrations::integr_abstract::{IntegrationTrait, IntegrationCommon, IntegrationConfirmation}; #[derive(Clone, Serialize, Deserialize, Debug, Default)] @@ -163,6 +163,10 @@ impl Tool for ToolPostgres { #[allow(static_mut_refs)] unsafe { &mut DEFAULT_USAGE } } + + fn confirmation_info(&self) -> Option { + Some(self.integr_common().confirmation) + } } // const DEFAULT_POSTGRES_INTEGRATION_YAML: &str = r#" @@ -224,9 +228,6 @@ description: | On this page you can also see Docker containers with Postgres servers. You can ask model to create a new container with a new database for you, or ask model to configure the tool to use an existing container with existing database. -available: - on_your_laptop_possible: true - when_isolated_possible: true smartlinks: - sl_label: "Test" sl_chat: @@ -262,6 +263,12 @@ docker: - role: "user" content: | šŸ”§ Your job is to modify postgres connection config in the current file to match the variables from the container, use docker tool to inspect the container if needed. Current config file: %CURRENT_CONFIG%. +available: + on_your_laptop_possible: true + when_isolated_possible: true +confirmation: + ask_user_default: ["psql*[!SELECT]*"] + deny_default: [] "#; // To think about: PGPASSWORD PGHOST PGUSER PGPORT PGDATABASE maybe tell the model to set that in variables.yaml as well diff --git a/src/integrations/mod.rs b/src/integrations/mod.rs index dcf0d77a5..ef5c25d04 100644 --- a/src/integrations/mod.rs +++ b/src/integrations/mod.rs @@ -27,7 +27,7 @@ pub mod setting_up_integrations; pub mod running_integrations; pub mod utils; -use integr_abstract::{IntegrationTrait, IntegrationCommon}; +use integr_abstract::IntegrationTrait; pub fn integration_from_name(n: &str) -> Result, String> @@ -96,22 +96,6 @@ pub const INTEGRATIONS_DEFAULT_YAML: &str = r#"# This file is used to configure # # Here you can set up which commands require confirmation or must be denied. If both apply, the command is denied. # Rules use glob patterns for wildcard matching (https://en.wikipedia.org/wiki/Glob_(programming)) -# - -commands_need_confirmation: - - "gh * delete*" - - "glab * delete*" - - "psql*[!SELECT]*" -commands_deny: - - "docker* rm *" - - "docker* remove *" - - "docker* rmi *" - - "docker* pause *" - - "docker* stop *" - - "docker* kill *" - - "gh auth token*" - - "glab auth token*" - # Command line: things you can call and immediately get an answer #cmdline: diff --git a/src/integrations/setting_up_integrations.rs b/src/integrations/setting_up_integrations.rs index 8203d8f9f..3e61163e9 100644 --- a/src/integrations/setting_up_integrations.rs +++ b/src/integrations/setting_up_integrations.rs @@ -4,11 +4,13 @@ use std::sync::Arc; use std::collections::HashMap; use regex::Regex; use serde::Serialize; +use serde_json::json; use tokio::sync::RwLock as ARwLock; use tokio::fs as async_fs; use tokio::io::AsyncWriteExt; use crate::global_context::GlobalContext; +use crate::integrations::integr_abstract::IntegrationTrait; // use crate::tools::tools_description::Tool; // use crate::yaml_configs::create_configs::{integrations_enabled_cfg, read_yaml_into_value}; @@ -29,6 +31,8 @@ pub struct IntegrationRecord { pub icon_path: String, pub on_your_laptop: bool, pub when_isolated: bool, + pub ask_user: Vec, + pub deny: Vec, #[serde(skip_serializing)] pub config_unparsed: serde_json::Value, } @@ -39,6 +43,19 @@ pub struct IntegrationResult { pub error_log: Vec, } +fn get_array_of_str_or_empty(val: &serde_json::Value, path: &str) -> Vec { + val.pointer(path) + .and_then(|val| { + val.as_array().map(|array| { + array + .iter() + .filter_map(|v| v.as_str().map(ToString::to_string)) + .collect::>() + }) + }) + .unwrap_or_default() +} + pub fn read_integrations_d( config_dirs: &Vec, global_config_dir: &PathBuf, @@ -224,6 +241,28 @@ pub fn read_integrations_d( } } + // 5. Fill confirmation in each record + for rec in &mut result { + if let Some(confirmation) = rec.config_unparsed.get("confirmation") { + rec.ask_user = get_array_of_str_or_empty(&confirmation, "ask_user"); + rec.deny = get_array_of_str_or_empty(&confirmation, "deny"); + } else { + let schema = match crate::integrations::integration_from_name(rec.integr_name.as_str()) { + Ok(i) => { + serde_json::to_value( + serde_yaml::from_str::(i.integr_schema()).expect("schema is invalid") + ).expect("schema is invalid") + } + Err(err) => { + tracing::warn!("failed to retrieve schema from {}: {err}", rec.integr_name); + continue; + } + }; + rec.ask_user = get_array_of_str_or_empty(&schema, "/confirmation/ask_user_default"); + rec.deny = get_array_of_str_or_empty(&schema, "/confirmation/deny_default"); + } + } + result } @@ -357,7 +396,9 @@ pub async fn integration_config_get( let mut available = serde_json::json!({ "on_your_laptop": false, "when_isolated": false - }); + }); + let mut confirmation_ask_user = vec![]; + let mut confirmation_deny = vec![]; if exists { match fs::read_to_string(&sanitized_path) { Ok(content) => { @@ -366,6 +407,16 @@ pub async fn integration_config_get( let j = serde_json::to_value(y).unwrap(); available["on_your_laptop"] = j.get("available").and_then(|v| v.get("on_your_laptop")).and_then(|v| v.as_bool()).unwrap_or(false).into(); available["when_isolated"] = j.get("available").and_then(|v| v.get("when_isolated")).and_then(|v| v.as_bool()).unwrap_or(false).into(); + confirmation_ask_user = if j.get("confirmation").is_some() { + get_array_of_str_or_empty(&j, "confirmation/ask_user") + } else { + get_array_of_str_or_empty(&result.integr_schema, "/confirmation/ask_user_default") + }; + confirmation_deny = if j.get("confirmation").is_some() { + get_array_of_str_or_empty(&j, "confirmation/deny") + } else { + get_array_of_str_or_empty(&result.integr_schema, "/confirmation/deny_default") + }; let did_it_work = integration_box.integr_settings_apply(&j); if let Err(e) = did_it_work { tracing::error!("oops: {}", e); @@ -384,13 +435,17 @@ pub async fn integration_config_get( result.integr_values = integration_box.integr_settings_as_json(); result.integr_values["available"] = available; + result.integr_values["confirmation"] = serde_json::json!({ + "ask_user": confirmation_ask_user, + "deny": confirmation_deny + }); Ok(result) } pub async fn integration_config_save( integr_config_path: &String, integr_values: &serde_json::Value, -) -> Result<(), String> { +) -> Result<(), String> { let config_path = crate::files_correction::canonical_path(integr_config_path); let (integr_name, _project_path) = crate::integrations::setting_up_integrations::split_path_into_project_and_integration(&config_path) .map_err(|e| format!("Failed to split path: {}", e))?; @@ -398,6 +453,11 @@ pub async fn integration_config_save( .map_err(|e| format!("Failed to load integrations: {}", e))?; integration_box.integr_settings_apply(integr_values)?; // this will produce "no field XXX" errors + let schema_json = { + let y: serde_yaml::Value = serde_yaml::from_str(integration_box.integr_schema()).unwrap(); + let j = serde_json::to_value(y).unwrap(); + j + }; let mut sanitized_json: serde_json::Value = integration_box.integr_settings_as_json(); tracing::info!("posted values:\n{}", serde_json::to_string_pretty(integr_values).unwrap()); @@ -406,6 +466,12 @@ pub async fn integration_config_save( } sanitized_json["available"]["on_your_laptop"] = integr_values.pointer("/available/on_your_laptop").cloned().unwrap_or(serde_json::Value::Bool(false)); sanitized_json["available"]["when_isolated"] = integr_values.pointer("/available/when_isolated").cloned().unwrap_or(serde_json::Value::Bool(false)); + sanitized_json["confirmation"]["ask_user"] = integr_values.pointer("/confirmation/ask_user").cloned().unwrap_or( + json!(get_array_of_str_or_empty(&schema_json, "/confirmation/ask_user_default")) + ); + sanitized_json["confirmation"]["deny"] = integr_values.pointer("/confirmation/deny").cloned().unwrap_or( + json!(get_array_of_str_or_empty(&schema_json, "/confirmation/deny_default")) + ); tracing::info!("writing to {}:\n{}", config_path.display(), serde_json::to_string_pretty(&sanitized_json).unwrap()); let sanitized_yaml = serde_yaml::to_value(sanitized_json).unwrap(); diff --git a/src/tools/tools_description.rs b/src/tools/tools_description.rs index ed1d2ad5e..4330e55bc 100644 --- a/src/tools/tools_description.rs +++ b/src/tools/tools_description.rs @@ -11,16 +11,11 @@ use tokio::sync::Mutex as AMutex; use crate::at_commands::at_commands::AtCommandsContext; use crate::call_validation::{ChatUsage, ContextEnum}; use crate::global_context::GlobalContext; +use crate::integrations::integr_abstract::IntegrationConfirmation; use crate::tools::tools_execute::{command_should_be_confirmed_by_user, command_should_be_denied}; // use crate::integrations::docker::integr_docker::ToolDocker; -#[derive(Serialize, Deserialize, Debug, Clone)] -pub struct CommandsRequireConfirmationConfig { - pub commands_need_confirmation: Vec, - pub commands_deny: Vec, -} - #[derive(Clone, Debug)] pub enum MatchConfirmDenyResult { PASS, @@ -48,16 +43,15 @@ pub trait Tool: Send + Sync { fn match_against_confirm_deny( &self, - args: &HashMap, - confirmation_rules: &Option, + args: &HashMap ) -> Result { let command_to_match = self.command_to_match_against_confirm_deny(&args).map_err(|e| { format!("Error getting tool command to match: {}", e) })?; if !command_to_match.is_empty() { - if let Some(rules) = &confirmation_rules { - let (is_denied, deny_rule) = command_should_be_denied(&command_to_match, &rules.commands_deny); + if let Some(rules) = &self.confirmation_info() { + let (is_denied, deny_rule) = command_should_be_denied(&command_to_match, &rules.deny); if is_denied { return Ok(MatchConfirmDeny { result: MatchConfirmDenyResult::DENY, @@ -65,7 +59,7 @@ pub trait Tool: Send + Sync { rule: deny_rule.clone(), }); } - let (needs_confirmation, confirmation_rule) = command_should_be_confirmed_by_user(&command_to_match, &rules.commands_need_confirmation); + let (needs_confirmation, confirmation_rule) = command_should_be_confirmed_by_user(&command_to_match, &rules.ask_user); if needs_confirmation { return Ok(MatchConfirmDeny { result: MatchConfirmDenyResult::CONFIRMATION, @@ -89,6 +83,12 @@ pub trait Tool: Send + Sync { Ok("".to_string()) } + fn confirmation_info( + &self, + ) -> Option { + None + } + fn tool_depends_on(&self) -> Vec { vec![] } // "ast", "vecdb" fn usage(&mut self) -> &mut Option { @@ -228,17 +228,6 @@ pub async fn tools_merged_and_filtered( Ok(filtered_tools) } -pub async fn commands_require_confirmation_rules_from_integrations_yaml(gcx: Arc>) -> Result -{ - // XXX - // let integrations_value = read_integrations_yaml(gcx.clone()).await?; - let config_dir = gcx.read().await.config_dir.clone(); - let integrations_value = read_integrations_yaml(&config_dir).await?; - - serde_yaml::from_value::(integrations_value) - .map_err(|e| format!("Failed to parse CommandsRequireConfirmationConfig: {}", e)) -} - const BUILT_IN_TOOLS: &str = r####" tools: - name: "search" diff --git a/src/tools/tools_execute.rs b/src/tools/tools_execute.rs index 9c041fc09..643869233 100644 --- a/src/tools/tools_execute.rs +++ b/src/tools/tools_execute.rs @@ -15,12 +15,11 @@ use crate::integrations::docker::docker_container_manager::docker_container_get_ use crate::postprocessing::pp_context_files::postprocess_context_files; use crate::postprocessing::pp_plain_text::postprocess_plain_text; use crate::scratchpads::scratchpad_utils::{HasRagResults, max_tokens_for_rag_chat}; -use crate::tools::tools_description::{commands_require_confirmation_rules_from_integrations_yaml, Tool}; +use crate::tools::tools_description::{MatchConfirmDenyResult, Tool}; use crate::yaml_configs::customization_loader::load_customization; use crate::caps::get_model_record; use crate::http::routers::v1::at_tools::{ToolExecuteResponse, ToolsExecutePost}; - pub async fn unwrap_subchat_params(ccx: Arc>, tool_name: &str) -> Result { let (gcx, params_mb) = { let ccx_locked = ccx.lock().await; @@ -125,7 +124,6 @@ pub async fn run_tools( original_messages: &Vec, style: &Option, ) -> Result<(Vec, bool), String> { - let gcx = ccx.lock().await.global_context.clone(); let n_ctx = ccx.lock().await.n_ctx; let reserve_for_context = max_tokens_for_rag_chat(n_ctx, maxgen); let tokens_for_rag = reserve_for_context; @@ -149,7 +147,6 @@ pub async fn run_tools( let mut generated_tool = vec![]; // tool results must go first let mut generated_other = vec![]; let mut any_corrections = false; - let mut confirmation_rules = None; for t_call in last_msg_tool_calls { let cmd = match tools.get(&t_call.function.name) { @@ -175,45 +172,29 @@ pub async fn run_tools( } }; info!("tool use {}({:?})", &t_call.function.name, args); - - let command_to_match = match { + + { let cmd_lock = cmd.lock().await; - cmd_lock.command_to_match_against_confirm_deny(&args) - } { - Ok(command_to_match) => command_to_match, - Err(e) => { - let tool_failed_message = tool_answer( - format!("tool use: {}", e), t_call.id.to_string() - ); - generated_tool.push(tool_failed_message); - continue; - } - }; - - if !command_to_match.is_empty() { - if confirmation_rules.is_none() { - confirmation_rules = match commands_require_confirmation_rules_from_integrations_yaml(gcx.clone()).await { - Ok(g) => Some(g), - Err(e) => { - let tool_failed_message = tool_answer(format!("tool use: {}", e), t_call.id.to_string()); - generated_tool.push(tool_failed_message); - continue; + match cmd_lock.match_against_confirm_deny(&args) { + Ok(res) => { + match res.result { + MatchConfirmDenyResult::DENY => { + let command_to_match = cmd_lock + .command_to_match_against_confirm_deny(&args) + .unwrap_or("".to_string()); + generated_tool.push(tool_answer(format!("tool use: command '{command_to_match}' is denied"), t_call.id.to_string())); + continue; + } + _ => {} } - }; - } - - if let Some(rules) = &confirmation_rules { - let (is_denied, _) = command_should_be_denied(&command_to_match, &rules.commands_deny); - if is_denied { - let tool_failed_message = tool_answer( - format!("tool use: command '{}' is denied", command_to_match), t_call.id.to_string() - ); - generated_tool.push(tool_failed_message); + } + Err(err) => { + generated_tool.push(tool_answer(format!("tool use: {}", err), t_call.id.to_string())); continue; } } - } - + }; + let (corrections, tool_execute_results) = { let mut cmd_lock = cmd.lock().await; match cmd_lock.tool_execute(ccx.clone(), &t_call.id.to_string(), &args).await { From 98763b4260d5221c9af14d4cd76c34a6f2b9e7ae Mon Sep 17 00:00:00 2001 From: Sergey Vakhreev Date: Thu, 12 Dec 2024 20:33:23 +1030 Subject: [PATCH 148/185] Fixing integrations (#474) * Refactor confirmation logic and command denial rules - Remove hardcoded command confirmation and denial rules from `mod.rs`. - Introduce `confirmation_info()` method to provide custom confirmation and denial configurations for integrations. - Restructure `IntegrationConfirmation` and `IntegrationAvailable` by renaming their fields for clarity. - Integrate confirmation and denial rule parsing directly within each integration's configuration. - Remove redundant function `commands_require_confirmation_rules_from_integrations_yaml`. - Update tool command matching against confirmation/denial rules to utilize the new method within each integration. * Refactor struct field names for clarity and consistency * Refactor confirmation fields to improve configuration consistency - Rename `deny_user_default` to `deny_default` in integration schemas - Rename `ask_user_default` to `ask_user` for clarity - Update relevant function calls and data handling logic to use new field names - Enhance `read_integrations_d` and related functions for better data management and default value handling - Adjust tests and other references to reflect changes in field naming conventions * Add leading slash to schema keys in confirmation settings * Remove redundant check for existing integration configuration in records loop * github integration fixed * gitlab integration fixed * pdb integration fixed * Add IntegrationCommon and IntegrationConfirmation to GitLab, GitHub, and PDB - Update `ToolGitlab`, `ToolGithub`, and `ToolPdb` structs to include `IntegrationCommon`. - Implement handling of common settings and confirmation info in `integr_settings_apply`. - Add `confirmation_info` method returning `IntegrationConfirmation`. - Ensure integration settings and upgrade methods account for common data. * Add confirmation prompt for 'gh * close' command in GitHub integration * fix warning * Update default deny configuration keys across integrations Replace 'deny_user_default' with 'deny_default' in GitLab, GitHub, and PDB integration files for consistency. * Update confirmation patterns for GitLab and GitHub integration commands * Remove unused integration YAML constant and related code references --------- Co-authored-by: Kirill Starkov --- src/integrations/integr_github.rs | 112 +++++++++++++++++++------- src/integrations/integr_gitlab.rs | 117 ++++++++++++++++++++-------- src/integrations/integr_pdb.rs | 109 ++++++++++++++++++-------- src/integrations/integr_postgres.rs | 26 ------- src/integrations/mod.rs | 109 +++----------------------- src/telemetry/telemetry_structs.rs | 10 --- src/yaml_configs/create_configs.rs | 3 +- 7 files changed, 254 insertions(+), 232 deletions(-) diff --git a/src/integrations/integr_github.rs b/src/integrations/integr_github.rs index e0906b830..a0ab4e861 100644 --- a/src/integrations/integr_github.rs +++ b/src/integrations/integr_github.rs @@ -7,53 +7,66 @@ use tracing::{error, info}; use serde::{Deserialize, Serialize}; use crate::at_commands::at_commands::AtCommandsContext; -use crate::call_validation::{ContextEnum, ChatMessage, ChatContent}; +use crate::call_validation::{ContextEnum, ChatMessage, ChatContent, ChatUsage}; use crate::tools::tools_description::Tool; use serde_json::Value; -use crate::integrations::integr_abstract::Integration; +use crate::integrations::integr_abstract::{IntegrationCommon, IntegrationConfirmation, IntegrationTrait}; #[derive(Clone, Serialize, Deserialize, Debug, Default)] #[allow(non_snake_case)] -pub struct IntegrationGitHub { - pub gh_binary_path: Option, - pub GH_TOKEN: String, +pub struct SettingsGitHub { + pub gh_binary_path: String, + pub gh_token: String, } #[derive(Default)] pub struct ToolGithub { - pub integration_github: IntegrationGitHub, + pub common: IntegrationCommon, + pub settings_github: SettingsGitHub, } -impl Integration for ToolGithub { +impl IntegrationTrait for ToolGithub { fn as_any(&self) -> &dyn std::any::Any { self } fn integr_settings_apply(&mut self, value: &Value) -> Result<(), String> { - let integration_github = serde_json::from_value::(value.clone()) - .map_err(|e|e.to_string())?; - self.integration_github = integration_github; + match serde_json::from_value::(value.clone()) { + Ok(settings_github) => { + info!("Github settings applied: {:?}", settings_github); + self.settings_github = settings_github; + }, + Err(e) => { + error!("Failed to apply settings: {}\n{:?}", e, value); + return Err(e.to_string()); + } + }; + match serde_json::from_value::(value.clone()) { + Ok(x) => self.common = x, + Err(e) => { + error!("Failed to apply common settings: {}\n{:?}", e, value); + return Err(e.to_string()); + } + }; Ok(()) } - fn integr_yaml2json(&self, value: &serde_yaml::Value) -> Result { - let integration_github = serde_yaml::from_value::(value.clone()).map_err(|e| { - let location = e.location().map(|loc| format!(" at line {}, column {}", loc.line(), loc.column())).unwrap_or_default(); - format!("{}{}", e.to_string(), location) - })?; - serde_json::to_value(&integration_github).map_err(|e| e.to_string()) + fn integr_settings_as_json(&self) -> Value { + serde_json::to_value(&self.settings_github).unwrap_or_default() } - fn integr_upgrade_to_tool(&self, integr_name: &str) -> Box { - Box::new(ToolGithub {integration_github: self.integration_github.clone()}) as Box + fn integr_common(&self) -> IntegrationCommon { + self.common.clone() } - fn integr_settings_as_json(&self) -> Result { - serde_json::to_value(&self.integration_github).map_err(|e| e.to_string()) + fn integr_upgrade_to_tool(&self, _integr_name: &str) -> Box { + Box::new(ToolGithub { + common: self.common.clone(), + settings_github: self.settings_github.clone() + }) as Box } - fn integr_settings_default(&self) -> String { DEFAULT_GITHUB_INTEGRATION_YAML.to_string() } - fn icon_link(&self) -> String { "https://cdn-icons-png.flaticon.com/512/25/25231.png".to_string() } + fn integr_schema(&self) -> &str { GITHUB_INTEGRATION_SCHEMA } } #[async_trait] @@ -73,11 +86,14 @@ impl Tool for ToolGithub { }; let command_args = parse_command_args(args)?; - let gh_command = self.integration_github.gh_binary_path.as_deref().unwrap_or("gh"); - let output = Command::new(gh_command) + let mut gh_binary_path = self.settings_github.gh_binary_path.clone(); + if gh_binary_path.is_empty() { + gh_binary_path = "gh".to_string(); + } + let output = Command::new(gh_binary_path) .args(&command_args) .current_dir(&project_dir) - .env("GH_TOKEN", &self.integration_github.GH_TOKEN) + .env("gh_token", &self.settings_github.gh_token) .output() .await .map_err(|e| e.to_string())?; @@ -123,6 +139,20 @@ impl Tool for ToolGithub { command_args.insert(0, "gh".to_string()); Ok(command_args.join(" ")) } + + fn tool_depends_on(&self) -> Vec { + vec![] + } + + fn usage(&mut self) -> &mut Option { + static mut DEFAULT_USAGE: Option = None; + #[allow(static_mut_refs)] + unsafe { &mut DEFAULT_USAGE } + } + + fn confirmation_info(&self) -> Option { + Some(self.integr_common().confirmation) + } } fn parse_command_args(args: &HashMap) -> Result, String> { @@ -146,9 +176,31 @@ fn parse_command_args(args: &HashMap) -> Result, Stri Ok(parsed_args) } -const DEFAULT_GITHUB_INTEGRATION_YAML: &str = r#" -# GitHub integration - -# GH_TOKEN: "GH_xxx" # To get a token, check out https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens -# gh_binary_path: "/opt/homebrew/bin/gh" # Uncomment to set a custom path for the gh binary, defaults to "gh" +const GITHUB_INTEGRATION_SCHEMA: &str = r#" +fields: + gh_binary_path: + f_type: string_long + f_desc: "Path to the GitHub CLI binary. Leave empty to use the default 'gh' command." + f_placeholder: "/usr/local/bin/gh" + f_label: "GH Binary Path" + gh_token: + f_type: string_long + f_desc: "GitHub Personal Access Token for authentication." + f_placeholder: "ghp_xxxxxxxxxxxxxxxx" +description: | + The GitHub integration allows interaction with GitHub repositories using the GitHub CLI. + It provides functionality for various GitHub operations such as creating issues, pull requests, and more. +smartlinks: + - sl_label: "Test" + sl_chat: + - role: "user" + content: | + šŸ”§ The `github` (`gh`) tool should be visible now. To test the tool, list opened pull requests for `smallcloudai/refact-lsp`, and briefly describe them and express + happiness, and change nothing. If it doesn't work or the tool isn't available, go through the usual plan in the system prompt. +available: + on_your_laptop_possible: true + when_isolated_possible: true +confirmation: + ask_user_default: ["gh * delete *", "gh * close *"] + deny_default: ["gh auth token *"] "#; diff --git a/src/integrations/integr_gitlab.rs b/src/integrations/integr_gitlab.rs index 908cc78d2..0b4c720f9 100644 --- a/src/integrations/integr_gitlab.rs +++ b/src/integrations/integr_gitlab.rs @@ -8,51 +8,63 @@ use serde::{Deserialize, Serialize}; use serde_json::Value; use crate::at_commands::at_commands::AtCommandsContext; -use crate::call_validation::{ContextEnum, ChatMessage}; +use crate::call_validation::{ContextEnum, ChatMessage, ChatContent, ChatUsage}; use crate::tools::tools_description::Tool; -use crate::integrations::integr_abstract::Integration; - +use crate::integrations::integr_abstract::{IntegrationCommon, IntegrationConfirmation, IntegrationTrait}; #[derive(Clone, Serialize, Deserialize, Debug, Default)] #[allow(non_snake_case)] -pub struct IntegrationGitLab { - pub glab_binary_path: Option, - pub GITLAB_TOKEN: String, +pub struct SettingsGitLab { + pub glab_binary_path: String, + pub glab_token: String, } #[derive(Default)] pub struct ToolGitlab { - pub integration_gitlab: IntegrationGitLab, + pub common: IntegrationCommon, + pub settings_gitlab: SettingsGitLab, } -impl Integration for ToolGitlab{ +impl IntegrationTrait for ToolGitlab { fn as_any(&self) -> &dyn std::any::Any { self } fn integr_settings_apply(&mut self, value: &Value) -> Result<(), String> { - let integration_gitlab = serde_json::from_value::(value.clone()) - .map_err(|e|e.to_string())?; - self.integration_gitlab = integration_gitlab; + match serde_json::from_value::(value.clone()) { + Ok(settings_gitlab) => { + info!("GitLab settings applied: {:?}", settings_gitlab); + self.settings_gitlab = settings_gitlab; + }, + Err(e) => { + error!("Failed to apply settings: {}\n{:?}", e, value); + return Err(e.to_string()) + } + }; + match serde_json::from_value::(value.clone()) { + Ok(x) => self.common = x, + Err(e) => { + error!("Failed to apply common settings: {}\n{:?}", e, value); + return Err(e.to_string()); + } + }; Ok(()) } - fn integr_yaml2json(&self, value: &serde_yaml::Value) -> Result { - let integration_gitlab = serde_yaml::from_value::(value.clone()).map_err(|e| { - let location = e.location().map(|loc| format!(" at line {}, column {}", loc.line(), loc.column())).unwrap_or_default(); - format!("{}{}", e.to_string(), location) - })?; - serde_json::to_value(&integration_gitlab).map_err(|e| e.to_string()) + fn integr_settings_as_json(&self) -> Value { + serde_json::to_value(&self.settings_gitlab).unwrap_or_default() } - fn integr_upgrade_to_tool(&self, integr_name: &str) -> Box { - Box::new(ToolGitlab {integration_gitlab: self.integration_gitlab.clone()}) as Box + fn integr_common(&self) -> IntegrationCommon { + self.common.clone() } - - fn integr_settings_as_json(&self) -> Result { - serde_json::to_value(&self.integration_gitlab).map_err(|e| e.to_string()) + + fn integr_upgrade_to_tool(&self, _integr_name: &str) -> Box { + Box::new(ToolGitlab { + common: self.common.clone(), + settings_gitlab: self.settings_gitlab.clone() + }) as Box } - fn integr_settings_default(&self) -> String { DEFAULT_GITLAB_INTEGRATION_YAML.to_string() } - fn icon_link(&self) -> String { "https://cdn-icons-png.flaticon.com/512/5968/5968853.png".to_string() } + fn integr_schema(&self) -> &str { GITLAB_INTEGRATION_SCHEMA } } #[async_trait] @@ -72,11 +84,14 @@ impl Tool for ToolGitlab { }; let command_args = parse_command_args(args)?; - let glab_command = self.integration_gitlab.glab_binary_path.as_deref().unwrap_or("glab"); - let output = Command::new(glab_command) + let mut glab_binary_path = self.settings_gitlab.glab_binary_path.clone(); + if glab_binary_path.is_empty() { + glab_binary_path = "glab".to_string(); + } + let output = Command::new(glab_binary_path) .args(&command_args) .current_dir(&project_dir) - .env("GITLAB_TOKEN", &self.integration_gitlab.GITLAB_TOKEN) + .env("GITLAB_TOKEN", &self.settings_gitlab.glab_token) .output() .await .map_err(|e| e.to_string())?; @@ -105,7 +120,7 @@ impl Tool for ToolGitlab { let mut results = vec![]; results.push(ContextEnum::ChatMessage(ChatMessage { role: "tool".to_string(), - content: crate::call_validation::ChatContent::SimpleText(content), + content: ChatContent::SimpleText(content), tool_calls: None, tool_call_id: tool_call_id.clone(), ..Default::default() @@ -122,6 +137,20 @@ impl Tool for ToolGitlab { command_args.insert(0, "glab".to_string()); Ok(command_args.join(" ")) } + + fn tool_depends_on(&self) -> Vec { + vec![] + } + + fn usage(&mut self) -> &mut Option { + static mut DEFAULT_USAGE: Option = None; + #[allow(static_mut_refs)] + unsafe { &mut DEFAULT_USAGE } + } + + fn confirmation_info(&self) -> Option { + Some(self.integr_common().confirmation) + } } fn parse_command_args(args: &HashMap) -> Result, String> { @@ -145,9 +174,31 @@ fn parse_command_args(args: &HashMap) -> Result, Stri Ok(parsed_args) } -const DEFAULT_GITLAB_INTEGRATION_YAML: &str = r#" -# GitLab integration: install on mac using "brew install glab" - -# GITLAB_TOKEN: "glpat-xxx" # To get a token, check out https://docs.gitlab.com/ee/user/profile/personal_access_tokens -# glab_binary_path: "/opt/homebrew/bin/glab" # Uncomment to set a custom path for the glab binary, defaults to "glab" +const GITLAB_INTEGRATION_SCHEMA: &str = r#" +fields: + glab_binary_path: + f_type: string_long + f_desc: "Path to the GitLab CLI binary. Leave empty to use the default 'glab' command." + f_placeholder: "/usr/local/bin/glab" + f_label: "GLAB Binary Path" + glab_token: + f_type: string_long + f_desc: "GitLab Personal Access Token for authentication." + f_placeholder: "glpat_xxxxxxxxxxxxxxxx" +description: | + The GitLab integration allows interaction with GitLab repositories using the GitLab CLI. + It provides functionality for various GitLab operations such as creating issues, merge requests, and more. +smartlinks: + - sl_label: "Test" + sl_chat: + - role: "user" + content: | + šŸ”§ The `gitlab` (`glab`) tool should be visible now. To test the tool, list opened merge requests for your GitLab project, and briefly describe them and express + happiness, and change nothing. If it doesn't work or the tool isn't available, go through the usual plan in the system prompt. +available: + on_your_laptop_possible: true + when_isolated_possible: true +confirmation: + ask_user_default: ["glab * delete *"] + deny_default: ["glab auth token *"] "#; diff --git a/src/integrations/integr_pdb.rs b/src/integrations/integr_pdb.rs index 5876d2587..30e71b5d2 100644 --- a/src/integrations/integr_pdb.rs +++ b/src/integrations/integr_pdb.rs @@ -14,10 +14,10 @@ use tracing::{error, info}; use serde::{Deserialize, Serialize}; use crate::at_commands::at_commands::AtCommandsContext; -use crate::call_validation::{ContextEnum, ChatMessage, ChatContent}; +use crate::call_validation::{ContextEnum, ChatMessage, ChatContent, ChatUsage}; use crate::integrations::sessions::{IntegrationSession, get_session_hashmap_key}; use crate::global_context::GlobalContext; -use crate::integrations::integr_abstract::Integration; +use crate::integrations::integr_abstract::{IntegrationCommon, IntegrationConfirmation, IntegrationTrait}; use crate::tools::tools_description::{Tool, ToolDesc, ToolParam}; use crate::integrations::process_io_utils::{first_n_chars, last_n_chars, last_n_lines, write_to_stdin_and_flush, blocking_read_until_token_or_timeout}; @@ -25,14 +25,14 @@ use crate::integrations::process_io_utils::{first_n_chars, last_n_chars, last_n_ const SESSION_TIMEOUT_AFTER_INACTIVITY: Duration = Duration::from_secs(30 * 60); const PDB_TOKEN: &str = "(Pdb)"; - #[derive(Clone, Serialize, Deserialize, Debug, Default)] pub struct SettingsPdb { - pub python_path: Option, + pub python_path: String, } #[derive(Default)] pub struct ToolPdb { + pub common: IntegrationCommon, pub settings_pdb: SettingsPdb, } @@ -66,39 +66,51 @@ impl IntegrationSession for PdbSession } } -impl Integration for ToolPdb { - fn as_any(&self) -> &dyn std::any::Any { self } +impl IntegrationTrait for ToolPdb { + fn as_any(&self) -> &dyn Any { self } fn integr_settings_apply(&mut self, value: &Value) -> Result<(), String> { - let settings_pdb = serde_json::from_value::(value.clone()) - .map_err(|e|e.to_string())?; - self.settings_pdb = settings_pdb; + match serde_json::from_value::(value.clone()) { + Ok(settings_pdb) => { + info!("PDB settings applied: {:?}", settings_pdb); + self.settings_pdb = settings_pdb; + }, + Err(e) => { + error!("Failed to apply settings: {}\n{:?}", e, value); + return Err(e.to_string()); + } + }; + match serde_json::from_value::(value.clone()) { + Ok(x) => self.common = x, + Err(e) => { + error!("Failed to apply common settings: {}\n{:?}", e, value); + return Err(e.to_string()); + } + }; Ok(()) } - fn integr_yaml2json(&self, value: &serde_yaml::Value) -> Result { - let integration_github = serde_yaml::from_value::(value.clone()).map_err(|e| { - let location = e.location().map(|loc| format!(" at line {}, column {}", loc.line(), loc.column())).unwrap_or_default(); - format!("{}{}", e.to_string(), location) - })?; - serde_json::to_value(&integration_github).map_err(|e| e.to_string()) + fn integr_settings_as_json(&self) -> Value { + serde_json::to_value(&self.settings_pdb).unwrap_or_default() } - fn integr_upgrade_to_tool(&self, integr_name: &str) -> Box { - Box::new(ToolPdb {settings_pdb: self.settings_pdb.clone()}) as Box + fn integr_common(&self) -> IntegrationCommon { + self.common.clone() } - - fn integr_settings_as_json(&self) -> Result { - serde_json::to_value(&self.settings_pdb).map_err(|e| e.to_string()) + + fn integr_upgrade_to_tool(&self, _integr_name: &str) -> Box { + Box::new(ToolPdb { + common: self.common.clone(), + settings_pdb: self.settings_pdb.clone() + }) as Box } - fn integr_settings_default(&self) -> String { DEFAULT_PDB_INTEGRATION_YAML.to_string() } - fn icon_link(&self) -> String { "https://cdn-icons-png.flaticon.com/512/919/919852.png".to_string() } + fn integr_schema(&self) -> &str { PDB_INTEGRATION_SCHEMA } } #[async_trait] impl Tool for ToolPdb { - fn as_any(&self) -> &dyn std::any::Any { self } + fn as_any(&self) -> &dyn Any { self } async fn tool_execute( &mut self, @@ -115,8 +127,10 @@ impl Tool for ToolPdb { }; let session_hashmap_key = get_session_hashmap_key("pdb", &chat_id); - let python_command = self.settings_pdb.python_path.clone().unwrap_or_else(|| "python3".to_string()); - + let mut python_command = self.settings_pdb.python_path.clone(); + if python_command.is_empty() { + python_command = "python3".to_string(); + } if command_args.windows(2).any(|w| w == ["-m", "pdb"]) { let output = start_pdb_session(&python_command, &mut command_args, &session_hashmap_key, gcx.clone(), 10).await?; return Ok(tool_answer(output, tool_call_id)); @@ -155,8 +169,8 @@ impl Tool for ToolPdb { &self, args: &HashMap, ) -> Result { - let commmand = parse_command(args)?; // todo: fix typo "commmand" - let command_args = split_command(&commmand)?; + let command = parse_command(args)?; + let command_args = split_command(&command)?; Ok(command_args.join(" ")) } @@ -176,6 +190,20 @@ impl Tool for ToolPdb { parameters_required: vec!["command".to_string()], } } + + fn tool_depends_on(&self) -> Vec { + vec![] + } + + fn usage(&mut self) -> &mut Option { + static mut DEFAULT_USAGE: Option = None; + #[allow(static_mut_refs)] + unsafe { &mut DEFAULT_USAGE } + } + + fn confirmation_info(&self) -> Option { + Some(self.integr_common().confirmation) + } } fn parse_command(args: &HashMap) -> Result { @@ -339,8 +367,27 @@ fn format_error(error_title: &str, error: &str) -> String } } -const DEFAULT_PDB_INTEGRATION_YAML: &str = r#" -# Python debugger - -# python_path: "/opt/homebrew/bin/python3" # Uncomment to set a custom python path, defaults to "python3" +const PDB_INTEGRATION_SCHEMA: &str = r#" +fields: + python_path: + f_type: string_long + f_desc: "Path to the Python interpreter. Leave empty to use the default 'python3' command." + f_placeholder: "/opt/homebrew/bin/python3" + f_label: "Python Interpreter Path" +description: | + The PDB integration allows interaction with the Python debugger for inspecting variables and exploring program execution. + It provides functionality for debugging Python scripts and applications. +smartlinks: + - sl_label: "Test" + sl_chat: + - role: "user" + content: | + šŸ”§ The pdb tool should be visible now. To test the tool, start a debugging session for a simple Python script, set a breakpoint, and inspect some variables. + If it doesn't work or the tool isn't available, go through the usual plan in the system prompt. +available: + on_your_laptop_possible: true + when_isolated_possible: true +confirmation: + ask_user_default: [] + deny_default: [] "#; diff --git a/src/integrations/integr_postgres.rs b/src/integrations/integr_postgres.rs index c32a6d594..b207e50ec 100644 --- a/src/integrations/integr_postgres.rs +++ b/src/integrations/integr_postgres.rs @@ -169,32 +169,6 @@ impl Tool for ToolPostgres { } } -// const DEFAULT_POSTGRES_INTEGRATION_YAML: &str = r#" -// postgres: -// enable: true -// psql_binary_path: "/path/to/psql" -// host: "my_postgres_for_django" -// user: "vasya1337" -// password: "$POSTGRES_PASSWORD" -// db: "mydjango" -// available: -// on_your_laptop: -// - project_pattern: "*web_workspace/project1" -// db: "mydjango2" -// enable: true -// when_isolated: -// user: "vasya1338" -// enable: true -// docker: -// my_postgres_for_django: -// image: "postgres:13" -// environment: -// POSTGRES_DB: "mydjango" -// POSTGRES_USER: "vasya1337" -// POSTGRES_PASSWORD: "$POSTGRES_PASSWORD" -// "#; - - pub const POSTGRES_INTEGRATION_SCHEMA: &str = r#" fields: host: diff --git a/src/integrations/mod.rs b/src/integrations/mod.rs index ef5c25d04..333a8661e 100644 --- a/src/integrations/mod.rs +++ b/src/integrations/mod.rs @@ -9,9 +9,9 @@ pub mod integr_abstract; -// pub mod integr_github; -// pub mod integr_gitlab; -// pub mod integr_pdb; +pub mod integr_github; +pub mod integr_gitlab; +pub mod integr_pdb; pub mod integr_chrome; pub mod integr_postgres; pub mod integr_cmdline; @@ -33,9 +33,9 @@ use integr_abstract::IntegrationTrait; pub fn integration_from_name(n: &str) -> Result, String> { match n { - // "github" => Ok(Box::new(ToolGithub { ..Default::default() }) as Box), - // "gitlab" => Ok(Box::new(ToolGitlab { ..Default::default() }) as Box), - // "pdb" => Ok(Box::new(ToolPdb { ..Default::default() }) as Box), + "github" => Ok(Box::new(integr_github::ToolGithub { ..Default::default() }) as Box), + "gitlab" => Ok(Box::new(integr_gitlab::ToolGitlab { ..Default::default() }) as Box), + "pdb" => Ok(Box::new(integr_pdb::ToolPdb { ..Default::default() }) as Box), "postgres" => Ok(Box::new(integr_postgres::ToolPostgres { ..Default::default() }) as Box), "chrome" => Ok(Box::new(integr_chrome::ToolChrome { ..Default::default() }) as Box), "docker" => Ok(Box::new(docker::integr_docker::ToolDocker {..Default::default() }) as Box), @@ -52,30 +52,15 @@ pub fn integration_from_name(n: &str) -> Result String -{ - // match n { - // // "github" => Box::new(ToolGithub { ..Default::default() }) as Box, - // // "gitlab" => Box::new(ToolGitlab { ..Default::default() }) as Box, - // // "pdb" => Box::new(ToolPdb { ..Default::default() }) as Box, - // "postgres" => Box::new(integr_postgres::ToolPostgres { ..Default::default() }) as Box, - // // "chrome" => Box::new(ToolChrome { ..Default::default() }) as Box, - // _ => panic!("Unknown integration name: {}", n), - // } - return "".to_string(); -} - pub fn integrations_list(allow_experimental: bool) -> Vec<&'static str> { let mut integrations = vec![ - // "github", - // "gitlab", - // "pdb", + "github", + "gitlab", + "pdb", "postgres", "chrome", "cmdline_TEMPLATE", "service_TEMPLATE", - // "chrome", "docker", ]; if allow_experimental { @@ -89,79 +74,3 @@ pub fn integrations_list(allow_experimental: bool) -> Vec<&'static str> { pub fn go_to_configuration_message(integration_name: &str) -> String { format!("šŸ§© for configuration go to SETTINGS:{integration_name}") } - - -pub const INTEGRATIONS_DEFAULT_YAML: &str = r#"# This file is used to configure integrations in Refact Agent. -# If there is a syntax error in this file, no integrations will work. -# -# Here you can set up which commands require confirmation or must be denied. If both apply, the command is denied. -# Rules use glob patterns for wildcard matching (https://en.wikipedia.org/wiki/Glob_(programming)) - -# Command line: things you can call and immediately get an answer -#cmdline: -# run_make: -# command: "make" -# command_workdir: "%project_path%" -# timeout: 600 -# description: "Run `make` inside a C/C++ project, or a similar project with a Makefile." -# parameters: # this is what the model needs to produce, you can use %parameter% in command and workdir -# - name: "project_path" -# description: "absolute path to the project" -# output_filter: # output filter is optional, can help if the output is very long to reduce it, preserving valuable information -# limit_lines: 50 -# limit_chars: 10000 -# valuable_top_or_bottom: "top" # the useful infomation more likely to be at the top or bottom? (default "top") -# grep: "(?i)error|warning" # in contrast to regular grep this doesn't remove other lines from output, just prefers matching when approaching limit_lines or limit_chars (default "(?i)error") -# grep_context_lines: 5 # leave that many lines around a grep match (default 5) -# remove_from_output: "process didn't exit" # some lines are very long and unwanted, this is also a regular expression (default "") - -#cmdline_services: -# manage_py_runserver: -# command: "python manage.py runserver" -# command_workdir: "%project_path%" -# description: "Start or stop `python manage.py runserver` running in the background" -# parameters: -# - name: "project_path" -# description: "absolute path to the project" -# startup_wait: 10 -# startup_wait_port: 8000 - - -# --- Docker integration --- -docker: - docker_daemon_address: "unix:///var/run/docker.sock" # Path to the Docker daemon. For remote Docker, the path to the daemon on the remote server. - # docker_cli_path: "/usr/local/bin/docker" # Uncomment to set a custom path for the docker cli, defaults to "docker" - - # Uncomment the following to connect to a remote Docker daemon - # Docker and necessary ports will be forwarded for container communication. No additional commands will be executed over SSH. - # ssh_config: - # host: "" - # user: "root" - # port: 22 - # identity_file: "~/.ssh/id_rsa" - - run_chat_threads_inside_container: false - - # The folder inside the container where the workspace is mounted, refact-lsp will start there, defaults to "/app" - # container_workspace_folder: "/app" - - # Image ID for running containers, which can later be selected in the UI before starting a chat thread. - # docker_image_id: "079b939b3ea1" - - # Map container ports to local ports - # ports: - # - local_port: 4000 - # container_port: 3000 - - # Path to the LSP binary on the host machine, to be bound into the containers. - host_lsp_path: "/opt/refact/bin/refact-lsp" - - # Will be added as a label to containers, images, and other resources created by Refact Agent, defaults to "refact" - label: "refact" - - # Uncomment to execute a command inside the container when the thread starts. Regardless, refact-lsp will run independently of this setting. - # command: "npm run dev" - - # The time in minutes that the containers will be kept alive while not interacting with the chat thread, defaults to 60. - keep_containers_alive_for_x_minutes: 60 -"#; diff --git a/src/telemetry/telemetry_structs.rs b/src/telemetry/telemetry_structs.rs index 672e4981f..12b808aee 100644 --- a/src/telemetry/telemetry_structs.rs +++ b/src/telemetry/telemetry_structs.rs @@ -150,13 +150,3 @@ pub struct TelemetryChat { pub success: bool, pub error_message: String, // empty if no error } - -impl TelemetryChat { - pub fn new(scope: String, success: bool, error_message: String) -> Self { - Self { - scope, - success, - error_message, - } - } -} diff --git a/src/yaml_configs/create_configs.rs b/src/yaml_configs/create_configs.rs index 658166bbd..c2aeca07d 100644 --- a/src/yaml_configs/create_configs.rs +++ b/src/yaml_configs/create_configs.rs @@ -20,8 +20,7 @@ pub async fn yaml_configs_try_create_all(gcx: Arc>) -> St let files = vec![ ("bring-your-own-key.yaml", crate::caps::BRING_YOUR_OWN_KEY_SAMPLE), ("customization.yaml", crate::yaml_configs::customization_compiled_in::COMPILED_IN_INITIAL_USER_YAML), - ("privacy.yaml", crate::privacy_compiled_in::COMPILED_IN_INITIAL_PRIVACY_YAML), - ("integrations.yaml", crate::integrations::INTEGRATIONS_DEFAULT_YAML), + ("privacy.yaml", crate::privacy_compiled_in::COMPILED_IN_INITIAL_PRIVACY_YAML) ]; for (file_name, content) in files { From d729c29ea194304d1a8c9b6adda7601de8804f37 Mon Sep 17 00:00:00 2001 From: JegernOUTT Date: Thu, 12 Dec 2024 20:36:20 +1030 Subject: [PATCH 149/185] Remove unused `IntegrationTrait` import from setting up integrations module --- src/integrations/setting_up_integrations.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/integrations/setting_up_integrations.rs b/src/integrations/setting_up_integrations.rs index 3e61163e9..889a0a18c 100644 --- a/src/integrations/setting_up_integrations.rs +++ b/src/integrations/setting_up_integrations.rs @@ -10,7 +10,6 @@ use tokio::fs as async_fs; use tokio::io::AsyncWriteExt; use crate::global_context::GlobalContext; -use crate::integrations::integr_abstract::IntegrationTrait; // use crate::tools::tools_description::Tool; // use crate::yaml_configs::create_configs::{integrations_enabled_cfg, read_yaml_into_value}; From cd50907cf4f1b662f87c238bddb6d2fbc74ee4f2 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Thu, 12 Dec 2024 10:03:44 +0100 Subject: [PATCH 150/185] disable commit message generation, remove "isolation" and "docker" from recommended --- src/http/routers/v1/links.rs | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/http/routers/v1/links.rs b/src/http/routers/v1/links.rs index ef92ecec2..781982fe5 100644 --- a/src/http/routers/v1/links.rs +++ b/src/http/routers/v1/links.rs @@ -68,8 +68,9 @@ pub async fn handle_v1_links( ) -> Result, ScratchError> { let post = serde_json::from_slice::(&body_bytes) .map_err(|e| ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, format!("JSON problem: {}", e)))?; - let mut links = Vec::new(); + let mut links: Vec = Vec::new(); let mut uncommited_changes_warning = String::new(); + tracing::info!("for links, post.meta.chat_mode == {:?}", post.meta.chat_mode); let (integrations_map, integration_yaml_errors) = crate::integrations::running_integrations::load_integrations(gcx.clone(), "".to_string(), gcx.read().await.cmdline.experimental).await; @@ -95,6 +96,9 @@ pub async fn handle_v1_links( if let Some(recommended_integrations) = yaml.get("recommended_integrations").and_then(|rt| rt.as_sequence()) { for igname_value in recommended_integrations { if let Some(igname) = igname_value.as_str() { + if igname == "isolation" || igname == "docker" { + continue; + } if !integrations_map.contains_key(igname) { tracing::info!("tool {} not present => link", igname); links.push(Link { @@ -147,7 +151,7 @@ pub async fn handle_v1_links( } } - if post.meta.chat_mode == ChatMode::AGENT { + if post.meta.chat_mode == ChatMode::AGENT && false { let mut project_changes = Vec::new(); for commit in get_commit_information_from_current_changes(gcx.clone()).await { let project_name = commit.project_path.to_file_path().ok() From d082d499b8ae82e43018165bca0a611bc16b9b6d Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Thu, 12 Dec 2024 10:04:18 +0100 Subject: [PATCH 151/185] integr_cmdline.rs: auto configure link --- src/integrations/integr_cmdline.rs | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/src/integrations/integr_cmdline.rs b/src/integrations/integr_cmdline.rs index 760eb467a..e7da6385e 100644 --- a/src/integrations/integr_cmdline.rs +++ b/src/integrations/integr_cmdline.rs @@ -139,6 +139,7 @@ pub fn create_command_from_string( if command_args.len() > 1 { cmd.args(&command_args[1..]); } + tracing::info!("command_args: {:?}", command_args); if command_workdir.is_empty() { if let Some(first_project_dir) = project_dirs.first() { @@ -163,7 +164,7 @@ pub async fn execute_blocking_command( env_variables: &HashMap, project_dirs: Vec, ) -> Result { - info!("EXEC workdir {}:\n{:?}", command_workdir, command); + info!("EXEC workdir {:?}:\n{:?}", command_workdir, command); let command_future = async { let mut cmd = create_command_from_string(command, command_workdir, env_variables, project_dirs)?; @@ -321,6 +322,12 @@ description: | available: on_your_laptop_possible: true when_isolated_possible: true +smartlinks: + - sl_label: "Auto Configure" + sl_chat: + - role: "user" + content: | + šŸ”§ Test the tool that corresponds to the current config file. If it works express happiness, and change nothing. If it doesn't work or the tool isn't available, go through the usual plan in the system prompt. confirmation: ask_user_default: ["*"] deny_default: ["sudo*"] From 029956d57318aaeb917ca5dbbd23aaa4dcac99a3 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Thu, 12 Dec 2024 11:18:58 +0100 Subject: [PATCH 152/185] reorder links --- src/http/routers/v1/links.rs | 132 ++++++++++++++++++----------------- 1 file changed, 68 insertions(+), 64 deletions(-) diff --git a/src/http/routers/v1/links.rs b/src/http/routers/v1/links.rs index 781982fe5..7cf47258b 100644 --- a/src/http/routers/v1/links.rs +++ b/src/http/routers/v1/links.rs @@ -74,70 +74,15 @@ pub async fn handle_v1_links( tracing::info!("for links, post.meta.chat_mode == {:?}", post.meta.chat_mode); let (integrations_map, integration_yaml_errors) = crate::integrations::running_integrations::load_integrations(gcx.clone(), "".to_string(), gcx.read().await.cmdline.experimental).await; - if post.messages.is_empty() { - let (already_exists, summary_path_option) = crate::scratchpads::chat_utils_prompts::dig_for_project_summarization_file(gcx.clone()).await; - if !already_exists { - // doesn't exist - links.push(Link { - action: LinkAction::SummarizeProject, - text: "Initial project summarization".to_string(), - goto: None, - current_config_file: summary_path_option, - link_tooltip: format!("Project summary is a starting point for Refact Agent."), - link_payload: None, - }); - } else { - // exists - if let Some(summary_path) = summary_path_option { - match fs::read_to_string(&summary_path) { - Ok(content) => { - match serde_yaml::from_str::(&content) { - Ok(yaml) => { - if let Some(recommended_integrations) = yaml.get("recommended_integrations").and_then(|rt| rt.as_sequence()) { - for igname_value in recommended_integrations { - if let Some(igname) = igname_value.as_str() { - if igname == "isolation" || igname == "docker" { - continue; - } - if !integrations_map.contains_key(igname) { - tracing::info!("tool {} not present => link", igname); - links.push(Link { - action: LinkAction::Goto, - text: format!("Configure {igname}"), - goto: Some(format!("SETTINGS:{igname}")), - current_config_file: None, - link_tooltip: format!(""), - link_payload: None, - }); - } else { - tracing::info!("tool {} present => happy", igname); - } - } - } - } - }, - Err(e) => { - tracing::error!("Failed to parse project summary YAML file: {}", e); - } - } - }, - Err(e) => { - tracing::error!("Failed to read project summary file: {}", e); - } - } - } - } - } - if post.meta.chat_mode == ChatMode::CONFIGURE { - links.push(Link { - action: LinkAction::Goto, - text: "Return".to_string(), - goto: Some("SETTINGS:DEFAULT".to_string()), - current_config_file: None, - link_tooltip: format!(""), - link_payload: None, - }); + // links.push(Link { + // action: LinkAction::Goto, + // text: "Return".to_string(), + // goto: Some("SETTINGS:DEFAULT".to_string()), + // current_config_file: None, + // link_tooltip: format!(""), + // link_payload: None, + // }); if !get_tickets_from_messages(gcx.clone(), &post.messages).await.is_empty() { links.push(Link { @@ -151,6 +96,7 @@ pub async fn handle_v1_links( } } + // GIT uncommitted if post.meta.chat_mode == ChatMode::AGENT && false { let mut project_changes = Vec::new(); for commit in get_commit_information_from_current_changes(gcx.clone()).await { @@ -186,6 +132,7 @@ pub async fn handle_v1_links( } } + // Failures above if post.meta.chat_mode == ChatMode::AGENT { for failed_integr_name in failed_integration_names_after_last_user_message(&post.messages) { links.push(Link { @@ -199,6 +146,7 @@ pub async fn handle_v1_links( } } + // YAML problems for e in integration_yaml_errors { links.push(Link { action: LinkAction::Goto, @@ -210,7 +158,63 @@ pub async fn handle_v1_links( }); } - // hmm maybe (post.meta.chat_mode == ChatMode::EXPLORE || post.meta.chat_mode == ChatMode::AGENT) + // Tool recommendations + if post.messages.is_empty() { + let (already_exists, summary_path_option) = crate::scratchpads::chat_utils_prompts::dig_for_project_summarization_file(gcx.clone()).await; + if !already_exists { + // doesn't exist + links.push(Link { + action: LinkAction::SummarizeProject, + text: "Initial project summarization".to_string(), + goto: None, + current_config_file: summary_path_option, + link_tooltip: format!("Project summary is a starting point for Refact Agent."), + link_payload: None, + }); + } else { + // exists + if let Some(summary_path) = summary_path_option { + match fs::read_to_string(&summary_path) { + Ok(content) => { + match serde_yaml::from_str::(&content) { + Ok(yaml) => { + if let Some(recommended_integrations) = yaml.get("recommended_integrations").and_then(|rt| rt.as_sequence()) { + for igname_value in recommended_integrations { + if let Some(igname) = igname_value.as_str() { + if igname == "isolation" || igname == "docker" { + continue; + } + if !integrations_map.contains_key(igname) { + tracing::info!("tool {} not present => link", igname); + links.push(Link { + action: LinkAction::Goto, + text: format!("Configure {igname}"), + goto: Some(format!("SETTINGS:{igname}")), + current_config_file: None, + link_tooltip: format!(""), + link_payload: None, + }); + } else { + tracing::info!("tool {} present => happy", igname); + } + } + } + } + }, + Err(e) => { + tracing::error!("Failed to parse project summary YAML file: {}", e); + } + } + }, + Err(e) => { + tracing::error!("Failed to read project summary file: {}", e); + } + } + } + } + } + + // Follow-up if post.meta.chat_mode != ChatMode::NO_TOOLS && links.is_empty() && post.messages.len() > 2 { let follow_up_messages: Vec = generate_follow_up_message(post.messages.clone(), gcx.clone(), &post.model_name, &post.meta.chat_id).await .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Error generating follow-up message: {}", e)))?; From 39382ad275faf7365d5338e87eb55beb769e6070 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Thu, 12 Dec 2024 11:19:06 +0100 Subject: [PATCH 153/185] less spam --- src/http/utils.rs | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/http/utils.rs b/src/http/utils.rs index efe4ce879..2b17aead9 100644 --- a/src/http/utils.rs +++ b/src/http/utils.rs @@ -16,8 +16,10 @@ pub async fn telemetry_wrapper(func: impl Fn(Extension, hyp body_bytes: hyper::body::Bytes) -> Result, ScratchError> { let handler_name = path.path().trim_start_matches('/'); - let no_spam = handler_name == "rag-status"; - if !no_spam { + let mut spam = false; + spam |= handler_name == "rag-status"; + spam |= handler_name == "ping"; + if !spam { info!("\n--- HTTP {} starts ---\n", handler_name); } let t0 = std::time::Instant::now(); @@ -36,7 +38,7 @@ pub async fn telemetry_wrapper(func: impl Fn(Extension, hyp error!("{} returning \"{}\"", path, e.status_code); return Ok(e.to_response()); } - if !no_spam { + if !spam { info!("{} completed {}ms", path, t0.elapsed().as_millis()); } return Ok(result.unwrap()); From ece52cffc7bc0c5ae626e2dee0347088510b4081 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Thu, 12 Dec 2024 11:19:21 +0100 Subject: [PATCH 154/185] some logs to catch deadlock --- src/cached_tokenizers.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/cached_tokenizers.rs b/src/cached_tokenizers.rs index 62eb1ef8a..eb0027f41 100644 --- a/src/cached_tokenizers.rs +++ b/src/cached_tokenizers.rs @@ -154,6 +154,8 @@ pub async fn cached_tokenizer( tokenizer.with_padding(None); let arc = Arc::new(StdRwLock::new(tokenizer)); + info!("loading tokenizer done1"); global_context.write().await.tokenizer_map.insert(model_name.clone(), arc.clone()); + info!("loading tokenizer done2"); Ok(arc) } From f2595d42648f58ebfcd5cd92f88a4a6f16c11f4e Mon Sep 17 00:00:00 2001 From: Sergey Vakhreev Date: Thu, 12 Dec 2024 21:22:50 +1030 Subject: [PATCH 155/185] mysql integration (#487) * add mysql and reorder integrations * Add common settings and confirmation info to MySQL integration Enhance the `ToolMysql` struct and `IntegrationTrait` implementation to include common settings and confirmation information. Introduce `integr_common` and `confirmation_info` functions to handle these features. Adjust the schema to incorporate confirmation details, ensuring better configurability and integration consistency across tools. * Refactor integr_upgrade_to_tool to use &str instead of &String for parameter type --------- Co-authored-by: Nick Frolov --- assets/integrations/mysql.png | Bin 0 -> 86410 bytes src/integrations/integr_mysql.rs | 246 ++++++++++++++++++++++++++++ src/integrations/integr_postgres.rs | 2 - src/integrations/mod.rs | 7 +- 4 files changed, 251 insertions(+), 4 deletions(-) create mode 100644 assets/integrations/mysql.png create mode 100644 src/integrations/integr_mysql.rs diff --git a/assets/integrations/mysql.png b/assets/integrations/mysql.png new file mode 100644 index 0000000000000000000000000000000000000000..ba7538fd9369b0b8e6e6512c8394a6c50cba1ddd GIT binary patch literal 86410 zcmd?Rc{J32|35rqCtFJPEkxN0k#&e4|O0tDY8A}qf@9R)@*=324 zU1VPdGv@c2QJ>G}`d;7by3e`Kx&OG&?~hJ#ruV#;*Yo*&Y_Indy=#~0XxM2W5D1-? zrn)`^Ldr|{Lq!38Gjfl@9{dIM(7${UlH19-0Dlot?9p`Yi%q)sJrUOi3{EubRI ztKuD3B!fiDW{>C{2& z(AVznsG%BdEm&xFR@Rv&wc!K{<0|dxW}2{GJF0H&n|yAB->V2x*u#yE*R?M9WcQOn@qI^c zBKp>7bUx=5Vy*|UF1_7ka%7@5nw?5U)t6r~x!7DQ;|ngIoumOqm*ZrLsn=BtzUqB&jwnOestznw-eh4Z$dNk|6wU@5)_D7-p9#8NU~^Ncbo7Nj;afS0wME zE^T0Ij?1Qm+O^O{CVZ8C0trL5nv&s*3W$df^Jh1!kqUC{`(Wp|8Apz7J6$EQKRh2s)cN9{{GHnCdvzfsnwIKMLonzZ^@G`ZYzSkhnyRFNpBr+nZ!8= z+_%%zQIIo%!3{qsVRX=SQo5Y=E6L8fLq(o~O$n*Tc?}H>C9NC61e{~%SuzltN9Z56 zD$b`@RG5y|#8kc^y)y>)&_2EA$<e#^0wVuA9jnk-gWW%$)c#c>ZDr#<6vcRASd?u^XD#Clp8hg z!M)H;O&VhBE@ShQFsww?ez$*>wdC5R>QmuiUC&&1DQqW?tO>3%G{JEg^RGQUJqvZF z@kcE8d5?NEhVPf@N^*Ve32JlkT4X6=-#ge!LeiU@d5|CdO;}uI?Rc2uduF$Im}qE?^=3RMTJ}mNsM}Z)WWnDS?HH( zAFl-#+Nj(QS(%y9+uPgAc=@-Ib}9(kH?LMJ425?TRFAUQ3DV#yjec2tqJ_a29;3#W z9+qR8@vG>w&`GJa;oZj%E(-NUhIOsBGn=+6)!H!gDi^I>SM|c}ZS@We#B0CLF}@R6 zT(#xVHT9H{MeoxJ!aPZ8<9reC&E0B>P^6_%3xZ4UHeqQ_NxZtGxymkbkRhar?UOnp zl!-yX)UbrgdGZv$EA{aPUWb{N-u#Yaj@CNq@V&%-K7Kaei`c7uU){YH6Lt~x=vZNu zRHKN|Gny!6Kc6Lu(+v!`UY+$-k!i67^pwA4wp}d?c7mmuctf;;;T-nh^qK}NwfU5NPlhjxK{?UzR1=^<34_SH^wMs?mqOC ziGFz1ULevT3mpH}E}kBSnb8gi^S7jyUK>V7)^(xYI)bA7<*+7Kpu~G!gjdFEK~Bf< zJLzOLyw>oI0D5yKX~1tlwzD?!O748Zdnn92GKdHJi7ZT_fyF#8fCuY-jJNj21N{6N zjgdZ=;D>?OYItuek|}Xo=X%DK*?GGSVioZ|d3h24m9a}g2Ve4cXBaMwYl$Rjn2;i~ zC#|;J_i^%?AFgClIE%$ncl+apE8_oj=y0+)&>9llCoV&+^WEfFhgKh&US(5pk|vhX zWO#^CsVH_LgHHu9(4DGs5|nb|S{mfs*#;=x>Pyv5*vFk`Y6?ev^=H4z~w7<4im z)NPCsuM84FPJui_A^^IZ=_lgXQ9lYfK~A;SI4eQ;QN;_&r2JI@VGnPnkg6E%$7w_@t+?PULKo)eHQ$;jgSPX|TIZ*-3k6@f7w7hM z*mI<1X&`Fs{2END=*tOEvV0^Bc9R}_Jgr+^li?B-xer!Fl#qtm#(B%MLVe1u=KVBb zI`2!Q_HL;!*H#&zlP^ZTW)szb#w_l*SJ3aJx0XXlVR&9rnE!)%NGPb420V87hri4N zaOhm>?;m8nKe^5fEqF3(#>pUUasmXPz2DPJM0nFv;j>v+^yLM2=>_+OKazkM0+vTf zgHPLMF#4$(QByHbub7vzbo4KA;Ztj7vN=)^G1Y|wU6+LoF5vAvb zetv$bUj}l_Gsa|tkd{`n7wWpu&GlUQ6?yPW4QyPU_YG-(zm}sET zD1~N?X%I2dGSaCsM+oLhS5Ej`1oVLpj%wfF^%6d2~w0a4y#?{1ds_DA@%QWd?Mu;gW0q zia1WvUp7;hj*=tt(cl+R_@|5LP2)CcWhO)Q+vA%*HAht;_|N#sJ#F60*gmGk(+n_7 zqo4}nlydV{+*nwb+!4l{m|MHnEWA}Z=r8ndrq4uHx&FL)Vn&X);?C$JHdB#|FAbbH ztb;WiJUIc%gSjq7{Dxg9et4*!@BpIV3!Q-?JJ?7P0hg|@sCLB6r+bOZ@3mZt8E6<$ zh5;5`kB~AV;8_8j!t$>;

cEC@Z{KWESm+NVVDNXO8+}x3%lyp`6<|U-xZAI}QFC zvR!fsdWZ@#Qobrs^G=1LuZH;_EIp#W0UGZuQw$GzSZ7_M*~$B60@sUEP@PG)9EdOr zHwa*s0;WEX5enArMPmEgWdbB`m(%dJNbWdGfm#Wqh z{2+Q=gpQplln0BT7C09$;a|I1^$pzZS^!;Zrz;8n45oTKB}J~uXYGAp^^{!medWB} z+2vd|(Gf16(rYJGZjVokUc~4Bv)K|`id=&o4P(S5wZfU3H$P9=o)?6Cvp+obe|a{S z0@V=!ea!_7VbaIeuB@N@^RZ1hofs1W+!#1$eRR;tBG4b3?2LZ5zO|>Zbljj53#gEM zAUi=ge`&H71^|Sl0!s-k@v!A`s|TE`;}0x7?8|4|0t`;JiO|D1t`eUzRvUE7BNu2G zgCZ7qDIN-R%RB^U9s{9HW?}^c>L&Oh5V9KwfZR0YpT6AoE@&tzO3(zAmW6Y%hCnTtc^FIQMC&}{!W{_WT! zQdud#^;(ySJj}QD1C3H{b}|nZv_Ek=9Z4ehVA6BJs@ind@4O z-~lai)OXUqaSLXoGU;Re*n){%)AhJ>$mOL1RO)f#k_o#4mu0q-BbHTWeK4ovKPbU- z5AzSE+RcHCENf$f;EE)1WB#XACU>>o-C&}=I2)mJuKqGCDCqHPYdFUt#UU~?V#gye zjzU|JXwbLn0sW(RNZk=-z&q>&#>Xxb@4;@2j!Vd!1IHpLc4AH^05hi}&V>iyg4Rui zRFUR4#Ytwe8+sA8{`7N~#xY_OLicu`>0-X}zm*M<%7H!w>jobo`?md9$=ui)kpemQ zkP+Ghi6r|*!GK&fVEWJiz4j!GIf|C#nXAkSktm-zr)&^luP7j(^KTWK3Q2)zeX(s} za1#DeHn#!b*1z%`y#V2V@*F@8-n937yv2(*fvPsm37iT_r$hW)k(L7h6>jJ2ph~MgUO^~Og1bp zd`xUKO!j{@8gP!6pckQl$kH(W{*bT=JG)ce#hf)5$2DSyBLQTWq?~E$i0^Os*{tyG zu*s z|5Z1WyDdH#^%gA6z*_~J-O2iq@8|9KiNyWJr*}H181_P&6nrEC z3mj|{&*g4vY3*Is9}@xhK&1GVc?Sx>PHOcnM66xgL`#3VC`zN zh^XZ?!pbQX|H>RS5#8N&)8ZEOkOo+@eMdF3R8Af&l+VT7!REUWFY3&>gX@1U+4{2o zIAlc3OF0R$ijxIBg^Z$=kpTtcpLLd+Vwgml|BKqU6vzU<$0L~R^O!5p$@=^^V+9)z ztKVFWq3K>NG6kZ+2A>LkD<$5A6aXv0L7uCfE~CYs2_=;Gz>PYJht(Ht4!OTToC+mO z2W!C2D*%|7168*_p%MHr_8}I=EyzEnB5Oe0KiCR^-)tkm{r#Ub#9r_$=m`ILwGS1Y zr%sMI^=JKBQ@sA8vD7sJAV60T1(rLMn>_6I*nX5u)<5fZ01{j~(g@Vgy zy3YtG&H`_#kPSH*@&M4kKiDOJ#PW}Tc9UFPP2^%cw37lfS+JG@`npf5^8V3P212!ti-2G5m(s?p5U)^V zee%%*SA8%Z-UI#P2FbiZaSA|Mdyd@{_@eIE8>n>rp44CJ%H;rT0LE#7uko406Q_#| z$G==j3O5YE1b~xDx+Kv=YjjVS=gz#)_iT;TjRmgNjkI`|OZXB(g}DR$j$e7ihZ3v( z3ho8%9UUE2+UJhTcjW#=9sw%&YV4!J^kjH}u?oo@7u1-** z5oBPf+vzay)tbz2AvhjkRk{j?4iXFlOzw<@7r;(dlk|@4U8)H(0~(Z1htLy}1||57 zVD3S~ZonJ-1QR$9>)v=c>_Vs zh)1*v%Bo$%mkdJ)f~X31LongFKPbhJ3(0?1j@}a~r+@3*b2O)Z7mp&7{U2#kk6QGw z`lER4L31Z@_WhKUBXX`*FMw4s?BBGA1{zHYXp?N>4pHchV+9o?&CL`eSjRf?*AR`n zPUY;U&Pzupb2DWC zgQ626t^_EU^H7T?Yr#sqA<7p4(wX9NK73mLyQmwG2049m3K$a$OG{U@$no?CUbk7I4v@-ir69z-&Pempe9f&^=DFi#7;=r zF|j<%Vc8p|B2qBbFKPI@AJT1q%r%SIlM@ES*;*pC?CJy@v@YL--d->BvcYCi^x>qt z)hifFiBp^6s!6CDDlY6`HQswCfa5>Ca*VHb$RwT=M#IR!K`Hen{qD`pjk9+(jsZ{< zs-jO$bugy}BMjkx8C+3pU~Nq@9%(&W=dHqDue+4(02lxm*<|&h?9}lQ?BHGjt@iyC zbG*0^VHBAs{G`!b)dajVH=oQY4d^EeR04*)NAdSZJ+!2dLl=k_6~_v&>t9QD;y3Oi zF4-C8P}J|)giI&c7WBk9C{9Vr!7Y(}nl$Xs;OYUm6*@$~t;gVhL_a`0?*ofq?WW1K zl_76MtIkte^+;}0jQLNAofQ`+^{$0V;LAI1C_Y8UI@$bU=k9vuDl~ zy+y>QWM+RxI??h7IGzK^%*u+{+w*b5U;U{q$!Z3iiB`9+xOdlNmBIOwjQk${MhWvnH(_?n8>y)M9zY!{|0@Tw{Ag7Tc z)4$|ZK^$CMmXrJac!y-jE0{R0?Gr%BixsxGslbUb?14_UPa_ zexW?rI0~p99arx6ip+!B2*ukTyNTFXWT`nP8$nQe%_Tyx@JY(pQ3(f4vUEd=IcHsbP z!<7j`y(m4D5iYvvQ%k3cOtv$1!ndDM5tUkL9O5}@1zm8)rH-lQ@ihx1)Tiqw@>G2tgzB306N;51PW zkxZxK)E`B8Pw8J-?dh7}O?f{w*aSQ*z)nFq{ELdFJq^_URW_1hsMSTO>EoI*9AE_2 z0@__W9h1nHf&caozT6B-kyMSd`|ry6(7-11Z+4sqdF*c&kej;r|D8}}O)d}wnM&;N zCCFca+Rq3al!3aH>E_?&K%h3~#EURt6=4-754Lqu4(3?DMIX=8S?8PmG!0mt**3=QM%KX~x|-6MZWd{sd~&nzGH7rj(CdukU1j48pd zPXHI!0t(d?;G6=%jD{eXWp!E(7wfKe?c9vht2q(=O6tjx>VYFTCo%%X59FQg@ju_+ zTe!M0b)uLck^$XMnn2#{=D;$&X(CD~{h>eY^xm-M;nX|35YrlTkh1oCOy{PP+3=lb zB7XWY2hlHfo`4Q4+8zkVVQ54UTc`f{vwPmG=hLVO?3g!`gbcrtf+8@hp_m*!A+-Hf8n6K{z8_7fMUb^?&=I)sVOSvH7-1~s=y;L~JQUcJVl_4)DeVFF z$bGVyGf`pw^2-(P*kDea#Ea5?25{_O%XjtA+d5pn|Eest#fuTxJq1TXP$0qQe*tay zJ;Z7OhMj{r5isl%iNF0W5qUuFAD$J>@LE#jmavO`MZDOS8 zwjTsz?>uuplsk`c_)r+e4 z2<4E;gz1M?LJ`O}fwlevcoQ-K4gY17{^v}eq;uQ`&7vxGx z0mHCSMZwvC?}tu`$cUE4g6#G5nEic${;~qAuJvfXixXkDBFM{C=trB zPDmGPGKNXW_=8(?NJt1eeIokoX9^fIl%$x(IQ}g00!ecIjCaBS7gl*{6mY}PP|MG+ zrW}9DQ;>~TY>zy2x_nMsU&sqK7$~)Z+FdE0xq>^ zx$KU6i<#7UzSg0U`w&~W9Z)_!O0I{vVLDBlNgO&l(va()XwU-5#UWeXO zO)Dt4-uEuq>GZtu5_43*tFCM{dfq+HT}~{`gA8sGRsUuHIX{VL0=enVDftEF`=B;H z;|r;|Vv4bI>kK!cA)niFhum;`H-DiTE98Z{l9?&KsLt z`rYdFV*^TqB9`^aZ!Y>??3UmP`%M1kHXRyULyj-H1aP+m4>5_*s}?44!bfR51Zc5E zqNR9SX5RQ$JJw5)RCxqH@cK%!k+E@x)rM9o#I1|PFs0d(c@sz+@K!-!w-F#r@Sf4C z*PO^Ro;6(XTEv=IZCL}VKGIhgn^%6&OrvR``z%T)6|xYK+OZm@v~BQWGV26Zbf7~? z&8EuH*l{m3;^g6z&tv36Gv6GseB!?vemS{!UxHjy5%%PT_DR*2YPZv;F!OfzqVx=w z9B&hqzeB;AD0LDHpHe5mb)feW^Dh!9Z04k8P;W{_P?i3arO@kMlmMpxq6m6Z~IgH5$hMdmD-{Q0Oi~ zNe;UX3%RCR5S__~^9OdHi7P=5kcjIL;2zv!2uq1%1gPiX<&l5b2f}$5u)i7Fx(G-& ziB1Ts&KLHO@OL-rd{WOsynBEgB`PqO$MF;?e>J-729`PY3 z2xh?RcKRlSne)xp{$Lf}_~lgtxz%DYQsxoy=`wBrvUbWD+cvAKoKYNbj2$DDfsJ*=3I<1QDet3CB@T^RZlT=fj@@Xx+X4}3x=(_PUb%1hx zZmOrX%VcN8DL#AMLq=f7Q8%EE@Vni*1Sa_}gUbf2t*8qP7bR)$ad&I39^1dm4%omh zBd_XeipC?`G%ynu0NGf07@Xezgs20jSCTL(GVQTF6A^`u%!3n_C(&ZoY=Cj?SoA*Q zk$L`m$%5hb*OJBjXHUSkoyhKkBHHWgLYmv}wn41Z4l5N7%vKKw(QIvC`J-olOkw5% z!u72W)lL`I%i{U`;P5XmKmgBMx?@n!jZN5rE2&PvqTA8_dz z4Ws4xs2m0J^xXFvuPJsC{5V3}iVLnv7W==wVQj3G!jj4cV}&7n&Q66gl-y6$o;Chg z=edAlFkegOFXwsVK?(hkt>i|5>U+ug1mxgQGk^hb3v>#;Nsw8L5{)sle?vO8U>p59ilhDqXgZe%^U4GnZ+>z)}qNbFU?;Qhpfikbi||gx5f0uzfy- z^xdtSu!X@4v%8LW1qNlVPV%ftoxqp^AGM(g|*S_c|)7n|OeWNFOB`Y^8=Jn4rnc11~=RQ-m_4jPK>-C+LyT5X@ z(0_`NO|BfhnVESlC+cjiXS5AAVleNC|JsF_n5_FqQ}Qs|*&5b;ev;4Rtw`5JmM!|b zo}Qj(R0OXB8LyKZiAtrz_2yaBcS*2nblhyLIR@HR<5t|optCqx+13(sua-QXw?b%N zL{fF7drfhv*US8!yqxF|RoqPSdp`G}at%4;c+K?0_0Kk%<7T%I;gRu}%h>Q0u{C3r z$+4nQqCgP;{9ix_Mu2J;$$>o4zKL+|-MNNANmOiJama7ez;_ol;Ay1HYa9|0b12+59Evzh?TKyu92eJg3VgA+%JM(9f`x*h0p!ZOF=h@i;+?*Q4KM zCJ)Q=RNKEpTeje0_LTa{tVar}qm9^2nMkQ{#%hY_rR90F3%c2GN0A>^xz9FS%|ATJ z|I=ZAJ2QI0SGJ{!eq6Ux;cm<`b{XNVpGF{GH<8i9fg_ST!szHPICed;`@C$cpgDD? z=A)Y%#|a`?GLyCTp&43e+x`}{|LH5-t}8(Vzn=ebth$3jZTMSFswV~$?Oy+bvhB;0 z+qU@V&+q)5d1fqqp7!})#Z+7Mbnb-DjKPYf*I;4=lRzB-v}PW-g&%o;Ga#bmxHdLh zX1_%thyOM*-DvCR^pwMl;itKX!H!Fml#p(e7mw8SQ_{3YcfQ4UeI3CT%cr8^P1cUx zNYt1&{z*_%Po)0SRU~8}AKZpc%DnQQ&0}cq;zra?80R|a8kU!gOidf9Z##0HLS1lv z{RZae@4zXYeDCx37Tugt?#-Fb@~&$KObIuNIk8KG;}XLNf0-I2>eNoMr@B4HOvG4l zDBkf4{4a{>ILxXPhXHPpc_1cu-ubL282M?t;v3O9r65zhXdt9CF`qlsOKA+MjJ1boIViT2egI z?=Tcl>puOsW_nX=%BF3MuDAAfm6NkWoa~@$bUH_A+vNE~+k>BUh3aj?0a4LlfU0YzF!{&R|Hl z34nJnE=$8rn5H7)n`hJwk?W7KfB&A$0MOm<4i-0}HZ-3a_H*oRgPsh@wsLLmEUQ}867|d2zXvDjY-Vs*^B^4*}*7Kdk{RXHc;wGAV&v7$+%J>r*q{b`RsaNUkMw zp>7t0%^WuLW}<4`yvj3FvCJuF6xXX?vGUcn(0%H`N}in@4y;E3o(s1M5tIFD>9_Ro zE?jFcb627+(8dQ#2U}ELkTj@>R5{4qj}qW{`A3NnZpKU+skpo@I$J{RXPe6$g}dA? zk(Zwn@K~;L{QI}wQya$4FJQ15J(tA&5T~J10|fkwJ0)wa=ibBJPPvR$%ynEt&L~JE zj3pl02QSpHX}`ZLoa8aIVOE*gRpn^we$2V~s&KzS-%wr;-M6XFqKl5K+F4ay-)Ps3 zEjks+QnqulJmx2lm^0UUUa-itm###7GmBV)n1Wf2vHGbwe?>JLpnL{aar16SAEOM#|nlQ}h!Gh7}xe^855Jd}nLK{oiaA zDwf`>aQZejNq)~BUn_ZMhAw?>G1v4C zfwRY&d4*ddVsmeQW?=5brCi1GuR&5mEBt8hakgG*8%UPr7)${m?0 zi|mNq6&VaSPhYFdlD{Nhr5$$D^K;IFa>M7jc(Kg5qh)7$Gv1qxI$ETK&_4yivuEiot0+ zypLA=$M2IhvJG8oy*U=wd&^4ALbg^!LL;4{(oRXY^0JHC^{U<%yt)8>XU$Z^I}zf#*9e zX0*)yNpY$7@{xj9@`WWMFJaaGZrSYvcox;VvXYgUt%tAIZ8yA*A^LxpQE)rU$EStE%V97}|<-24~3@YiYL* zZZpJmp=1o3BH3)E>T(C%rcfes@pH`|8P~4rQzK^In09dju9Mpl()2$iF0b?n zCvf=%aF*z%^?&VGKOh645ki8UPX!v&?QW5o8$fm~0O?k#=E6o8OuU)Umj~G?wRSt> zuntoJaP^|Y12OwbLS?7-eO(pw#g$!N_m_W`W~!WQLT`FW?p)X$$UVi#a~) z6cVEDf2}>QJ932{FY?Y{cDzRy+@w+_uwifVhm_lR^w%-@)0FBkvE8I~Fk2X)dwIl# zb7^CwIL#1wZ)IaWQD`nBX44{J1cBR?qEE=H*?QTZ`J*=B{6Piz^UfO!(Y2#l%~7|& zHmqCQn-nDJ2^=%cp_FBO18tR2Sq{qi}cV^FTFSJ9Fu)~EI1^9d`V9q=LYAyWA znL09bpkF~=2;?9G#8Hz^)ry)GX(=C+HBY~i9heTbfaT{|Y~{vI$M6fK(=QiUu;V7N z&vo%bU<5msImH?qeN5m;GUe^6uGNnJI4qFpRjlclWG(t!Iq;c?`skbwOnF1E!n&zm z(q|}nd9YA=hq^tU1dZ5BLp4?+DL10g@2FR4;yduw*FqYgzVgXEWJ<61Y44HEOG7ck z)kg`cM7KbIi{X#4U7eW?*^ZbQjFAQM;j-!e!SGKpXXa)7%N~k=+1%BRkr_#n0VYO>Ct<^fwjNuKJUHEwKSDU z=Bf)g>4aw`c#T~U(V_?^D1j!>-%8BWa{sOHvO8a!tk^GM8`tMx! z7kq|P3@ax~cZ36k3r87Ea9rGJVOq$LC# zmYz^Q8X3j?szSxVI;~#rFu(iun!N%$q~+crDAZm@Lb*C)ZAOpP_tiA}W^;YxRK-TR z?94z#Ui;+i)!CYpfPPBW-ui&}S-jvrd@j~T6E7dJm~PV^h}uh8m*!O-u{SyizLQp2 zjS!9glV>L8TH>%Y5hIV4{J&D{Mw;8TPlX%Lyj}B3?9Hpi2<+B9F?iQ`+rv}txhp8k zld{Tabr*!olj>SzxRa~Hd1ijxUVqD_%iO{!1wf6Yv#cy`G3}hWjC4bZX+OtUrx?FN z1%gebKz#p4beoV9Ir1cd1}3|Rulr=X9%r5uKRDphxvzJ7k+QvH<<)7stHYOT3Mws{ zWp}b(Pp#Kwx6eHHc5TrSk!^YZOKdDSOhL%z8yFC)tp+pB20O9u`A8dnm7KmGCxX7% z6}PyO-V5u_3s;dNY_@TA6*+bo(SbG{+JM5LQ1uPPV4DS=V5)Tda_2z#Ot9d3$p|Xi zbL7FS)4Yrk>cOhA`&Oe+Tf=pJ0gbUyzdZLH*iP%X;9_aZ=Oe}~ITE$v9%B}<@{Bz@ z?B_n9-<c087rXlC-OjX-kRQxfVvChW~#2@zo+yJo2_wVewaNfhrH4nQ- z*cCO8nJh}HFTTo}fPwB}PKUM)jz9QF`_q_%qcusgwt_jDAaz{rl87ExtWb&nI3iFR zT2MuIKq*a^Ao0enyn1my=pEb{^+s{$IMR*legEEN6W7gf$5#6a>s1kL=wz+HbvWF0 z@3AGTYRuc=RQSD?s6GioJoU|Q%Pltu`Sv>!mss?`^U=`b0}S&LF!Yj`d;QU4>(ASL_b>$yf8E)|eI$3^IHIE;%CU#6^}2^P z+bx@7i0=BLXx8*>ejHaZrSulA%#e=%x$k->TzeYiE`fFDd6Hn=oy!u;=p*bqIYZcY z5>QUucOn?DJpSznZX#pr)fI1S*Tp=MQ$4?fH5^$ecf?Jx{~9EB34>6Sx($lpnN7J8<@+eVNW>k~}`RwaBw7Vu}1tuIb_7;nn;Eq<|tP zXvh9d*-ZITFJOJZpA#_C_%=SQPNtIYt}#KZ3q#G!(zR z%Nkx~4G>M3+@r(Eif7u(VFW+B_9V8?@caAS;C_i#T*)?x-DW=plgG2lxm$D@7o73N z+OI6VP)I_WBC&Ph>zf=Mg*7D|j%j-h(P}e}c3k4kSM6i`nw}N1#bY7JB(gR^|0kj< zil~aqMb;;HYck;vyR>G1Ms1AH}e7abO4A=X_{e4%xgq1`+e$uT?5JVYg z_`65ec7taeXWIM~PfLiy6Qlwkgf3VjFtzJ&@I9X?H-8n(74#99< zj7SUpE!fx+I`{iDNH4h!%@|6fqO0L`Y3+RNfZ>JAux|qV}hkBD!kD$1Cif ze{3n@d52Vi@6J|)wqNSoVJ?g0D<7?Ss#IGMvF?&r?Ys2}cmHY$&NB7W`m9t|M z-JxZPi}$VXx+w8KL(JFLT>G|-cI$Cn?>nno$_l>KJl)FJNjmAvz;S$6N^2LTkZ1FK zfUEH4g1c#zXNIUb`n;!jsKOPuJU-ItrJ&}ve5l_MrMm;;vAq%x+j%`m+L^UD%F5n# z(pSu;co$kv8EqvLj=tjSYnEwa5SLed4!ikmPhbxrg}G~Tlldtkna$ysZJE+e=UG5fm#BKKc{HJg!h%KI|jw;r|P5#Z<<0)bs0BW^^Q{td_EZhxWh{LVvF$ z-AMIB8ydfcbm+WJTcN}RzDStorCXGRTYq4I5lu|5#Dx9_yvy+L8Lyh+E4jYY%&)2n zP2G7rhN(j~S40vxJ);oIn`t)a8VWmRM6i!8AC9Jk0bf(Ln*v9Y#MuMM**UKzDua|` zBKm&>b0sb#U314zqV}4Pt+#xNi|(48U1Q)H;l+FWIOo3F@mEuFd)NR?>A>ez%9Ts~xwIpab5mV4NCI zRb)IS>HP+=f<0!_JAX9mq`-3|yZ<;Bwi~>}Lc3UkJna4D$2JQ<5dX6S^icZswSZ1; zpT#gAKObsik1K9sP0! z>$iwH;oY^}=9Zx!J&9A{>8SVqAHP*wA|I|;RICMG0+b>9ZcwH9FS*_10{5+|YcXmX zk-{eS}l~^vE1H-`rI$l&swN_o(-Wj{Nxb(mZN>ejz-W{j z{kL<-p-CtfnACuzB)-swh;Bh5kd|ar0U@0fFp%*#l!ld=?uG8agp9w#WQ9Vh`6*h( zc;wYI=zf`b3eRF@t&I23m#yMmzsm_p-tr=EL5=xly)mI6*M#)vh*!+*juhoRQMhX9 zah`rbXk2dPVa4W!}(ZkxHxCG2vib_nOyS}Eq<>bjf|+>uzg z=?4E?dGOR=dex0iF6Fp6jK{G&;sE^+%Up4gNnUuj@4?sE`(EtWgf2PlF@LO0CeO>U z@MH6;_|*bUum|tAU!Tqjk>a7m(Kd;#(g0 zT0ujui~vaLkg@*WdcjH>rXO5AId7L{8ipEyAZH?U+fa8TTI8f)cjh6xIA)>t{vQ6P zRsrQ1p4q$!_ybjM@0k#8hLzfYt+b8E6FW~PER0V_qL&$4SO%ODA}#R&qNYXHZB5Gd z)?W27#EOi&zoh+;{maF?r8bM_7PfoxOgq^)Hi}JF_|WFL(FW~SIsqaZM+hqgyjCcg zmGq5%uu`qc-hNSDtDUQle(Bs$+S%mXNW{z+wHaJ9rvQKA>zD?X3d&BwpDxVPuR~`& z^B&OxzYZ?%_ZmA=K)H1xLEz(+%t%*$4e6F+;l?e^d1$zmMXAPC@JFCI&v3Y+_;{4O z@9rcq0{wyO&(^@xTj0G)6jL#OXr=%a2oJAaz|Q6j@va>FA}vHr4YcrN=jO(2ykF)Y z_Z_+JK2d&7CfIs@aG^G=#HIvs@ZMC?e@laqWhzJ>Mie#oB zY;lGBy$|-WlNm5nUg(akmXGeY^}Z1n`gQ)w!Q7+UGh%&0cki*h==ka{>*5-{@_qje z7CAt|b}l~;@$2K~>0buhM|T82B8)4B%1Ua~?$nfCtbMRCm1(W+W|i`cbx2p)fA;8l zbZmz9qS1F`%teaREXd#_{^fBt7(osep#UI@W~iZ~rQDR=>#rR-spsy5(cmC^>D0UT z89^vrk0*TJ3o9X>IZbh4FE1BgX`F;y?^dkXt7mBxY`Q+U3iX>And21aQO13!7K(Pv zd7JHD-m?`5bP;10Z!=}@HXHBel&SRb+{xRz1$iU)(D$||yzwi`am`ki@Au>ftkfvq zU%TQz(h%(p*9tFePWq&~=y-j-!H%QRdNup!Xx!EPQH1WxxA`yMnS6PpWigR&1$NgG zw`Y(^Jp~chl44OJ!I)h2c(SUrVWsQ*94Xd3YwsnTlIVv2begigt`eK!;V9ji0oU=Z zsvfA;qPJ&1wR}GRg0WoFT%)6GJB!O}!ALQy)5}ir<%3pg`>nd-isqd#pO5`F45N7( z$P@TjGW_jb%%2}*c-Q7Tog?gl71jSA`Q_@~!yxV0k20H%Q@it9I~g=bjib~prM5Z5 zU^VlsYzj`vS2M%N&SrAN-$p#BM!_qMYW;y?*OD41XPlDC8sW+*Y79pdzESeNICiXW zYF+Ff8pfMQ_Ui!;hXh{!5dBL3zv!Vzhxd6kMM-QmQs~fG%rG zI}$&0T-knFuj1EdyS7zm1p}!lR>y&QDe>xJoFT`IfvjSZk65|DVp_bZS0SEBHK zJDG)DVWUEgEQNHts)sK+`P(KV`}%t?5iw1J{&YyAd`)|Yxqq=w_<5XZMtgZB_Dq%ZCKj1 z_$!<^Y1yXQf9#*U4>nl+$p!q`KIhAh_HG|+V>2EXZXa-cf^gaK=?Z-gGdbA!xS(%r z;H*)Ru_i($;DVe`4#loMVqP!tR~9>xD9c>wyfeN(xD_4wR`F`B&vK8XlgjweFOvH5 zy~dhz+Yvfv%yOB)R>y}J+7&$?mG<8z$Qcz7A=vD5o+z_h^*{yIOwW0>&~;V6-UxjE zj9VOCwRD3`VbnEVX`^X#d=xqLGztI+anwQcvBEp;&xhz|X(Cl*+fEsE0&?_X`l`0dFQ*6x$6!4CiF}StNF?)p(kJa9qgzL9xbNxsU#D zE%rN)O>aI)Ypi79!nd!;iWlJ8-&|VnP)Le9Z$8DscZ@xx4PU)+T+Z&>!_3SlXtA2( zSP7n^n?;SU{+qQgilx~wFMJUDW~a}5W>+?mKX?%+hM{(xq9~Ny)xDhv)TqYyXBgul zZga2Fb_s4j(_9ee`8e`TzdSIkT;evDf1Uj<%ImFYU-S9KyHk(nc?zyyqWPz93HkQ> zIJ)EK@R#5Zi9?%CWzzhGua#KAeq+_4A3KayJVSfw!8Wljkw zMQLLs%rVgE|4{bUVNo?)+czMgAYGC}BhsRDivm)D0#eeUf^@?S(kTrh!hjMg-OV5! zQX<_Y9YYRGeH*Xqy6@+{-{*Ue@B7EUI1cyTv-jF-o$EY*+c{3YkQ>z$`0CsGk3M9t zu#ke;Bhol~-XnSw*`U7&1Akn>{}YU-uZX)s)`gq}R)~dIR6_9rb0AOem)vV>-;U!G zGpKufvScVNIscR>y=FS7_1K4D3RYktbCez& zc|_dyziedHa8ldu3S%lL;GDh34j%K4#5AEXtSN;EDjlq9%qga_Z z0q~%$W!4QQLFdkkcO7HE>l>SJ&h?FSW?}2_O9&|mb_}@)2*m^R&KcathRz9+8#IlE zzXA0vrD)MN`lHX1gKGrc@5Wy9le1n&-2br!|A*djWtJ{V05WvH=iSqLkzn`u;;H)k zR6m!%o<}hH!GtPu)*bYH0hG?c)%Fn>GBnnZ=d!#S5-PJi0fydc%SKOmdUX~9qhm{k z1NEdYVis6|aG;&#)&3*dP2gm)QXEma;_w=YiU#tPX}|nd=fK07=@fWo7i| zi+QD7aZaPclA+ylhDp*5$OfD&U+fP^&wAv4im74we+Vp(@5_68c=Zdjf_3FFJw$H% z61RTB;^z)q#cVI@m%~`ajt)-9g&n^>DiS|P5T27Z+5dp_+eNdzfen?F`ouKvdszYW1YZo$ z^L&PyP>w}u=WnmX{PZsMFA+^{g;zfe^oOstZ84f{C^A`9qE~oP=IOf+B6m&+s@NcI zSU_N#Jm@WpmT0}wO7Y+SSZjC=c=OY@_-y~;{}9XlzvLUs?jai76~EN+%;;EV_(l+ zTosrQG{pS-{;vmYn+JfU)x?JYNH`9t<}j}uyE}^)YCnQln|TE9$oC;fsd@VBwX-3puY70d7gST^ImNBJ9a7#FGyjTP9yk9d{viM4Kk)};9thwDG#g zo%#Y$)Biilh-#nhzt9Q&g7QrsXHfUW1Z=%$ZSfpU;N8n0@cpCwI%ZC&!m-PiVLG;4 z=2GFgk(r4-d@J^}h%HR$Qk7j9&J%UFN5WgacX2}?t!z%*q_(!Y z>k{|;axS${GYz6Jot5-L3quvc2ydu|XV50x(tfYO^$)y(&UVbW{)>u&0v-Yil0Xf2 zlY;%l70!)^7qiW8o41@YOjozuKeC*+H&bvsy4P^IlI)f(mMGQm8?z^-loIce-}9#@ zJeAq#b%y%r&sc*SJbsPfx(yQa^!E|Prge7{R&gf7rYH=gfkrO`%SDMJ25F_acJGpb znd=t~a_0FTbN3abExU53wziiSgGrf_fmEp3pJ>N_2ga}N_!ccYZASVX50A?-n4BA} zOU&e(w0O+AuCLeDhv(4WC4e=zij0+i(RYXPt`j!Ukyx}9APup3?yCIa@#v6i$}~_e zw+yQCCB8~)J-H$JcS0Ya4OhYa1GelMP?Ok{xY-~mpyP6~Z^KSIbvRq>foHY``(B0fRg?E3>PqsF?*Kw+ z^p{5;_O#7P_$#jQeiy7K`a6U1Ru80bNy>T$bkPxbv|)7Y4yJrMUhS zcv_;*JHq(Fa$0X=(&VyazVQLDsXaa_ETn(@+Fw;5?`mBDK^;on4RpL~egt@p!?G}m zNxX4z!FsC>NOJHH3Gn6wi68`7)JKvJ5efx}kFU+{zoL@$CGfo+jTY7eG+d1l}j5Aq^76o&^6qK9v@{q%`c|&3;W+M zhul{5qTC!rwXqPnp~sHb`RnAbY!=oHfs3tKZu4OqeYU(>6z96QI}ZaYD2r9D-geNf zL7dgkJeL=LAp`)xvn$T%As-jXALY5_7HY016&O(U;yIkAQ(+}RU!Lo$mMw#>-W^BD>=`@J?cPf z*LXD(v64tN1}PgHblrDM#;j(f>k1UfIKhj>)Hhe&S-=h3cT9-Sx~tWCHeqdp2x{et zi24luO?2fGX+W0k32jYul?wy%*v<$5IQ>6;l+zA$$%~mg5(EBV-$L1=?aJoIjT~#} z9-XwZZ2#(e;RedlU!ifPhC6afuq>y&qnMr-iJ(QS^6oX_b0@|Reu4WZ>d3YEj;E3E z?6$HIF*{5a3b27RDgwsjKY-SMGga}gY<%3u?nDK0hy9sua8E4F`FMUnI=`~K=Zwci zJ8;*IV781-AOE&LmNk1@ZE^bOVos}SEsDFy3QFqM6&tetEk-BFGF+cx_jQ%q-)mxm z{}DR*lNqMuzsG>zzM6^b+#%opD7Xl7uXUWxKB{zF`j{E?`a9+ z6!uC{Ie;q%gE|4`%q*ROZf4%;2bDjdE^??qzfvT|Kk1m;`5Z{ZfquK}OS}ikU zOqWL2_th`&J5RGi(LDkU$ElO<**4I={=jQHyL`xgs6s&6&3(g-Fjt)wdLo<)r7Kh4 zROp-49v+YX!jj=%(I{_&_w|eI7ah`c(5LLa0pE{4644NPXI5YxE_zglQ(MBx@^C6L z9&`zxo>qo2tw+Nag?7ZdWInnmI)N_ski#i4?;q;mxX}-bB>=KuxlLUCKvR3FS>rLc8v0V>}|>~ilO7V4;c^tbz@7oWbVoH=PKJo zXUXQU^P9dq)q(lOzKYUlPP&zk%KxB$0KVRTjIJC8fR&eqadGaoe7vCP9ndB4z#m0> zChqDL=IP8DVh(<0=uOtjsk@isQHA_IoA7GbA5Fhf{rV@eMtP_07Yb?%9swdO%>{;_ zr*8zev4yQO{T5*#Cjbx!{Few#Ag5lRulaAmYL)7}<%27P3>jcy`6s5H3Q{<~Uo5a=g--MB8c_;8}KxLfthCTEhl7{~Du)?!kTck35g2nV<<$&ZViPnmp(0;-7UG zW^ZE;qQ}-F;hg$p>RA2deyUQHzcDls!x2p*yHOL*^1ql@c)0PnzMb^Tg~t71-%)hR z0s${$V>&Q+F4&+c1UhSS7*R;qD{&AYel*s6e>}tvG5U`eK;LaSmwGJ|SY!~rV@lRb z#Vp&~niwpkpj~T#y+$*7G|J}pC$!;Fculz?N=+Xi1i|KA;wr37)(`t z=dXM@{XiRGs^7hbf8zO9DGMI6yS^g|_ZAWbn|KdRS^!%m=C_gC`G7`#3#UK0#Tm+Y z(HRxdd(}{WK7Un-OH#8yjd&UKd}UzH_Vs$!y}X?P%@TaEPrZ&nufW0|qQ+*qs~%oeBshSy$RWB% zh3W+2L=Xf3$OCW@0g~&luEDZxYHMoy{IQ4SvWV@YkMC8mBqcT`aLPYjfwRiUnM zNeuYaAX!zC8}apJeCX3fyMEo(Oueq_@L{`nE=0*(wZ0G*;yn;3+#=MZP2XGeOO*P9 zp1;x=qp6RWAJwI9&{RO)93YVTOf~(R`X};V)IZq07>P-zWOB^9rSI>!`%<~Abq8t} z=yl$K1h5&=%y;K=~!WqQ>gHuEi{5Pja*dDAixOnHF1#a+i;Z|U&3UvF`Mf=OKwf?6-;2+58 z9;X9)?X{DI^o4|Y)H2xDX|Ow(xWV0Y!4m;K8gwT+5aM#b|5zF3mib-y`V;miSCxo{ zjEYWh1GjUdg1;gJ;liQlO_>9kY9_k!5^k0tcE!aOPv)48E=;m}FD~X?JBa$6h2+!= z-hC~^8NDSM@2Gbc7`VrCfdGLLWXw&ec9NTk4(@A~wjMsuX!U-utuK9rb>lLfiE{?p zt0b=LKqvR*?{KT$4b8)Tq?MFkx$;jlG~>K%BiZ8wE>^O zI2x@yF=y&EnM!1`jhVzv=Z4j_V(<9~*%D z06;y$zTo|_l1aIRGzRmiGqmxa}6HE(ekg4NyxHG zD8RVfxcQ4qs`ko+<Srr4u&fwL^9jd! zb}V2CUdI@w7g|AbU#}(L9vyqL@QR(y^8=4Z*GO4{{0G=)=oL&s6aio$grwqEBX#4G zrfYAFCm{y`@CL=b4j|}z+wDJszTDMUF<&#$DBu$=6kvdXE?QBCI+&nO2WHu`P)FxM zOh6G5wXd@%^*~70Lpk2{CNixwa8_D+O?X%!$$3sxRlIYGm>T}5OyfwCu(pj*#A=Gfx=dHO$1cuAP5So z@!-EK1A2x25OaU~Le$d2#`-qV#v^ULMxE*f_^6o8VmP}T>vd#WZQw@P0voi+)nKUX zgYBz>g_lx}rXeTm=!gG-vvC(&*d?-%#`Q75v9B|+oKcF4+zmpd66tS#0y`p$tu?D)< z{)!*Ng#qxjJD_BJh#Vgof6TZdaKjD;{4fTe5vvc2EHv%e3im7ox~rU@9`Am^b$Kp&jZ(+z|vG|9Q8 z&Fv8(#%T)&xK(v_;S?=jq1mh$dXd*o2MO5bb=Qk!GaU_o)#bs>2SmU60O?*!NEX$$ zKP)%~Ic0smh2{j9+s{h>%@jL(4rI3rzhStDDb54Z3=05!+hzG=6~SMC&vlOS!p;pK zYJJgNqSIf8mC}&rp!!vD8giw#=)b$AaF>Jt{aN<1ho674Pik_GBPLU#Qx72+vC9cT z_DTVT-|W@+fc;Njoh?vB&^pDpHOzP7$nWk+w-x<9JBZ0z#h+IM;%T-nhC{pV%9&@L z0I}wcKkh()pPln@1)9kY+XZ?hb9)%$MSaA(*D13KzS*%16UQh3XI$oe0uFHH2bnir zxL$lWtFkT$t{1jKZ#}RdxQN(;UUCUvyj_?`T~N5Kcp^VG&k=>ob-bZbaqWM@i4LyS zy_wZ+Seh|e&%My#biPajHXl%VpW1q1%k@SeMdE)#2un9CPChPc(fVCxO0O|p1#IvX zAe)Y^g?Z=U;EI%f_D~>d)Ws(Pq>RKZ0l)!CAfnByuyg`2MgU;7t4}-?=B6rqubcmh zfFI-oj>>jylbLBSW9zGLMQbf$WQj$Ko=L!fsES(&__fdCodipcpmnnBSbwbh&z}=7 zMg&#K7;eLwtqMMhd72y5c{wTh#wki2jmernuchiS2b!v$D=7c~f&4{&_=7V$iZ;9p zgS0e06KdTjGirGGn(lKWuq#=wich)P!0r! zpC2(04M5C&nmyhOpOI={IboF}8oz1xG)bc=v4DW}=bI`2VwrZ`QcSs%uw+khyAZw# zgyi~YVXY?Ojvt^#{Pf)xRrwq^(jIHF>|8T$le$N5l zUQnEM_)C%e>)Q+59m82EfrEj={a1<>;I6Zs_W)iTZu7QLR-MlGu-(N|P)q$Fo}cdlQLK3Pbs^`P^bUO@iB9yG5DR#iQHkvY%sdk*yj&;KBB zBXDExc*uL8LE>G{NU8evi=X-T9e2h zb^4#a{BM(V_-`7uo~a4VyEDUl{Q3H~QBZ>1abZi^wctb%rju{u?HC=F(A*#pH3h9S z8393&j~owMGl53x#UbJe0f_hI6C%)RAtl(hCa(TCgcf19VL39ve15l7r$AA0sPCx7 zHuYM7C-K%zRR503Jm_u5vI3_;*_vc?;4#h3;m5k?mCfxSx<6-QM^^qUsE6zki%%?Q zm6!VM&~=No$ljwqrJ}%j#braiv7p&SByNoRJ6{?Xc7Mv;=rFY_o7Mwi83j{eEPfUvrRoS9rGpVrWTV^Te)FC#a^qtYA8tCx$e{ z&+|Cr%&@e!CP^n?f0_!!ELhTQ1}bjTY2cMD4Qg1Gtu!IV_@3h5+)7$=*T_~te-uPq z`0K#WmmMIQih0Hn5q`3Yz?tG!OCR1+=+@!ETSmN1zM&g|KaxwAll|>mhtn~*joklu zX8D1=e8J6Kp5mYr9i5w!;1nvKg@O>o#S&k$>kdJV#$ojxS$SDb_4iK+tL^EW@K`j@ z4IOCLMvXcSclbh-AO^;gt$NX>8ck2y;%`I}vqS_3>o>n>D%b^X{665c-6$-u97H8& zo(u|3hx}iPJLY@`%B>b}#ItDcL%Asu-e$BO1U=coZle0OISUV)!ZH&Lb>kJx#mOMe zHXrR?nveHykPS`SK%@xr>)~5gu=-p?W5`0cX zvU9l9UNhzQ>Dd%9P@-=NKoGCEdQ=?nIFa@TB6^qWys#JX5P`)zTO!gW0kYz+CHJAC zDsXXuGsyN2X_`Z8ycN|R=`6CLv9Ai93Na8J4UMPv>~Bg^B^A>SZbrsJrQK~%w-mgA zho>CE_pPMI=>QXy-y{dcWtf$rc1F92hkH)SlcYI7xIN>1JeD5`%$nfzY#q1;q%iP< zn&>vp)C?tDbn5J|8%*8kFxvO<75dwyFB3w&hu5A5cp-f^GZ{B(|vL1p4C8gSs>5 z1pjSN>|Ux+R8X%k@AAk+V~fw_Ea~lUZxoK@Ys`5TT3_Bt`ZVs`K}-X7TacLI^^{-{ z{%!^>{?13-8j20&fDn0=?`4XfW>cHWZY)CMmreTN(CMX#M-fhY<(j*5BXBPGyn75>!9L`Z%1a zu^TBmhW0rJesd9DoutGUO9Ad7j+biHB{YwT4G#TYWR`~*MDPGdq^=Z%j-g3up0q?& zca;_N1uF>7gzMCS-p+S8O>MdAB6{>!U6+>xpe7OPm#`IL{@=}~wBoEnM`{6^f`f!| z``NO$;Tr&{>2N6AbVLg0oQnQs zJBeXz$4APRfb6aWF?olb@nn*T8|OMcOR`QXx8^xXh0WzyL+<{Xzrmp@&)1t>a^~Iq zi7Z}dDsnu6gxgH>7PV#jSh`MIVAZ4EM*20!uHnqR(9dX5+{6|2_d(ZzPd(x2TC(na zxC`DMxq6tSuDjrHUX_-IsLiGG*F|S50Rr-wrFw%G(x^o`rfKWlW7jWaMAO1%{<2F+Nm%fB3WJsq65GWzu#LyV|Qg zoo_+RAhn16{sCS1D!Adzs=BxiLRwe9*9|;lg6ocu$0G`3;cyZKRBK;fM+b>`PQmER z{(79B99XJU0pl^+5Gl%}Pf1ZJ*m!uDO>VgK+$hc_=cyG39LB_JZ447inyojq!E&%j zH;qN5gMWP2P}o27lkaJ%yAjxEHG0jEO^WB|M#vOQZ8D_rLvHBRE4B3N7BmaA4AsQto%WUJJ|$S-Jn&`7!jM9j;lkH)tg;2PktBp?ZLcS_6s@X_XmMXw z&K#a|XTs0BOk}eq;9&?s>&w*({+>2Rk=uGV!?X`fB5ev?RFecpg|a{DDlIeva1 z3zCRs<=R?Ed%_4hN;o!^VcxrX7Y!``=ll115BW#U$N&R!dz9Dj^wqPABA3cHO>O<1)O252h8w%wnmb*<5@eaP$<*1FP=u`4S`1L z-|JAf!+4Ol22V`#rDvC_w^?X*A7(h%B_MyuKkR=thBNn5k8l%@Wr1hNI9D#@+rE~G zaTWUOgTz0DcxfBDwbIaV4C#KCyw2LRnMxILQ8(1qc7k6PpI`-9+?5S(I1qY}+DTJZ zO8&WH2=?LLbsaO={tY7<9<~_@if!(?JSy+NYC2?05m$zRX&%F{nH+()Hp{kzB~A}f z65FXU-_`5slX6dPjsa#PID%bb z@$)E`N(TIrG+ET&BJB>|(Nua7{AZ<8($6M8zbCM2pJw;tXEydced7Br6J?I&+7b@c zGlxeyP4LSZG#eA#@I;!$&epq!sqTj|^xopjcLkDd-p);`<)l@H?C7t=>P6meLx$)tXLEh4qnT5p5BJ56w+W^75zN=zWRbK8kq{6j#9)+o3Tbl;#S|M3KbD z7I;1Q+Nl@i5M2WEDf}Rob?7;b!AjC(x?fAcnTx>ZtSuQY3&)kJN(k{ zrOe=`rr{nnVWTer1PG6`rCM)Do4pCymlVd4ouG_sm4^pAJi+@!iN?S=KKvFa$B_X2 z{jt;9yg2bAWz6pS^6^j?b^5!9jyC>O?Th5=mpN&EJ)9C+&Xe zxVcq+!ZgdZR;{^16yOIZRi8fLojff{N=o`kD5f*js1UO*6LNjEaEOXQp*tQO^Ov2W;&c9R$(raWi~m@&+e&otyE67c3pB3BpFtG`titA!VYIY+l!3Q zzJP7Uk!C2&C4d0;8bs9hOAiDO*HvGWR` zJ+Ux7f1}=trwj1~lvYNd)vYt{Vhy~!oWXswstIT)_C`}xpTbzvk68~Koq#4<@raG6 zI-_eyC(<_D-Bx;7fe|@&Yd_l@ zh#!Z*>3xw+zLvL_?U(uymTiaBQ$#qI?>A5V-YSmh)QBOCh3&lx0L`l_(bgBm1k#<-Jh8u&MGv){pM(OVV$W-rZi{NNdZlm6ulvj9fz!2`*HQ%%_eC9DR(t`zA+6?%pJnk7UlzA`o_&lVb%KPK( z?{c8hwu#lQO6Mxxk1x#<4dfkUCJu{j=A_@?6r3M$yyPCp%zh4Y6R5u$MK?|<8h;5Y z?thA&Eq9Pf{3$Q1w9dNAzh@E^P&({}cK7vd*iPkun|(FsaM~|=f;q!!q#`?4s0zTG zKe-DQr0YE^98wYk#&rq&p@IXVK^LrTpBFoKS>Z8#b(Y=G;R4}lW2(@0(u{MmyP6LNDdU`$JDy$O$EDf%$oewB5FsCIrx;QOz}EzZ0iMay!l>t z*|=R#1d}^_lfFpT2ym@hE3$ELHs!CJ-MA)Np!&_ikms`!+Z{cU9m6|%**gTp3T8$| z8Oc*|KgF2#RTNQ8^|p`q41^5ZbMTc*)~MHA=CcdN#l;MNY!0u80JCK>q+%gwgSAK&NUGjU=f2{OEP)6=hEyyUVzF2#ZJSX|8LYX+kUDkvoqbcU_wIHX2_%0OuvM2= zO9?@EknEqvd+wc`rS<3es{ePClYhI#mqFsMhy9{OsBo))!$$4{mh z%~R9Th&M)3OM`rkh1JhmNCLd27Y^gA&AZtM#86BEqyKL`WoJ>P7f}S{LeJxBdPtTJMsNtA z5Lm#?QkH2ZhQeXZ->H*J3l|EOTiL5GZrq%2Mz5C%Ka5|KETbrjGx9;zvcPN@Qb>0e zi_TYKvQ%|v)#~zIdpo@y%~5^q$i;KE72S{=zSO+S+U_NIHY}Lo_<&u0oD!Ek>}5@s z+OyV~@4IhQzTY+%>0gUx8YukjK4h)T;&*ihA>=QPD2Ni;4Q$-^Z??92>&Mi2JJRy@f;dTlVAmQ&1%$+-PlU~7aPITgXkae1> z*ppE4 z#j;N#vj}J1Y0oPe>D?7)O=Ni)w7;2jYrgS5e+vAmm2fTv^e(RM zr|B^#<*SaX%q^kD~Jf9~$n zzLUp_n@fmF^o0*9bMSWYn?wG?Jb4D6or14wF>#I$6dm+`&)4|Fm+bjRM|P!>)5xUf zWZZAPsJHvpQzE?!MYosv;k`Y=e|GQQs+tk0*IGI|FAOhJCne`g0p*}U6OT=3EskAh z&vlixf{G9qOhR^Kln8+hp*f3{lS#zY+^K7+gnIiFmvwll=vq9`t=n)=yaP#QP<$mT zUy~L71p`Gy7m4h34eLJFKY-WpX>FGzSHdrahaTjOE{WoBtsHKhaU#Y(Iemdw2KLt+ z^GBR_LP|4lO2_WMGuQe|Ko6_L*&%}-eG7l1?&@=JMk4as#XH+V0)4$>hWgPRy11WGqDWZb!1Knfjk ziAX&zeP;L$oPVNqHKzurq)R&n-Dz6|+Pc4GPvApAHqmXktt5E47<9P|vyatxzvg<6 zqREa$58R~)SR+Lh;rZeF^~9h1tu5^I!$Q-zF;JRjkB9Rt$(O4K>F=m($9>ul21+7C zych14(KG5j@tc2->vM7WvR$CzCysISd_*HSzxlHPVzmFd6L1NPnBvoyHbM%^TXK}u zPUjpdLL9YTif($`8kJ)?*I^2MiEadXtDkFjo)cwyan;qpz(S@VUq6iKQx3YOXygLF zY0EqG;tz~FBXz?92AP5e2Nj`bD1%Jp#dN8<3xO_0F`F8XtI3~5ruBQkP z1oA=dC|1n^IgE~}2p+j6Xoj(^Ug6}KKFfXpBy2eLxlx}eVxvY(WMd92!Z>7V--9vA zl6xHnN=)>Nq16h%j;a?1?T}}+zYDhdc)RTMu|U3BHZ%3xBGr@b8cw8zn%$?#lplJR zRM#B7dglKcBUXSL@OQ?IQJ>%aQS~;8k9JzvJ~@!mWx80-%P^6(EwksB?eM>kBSdX9M%BAL z*RT`R!GzVG2B@6}2Q(1c)?^L11JIA2-_{Sm6gv~zDkPgY>@1WQ)1cf;A*SNh5iW(1Kid~2@_TElS7|9c z5pbe+@Z>N>Hs)#pT+}nWi(h|hxsptHw?33CxHVt*hLq#U=%o1t1kMQEuh+QyB14Ro zvoWjT@co@J!A){V3-03nP)J0~kJ%G?rkCyQO?K8klWFqJ%ihJ+gjWNM!0ZnCMZ)bW zN=VZyD%OvILevyek9I!4VMBwm7;?uex5Mprg}-$F3{*Wd8`5Rw`+D{`pe5?t4u7H4s9huOgY4dh?^n*TdB+nGfy6Ww~_8h ziFsY^H>CK`IV~$OGQBJ#DAz2>QoMF9;l+!^*xBD@?cvr7V;973U~Hq z?JD>jcDv-YV%6Y&dT-;|-B13+tWZ&6YGgj5xpO4>og^mtRYUd;oH$Y%m3xkpZ!+YIWzA4W|})w>l!D=$)t1bav1H-avb$&Mv_e7&90b9?FRugg)jB( z1+yM3IoRgajs^pgG6H-qimJwPxe-4OgCx6cNS6%Raz9-Dt$WP{{Y8Frzmj_x#Ol@^ z0c&kf#W^O9q!~EWiy(bM;3FGSMkagJMMvRKOJ)Q$=-Z>3S5?n_1EB8Ab%Q26HbO@R zkebPoOCr#$LOhi&v-P+>XO*HNS@M$m2S}4A3z1CY|hgu*_o}<^;S3s5$pA>^{?BX*zl+X z87#`u@}2Rn`&wnW{1SHlo@C8lxbw4^l)<)~k!_$*@$0|T#X!X=OiB)L%B%{J~NVwm-~skyeCx+$;NiA+8rv za+ILyO757`9dfBf7`Y^FF441R&#p)R{N~UwlVi}T$i#M9Hd!ZW%pJCFjY^q{L%g*{ zJ`G;T5D)=c&<&EeE{DT(P1nqwWkQkimp9f=`-criwtMng84x|R)If>RcEwSH{tW(R zJSXlf1)lT9($JO1- z!@gW#PTBM%#+|tW9cFlerLn@-H-_Tn-7QABsX)+elWj6G|V*2 zAfEA-lc#I=d~uh`Y9zCmtTiruj$agDoFHIW%WuWQn1^^;I!3os6(HyzmP&VZkoGdN z;M92GL{xN9%PtmWWP)5IYC;?BdKJq)kDLTAL}U@zwm)>$BffBYoQT{c`0O%VZOJM| z#Rnf&k)B``Y_HPrW-HA3#KQNf1$#cs7^U#yE}h4%3>c`jhIaRCLrWge$xD2@d^|6- z1?K&4>YomRhr<&DHFswT;?3_7iV=CMKJZMvgS5aJMjPO9klE>4-n%C3dDu{QV=elz z>cGZhytx~=gFaN+R*BK+b?jOoi-x=qK=I;PEfl)-XN9=fi3vl;%p7En@3*x+z5m>! zTyZMRv$Aq%gUddpFS4RV%ysg_-KFbL9_VLDI2=yV`C@kzSnhP1r#bfICe_ljB@u0A zW=9&n`S_M3;tW6E?o{6=Aaoy*lX2eu%`c!*ir%?oA?`t!KG<-pmzB_lXL(_(!*wf1 zv-86+v-SpJ_!vJbGNuqd`&hAa@0y++FKRrOs30#7QN*)z{+$C_e?L0Z@b0zMy=yw} zxp{f(*aO}AuAdGbWnEZ^fH4v8BPnO0kYd?!3)HVf@or(+!)qRqv2hQym89h7KgMu3 z#glx8jxSRTSwFF$e|gW!p^nB>!VgD?Wr9dfZ^a@nxlQO{(E$^SG6ufHCwnqZm@-i# z_xaQ0j>~j;tIsNZRYC@Z;DD$DHx%)N74#Ma(lTHYXW?%LwY~_^QjJ-MtkLsCD%s@YdVzu*W`7L)_Qr|MElUjA*D`aG&??$IPlhuUbRR%Jn8f4 zHJ~%GYokAqdaEF~5RLj4E?#sug?6XRP^SgDF$)#0NB3<3ZHShbO9)Xr#beMqA+b7yl$4$n+ow)^>(a>@{<)x=0f^@DF z>KRI2wl1G2brq`dK6$D4d(yGpT66DtP*m;nX;rW3Rp-%H6VEH{(H1(K))vxJrgw5n zVI64sZazd(YeO3~U67eOWTB7(F+QMo>)=;7pslrTU`;tlx)pN%28Odiy3fv6e={_o zRFnZ&see}4MtpIuT^SQ3Iy~E!n@TejNU%2V`Z8(x7$$_+t7ZpY^=7Wb_Fs=KI7LfE zy>E9GI+5j$;ZTXmj>f1-CG0;oFr6g?J|#J|Hm3KzG*62do_v?aNS^PuZ&}BmH5vu4 z+0}MpPt&YhhvAJCKkzR&&VVUxf1u}>-_0fO@XV&&r!vvj%DsFJ>iA>nae{r=F!BDkwYSS{L(3_4eotVAys=)tcZ^h6GHG z-{iC7D)>#s@I1#rF<41nv~9#Ms6I)2l3Se*(KKOsVO;AGDHak7y@x-A$y;74q5FF^ z>wb7)|6yyvd^FU27<+>#hTmjd&8T8OQ;G`G=*YanVM5k{K3$x+($`Uwj;4WG>Rm0!U_!#G3a8tRuZ*y342L0k)K&v!4H}Z9QxS{x~7mWB; z1S(LbKOGnb5rb=;|Ic~D;pxPS1Z`BzFhRc^WZdYQ}KdoAKu*sV^;&b&&`!7>A~9eoOauR%NEC2tg}dB zNKrYZy5uyYf~}^;8n?1= z*O02k$8_)1{b;x4yhL!bv#>)MnT_P{Z{o_3gLXCFV>u1o;qxDLlxVOuhMnv zMi}m=mD8E~q&>Lp2tL)Io(Q+yFWi3(Iq0gZHvBXUWcIv%-G}mpX(KJa`?2e&RLkK{ z&!?k;eMoj|R$%i^NsQ2~#D4lfhAvd$;wo^JhG4g_LY) zKzI^;q-+`V<;qHCYiry-$wS^i$MWyBD2F2@aXD_axh+q_azM!JxN>_wxa1{f6KeQBhtyL_J{*p&HeD{?(f z?+q|o53vm*_$s8kby!|q^?)q6XT$c3h}&N)mp}uxD_N&ahnX3ihf@TfjfMHv5KEdj z8Ft|@9m_QPCpo_cPC&K|pJ-$4inMwI>^vncYG#&kEW9NZPxpq>#KoJgoz~Rkz($#v zJfxdp7a|Mu2|x!u#G-(s^Fos92CGob%^pPlDUXNfd!C~u>BzMJ)6UtEuUf{&A3hey z?=%u3Sp=bFN_i`bH6z zjXcjJoN=`G16OuxFZ&$JEv?9M~BC-#fYNfQ#U>P{*5h8sz_Q2vX^ZPPAL6biEo_^oWA>)UP8Iwrblk7rQ6JG`+1YcI5KY9yoP6vP5+{gKy^Fb*+9r z?jfQK=P6Nvj8T9X5ulJFOq{@)Rb;R~(f^dU;z5)BJBo zA@U0gi^9|$hL666{3+`kZL5YtR!%!tJDg|u!F>u(!v*@^J+xN_@|N&=yJrwf4YDpi z!2B`pFzff~)MV1JqoV&6urmm^MII*_s)u%P)#BcJH7M4QPmA)%IZ?- zz%QR5)UDI-z!sHa&6@EyV_3~80JyL z43gx|x@N!x8<~#dDI$GX6Nj*b;DTkJ1ep=getGk1|JkSI!8==uevYA((0O^1&;HfS z`oQ58^7He%Qz7;I>^icpM?&c9G{4UGuRp$(QqS=JXIoSbk#9n~+lCN0m(E7e5|O}* z;MZJTy>w3YvzLbjxX{^X4gdF!WJKQ>Kj=O)i{ImY{Qt1^)^Sxt?fxjSflYTKY`RN8 z8aCaj(y1sdAtl}2-QC^N4bt66hjfE9cYMz|zk6={&4<}DYpq%9dA>C@dqyG}Z$^KG zT5_L~c2#$vncm9f0D!JQlY*^zF>|{>>-Gtv~^&M!A0=i61~U{zG&|{!`+BrC$}1qD9v|@xpG? zyGCKPn_hc3Wg|mkuXGHKF7M2I3OklUR}sD}I<*m{MG+dmmY|R^mx+Cmh>PxL(n6t) z=Q+Fyj(YpqOoJ)VQK3KCAcZJ8CYXHaLN+o2Ph+U>^Mi5O`t-E=CRxigK9&v2La2A} z$a{bY|K)nj)k;VwczRiyTlXY8A=-#hQs5KSsBBnBxA!%T9(Nnk z!0kBn?~V(NlnkWD#O(Y2Z~ar-*{Vn|7&d3li3FM%?j)Gm`8Bk9F7WoHjB)eOkO8`yVHmoy z_w0T0gVq8PznUmpWjhgMKp%}%r-l@_1`{V1<-aeR|4Sy%QXH)|_Vbi5BL+NI97nls zDpVi>QFy0Vi0ozrB-tpJX^vHnp&B3kp8{j||F6K9CFv|~0$GhQ`z&Su8YDk3uu)86 zYM8$akave7jlOo6pc-h^O39Xy4vN!o4p>QA`OEQD#Sjw)%r1ffH7OM}oBSO+mk3IT z8uor-N+3}k@)S1RrZOg7@+G&#Euc;z|7vX!Gz_D05o(I#=cJBbCfNp^5ckV|Yra{>)fG*|C=iR*{NkJv9AB{Lvg0PwAWed}3^1zx}mW{GzH z@b&5c(2_6;7&<3MZ#|bcHCN5L+1Is00ZdbEfpK*T4qT>&923(cOa?RtzQ)=^`x!X& z>2ulm%v5AAjVd*sb0xmW0H~Sl%>i6`14flV?}X{!c18x`gGT!iAWa?{0tH0&#iQ#Z z@V+7R`PMEl=frr!o-L}_@SSZqr(q-hHygxRiDeL|$ zMF%+qn6ZD59%K?jCy|4ksm}cn>;D!8veLS?}xb+ zVoJo%FkwaJo7GYEY`&UAEtO~X5^ z7Lk~^}ry$K4ouN6wS)I<8H;>Repde=A?m;a6-|BL3AChDtSP`m+wQ&|CaAf?{gDJ{M zJ%84zL#-#x(BPmH+7p(fjDwMWbfd0&U-|2{2t!5${vXQ%{~uWNAIs8EHu|4kX#+;= zzh?d4WC;w55-xCW33)}K_nZD+Z8wlWtw)Te5x zscp7IfNKeSzL99?}JpM7GFKfTk;N7}?RjG#RmWtp31zR}wU>5>~Nr zFiFpYu@HGo{j?y=IKU1*HhdD_sqr-6G~$4t2~f#&PcGBs;=T=%kQ5j`-vQNcjFZD0Rbg)pFK(Ip4Lg`TweN z+SY_XKxRPo8OyoZpafu&meoa#+t3jiM5`J>qnaf(fSL&n{Q6;*ciZd-dSt>}?(uae zkB+rC#FBeIhR9rcqMqh@LRQjA1EqzgFWXX_Wh)QpiFo_U3kasNM@B{hUKnMa_{eJI zf-MCtsuGm_^hJVtk;{60o^o;UG#2veE0Kc~%Iu2k5vArL7|Lw$JXQKlI3SEnaOO4G z|F)wSb7a|w5J`1Kkosq8Y~xAl9ut3z<%kfYUs1bp0_wsG|C&-puz#7?-UVg9z(37) z{^n2mPBiDXPO_CNc_YE+_N4hsMOFb)`B;e!GG}?hX zD6h$;w_?%!DW}2xx+g?#sMx}404PX+qhES_NS%zWWWZ+&g^ayS%&(u0h!f$bq^b~p zE)iloxwIdK+!Qv^G%d133(Y6@2{g2EMR93&yl~fb$v($Ml|DrWzAcqzKiTps1Z1?Y z_prmCEfFFI6H}R#<~Wm$Tf>()p0Lv2g*f$VpV=(MH2Z-!QVn^h>}zMJrCrF zrg@0r-^O9VGpm215_6OKaz6C#5asKWtZJglgQQQ99nta~zj75wUjM zNaBw7f#O+r#OwAQR(w*zOl+?D-`VL%Iz8p8+roUBte2uhIiiLa72SI>S*0_SKl^GX>k6Pv_4|09)wy6ug{PI3N(=CfQfXR5NA3MOZeNt9Il%1`LkRZf9GgVr@l8`_YB~CtJE$>_RTnqu!=2IrtM^*VwpWhyG z*2)O%DHDZ;1>Q>kjVGeiI4?CUdE-`0xV#=kAX-rmzNN>(nj(^dnjz|2{5iFT113eI z!!-;&NWQctub4paYNWa{}-;jeBj)e#7 z6$@5F#i}8W@HR|Wzxv+N%I?O;tb?=V2UB`=oX?x}vl6BietAIG6-dQ*80I}-PYFYC zO&>kTB|yE#_`tm&8s{Dnp=3=*J*Q29h5v@umXcp&7Ws{XD|I$6{2tMM-gI`@khe%7 zYH2E(Em7_Uw=^tXFfPBE8r-g-LqanL*&R&F9t!?6Z010ah$9EpfDj6i`Efn_CIQ8y z|8{q*MO*VNq+`tygm=csiih+MPVY>Qk3bE&7T3!!QBo{FxwDp2ApLeB#te)7^OfOa zqNN~;!+Ul0`u)-d%!W&zm18aEdZH{F(a?xrp=wKJu+OBsaTt<0tXiDd(&2;h{SsV3 zpWLgi-;Mm(;*#>C9*vEQTOn%F`8ByNZEEn9 zVxZ>3JE0ocAcAU*1(mp!tNiPA?#y3_h=&!qAU3t+2wS_@7fKNrseBc6%2}5`(UaUM;nsCl8cZO?t6l zTPfkj_rKZFpnG%K!9-7DM*6=%KagZ|$+|*i?g%|L1&9`eLHZ}ia&L!9O!oP4)igcP zGx#;s>*|C>AXp62#pOS@;@%)#3=g*#TbU*LM-<7swkyld2%6p*3AuMS@5hl-TSbWK zZZkWh!HJ0c{%OQGAFtaBweF5(ra&k{QYby&Yk}8_{;ucLVb(nHy-KArPD9d@i-(jq z@-Xa9w(b|(TqQ}!%AVd&wqjcAN?$}jcL!zD)fg({5T2rJGT5z1gOA_d5pgtl(UGT$ z;Pti^fK5H}6OFnilFC`KJ}9|X5^~>7Sp>8rw*a5L9(EG*I@aAEae`|o%Komi!^&3d zw^MFuP3G_>myKTny3>|z@v3Ozr%{+%mR%-@>@nrKuUrvW`3}>J#bT@it*tGP&_wiXLs<&V5(IXF@tlfxQ-Q2#fl z9ps;|&JH^h`HLV`X?N+yQn{hm@V?a}s_T9FyZu9DF~b_tmfV|3!pfk^TOy-^nw}BD zmuvdnzZPKdO_kXb&Oxy-tp2WnkL+O>uY{&!^{%2%FO}3WM|mEuhMZAo05v1&czBI% zeuO`lIY&CDHo2w@+<~F5k2gc3XvjRS>=RAsMh%G)^o*^%Yvl-I^p(s(#j4YUDfS&>pGxSIr5sdG?u*Qb4AR&iYa#E~ij}BZ zf!&c3b&fdoAU$PQcf8ZgM%BHW0sUBMenM_P#50eBa#tcGh9n9`s7Cn-^iQN zQK1_;zP#y8MG}zd!cev8m_JtsyHT*&jA3?1XSX=?cX!;5p$UYX;mIU;qlU?Ph^f5f z*Vb>Gwi;K$OXkBix}uoNa@jcS?FW;s$eL#kOI3nVwv^iZ`o9|Vz=y4O(f!>|G@yNu zbw?fvl;TgJZ|&x&Rhc2`J9&PxWl?xrn^o_&Oun?^T?m$A?4}h~XaQiEPkeGC#$-Z$ zGh50qzA~C^gA+#vB?W7mXYCC`rf!FXY!#^@t;n?hA2y0GOX=H0yl4kRPy6#v_dD5h z9p8esP0NBd7kMYoya*o-dtt15)nd(MRxN{+EEn*jz3)w@4aX>nZYTUU`!FYY66?zy z%v_2z78ypLrOZ(YGDt&)eDnT(tVVA_4nCRJVOHJdT?7v<;ZY3M>@ND#SdB0bY#Ubg z599@iCn^>MIEm8$&W1I{r6%hhm_GS7snvxmxG!Gv>r?IekR5;&;l=8)m?}y)YAG&+ z{U?U)Sa(y$pP3W?rsAPGye66{3db99un;JW^l46Z#ChNB-fVlYG49LVn5m^*eYwVn z=2UqV&|11Y?lBTYXYTU??d3N8Us`REe)i4lzds=M##9*(W*|-8^3%PEb);_1RmL>kf-cs1A;?T9Y=p~-Tj;bLe{<)- zx*@d!Nycj{E7Bq)V_bQu`(LOQ(T?ER*4L9vu&?^E|0I|Em?w47fO{P*n+MAqej4;x zgf8~8&rA+CW~~ns;R#RttcG}LU0VifHnHB5F7&jXTftC)y5{%8V`l5(a1NKVaUlg2 z(mMs?LF^#*QTX)HzTNg*NUO8o0qeQLe96V=K~*xv-SpR|=hE+n;S|}HEP*fknjFVo zox9;O)fi}wr5MBWu+)%w*Zr_}UDdDF`zQxJ9H1sp)Lg?7TW^ z5`3WT_C})-!x@kNCg}fn{sfne&8k{8)b2}wQ$xCFF{!7ZFllc~{+2^5%Th+b_TQ6o zY6y>#hERts7r_o0s+Oz9QsQn7EbFGeP(Bi1zkoAi#!Rr~hT3RN(*msLdceZMDXN9= z^p9xH_UZG>bYx(HHfFd1-<`Cw%l03{l%%n0d`lSkZF{r~l$v)<2 z5anNqeT%3|0)Zu+scfG$kjxfTGRgz1kn(4}AZhM3GGG;=T9{^w2Khe}v1&b{GG7es z!QZ*ecA~jM|4cvW7a3S z7vAqk{pbyr<0HxDeN!Qz7yQ_%wO&G8mA{O+i~Rb}kg4Xxo25`)Zdeiw98=?W2c?)j zO&7@fs0(lyyGfQMGf*^M*=CJEsxSR%1V%-~FJrojW(E#ViW? zH|A6jry|@i=d7;wtSVeKVzhv_RLlrhf5xHGdU&|eGbm->YB!b&1UbA$j8&O-jo8)b z;K?)j(f%qEqt!*ViuFu&o0YVl_e~({E0KrmX8OMLsf(QD6o?HSD+t1E`Pb#3&5cm_ zZEhg7MJN}N!)a#(2mSbcW<>Iw4V9e*oM{%5s81-ttq%1^pEk#Pfq5e{)+NBkl{cgL z32=%)rUB-!^v!m$mAd>y+oz6a@4Gn%D?=^=tLr4+dgK zVn{2{`cr$h?8y@~iX!!PM+KlFHan2R>0o1-z9(i$Gw}a%Wa%yJGQsFXKArJsqOXWJ zy8Q~iIyZ!6(hvvCX)~Wq``Gckb~NXWhfY6h)gKWxlCg`MTjyC(P4iHa{8f5GMMDzG z`6xoll#7?R{vyhF@bfrQ&eRZ>KE^xEpK^G2SO zDVh;K`-WVHblDsBs?lyQ%`?oDk1Vc#%%5AgDoE-5-lqRx-rP6ZS@i2{F%B{`M@B>X z)7x}IoSic6ZhN2z;--W+p_aR?Nn9-#ZIrVgk1lMQK0={Mlc;Kc+EFCd6%I%Yq_hA5 zGf3AmZ#>*^pS=0~$D2jle600TA>V{%dNsZG?3SN&fntt_^k)RcqU-nqA(io855bjM zBWq!`z(1PDJrhVOC3R02-&Tbg3E1`c2^N&d=+Y6UN*(rK5-%|_cLdcw2Bod*hWyT* zL}?m7$qfDL#fe`3Gz`i|@KLQ7Wpr9Geh{uD-z!)U*QC&SQA7{y{;h!ux5cm$G+E!hE@?udN9YCd*+AHPaw1idQN%`w78>7NJT7T`;P1LDng;6f5<^RL-?{-nIjM|4)$U(RT0Du1T4u2zA zo%oXyUH3**GmG)?=RfjJi}rGZq;Hs0`Qn{}MOJLQC&)2l;p}*mT+D!z_m=ot| zm#uJY%|9elc4rCsapBaQzm#!6sMRiijjhY5qL^RNYaSJK9a!bPXQVJ}zD$_q*(&@Y zYOE<1@S( z8|LnzPyz0{5LB-1JpYlRD)OaYpPg7(cX2tv$j_msP+6^Z1I(aa$|oA=EM7%YT>jus zp`S2b25tC3aJDYZHab*@U+6X;g?;!9#g}k0o9(H{+l}h27KQn{p`(z=Qi7$x-&9f} z0#7$fjd?B(-O^fc@%I+v&44Pu6D>%-p1v4<2AAWEaAs;96_#=oE+k7Y!=)5p2_@9H zo{q=N)}C-!j%QWCPAox-h?C9LB>+bAKjC zEGrh>!{VI-XUb&CHV9`r*YI;1Oqxk$W##;bPGn@{=^F)5pD{iTJ0I(fcL8GLFtu&` zXYz-Hg$&xX-IXyIQi5 zrjlcjw4lh8Cw+Y0TJv;7PX6JlZ{A~wP!{fOlO;+fAT8e;*9{yF?E3w|?OPNYi|09t z$-u8-Uj>I?ozrgmPpNu|FpcAHZ&uBLO9|bC?HLW=H9T9lgU!^gM_!2} zw0uGH0?`0lfCuG&Zvn;pA$z-jyPO>4y433r!__!$+d+k!&f-~4%BV{ml3=dQ7kP#U zLa(!(v%jU+M-Dxiodinjt6&*@pH3O`zi#d1Eq5Xun9T@*YlP6O*{UxI3O2VTf3j!U zWB7tTYT?_>;`eH|q2H{$pz2=`?8BwLQAUzT;tX!vPy!}JVLC$4GeZgbI{-uexdlvY zajCYI{&~0I?$aZ-d&>#K@KIXZ+P=ge4@TX+av%Mb^uHf>4S(a2$>04@>ScBe3vRUw z`{yhS0-S{r_J56*->(D0B7hgG-sl)PzYINbaho$xG7aqeTX2GQ<>^TNjW`b7FXsz> z==rs#-cj>Qb6cAp7e8P>OgJqw(HWGfLqxTY+`d3a6La=9)899GEk8bWaX!6o+L^P7 z{;uv8yB*~X!VzT!8j9JRVp>l;Js#F(_}ghRNAVx#6=Cu|Kd#|9GTb`zTQS5}JI!Eo znVy$ZjhD_@SkeqZ$37`-7FUr1@D4R892jjF*q2F+ZCX>QQ*2OK%crQZ;5JmN4W5NLq>jP>8^L@fp|d$!cpzNN2WFED-g+t1|UY%7t`_+DGe zj+XnSMLYdFT9juEN^?NMOX={72N0js_BvwDP-@**C8`}X-O{yVlQmz3`r^EeMj!!dVI;)@6RedlB@19}emfEnqhn^JEdwShmA5dhwT=v8?xk}pT z-447xW}aRXsckLXD05@SZ9MpqW|zx}4St&4!Z!n&)fa56=X3+4{U}y{<6ep!%W096 z!1p%3pSzT*Bc6n@5)KWkXLxjWzb|EyGQCJ$D!7&uJGt+ZC>SlDyX{{RAiX^Ku7PEi zi53HYqnrM{g3+0aRTbChR2C%LZ8chYVqe5?M=*snemRJ!#_WJ^RP(xaWe;|KO0Uk+ z3K%GG?$UPr;7hmj(as?Q<)|DJMqmNhCd)l{yEkhZmoF4D*|kM_M(`#i2wk7KAn!-#Mjzd3gXr=rJ@#|3=P$gijR8G zO-YLa^my1vVGZ<%v!Vrq&wtV{0H{4E?4`B77a2T+#qhKf4K-{5XlL!luXp@b9D5z` zy@{r9Z*@<@PcT;Mc{c`$s4wDwn(B%7cY=EFCKJvA%}?O7vwYabM=z=)?zzYqscEc> zuIN-`4;Bvm^B(wEEHiy?x_kiCJT#|A zMYBA^MV}a^#$Iw!+S*+)I~HBB`gyD;nENG(E{;m_&S&wrv2e}7>v!4p^&j|xDjg8| z(v};jAfL|Zq&lFu(U>+FK5IiYcLIO-GM~O#LdSiHZd`x!wXnaqUo^`GbB~a?QYjEl z49RvYj>FF$&c?aNe>_e(d$$E>iW<@wqp8z=wqQ%cpQZ>AVNM9i43>VM@)8L0j&}Fy zQb7&x6VVcJG_&x{YQQc`_v(%Ycni3a%%rY&CyspW`S>>hP+((oB(Yx6N%72>%+P_K zSw_k!6D%p4)Z!hKZVereeS*AA3013EEYbY-oEgb z>O5m{A|t#~$#}gSGEG;_V8C0KtlWVdl$I+Xts|ve(meuu=8H({r)NAguOfIimZG=t zB#d)416qQ4GsYSBdV^b>c?@KpNM0YH1_r>iGT7wU6t{b4dv9sE<>6D^22 zVC{u_S_f8~5?gm5gFylI! zsRa3_9y6A9Rf(>8|IY2wOXcm*6fo#Ce%nIJq%uT$4Bxbb6*BVj47ffMWM_TbENZan z+#Qb46!+sO%V_|wl?@$5LrP&?OqqF+2ZsL84V+*NiGM$D3 zoNsoi+@I_;?+Y$ZAb^RC`V{0}%hP^-<@scJk1?jc+7c>W24SB_@#d#?m5ZbACGbDwZZjPCc)bm2Cc=h;e`vGtrrhBsZK3n+PGq%Jni6M z!Q4JkOx+28$r!#e?V0wfn&}ofaa?@RV{Jyq#Trr#K!?pCELgN#FR_1eydZFIKT);Z zQyHHB@p?@OEV+&OWGQst9FBj_HCrtRoAZ@cL%6W>JRsVM73+&@r{ZXq;CoJLjIZ!4 zCDwhbSyjK5C5qr+-{9npx;#G&@!68!$v@0^`~Rq_vidEqP`ULZ?Nj`%OnMv?d2E`qx|$^CV7T%Lplf!{&9$R zA1&RckBZlTZkZDj4XurPKC;a!H$E4l!$$`qVJ-A%#AWAJl?~^0D`_c@I7xKbDrt%^ z?JVEyAINmX=XZ6TJEihY>xkC@-1b8o1)L>aXf?PIt z)C9Y{6z@qC*{OHj9-@SiwZb~dYN$hiK$uYB#q%zpSU*5I3>^@+50r_Xb~SGIR|{q& z&(h<*Hd&A{?0tCe+w*##0{!>8^k#y=j4or{aKEqjaDXgd`C~Z)o))3Ae&%0{0_k|= zh*~-e{;#_A_^976;cA~lgGu#;TtFh{FS;_4m<)%Y zzqfFX^tQc>W6jT{TTr9&pMy+jlwLROIs8h5h`!*sK&j!RY%q;OwA3&KM zCU})<%_unkk|ksA+OMRcrH8C{H$lu7_mSLxVdmk3!09i;s{Il<1xM0mc788K< zhwtz;fIr^7krm?4eHn>nj;`=x10As_y7yyl;XD6yUu_!knjMMoKdYlIV4H0@Y8@aok6vmoX ztEdqn<{E&bV2%-mGQ`CIGy7O*(S4lVDdH-IEE$?iSf4mkY&>@Vhle<0Tz+K_J9PJ-Bjdkt73TthW#YqiYKnrkl80Q zbDq=ZeHoT@H}R`Fh`<|;LQ{bkyInqjzcwRC z>5Lts>YcO#rcuBr2#(YCG3-bgRNp@;$`FE%vKMAvf3j~vp@*pzW{p$U6Hp+2r~G24 z?Cs`~o*{gl*nPY3tSBzl*asZ4m;YoY(Lq8H9XSTrE7@_t_xoX9wY3-kY{x(*?qxF1 ze?q5T;7bg?&lk~^7mfwaBfNZ=O=z?WcFv;YhwA>lE-#JKzE@VQs0SE5McEYWeZ^j- zyvGLqnGKa|BQ9tV?%mm8``rqrAbjnEnAisE=f`aTHoN6BlL+wsz^ha3aweBvTz-|} z4iOMo&O)w#^Z?=!c_ZHhSBBJ_coJfOcE%i=7oBp^Ci$_ku_;MKqtMf#{zKkrGPga! z-xy0DOyF`+I^IS2^}KSz8vlFgrm`_YFODqXRGSVATB$s}Li|VqjBwA^xhx4t} zng3;HQB8TeV`CkDvJvC21jyW5LJQG4mAZx`i~}F9@f7bqvpUHTKN|%9#$x^FTa;tw z()cF~dnP&{LhHWj>*jBkiAc&1U|9g0C)cmWwv5E98m*>* zID6J2uJ6_V#)NLmm*L>T-!qpYWC?mB=YI zG`Csk&?r`_oWY08U zVA$ElkJgxT_y98ql3YFq#(7iXL!wG5dHWm+0&x7w&ekx)=Ct3(6aLokc2h~%&h_$$ zh{Intb)G)*A(cVv3;RCMWpXX-c%&o}Qgzsnnu-ZVnEUJAh%|M7tYm15g&LE{j@u; zKZ(&Af^nj=dnWt}9^$I%kY#o6)2oq|2!P;EMn?>bE%*h~T6cM#$!WI#1T(5FsDp#Y zPP@)J^!zk6jwjvsQu`F6B6?(gj&%jItUr$WjlnR!UtDO7+{Og*M zKc-(`dO?y`_*h=+ymXKbnQYM~m#4R$nOI(oLwzuMqXM+tt@`TPZcyvV4CJ)*jK1H%w2vzbM07D>p{}i!eR41zW zLlCeep?kuv`Xaos`kWQW`l!5*PB*l@pugT-z8;wj8(*-l41v_{9D#g^&F+)E9lpW5 zF?)V^d~td7IHI&kxj=pFBmlwrWI>^WM$^;FMXVo7%`7x^beOE(Pv^=euup<^dUZ{+ z4K32K+y%)nqLSyhwQXO2F>`utdye_Z^wiqO3yCBPqQe9KyF=S1$7XoRI2A->8=sW) zlkF^I;v$JDaqmo%;#;!|2Rn@J%wP}Q%~9yjy>aQ(5?aR!W{k>9M*`TGFvB0h72XbH zurbV8yrd}7kKYp#c3QXdKHlwJQ##Oo?$#yUL>s(`^Ox9P1A2g~;*7c)vO-5paj#E| z6$Ou$RF8inPwR}+?lH(7WVPeK*V(F*Hr4l{4iMOqND9{EaFRzWtk%ah{HFkG96VJu<4BK@$xX@%X6x(BR{dB=?c;sONwBbpbt^ zwDLmiA;zF=PtPOUuMIZSQ;c)T+Wl&t0@Z>O>Hda@*oVaTjLTMihLy?T>*Hh?b#iBQ zB?L5+j_)L9OZIL1vO4^;>-$u^T4Ady7el_L>5rGmmy()uZ}_DA+2|-)0@TQq^%r!8 z?frovUXLLD`!D~lXl~>RxA*)>;GwAhr+Fc$^Rk6b=(3;5y`GXV{x@`FT9}{U>^C<8 zb!&qO%6qD;4?ldnUw(gS-}G8XB@D9A|3s~Lw?7_cdmr3k0DMl7D`V`VnZbpw)?W`a z5JcNgB!`fS{S_~e4G??9QzaD%bQc?H^81O40Khq~5d28tAK0MV7ZsvjPJQFFYZaQP zrbx3~rhkL!PE^GEd^qgXUBCBp^5dzd#q!Cr+Hd0yGxAXhn}pv*3|SqvZ^A7pyek`N zA2SpGTdx7mX!g)?^qN2py~u%fzO0Sh{N23uKyAKY{*S;b0DC5`#dmW}`Y<$rOd|GQ zLpkWXt`~S*X*zB=849yWLx5csS2R;xDWp}LJo=+ox@yMwHrl{tJ27QHfn=R7*p?o& zwiD;qYN<~JQI`#}OlvP1hKj9c`q~#n-7Hl)%ag6Om@@@_p`{*+a?cQh#&6JLMNepM z=0*6>L*#e9JD=ySBIP|d4@-J=T#V7e(n6BZ�GdP`bAd@LT6aS4gJXZdIzS+z@OR z>nXSfWRqn?!B=~zja{hZKReI92@E(Vv+TBzYP3B2Q6R`90$VZ&LF12DX@Q3C-kBy!h zjEnJ`=7jtE!^u*~5&2guy=#PxyUzf>8(8w@B;rU8x3(YeF+pa@rIKgJOLr7=E(#<4 z`l4CK>-^oVRZ!U6`KFGmZOWDoMASEMHtuv)&J{ikpvl5g6?rphl zM~kCRrj{WVTx~dglGQ-PS(SAA`VabCQ%?f&hi*BDX%IN%WlG%tVuy=sU>PL#S)Nzs zNUj8%ILB=AIZR_yv&tROYjBDEUWdX{=m*0$!SAuI*y%CuLlB6@xdIJr4V9tLvmEVn z7~P*AQ5}9sNj99H;S|3x!d)Kkez%=(>So=IIHuO=wJ%1_u_2e<&Q0?%qMm6TBfvoo zqCS}og4~F}`kI}_mVKaDkB?&)!;YT3c8d|+d&KgeFK(iJwp{B;LphT9@Msjx9s{CL zA254JlE;pYuZsf1G`m;4=p|LJ53*`~C9KohU0wFYR}uM%983Tk^81QNsj%Y9$ENG0 ziJF7xE2J%2wZ1PiGMkm(O(DF(?wmP|*2*in(q6UE@8&NWapsr=b)r(7+kMd=Gdj~_ zb_&Ba+4WuD4%{+*c(iedWF!Vj_9jV*R9I0V2jk!89w;&$W*I0eiTtF3Xp2<2c2iq- z*Iw3dAR%<0oaJ8hT4s~2elB1P6oAX`;EM6Q6s06UM*@AunN}O%P1qB4gmA;v?$~&5 ztr3)W)wil~p9(pmCMFbdMcBLr%L+ZI`*=GPUNn@hOTUPa1&4R%`3ge}> zW^sI|@H1XwbBJ<|7-6A`)>0fenNl5+*jP&QF6KiF9*4qN<-Mz6^DW`XQ%$jG=Z6r;~`YY;(dZUKsyBm<2Fp~I6 zO@2om!~_qvrRq=3IS@2o&^wsa1(xkz{LX?K{XtjsIQ;+dfNv?C5^gH;#wGw&QXG8F z7uDRzYBJe{ID8L%UMh(6zg0|ynM2C2wVpV)@!N~NCzI(9aPTq=R;gt9i)J~xw_0nl zfQCY`E%Rdol23*0@d!n3$wgLLCWgzagGz4lc@y1-XdouC&Ee6jxS22X zy)&;LWQC)GqE`gTu+@fSpQ;W@_&Qu6>f#+BY%BqPWsrZP(b9Y;UZY{+g%Yp^xy6-U z)dRfyVka(lu+^r*y}B;P%ox|W5%)AeG@0%p zRFGwmO}@97cDFT!djt}pjqR==-IqNb_Zj~E%%r)m!CJeYi8Vclg-d3GXsB0m_AyWD zI`^8Tic4+YK9claIvno`{<`jNU(X1A8a*fAb`9TAcn8ZFt8ye$3kH|DOT))oJ%`a} zrGX?ze5KB!Oet9I7BUApQ*ckGL zxWe9)%O*#gZ7Vnn^WRSrvpwg5sU)eyC4*^VyOCtg1OI5K%fI5tLhD(yl=mqrwxy5K zmTFXHpJJ`xa7w&Nq@n7=BDoCMNfR#*Q)70Nt66ny24Cs+^0}?$QyDXF&&*PkXci~- zI;kME-Gze$HiXWQ2|L>5t^f{}Gmo%lm`>ppc%v2vRASVcpA64EFHj+>Hm&^c(W84+ z{3XtdBRiqvo$>Rh<-y0cI)Jehl6hKpUO2O+0#+}SkB-hnqRb-_|HkMaE*?zm@gY`H zjkG%r-enpRoer7as)Qd4H;n~@>)h>73Ui+i`y*%#YmsI1y`n8Zec`X4q@!Xtuk7wW zQBPtYC*fjAqenVnf=6H$-XWX^y?RFc(eymJ%fO&{xbSlYV8U@|XYTEfF72Jq#~nK1 zUwm__2*ci;c7L2;>uc#r&Ohro(ZmpDoiaURKkt2q6K#=kRAfcSRXLIeu;n1i0lVbfo$la6U#CU zvA;ixvyhaKmlE1PLPtschY?}VP0CFALVm@**uhQ1wbMSaV9_W;7wd(IUtrTrHr4e+ zxXc>ZSFq2wLH>5(6*yQYpL*6=v?Bd@#=V9{N6t69!;}Z^f1z9BqRS@2zF*!H?Q93V zYTfaq!i}4H44un2z2tK?xjumJgskiS60zE*d)vZI$AE)dWEx)}`5%8uJlr}?RxCe- z1bAbu719=nK|;)1_R;P*!mQxWu6W6rMtrcFp!_^+tYKf;`D5?;EsfyT_3wY| z#RvGSd-17zHi!IUS4r)EGO8kGq4In2;f1Daxv=nq|2axjABHa3<*_5l-Z?<*Je z_Yul#6AxBi=5n?B4?^(bZ)b9ji;}Fnv(_F-9F@im75SRDDWde;alT@n&CuQ0<>E)@ z3IkdAIK1r-&D)Y;w)rQT3*%%V>*;SX*7imqHv+~xkbl1;`->W4j7~`EEcSG)5{==$ zvy2cReP#LUMc;c%E@;~Aowx+MubvRB?1*U#ywuUpsiz)l$OTUyjwe9}^omu>DlygU z)my1_9+=&_;0D%pV0N*w1~?mWVRJN(`DTMh3|_E~Zuz~~sK%%_lZuqO&c>5{(W;6H zWI{B$o02FgnoN|1X3Y-%CA`ijF|43ub$UCjr$!A%j^DDUbN zTdHUHcOe3q@YV%}HHTWMhAdbPWp7(4DWbeNA_G^!;P$VEAK&Z=-%>hh3S>mVYRrN|CU+n)?C5hpt7jcl5-Nsyz350{@uy_Rf-yZ6XcH@jiCaSeN2G( z2l|~`dBOIizCz7%uQbNJW;slA-qhDa#N^HbC!?<@quOyod0ib6Tur$C7D*ogbt0F0`&*WP3@JikC8x8Aqfq1 zW;bi5%w*eo``wtj8SKoa=jSGBP?U*k_mDE%DY*7{zr--CthxJ^IV(meCP~5Kvx>(L z zRy_;GJP@kbQ11$!O*zjhyv^{eooSo=Ccd!zwK-;)z*AB((#prQHxKLf8B<(>7=o03 zsY6x*`;az?Tj)i{(~IL7 zIz13&lwyb@2h}>}1@uAHgQoUGp9xfE?{pH9MojUWZH0NyEt`?`ziWhW???O&A6&NH z|4;;wsETt*WCi6=CLwfT`s3*Y)|uZ z4qt1j3|Vr_(5^q1W-VgO&2AF1vki5A7j-+iPs`UJCiF~!m79?5CvfLv|1X|$bpLg5Vi7zM1=i`uRMvXhKJ|e48ao)rvd+zGv8AwUS<1Xp z?`tWPj59EP)($4g!hO0yJ)0_o`R3t;pQ+u5?-P9$Bs)PSs7gzTkYXMw17#?cYQaA6 zX8T=DswvzGUKvRjHB#}v^6%aZLqfu91deOfT`iKh77ZzH7{bY^s`sM#nIS~yFv=pxv? z7GVu*24I$5JxVP8@*fY}!IUujeFMG`Y#&>tOuUX7_jbQGy-Qj>a0StLjsBh5d-r3& zKrqj1fW6M&5Y3c+?I~6k4rhdTW+X`wI6Sj125SJK>#kOArUNf&g2{f<_1ne*5II0# zTuoK#Nfm~+pk>ZZvJy^jmNz7IoI_|A3FM_csibRV{u z!I_N|AA4qc`t}d|VJh6+7^&gLrpkPic=1~g@~4ORnkM&UTbJDYwK)fxME4SLsMgMm z39I0;cUI?}Xj^?cIJi#t<`Xs+*Cp{%?VHEoxyJXG%~DV*YKSyZ|KsXAPkXRjU6(?e>kqd6h;6ap$*M{Rbfb&YHJ@~kjPdEGS&G$0*3 zaG#JgR~`s=cM~tdrW0e_L+BtiB7P$tKfm2q#=9ibz!C8<0D6&7t5c~>$lC0i2h)32Cy=MMMAAKL^RLio}m&5 zmwnXH&R*e@KKM-hF-I6;7Zd>yq@2O-^F-F0blpO)z1u2F=6$P~$!DfTyI0$=I z3qxSZpS)9RT;Yd4wddn4N?Ws2(KdGUOaBj-`{5Z*bLd|JPVI}ZBKg#e!=E-f4VbNW zQgQF&&O!-Af2CPE2^is9r)9HhVP*)T7e(yPeWC~)FR2Xi-=s&PWD?6R!TEF|5`6QR z!<{HmEDD;X2%OYH^e5@%tqgj?DrSknp`t%biJiaKi`6+Jt3_gHV1(f1IwQ<|=mpQ0 z^%d{7%Qap3C$g;sbkVcWY>dyH@M#R`kj z%BP|i=D!@wWB%MheWMpL@YfTq`Iywrg4j6g<>1v{$Uj%{Bx~`u3{vmLNwU_7CO_x& zYiEii)-BV!OHKuT?T#@Y_gyEAk^7kMV#55-4~AaUySNw>BIT2>`Bk{fpWoT%BrGh{ z)e1AFelo`u33LC38(rkZhby6Q^?~!`>%;O#HNPuz4?=~Xf?17;={adCZ-S0gvV6H$ z`~AP7YZ_(__;i>ZWx>zg@STN7ExJ?>E6;Eq-7=5%u85`uO|^%juSC%qniQD)Uu?Z) zRGh)mu8j;~@WCAhCrBW;Gq?qp-~ zzg7f7&XM6PU^p4{4?-Z^xE@1t^iw_@CNNEmTMQDFl*OI0T*Vkay-;`R{Yly^jhiq{ zC8d4bn<};YXw#pG`7$6IJt}n=T3oM zBAVj`2C)IX`>C2hEE}i0<&p)gJioh7kln^Z?|oEgd^K~+O;3%}CTpIAlgBW!^2=#d z_kpUp+|a4)H{O|LRbQwzg)&e7lo^~3;H1%RN!C%SCplfs47EImPs0)+sRQymNeVD| zKe{{M&&VLL9a}@qhXp});cMI@%IbgnRkuU5`K7T{cVZ6U+JJ97+xhM=XZ|SS7EKx^ADF%HYCV}tmJIRVF@pGldeKI%wSI3TdmHyz5;uXi zi@ncgELywIEI6PK zu&Kb(dFnNNTrCf5TC@r;`v$Rw{+9(9k9l)nXQVEs0wZbc!$Oj*DLqu*5jl@z)hkA? zpE~>7c)@9I&Cze%&rwP$uXjOwkMhc=fC;l!|iMiFcN-0()lE;#L6YCk^2+I#r;4#O<$V& z^`Cgu5iFXiR~+)xU;Fk|Ui(lr-P*rBC25s`?NY_Aw%;+pzgzP%3#Wd@q0DKq{sn$K zkSpsQrR}>{aYlG)dvTWf1PtaY`D36xpr7)cBIo9IEIrU;tx#{*&hhNs`yyFv7A7b{ zd_ao}I7beat}J3HLH7`=H)(=wbM$mt?cdv|MC?m9E_GDvAG#5nhuxMF^FQ%u$xs2E zz(lGLQBl(N9fzzpC89X6TfBqpNRrr3sH=xRXUAV5S@b*K@0lb=r+vN0j+-tqYP`qh zN_gS_F93*^>okmqO&CXQTRXqMZ@V3S+&r89JTL%wbNWVf$bW$-1ww^bYx{I`P%?!i zr&rgO_Xv{g6WS3fw;ST#Vw+L!AU_SQhjNMkcR}F#@4FbKQ;&FGkPXF@dKNSy|H%rt zQq4fD1b^p7>36na1~6CAo8fWXZB$Fn{qGI^_d&&rZ-$hx;ibW|Er)=}TavADUT0Be z7NX0}_rv|QamT(e$x8dg8{WnbRy5G?IKFaX?T%JI9C75rX(YgnQ72J>>8hEre%8SF zh8N^;M-BD4V9q*aRPuZ=F14KOEr9qsGOMEn|9W^M|C}58Avjs%nq21nE9ymoewNw) z5k8jbp|0f*^Bnw}WgNaCnPI;TA7Tpo*kvo`RVWrPa7=uhXOzxI+KQ^%C~kb~u-(}W z4}KMRF8E#T+-W*(+6??8-r4BZ-C zp_ai~8#z_rx2+opg^Ha@izAYmj_^X7YBHeC{N&TAu!6_(QHIHX`ho0$BR7&Hts24B%1wtx^UB0(jt_I44d0M|U}re_TBo1=G-0RPIB*BXx-x?~ zQ5m=216<6NwOFeOwfOa)oi|PLHkW3N;*Eoo4>$a8kI4V@1JzSmuq{ysCOBU z^>kel^GIBO^eNFHzE zoSV+G?;bb4n>gcUndzX9YWdLr`u0JP3Cgr=rO4-N3Ya2!P{|wVT5OrrHcshBS#@&->e7P2SU_R#Y|HK5 z^ZFv!j^=sl-}k0xqpJg_8ZM-z*py!eRP&xeHD8y|=hu%+c|h$K^-j++^+5VYBLB!t zhnfy~j~Vpd-%Zr-?jFyl&4N06<*5GzaUBpH8*8-OAp(d_0RhZ3Fl>aNh>b$pGA$YV zvce!PrZ8$Z<-A0f7;BnA~yBb<5eQmw2&_qEV^GWpw15Qllfk{FH6M7wH7A{yVjoFOmpxF zRc$)l0ZH=@%5Dx9iijZn48k)Yr9w3Aswi4Up3*`a5A&BOUjXHeD_NCOdO!O4XEh}! zp@FfU)i1W=J{%-=AgaIs^H7wnQcdl2rHT!+DGxC0pz0QV4)$?m*oqoe+sBbW<10r) z1HHtOlzrVBM3bP{5AXv>#krrMmdW{T`{nGCRFN{3qvMlXn+D%Vcy(1_1Tu&v&2Hua z6-TN7LK5r1)*XkgA#ase>#KPBOWW!l*u1o2p%lUIp`X~}bdxu3bw)*Wc=U9A>}0&l zf?GDD!y^G7a`7WE0vwTbZQ3zlZ|4d^O8Rw+n_0mEXioyTh-i``w}i#KHxfdZ49RL; zUx)g>a+J{VTuO$>8X+^ZI%L&!pBdX0`wy)I&FXZ_31~CwqFXya;12u%c%3A!TE>3K-Le z_n9A4j+DO$EkO>3=K>Z@VS)iq6jPf(pN5-{;J@<*=}tz5u=3NoT=alHdIk?A)>Uig zVh%oy!yGSF3$}2~X=!Bd2hijnCpmwP@9UC%>8w8bM1PkI%dw>kbjS_zdG|6IW`6^n`0f&bu-|dZA@A5B25W6e< z1G9PmSbM#W>2g9!GDO;lfPZi3kbkii)0blLx#!U+;+__CK~4lTGh3QaDm2liKZCTJ5PP`{xGL zrZ_=Mc1Z8p;rT|7=2lG zAARs;VN_CX$Mjbt>7Vb*c&GxOFT|mzqxfo%jipCH?Lh%*XC5J?%V;Hz1W@^-%DOR* zAzJ9$*UA zT_xum+4qS#i%8vcb=RYVPa=2B_IuPTdVybFy-y%p_=4bI%#TCo)nJ+69Q1y#lgK)K z^0`Re^YOy%wsQv_4jc5~C?I8fx!^c|*5i0lv6--aA^k-$jRtNtWche~wCzZ9M%YG$ zD=utG@KMBqjbyw>E`^)OFEe*n!7)cXqN2GopwT-=*XIACNB+lA2KWMhDB> z9IZUDUU~0J`P4bZZ-RzFYo$|moBMu*GJT;3C7iE|6dS}ZrPdOb?{k!2kfNi6f&=@-Z%{d0CA|lUtM(Z=jw`(UJKM%Jmsdt38%vX}W zJ5UEW24w1DG}hr8uCmD!3cSSMMdp?EOpFGobT#biFPx_9 zAxPTTg$o@oRh!XH$8Ir1e65gkFI!HcO8h1J$UMZK#KCfWLX51V{2kv_yZHCzM(l0R zBT+j7b}sTH1oj`paVl071_d6lz#SBp-y4GdIa-wR+IL$EPpAHF^m{yQ!Rs~o{jw~vo zfG~0z9Kl%qHvuwj9{W!Ew`r^{XRMkF4bi^~(z(f28$B6bMcHOL)!o;1 z;+Jo@z!An_gJ-cQI{yI}F5NEg$@WfL?5Wv)5xrbRiErOQN2H;BJ|ryLtgc1Q*im*{ zw^gymy>q{JXW1yr+tg4Nd(Iq~yi9Bbfoweg_VZ65JWnmIn>dGiFQ&K6gDLz`(V0w8 zPznX_NUl_^s{fp~tnkbbcp4+2+s5cp{NV~nrW=SQrh!oQ7inBC2tLJ%t! zesaltOTh&dkNe`SsEdEl%x5@w-tM>M7{^SUQcF?6rC^tcuGsXy=^_G>u|vUhDJA~XR{i~oTPeQP zc;w$dnY1*m1>U4bC-7ao0%uIRGrI{qy0#WtKUqfxSTFkPCijMC( z3$`cv{j^UAGeMAO4h@xg?hQ8i`7r;CDCaKBR0xG`#_X(_7#IAa)AB#a z78zw%5M`t>hucA%?>QSsgQzv7Mniur+~bNo5|Wv;f)$U({(6gYLCAc0gNnW)wh5kA zcVPVddSjlq45Vvly*rQw`^;%>o?j(D!XeT)=ofXk3rz^I82;!b$M4dEZ=Kh(;7qsu zCZoxeRXwo|TSRQ!zldw94Gg|*v`^b370SSPid>Q!`l84t1!FvM*t!-($K+^bZ`gXH z(?(=%R2CiAyNj-FM-7J0#_mP;*>_dMebHS`*JSEnm%RPz^=C3NoMAr|6RGejR;Ggp zNkct&IQJ1u?cnN&X#cTIRRXYF>IXz@tmiW197mf0es)^4hr!zJJa?a{`7$ei z8S4ftxx8@hykQB!w6hY-_~$b&-0>A_yGxeOAEW=)??gLjAZ_k(8Ih@UDHD1>YS6nk zUVafx5psFNnvnX8C^?P$2^_14j>q-49bx>NOJQnEc%E8^&>L9X2QIJZz17f1#6f`r zLiuH8Wc|Z%PakC^$k%t4z6ak9!aK2JPmF@Pe$VX=w`bNWANvypX`Td0rlGTNwV4>> zI1Rf9wL)>2!f+TO)VRE%JjY%j)a=-#+^4{I$d9bYVg5roYLDyQ62&W;iGKU$kZ8Gn zikvv14qH%2$lR(%C8^*rF@Y%ZZ#ibbNpV+7S`jo$pTU$IV2ZDo~rvJ3~7wX=D;(h zwnU)4TcU*$AbK75nVp5bqNf>uuSf^c``7gyeYRq=+ZZ2>0{WXd0`RKew&rhP z$*vVMW7+>W!k3jPXrt;fPxhb*aGE!gmBu|3Ar=O}B+ z|C_rK-h)D#$ZV!@2ylb?vaUDxt%%JoXm)N^R4D&o#T#)VFJ~4(w~ANKCT$-dhDYO8 zdsTdkqft-p;h2U(z66zV0*iUpw|j1U86f)Kx3cvwqr8mZGm>meEyGS$m7I&w4`Z2; zf<>*(-i^3^mUAW2f5P=LmV`pSt_Jm;uZPJTYxSyfxU&q&y<{g}?SK^YRO#mP8p%=# zPzzd#puHBY8U0S#6P64C{3jWF`>F}-bTXYfHCVzLsXGIgNZv8 zYkv$NWKMc^{Y_vbJTfobAbkHhuD>jZ1yToB;WU)D1XE!HKX)oXxaR@&2^Ts35+lr7 z9M72cerSLdd?6$1z|h6L_P#g|-jHb#eWmd7u@J0$-WVNVtwFoOs!E>ezqAlC*EIgVlT@%)OF z=lCBu+;{AjZ#zmnE*7{ao0<9diP?mGqeAn3TmDZ*9<}noaoL4>b5whgVihTE{j=ln z5Wdr049$@SXZDKjztS1Fg@%apCp3X-qW^7q) zgMMWQF4;Zu)~GKeh{MM{DdP<52o3^l(Tne1;dvkj4q=n;Tiq_!?8yMWWr2gK!@YEV7ly zS6h`wwq<=&PS)9X)V#kY&!zDa64Ph|w&R$YSs+Mm38{}SlE^AF^9f>L*EcHxx&m+5 zIp!!OHw|{64L96ZtFBzJ?=pe((f+oWRIJlufRVNl}renZakUFQ785!d!;wz zJWBBpBg=+ofzs5+1*_XRt5pH2P}(I`sug_q5n*gsD(PqX9?RGd3pRInGO&?LR-Jk2 zMd0daN_ME>1D8Q+`uSr}p+)@bMcktETujiwBLxb#ze741pT6e#`t1J7*+jvttV7*X zt1cJOo|Onng-64BDn(IRZEOuvtrtddjYDvYccM4yQ5Mi84!JYfm6qlXS>1S&OoBMb zErCxA-KozALSTj81X0nbu>sy43-G*X>^r-=b`)`$u!eOf51p0L<1_b1T+#jY+efMLidba)0vkv&KNq@TP0wX zI|Ek)Leh;e|DM*vW)?l2Ixm`s2Mp+^tO<==ytlXF&+YfZ_M(?#Kkz$VzvV15JO*z9 zs%eq0Oe-pVBC-#lr4ox?zY<$%2v;||HcL*rIj%3ucb8y%^*GM-$MsK+;Ym*`@y3%$ zcC4SwdMg)Rq=_6YwBhI&E>q<+e2`bPa`I@VpPmwSU=`nu6HgxG>TtlIkn;&~G6RVT zdUz3}J|w;9P%Ppq&)u*`$iv=>?95<6;w)wwc0YDW3ySYVM#1N3+?ts#u~4Qq3h&a{ z=^SY$r*oD?9Z{eDQ;GVyZ}&!oH1^B)y9`>IMd+7y>Z!QqfWM;WY$u#XVfXx}uD5ZW zC~_iCGiUh(-5bBt*7R-=U=IhB{veNVUmP13IEs)<_xWh3itC&yB?nqB~rT%bNJt=>qaC;$K1<}aHDe)G?`zZJ% z@OT|F#E!axFWT*z3b?Y*@uoP^p_Lv;i(Sc(tCK@=>%wlvdzLj37>+Z-G_P z?>22Itdzmu?t9CKzEn&jJvFmvmBV9W!i(IGzifVYb|2oNU!l%&JWZ9BA^7taeItre z5cBX-k=v{l52huOSi0otr34fZ==r(`8euWdE#otK$4maKsDQ@qnmuq^bWKppjX4IT zWi@R{_w zs6Sl3XO`<3vM$~*U(50=Q|!{qjpOXd4)_*BpWuolX{Iy7`)=h4<8-^}DMq87kdB;8 zH#s^C27ai@araP&G8IKX{SBnPs=W`C&m;K~;-q1_@DvPcZ{m|`H^9Kedis?T6RVO% z0pk?pnwQ=r6Eu&;^p%R%DlvHZ=xqT~_P?}OB&7*Tg2f>3BJR?;2okw*xMA(QZ@952 zRLJBhNpHpuOwO5DLV!}xo>yqvjgoIIFG&@J2{`qnt}Jc%Ujt_Bg{DQ3nkZqvBpdh8 z{q7Eq%URu7Fc{ow^CCR0+`nT7$I5L3^PrM#t?Z3}1UfN$q z_KUVdG-_TZyvV~E?-JbaesaA2*7Tmf_S(+)KvTA(RPhS)?Ym(EmTtmmSz6yQbAcqj z2KLfKy>I6)q+aC!^AR{XQiEaTh)DK?qCYT#GA-Y#JE-HRJhG#g!7(8QCo-C3@%ixW zX()B^p-n)#UI_5;^l+a`GIDCZ=p>nHqL3_G6%P6)Tkigs8>+ubee1#d(PLY4K=Yi; z)NaQ;kA6LK-rVV6UNdpqj`y4Nfgxa2ToVqL-C7;tMb9$!nqAnBP6uAnxJ6z>YeA90 zobF4Um#;=-f6O7*FjpqBu!ie45;Fo@4if@U)U&C&Q8pRn6en7&ntRCwoHzV?EY4Xo zyQLwK>VIN~O>+{I?4(jujmnYLBxCn=s7fkcSOZdYEpB%XHI1tW*4Bd5x{N3;jv(r1 z2N~-LLFQK$t0t>)!8|&JPTeTr<3~M%$1XzIy%NS}7W~B!G zGC7*d*JDBFQK!n|rPM<2l<2pz!gJcA=`<1q-m;~q3hJOp1eD!i7U$eE_ZI$jt%`}2 z8}pa!G4&8@gP8S^^%H#{`aCtm&8o3y?ctRNE^s7~{_%P_t%y;xy!>l0^e!LVU^Fb2 zPHE<66R3qyq7#;E39i#HDaA`zYigdYb43sZu#a{2051(<2^c1MX>x0a$e`4il=bDf z+DsNWcFgsO*V^Zz6?|-13|H;W3T^m&67GIk5~i#fsAcGX736~SlP6|f5)iaYB7YQB zlkJGzW9u9XZFxlL;5(+XEOZJG<2|ER77S$O2yU;RF_%#HUNRSk?E z2N3BfQqaGbFSwLuYPPrjl*gs}38OI_mDhXWBdO#^~F{Y zu3D6=#IYbgfFUB9e0;bA&gNB3UgQMTFHTf9R*MCNn+oT8k;v;B-S-=yUN*(iZOzaC zo)fcT?p4h>WHmLWMOdpMEtBr&wHYaUm z8^?D~$xl82mLGjJ)5$sfl>8-*ZsLwiYbqL7E-#}N4x{NQhe{rPmGsdO%ow7~urw5yg zzBss|%q(uLOE$f#e>$k!W}u$3TE;pFvBMfjX_B0zgm$ynRA4|U&sBww)U_;P zBb8uVa4VoE_`V}iRp!yD-_zUt;{Y!^FYCiAFqRmqyn|>NzGqmNnCz^$KhoAOWz*=w zdZzkk#PM$@+*k&MPppguuAv_Pl4n3_!P@!RcG2&u@CVidd)T1z<>JT5r5GL&$8|PC z9t)3FZ>E0-KQUmUodd`LMje>I2_CAd{Sq65quM%fk>cuW|RyvSPr(GK)mgi1_0R~@wa5~r}#T4#}4;^-K$ zfTwL#@0+CZY4t5@@fA)*qU`rY9*~t7h@oCiDV}zRIFoKaFB25irAApX=Xz`TSaSCY zVh+VSG9gu(X_6*K($LYdwCQwjFz~lo^)NLpD7iLs3j(O-Y(&zPcGwZ|Rcl>c-L@o4 z{hAyrwWp_fHF*l_0ogxRX#=?ZKb%d%@qH&3(uG=>jKA+=UXI|%i8qKbAa}mUhx|U9 z-BcA&XIXh5`qFQ7!Xc;KUccy^O>yvbDHYD>iiMg=K1nw`I4eAPI;D^uK(Co0O}!Op zMaOx^0n?X=rq?-o>mR3x##2trc%Qb6czcH$`X1Y5$sL2XSd0nk0zcR%&D2#be&YJ# zpolQlUk3ehPY&Y8*)L;jc?l)4|B`j*UQ9_Yj#L;|251+v!PfVO%?S2bSH(U2N2=wN z9cO3K7u4$&7kLH%fSqk~BzL6BU1!@|pqZ1DgvYJDnK~<6b+)Ah zLL{9m;*G|~cxi?bcwauimU|)D+1~z1_^Xc>4Ytl)eFyFOHuKf->Ud6^omFf&L2YMx zhyst_KaR7XKPez#4F9aff0E_VWzsfz?uTQB_Rq#5m-d7tR-oai;Rv~$;H^PqlRC#D zJG22MS`HTT&gH~?GFokQ8bbEW%8cU5>U8_nt{KK&j1@1bf5Qpl*CcX3FQ{7*DYjV_K1o4Bew`82LN(JD<-6M{3$+5(1sq; z1UCUP64XY>&p()GCTemquc*jQPF)r(f7yQ8Em}{oerCzUJbTpD77JD65e_b7mQ1yn z$QylLJiisf{rbBzJA^k0sBY|y<+vr_EflL0k=xI7-18^e7;Vf#{N zF~QDXWW>Wpddu)RVB$2Lbk&-^xU@TdC-vjcW$Nm#>+}7P95e-qSw+D_W~6f;W1Pnh<+7$a`jPQ5pI*DU{1Ev}zjy);n_Q1SM~F|z zD_xY{Lfpah=gfPnEpxr+H-$%k9_M0SqKGa)AZF(}wAA|ImoVutyEoU3MozJ#eV_jc ziw9+&y$>U9Q&RU|6}(1>(cLV|y4euMX$$=E!V2eMfs(Z{Z|`DlJzBNDPg7Q!@vY5# zSc>)IKYB3Eq58EoddTTyn9*%dP|Y_lchcP;ML0cm;2=0cJ5mej{O5-3NLw z816I;PAf^l*__lQCFX3V^DjT%@Gvn_?L&S1PqLrd2NWVgU%M2c3wpQ?H_o{8?IKCC z#=m@q>f$&(kSFLYqt%YhKeSg5u2}4+L!m5;Q88U<{;-~%W(f$v)n>?&zgQ6WNic13 zxT!f5swFFONho5n1TOHZ%JfyC@_YHX`4Og26S)2f>B{Qd)9EmPKVQoHYZi*079&wJ3T4B5; zO#tE08(F6Z-D@I4{1si7#|!Cg!clXU zxAt;R7~BOE%lJepigJ#>#c}stO1(;pD1nIfWzaw~(6eF|M4MA}5@z*Ov@cVs_|YF! zx)c~RxvKfpxpv%Vt?x>?b&j=f*L!FzFTPD9A3Pr6gWFD}f@GK+#j0Q~WLuXId z8UN6**s~;}yO8>_)!_SONI#hA2k(8@TOwdT=wxM-pT)`rh=IZ8_&u#V%?a3LO&K;M zc$NUSjX@E8QP)^a_I&e;_|^+2k!E_?xkR!v=mRil98ZOGrHbJqg)i>@$TzaTMhw!s zgt|Hq7Cd$^FqkXjag@m|0pdNcr@M=;DCkpQF$K*oOQ%qrEIq%W%?8(v8Cw=2p0zGAs z$;&Ho8V7Gw=h3gHqw|kWD2OuWGy;3$335{|&P)0{Q7 z;Qx=)?nx^U%-M^+S1@@qA{LksGpBRXbwpj=ADFpCEnjBPP?O1rG?ycQ=SfXRBRQF0$B&V_G7Til5R3hoq(+YCkMTOJsd1OStr8`XRm<&mae?N z^`LLDuLl)xOQ}C0%9}i)PaXfQHwXe%-0&2Sw_usoY~n86;coS;XC3!_++5%Dx4q0A zq5-+H2l;qd2zb$!tXto|^Wb|T-xzbB9R!Oyue<}5tsu)Nhjo1t6G(Z~qB+_!rEOcTd}EML z1PDXO-C0-_u9zZp9AP}SZz4z;!f@lQ?a4)$S@^5wKs%5|iY*v1vCa$r;6(~&caSI-M*xxkTy$Yu z(Heg|RxzAFN3;o)LX%j!+~;db*7mO+N>WX zB?hm;0dWW&iS-%nduG$Jh5-_g>00|^LhCw0xDJjCWtxPi=;12-!?V?kxSac&$FtIsjWgYia(0Og9)yta8+q|MHjAUvz3JtJEhE0A$*2dEP{(u0? zEFw9$(SB#3$IEg+HnR&(#5*42tg>4Q$_@|+E~5J)FUD*mb4q|2P&V*BqPhKx8~ z4~4|nWQc*t)jJ$8it7j=zjP!8>hxlh&|?V$I>YkFxq=zK-!YB{G}swqLp#s8dDy>G zy5<&bx5e#->WWb0b}&ip@=XtvLT<^f+4eI7}(lG zbdp2IsGhwHVC)B)wb)#O;F4BA-plXR4MQ3EN`W}}uG$83t^KO?tT%~W*F7H(p6@+h zD~-6Dm=#J*!4KY1Aqf$?n?KSx?k6`+eQlwIQ;Jetg$uzFUYN%Pht9C;qgLA1Hdhj5 z;ZSsso!cH(Rrcl5H45k&h5zVB2#PBFc!55dw}s#o_Olq+M9%Il6`CJa5{kh|>gXko zpyW2wgy;d<&gb6KFWnkH*f$E$a(GZvnzlgX`C2DIhTjz!Xn_n}LGl+YVeW25IPX%F zWmEyMVKJNx2QK_qt)%O~?rJzI)%^R$u@A*TE%sJ~Qe# z+*)Ut#?@AQ5}U^^&$?=KrBbM^@eCpN$(HU~i^|RQ01JfxgKL`8&RW$_&{rVLeHfKe zjDL(_vI#Y^KOQ`ojk?;?AFN%i#+63?`wOJT#2uh@m}B&qY_Q*iNK3QH%Q3sTm_s8f z!x**Z9%^qK1zBv#9rH^Of%KZX@mzqAKNu?zfefimpXpcV_3MX&E3b?*^Oq#xKW|hk z5LPM5rHwvc%v(?}K$B6bKJjDF8`Z?@25t@8C z+{<%S#S@!#@1RNsTCmk>?=fOp`22OnHivJMPb=&^gNwG-EH&XunSm_uH2t9}APgNG zYXRAdi=@kf&K!bQ8>l|0O?OrRZX+mdf4(G*HdsMd8SqwHwn7` zdTH4#edZ1I{+lEU6$MDb-z(@eQvnY@CHIT#uBAj@#}9dU9t`yjMTFPVn>#q6rxRA> zYM+wX7Zqe>AA&C@lfyp#WgZ>sRke(fY+f2kW`fp&W+>Q{sS8Ud_|7E0L@-*T=0%-5 zdw)Ue+KX?CPig(ogfD|$1_o!w!4^(i_IbYTQ?oK#?uZn9GD?fj)}*D_%J$-ddT<3^ zN-~o!7rejGqNA$psjS<$zECUNA*O4i^>Mq~<#Ob3?&zCH^ zF2xH0&E8@vS7d$}d9v!0^j)fyjsB)QaiqoA5^@cpoyAg-n;1NHv$tu2peW+$9|m60pH?*j(tsdIYwgVt$#%xUv;b;9qM2JlVt~c zc9eT;=)CF!DQmP`;Lpi)j<{T{uc+B)UJs&R3=XB6=x!-S1m}Z#RD|v45}($O1h&8# z0&RwB6L1x}Qg|fQ#1mOU1dQ*Q$x~fjZ#tVW;;ATQ!Vx@G*-G=#U-Xe-^^05r)LodW zJF+uSgRA?z+oautQt{c^xwz(zuj#`)9G0AX*M~lYjX~z-gC-@^?6zgV8=Ya;NT{f( z>~TPtDEyjIQXe~HsUzWsYJe^$g(Q)${8=V_dVkmi?q!W8r6fO9(r-K!0Ru|Y0J_&S zNfIIS^wZbm3&!}MCEMGz?M~M1!!x_w4GwtCL3i;_TbZ5v=48y33+hhPXiBp$%OO* zP3R0|fYk*chdsC6HF^B~^%s5W?OEY#aBL@{bIrWz`TNcFYg6PlYzJy|sAj`!Q0T`d zoV(eG--a(v^nQ_iVrBweBvl4``$B0QeQ=rLzx}GK?5sD-%l!~`A|9xSGETRHC3>-M)j-_UT{%y)3Lmb%=rwiG59y{ZX%)<-1JW>Lp7Q}*e z=!G89X9x9YR=sxI>`OVIyLs3+sh-E}tv-VC+tAIqyR%JDZ$F6(axEg2d;ZaX{-sHJ zKwEf?yYNB8=l^O{Z*UZ{&_B@`COYg~)VH6>D?CanL)>2lSRDaRUQfY5&^o`zw}lf51Ku&nSeilX zd0r}U(+am!jjp$yn$Gvg2%O?;UW?mgt0;@RWts-%IoDv`WB8DgsdRS6i}JShCq?I2 zJn-3k{psWpSf?$>_BbMOc?^%tSSRN>7ODIFRlRi3Ynp9bh&RcnmycUe^dRBM+ipT| zia&DT>&9;kA?<=-taeDj8NWn9a(4{Q4Gu|QlzPnndo!r-n5AJlht9bH1n)~WCof7X z(p-eDPVf4tlK!p3WcO6)Rvb{#7 z2UHK#a`wB9gO_g-DqsBEuIDWO!2Lb)MJf@pTgpT7;8#>@5Np5JJXD4JZ=fN#gXDVQgRhuU`ag z&ba}#X1zakfu9tC%>9(I)jHxJkN4@paoX7M?&nUqJ-V(PROJg|pXcpd==AGStAeAZ zyW54}gw46WQ#w42EA~gF6G_LX>HX1_uc8EKdx97RPMraUB4Ycav%o@{1B{pVqX~GdFXeqj-iqlS9+p7k(^=G{G-lzD`^3E@%B8RzFuOG5(%zxNhHY zB2UG%R9P38+QYu?a3pFePmu;3zP=TqRyc|z7g6Swt=HGBYfdjxQ+bj-Tc#?9#CZOKW_x~c|u6;r#n=%ZlX7G%@GC{;2(z?a;dj3 zXU5k&Y}$QFFBi!(sm}kcKcM^wO7TTtUz8e*7!hc{|GIY|L|O|_*4a?K`H0S0=aBakJr@b7yB&cIaz4jlzxx!b)eO9j8cpmzyad!`D|NOP<|q@9#y}F{qkN z%!}_Ht$h8avRT;|`sd=mb)dzjK={Q`fa)G(tniqW30bt-ci^egXVR%PZW@pJg@)d_ z__$8LUQH~q4(ym3Sr`vzBT8V$TCI>nqT5o8F1@~3J3j87np$s=?2NGx6Ha<_ASB7Xvqd)6??71`uB2&&iOD~lbNk*Nv~4=x9Y#V>ILEW@ zYg`ij%lt3y`aJlyFR1gL3%_;-l6hRWp*hXBL-udTSd(V)f)HN&bNT?@vsG^>q%DJ; z-RXh%MarVXT+?bQIHKukK-VHh3F7pUg@x;tb)j_SV43|F=wyyrR|xUbTV35h{hqIt zWaSxOnN;Bec8-q|t7xpyzhT*SGf=Uu$(hyIh?J2^IS`T^@7-V~Tw#I+KYQAAdOQtW z@{85}svJ31>My&zbK!!tW5s~rv}$$A_COi3?+_c--+zJ>5<`0%*NQlGdjR$1o@r|F ze+S19Y>o#lpa%E1iiyj8>^4h?tcU=>-E`wIwZUqih2L2AS)@;U^e^kkc{k4Ig$I&> zQSQMsJTIgsM0Tvt1v7Rf0)sYae>pfDDTMiO5opcv7gTnjSu|qlwzpzZSFeUTT&EN~ zTGIvn*ez}}CY?K1pna9chs2*NLiXf!&G{8lBwe{@Qk-tsF~-~JnsKHr35ov@p!S_A z3NOmnKfDmi#5550^b5d_izTci<#GJwx8R7v**<=InHSMyhy~qD=95&TLzZ_z)mHNN zPqPGwrprGqic2eKP3WmqRzwb@feuJk9Q&z?x9!iPDs3&U+(=yPnWH$q{c}fkwzjrh zT$K2o81hON8O+or){@)!6A zZ9QMG^rM(#cFV;%h(>c(o*d)M{I&fnY7E8NVS4<|2coY(9gLqTuHgCYDyIuyX<(D_ z2dZSjcB&+iJn_9pbBt3y@Oo6}0C2%y1it_3z~&5eq*GH=eh!s!CR_akV1+|-a@@LP z(;5>t>Nr3LnhI#1EF_x`^3=rXE1!ho?@Rj1MBs`_RHx9fB4G7jpUA&+0G(6y5XmO) zZLKQe496sRA4Vs)nH?af6hME<`!pMVAdj0mY*mgBl4 zjeDi_4wuKeon?h@y!5{N=*aOIa%9{tcRCV&{0qEH$h#W$3#PBD4aBHKtjPK=ZQJkK zSVug4{G3!Ok_0=yT?P#g1Zf;{QHaOok%E3~+;^izkYYtL4>OUFuQAMbTP2EQ{KLbB z!3-UkiSdXsg156n=1zZi{Z19lq9I03h)&KR0l({sty`zZ5op2)z=x~++fd3Z1faUr zJm!1gAB1@PgqAfYBs0}!!wPohUCo40a;x~hw#g?T`D}kdc8stfm9q&r8Nmo9{-eNf zPqzPTFz~5XDzK3lpi=t}ELt_d5)-H$mD3*eGY<+vyt`|;iWy25tOkjx+HFbMEsfmP zfqN-IM9D(e#4vBs4+dEWd7?ameiY)|V8Fa$>Q6oawb~!v4H8bg@Bk_ofv0!IJ^RL~+m4q!&}W;+n*ttuHD_@vZxeV?Pa0z5t4{qn|H|V-7kNtuk@t zVMxv)~=T;MIKK9}~Gd)aSSnhuowu~yI;^KL0sG2^A*wEIi5H$m} zK27`m_iu*Fb|b%MNadb7N&t>N-0b~m%2mqkeyZ8n=Rv(Qm(lw5bR(2*3;NC#4^Uc< zA?xbuuG)0IHkbHB$;{4fJ`NWD0FmGAFr<7fpa_R$hO^u#gCA#=Ws>1VpXXH};K7paOmGL2Q6DjnoSFe2R&P-k>PnADqBlQ$s6F@Jl zjJ{cBp7wqn`9oIfs1Rf81<>O+!ccsJ)lDx$z_TaRk9ymYi;{uMzFNWv4!nR^kb3O9iqPuL1F0mcRo8(_zVdO2 zN*2M*d&mdXiCt|8oldW7qSoG*-mX$_R_dQ;oj7zX8V7!ra>ZFZ*M(iP1L1nui<%qq zd~Hymk!DTH=&xOD+U{;mi*$TwiW9r_&L)8CUW#3Zva;!pZI;vZUX;UVC+^=1xgvR;al>?w``t;gK7amoqD~<= zG4ls2FfsCRd*?5kZk-lA@y@P#SYFM+Y`PA5qYr=WOhV0K%%nsI{wK?|)!$3y(<#?~ zg(4a6d>Z=hBgG+J9O=2M;jJ8MZ(VB5y?SToPXmYSxU`^r6@}Z_HGEX!jl zdwp8Jt0@hvVevd+a`hZPxB4XzJAu{#*cp!9!5(JFGaxuR|4ZlLI+`w{8t7=+qz!>r zS*(20ri=mO?}B$9Rb{o%_562m_y?udB@g|7Z9IcjSb1Wa$GCq_KG&M=r!Pdaz$k>! zE(W?6APE|y(Y#bP2wpiQ)hr-%_#rnqetOW%Lj6+nW>qeItjiw`$T|^N3!74`dTxMtKK**21ylV?hkpPlO1-#dS zg+4%UInes5qCL%s$Jl16va1K{B=XB5 zqmK-YDc#e*a5ADvaXYLf>cS4C74WtA61{d$75n=xWa2K#2i$aFBYwWbUW9{(J2&-q zeMf|BO;fL91sJAkW#!#{CaNgdY#_b7`&9P92GUA-hnmB^aGP6AO+o0Hs|(jpGU|4% zIn*GD(SD|;_xX(S^X>ycvC1}3O8Z>a3}65HY2XJz-61+Y zE6N95u(Nyrk}GrRwbZh2iI?2*q6m%c05>@UAOhOqO)!&|bPO6gH7PAjq*;8HoD_Lo zz;arHAC^-rWB73QxuM9)3h-UE?Qg6RC9hi=RNDo3EYQ1lP z2dZta*UUHQ&}#j}EL-*1_m9IfCXX}j0)+elcFYi>jR{ zj=28$yE@N07Xd=p0m{K3v{TE&+5^DtZ#UWnqzW-=GIav8&7=_Zd^=zbXFl@BRmPR_*&n$lb)S8C3kybO+wU{}~fq zc$L7ZLCnI?4c!8pLH%OLx=mt+_$nQ2CndMqXN9`CUT1EmbJ9Lqp|4tP_~a@6kYJfz z61lWR4xJ~+&%D-5{Sp~%`X{F5UQe7KF?NiVNVmFDe~KSApPQWDsPU#(R_@tt zMHL|^Yb=Ggu-^A)iMb>ijKq!i)EMZ!S5{R&CUYrDU2^IQ2^GG>@wllw))6PwMsugY{j>&vWZ;{yubeBb+y;EnANJI zI%rvhBIkpMtwW=1sk@EltlsuHe>zayv)@QIw8GA}$ohRBZFTZmmX*gc&5`z6_P)4= zzU+E~~tZWGGh?jWRU?|Fs=h#}hg#3{6{B^2JyoVuriJxGF>l@?AD5>BczY^~f z8gpIuVwR?CpPQ8qZQj2i=(w@~8nZ_6jDKI#hKS)oP|m^z%mo6k#z>&^;X|XAZ^wN| z{b_8(x~5!-V_7#DUHqiT${EDNrbN}(*FqwxW3Kuu=AX^UqYLD#v;4!?jk2Cc6!h;u z+c@eq)|WZ<#l%Q{d5Kgt?bXsmirjHvZNMiA(Zq?MB(i3Io+$j>Nn2f>RXL~D(sXqj zdoYDYzQ1SL^!*3huseq7Ys2VAF=_b$4H*YdV!q9qngu!XtrX(5kB8yJWug#08Jche zuATf z(^}1sH07nVPDHNxKL8iU!i|q;|F{oL+5;bQ)^qI^L{RvH-O1z|M{E$G;yti_1+ye5oUfLAgTjs{WQlYXYbcfB~1 zUK4&GMni-vzT+oeb?NPm6pZmFDOpV|%3K@cMZ0g32v>)J3$7z-O}x-2ij(8vx<&BY z1&rCj`Yn_F{h>IyW(PP=@m!udb>SeP!i2|`n(T6~YhZNM6e}FE4Y?(TDQ`W-+0XJ^ zZ59Qiz}lH31lcL(4T}7d$D?9xw`mGCi;JU4;qa`(<>9(h&HV5NkKVCzE`;81fdv(& zumi`ox9W1Y_U!{A&bHK;eE5qDGbW*m0|Y3bFfQua}_51Nen#Y zTN~-~xtRpbXL*&nnVt$sTAGwOJ|+ufq%FVXf6riv7$?}+<$+L~6cT+t@8cFDG@+Uu zx&wO`7<+Xbht{gc7$jQ=|v{DPG$?nk1-yIx5{vr0mm=T(C{1J;XKx8<2jJ>w`0 z`lK?(G?yHtB;Ie330f(DM0)7g4B&Hpf6hH*G-&4Sc%uwWK_N*X)6Jt+TcAhhpH zR3kNkateKMyT(x5wtDZn^m)uFAb9~kyxI1dE)Y?7g^d(78Maso=B*i?6VTon|aSeUC^$% zBY`${--#A0?J^EtXgENK#HqkVFwx?K$??+necsHwm{69d;xvTEXRpji#tAqjGPX{p zig7RBPwmF#6FtUSCU7rKj>czn+eojBPJikTMS-t*qEoyI2j?n046*dJlD>}Ejn^Gs z85s@bddY=AnJxqqyPf>5#)!q{FJ@hWoe^|Xy^CtRmW)w7vET+0l&4nBCJd!F-7AQH zYNgEv#rlXE9oPCi^0qt>A`iPCZ|oYv*elHj5IbQPKp}vqEHWrW7R=wjwVn2XgIer% zc|;?f^n+DtZ6aaB5=`qJ138{ztzsPFR;$R94`Mm9$%EK57hsd{`RBfixuf=oP6*;O zg;s=0KR~lL;Tg-BOfR9Gv05qIMWl>ExuS%Tvm7iSQPBuC?!oPRk^psr)(gI1v((Qd zbtw5VxcK+6#r`+^GI>5YN=J>dl@l$UPB1_mtR3K$We%`uI$oB+Pcu3&S8l1`Lg-)@ z9|srSUxWcZaF3OHi-$C9;6?vnivoj|(ME2!ADT@Ha5g|!Z=%OTKpyOB84k|LAvNhR zCih!cY36sJEves`CHwP%xkDeclQz)MZ5OgcWr%YJ+7mW^vU`7@-NMCSQWY49Qav4p zYI2uH^GjBH%a#lK0UV?w@Dw)8w!3_}Y_6h-+xF(A-)@dK=gwghjMHq^WNy`6Hleon z5s!fIeA)3la27YwH%FPLrOir6NTBd@#>&rKH7LXPRPm1ibATINa@b(+_E?;JY~sOkk5_2ic-@pw@xgde z&d{D=uOc*UHmjo4y|jn+kox?YZom{a)h5L;3*CZ$E{vY2^`~B~i%>HJ3h#o6t0m zgImNDhB`NuqlG5@V2j1MzIxpSD&pQ|c|;SMrF4lWsXu(ECcAY|x25Ch7m@u-DOqCU zDDuT&T+?W7soA3@)l&Dpogu}iRdMw%F@?$O2-VE)u0f%~dOgITqI)eVvP*eQQ-d-W zC|kDju5aZEZMF~Jbhbwqk*lo0I8PhZf8DKLRCjA9uWaT@+^VT|V*7-0ZmR-S9j|Xh zxNkJpx3(g@6CLnKY3)YtJPT#cidPcx5EoZn+uih4(X^<`la$%vL0g{fYs&j=6;*=mLyqQ(JW~vQRA9z(I?UsS4Iu#wUDwGN)V7LvAE3|^l5msZTtpR zG~foat)dy1~P_Kcj`;q2(hWH5^x9XVq~(adn9glleurQ429Er*+;KK zqgY7DSt`t-l<}vr!pg^PDbHCT1N}(-yV*(x7T-^8TYto`YOR~TDBL0JEMmc?;17AXN6+D3Bk4I(3nkhkQQ-}-Ap0S?`57hIul7l`AiVd&I>4`#vdPL#F zI;p~xxpW?AK-cQ8*&Jk<V=+3clrCIV{xTKoc z==J4mUrN6|&ArN+$PSc3H78+5OqOG$1p1`%As`o2$o`k7Vo~Q4QC_eIx7AVEH$w3~ zghQIIN29BJm$MY$AxuX*a}12h>FuujUCzLg1;vQ7@56C%R{>zo%((6stP|B=v$9<4 zX3Mx*O(~}oXxfihL6C>vhrmo@hiWhh410tIW@l&jU7VOy1&oWra(==hT|?>etrN7; z(o!w{n^%+{tmx8a4B7C*+zrR0@R!ox3X=RI;UF-iV#xQ%Mk0!IuGBAWQCD@dy`V$d z0`UV+v9LrpDBSe8z+Y5+oY=G~%7-SY7u_X!6WukKzNyq#Dx$x!I8xLDDznm-KcPFs zV)=tEE8EQ2rsC6!Zd;#!>d$MC_0R{|t^dFa1LpNoKON(<|Aq4hFW-A(xWdU033N;N z9n!tJp6CiLybO%KR#6Rrz41Pte2$aQzN1YE(raXHz5cvEN6QD<-j095WN;zx2R~Z1 zB7E(KTw@_5F(mtL>_iLX2U=0=-N|xLtUvAJhq!eRzJ=4^;!s?%h^og!T6lTMWQ!}a zBPayyQSDo@6*}wm*{)FP4-sbzUd7L544CgOBc)$|=6Y8GtqL^S{Po7eA_Y6~y5DDN zeiv%vTM!kK-kvM=g?^2RkyIB5;^wW#P{g`kFnfevQqB9U`L&a7<9!dB7jT1+5}>^N zcil4gk8Zg=LQg*J(|K~pi~j|`wA`X`>!>j)ujl%lNpyo{{G-T_JW*oYT-y+oZLoIp zt-Z(FnE@YAl|=$KgE7oEBt}1%fk+p1P;PgX+@5%`lPt1{9;X0#2-&%%xh#c&u&%kV zeB~c+&jy1MxU+1fuB@we1-pTUh4&c<-(oeclBxh5@GK4rZ46`@5CS6+b`(mZwTd%I zKrME}GdlbRI7mm}>@8BaJem9cW=Z93@YE5tm2dgv=&v)yD{9NyuB_ZEzU<$}o`NM* z{4p5XSHeKPOOn(Mi+QCy7cnX1gM4zl>LCsX(U->A>iQtDBNU?-Gp=p;Qtu0(u!tZl zvmi{SBWpY*BT8sKGI`O6OjhYh+*9D%N)rcc0)w&X8O*czY`q`FKJTKcwjAPL*k8OF zH+%81Z_YfTKFDvQ3;9@sD*X!v-!D?Eg@-!)FqkZ@ymvh&OCIke4KQ&l-v{LJ=)K5C0}d8< z9^HJ2Su_usT=lonRau*;X;5mPi~RK}r559#~K8tYh7%UPT2s!RLML-{EV5$yrNu;0 z;S?(^w%9w1ZB%)q0H$mH=Ep@OD+b4KYf%av%ZCQ8*ki6-1#f$n8=___j8QHZZ2HKD zuk^S|)LN5PYLNIeAYVPZ23ZqZy)?zWb9U?JQLsNvmK23Fi$2yh1*VyKoO2wuHF7Gd zeb+Pn60=-&0G-3_M5*eYH{Dn1bP8()uh+~PaU>DVzyYaI>T9eqofo6HsbR+ z*n?S;I&RNPOs}&S;HqEcXPn;)W15VHV@;T7 zaHFvSW)9=|BH(eQ$DT6M!n&lVn<|IM8=gg0zw}kScZZu{9HDSmCHyhdJGJAWI_*WG zqan1JU53n!j;9RDa|TN5yt%ty;4if}|QoDWCq! z*H<=Gf?i89a^gM;Ojp@}VaFeG5pYVk>ApVA2h&3}%Y6I!1a);67kJmA# z92{no#PE{g@*&5k7_Ac!h6OZ80J92R$YJEs{*2j;TXAzZc?nB&a=Q1Yk^}^ExZd12?Pf{O^HvDmHV@| zeBm_6-C04IG^GcV!&(x9>3d*mh6ws{2QXf%XFo>Mqzf7ghP{>z5<@~BB>)p4h4oA+ zSlAB;4myC?!Ug|YL{r<(_UM-k%Jz2vENuEZepRf-y8=2z5hi?4D7TwOv>#$W{70&- z8$>Wctdy|E|FyGI;LrR+LuX!PLnT>!II*9BfxE#__vl%%c#0fyo6gpxV13vCUA-Y; zyZ;@R{zbj*1V+ueXH4dNzRU)SoF{BR&0U`o;A2HdVR&2e4Tr-OQ_-~MPnT{PX3+?Q zoMP+!Da&uirKlBIP6-Y9cAz8{i*1sV7c4vRhXLC)$-?#JGZ@}p_0#-xGw3kLuDgP* zW-mxz*Ip${3owON^na=~py57`vR%JoAAanK)42CSpmw0vif>1`;*ojxn*Pwf(^$$e zv`DXC=rorEdPiKigF78+{msjlp2J023u)a}>(jjufIAEptdzv7sD0{EOZL^u)Hhhu78A089sYSzL zcW$`zsI2%(go7Un**4ge3n;}Nh*JEDiRzLED&PU+CY;6k*$^Kz<}P2Xu*dwwpO?tP zy;HKXW?mWHGXVJy1gpPov2{W?_6!#)aZFL;?1FH!^>c&#w}O&=Kk!A%m(TGJ+q@Wa za3#(zDTef$KQyj(16W!Z#Q(3iemTv_LZVVMFdE^5wC@-SIGYaN<=d|jacJ;{j#b$e zQ$yMWlQ3rSyxBe|QK3Tpmpbd(M@I`kcA=k#OIu!^l1v1w7g)$bfDiCxLlyoZYCSW7 zjo5wzSdGftM6g@MH4N5aO(9$NFR9dx^sAyAgV|TIuvY)EGv*@r5)6oW>ge^MGqU(I z-2}m@$8;X?2YM73^pt#%ntwMqfaan@=pkOoUJ-5aMtJkOYy(BfZGrQhZ^)1{WdJ@D z_tECaflXir>S{w(88Z%Yer5(7Foa=J9vCN+TQ)$T_c**#LuV$tUkEflDyQeg4$TUL zp}@C`%9e; znMH7|6c$ITQ{L%#f9J-7iwDO02=F&6Lxsdh{e>lgc|R7%J2-101;P!|{$=c&Qc{sp zZ5Cl|?P8$IxnkT@-`S(FYx@`9R+*!zXR&fs`e=`pWq%vS0ihlDqVP|9I>`3r3w2elJcyWFf4ezQ|oJt`=T6@x7}np>)^OR*|?_bRN7W9YVq6O zX{k@Fx&)hQ4}giR)H+b~8tVqbP$rZSbR zZNLoY$1MlMvX(q1q66w}#FtfMfF?P`2{eg6U6VvAjH-D5niYlD7?(hfkJ*;ivJ(0Ue*2iX1M^NTUO+I2~K#=Oiwt3o@)SnX@e8c zbvFe_VXP)gp24AQNDMDWat1Hl6930RA_umkAa#*rXi#@ef8uD8lV!@C=F8tF&D&u6)_z%Zv`xQl*7s42F!{@^Dh8f9e*}X@&n&G{bta#%%9gHy z_V0{WQB-f*>qsIO9&|rCLI|T{)1Vi+d*TtN!>G_3n|1V$Jr(X@3?XRn6x?bNCth$(`aQMN6 zrLTI1#A7A`7w!*)to39upchl_6MT!`azjm*XE{(mxg+YK-44*Je&y7CbwBbQ z6Q<6_4oRiz!#)(Ognh#=0Lhy#x#%mW~GRrt;aV8~mI@$uO+ILn~nFYki385aeb(A;Cb_OIh2 zy$_Md3K1IPJFzIy(k>30c4p6tM931pf(TU7e9=Fn3tAe)#J(Qn@5EwMr7w2|JlbEc1cy^~`yTku_Y z;SiM35s^Z>4fXW*t?$7-v%ATCeM)`pCoJ6N;iPFMTWt5;S-H-rHV!vfx}FR*UC5U5 z!^$Xs9g#NGAk^-xb}Rf1DVM>0*0zJhjh0H60oKo%e?u6`Xcy!tL9`Ltda-R|>xFwz_7;1`khbx>lla%4f0!3BWbAqWq&IHOzXNnG zpV$MBoRGdD!+}+RkRQBDNp`tuhF&`*-!tI(l;YX}HdRcBr`;tDdyy?;^=DsyE4Ax| zk!RM1k0mLv00+V?M;3A*PVLbi>zg?;GMCQz!&HX?cC8qIIe>D=Cvow{@6f*anU?b% zX`uTE_CZ)8LE48NOI{in{UV9427Sosm*=L@A24Q2N7u!eD5D=)t$9nlq#=WaQwsYZ7N)-@-kxba%7Tl&%L88JCVB2&u})00&8rZiAp* zHSp6WX>E%Rk~M>#UoWos*K>tY9cfK`2v z80csz-uEG)={paeq%Iu`0GJ1#*gLrbqF6fZ5{!3G#FJ0Pv?q5GY4oiRi?6F&T~*ee z+zIPDgC&gqG1uEax^0$=L_*}2(cZ_bmBr%pgro6ijy~~}Rf5%Li9wT=GzPE(VDMF_ga%;aJ7L@X6l~X9hIDHo@lYCO zgR`SRKl7tFR3(LPgPk3ly$iZi5e!e|wq2V$&b7&7x(h;~{Y&dyWMfAL#6_4kc4Uw{ zQXn`<(Q zDH$9FP2b8fxd3e&VdVqe8c>$$ySij;|K6m+G$KDD#KtXs+~D7KpCqR>5;nHG+!wd}kn&eYK1M{2A&Q>%Ko`d+YxI D36FSH literal 0 HcmV?d00001 diff --git a/src/integrations/integr_mysql.rs b/src/integrations/integr_mysql.rs new file mode 100644 index 000000000..912e46328 --- /dev/null +++ b/src/integrations/integr_mysql.rs @@ -0,0 +1,246 @@ +use crate::at_commands::at_commands::AtCommandsContext; +use crate::call_validation::ContextEnum; +use crate::call_validation::{ChatContent, ChatMessage, ChatUsage}; +use crate::integrations::go_to_configuration_message; +use crate::tools::tools_description::Tool; +use async_trait::async_trait; +use serde::{Deserialize, Serialize}; +use serde_json::Value; +use std::collections::HashMap; +use std::sync::Arc; +use tokio::process::Command; +use tokio::sync::Mutex as AMutex; +use crate::integrations::integr_abstract::{IntegrationCommon, IntegrationConfirmation, IntegrationTrait}; + + +#[derive(Clone, Serialize, Deserialize, Debug, Default)] +pub struct SettingsMysql { + #[serde(default)] + pub mysql_binary_path: String, + pub host: String, + pub port: String, + pub user: String, + pub password: String, + pub database: String, +} + +#[derive(Default)] +pub struct ToolMysql { + pub common: IntegrationCommon, + pub settings_mysql: SettingsMysql, +} + +impl IntegrationTrait for ToolMysql { + fn as_any(&self) -> &dyn std::any::Any { self } + + fn integr_settings_apply(&mut self, value: &Value) -> Result<(), String> { + match serde_json::from_value::(value.clone()) { + Ok(settings_mysql) => self.settings_mysql = settings_mysql, + Err(e) => { + tracing::error!("Failed to apply settings: {}\n{:?}", e, value); + return Err(e.to_string()); + } + } + match serde_json::from_value::(value.clone()) { + Ok(x) => self.common = x, + Err(e) => { + tracing::error!("Failed to apply common settings: {}\n{:?}", e, value); + return Err(e.to_string()); + } + } + Ok(()) + } + + fn integr_settings_as_json(&self) -> Value { + serde_json::to_value(&self.settings_mysql).unwrap() + } + + fn integr_common(&self) -> IntegrationCommon { + self.common.clone() + } + + fn integr_upgrade_to_tool(&self, _integr_name: &str) -> Box { + Box::new(ToolMysql { + common: self.common.clone(), + settings_mysql: self.settings_mysql.clone() + }) as Box + } + + fn integr_schema(&self) -> &str + { + MYSQL_INTEGRATION_SCHEMA + } +} + +impl ToolMysql { + async fn run_mysql_command(&self, query: &str) -> Result { + let mut mysql_command = self.settings_mysql.mysql_binary_path.clone(); + if mysql_command.is_empty() { + mysql_command = "mysql".to_string(); + } + let output_future = Command::new(mysql_command) + .arg("-h") + .arg(&self.settings_mysql.host) + .arg("-P") + .arg(&self.settings_mysql.port) + .arg("-u") + .arg(&self.settings_mysql.user) + .arg(format!("-p{}", &self.settings_mysql.password)) + .arg(&self.settings_mysql.database) + .arg("-e") + .arg(query) + .output(); + if let Ok(output) = tokio::time::timeout(tokio::time::Duration::from_millis(10_000), output_future).await { + if output.is_err() { + let err_text = format!("{}", output.unwrap_err()); + tracing::error!("mysql didn't work:\n{}\n{}", query, err_text); + return Err(format!("{}, mysql failed:\n{}", go_to_configuration_message("mysql"), err_text)); + } + let output = output.unwrap(); + if output.status.success() { + Ok(String::from_utf8_lossy(&output.stdout).to_string()) + } else { + // XXX: limit stderr, can be infinite + let stderr_string = String::from_utf8_lossy(&output.stderr); + tracing::error!("mysql didn't work:\n{}\n{}", query, stderr_string); + Err(format!("{}, mysql failed:\n{}", go_to_configuration_message("mysql"), stderr_string)) + } + } else { + tracing::error!("mysql timed out:\n{}", query); + Err("mysql command timed out".to_string()) + } + } +} + +#[async_trait] +impl Tool for ToolMysql { + fn as_any(&self) -> &dyn std::any::Any { self } + + async fn tool_execute( + &mut self, + _ccx: Arc>, + tool_call_id: &String, + args: &HashMap, + ) -> Result<(bool, Vec), String> { + let query = match args.get("query") { + Some(Value::String(v)) => v.clone(), + Some(v) => return Err(format!("argument `query` is not a string: {:?}", v)), + None => return Err("no `query` argument found".to_string()), + }; + + let result = self.run_mysql_command(&query).await?; + + let mut results = vec![]; + results.push(ContextEnum::ChatMessage(ChatMessage { + role: "tool".to_string(), + content: ChatContent::SimpleText(serde_json::to_string(&result).unwrap()), + tool_calls: None, + tool_call_id: tool_call_id.clone(), + ..Default::default() + })); + Ok((true, results)) + } + + fn command_to_match_against_confirm_deny( + &self, + args: &HashMap, + ) -> Result { + let query = match args.get("query") { + Some(Value::String(v)) => v.clone(), + Some(v) => return Err(format!("argument `query` is not a string: {:?}", v)), + None => return Err("no `query` argument found".to_string()), + }; + Ok(format!("mysql {}", query)) + } + + fn tool_depends_on(&self) -> Vec { + vec![] + } + + fn usage(&mut self) -> &mut Option { + static mut DEFAULT_USAGE: Option = None; + #[allow(static_mut_refs)] + unsafe { &mut DEFAULT_USAGE } + } + + fn confirmation_info(&self) -> Option { + Some(self.integr_common().confirmation) + } +} + +pub const MYSQL_INTEGRATION_SCHEMA: &str = r#" +fields: + host: + f_type: string_long + f_desc: "Connect to this host, for example 127.0.0.1 or docker container name." + f_placeholder: marketing_db_container + port: + f_type: string_short + f_desc: "Which port to use." + f_default: "5432" + user: + f_type: string_short + f_placeholder: john_doe + password: + f_type: string_short + f_default: "$MYSQL_PASSWORD" + smartlinks: + - sl_label: "Open passwords.yaml" + sl_goto: "EDITOR:passwords.yaml" + database: + f_type: string_short + f_placeholder: marketing_db + mysql_binary_path: + f_type: string_long + f_desc: "If it can't find a path to `mysql` you can provide it here, leave blank if not sure." + f_placeholder: "mysql" + f_label: "MYSQL Binary Path" + f_extra: true +description: | + The Mysql tool is for the AI model to call, when it wants to look at data inside your database, or make any changes. + On this page you can also see Docker containers with Mysql servers. + You can ask model to create a new container with a new database for you, + or ask model to configure the tool to use an existing container with existing database. +smartlinks: + - sl_label: "Test" + sl_chat: + - role: "user" + content: | + šŸ”§ The mysql tool should be visible now. To test the tool, list the tables available, briefly describe the tables and express + happiness, and change nothing. If it doesn't work or the tool isn't available, go through the usual plan in the system prompt. + The current config file is %CURRENT_CONFIG%. + - sl_label: "Look at the project, fill in automatically" + sl_chat: + - role: "user" + content: | + šŸ”§ Your goal is to set up mysql client. Look at the project, especially files like "docker-compose.yaml" or ".env". Call tree() to see what files the project has. + After that is completed, go through the usual plan in the system prompt. + The current config file is %CURRENT_CONFIG%. +docker: + filter_label: "" + filter_image: "mysql" + new_container_default: + image: "mysql:8.4" + environment: + MYSQL_DATABASE: db_name + MYSQL_USER: $MYSQL_USER + MYSQL_PASSWORD: $MYSQL_PASSWORD + smartlinks: + - sl_label: "Add Database Container" + sl_chat: + - role: "user" + content: | + šŸ”§ Your job is to create a mysql container, using the image and environment from new_container_default section in the current config file: %CURRENT_CONFIG%. Follow the system prompt. + smartlinks_for_each_container: + - sl_label: "Use for integration" + sl_chat: + - role: "user" + content: | + šŸ”§ Your job is to modify mysql connection config in the current file to match the variables from the container, use docker tool to inspect the container if needed. Current config file: %CURRENT_CONFIG%. +available: + on_your_laptop_possible: true + when_isolated_possible: true +confirmation: + ask_user_default: [] + deny_default: [] +"#; diff --git a/src/integrations/integr_postgres.rs b/src/integrations/integr_postgres.rs index b207e50ec..d84e8700d 100644 --- a/src/integrations/integr_postgres.rs +++ b/src/integrations/integr_postgres.rs @@ -70,8 +70,6 @@ impl IntegrationTrait for ToolPostgres { { POSTGRES_INTEGRATION_SCHEMA } - - // fn icon_link(&self) -> String { "https://cdn-icons-png.flaticon.com/512/5968/5968342.png".to_string() } } impl ToolPostgres { diff --git a/src/integrations/mod.rs b/src/integrations/mod.rs index 333a8661e..7adfcfc2a 100644 --- a/src/integrations/mod.rs +++ b/src/integrations/mod.rs @@ -14,6 +14,7 @@ pub mod integr_gitlab; pub mod integr_pdb; pub mod integr_chrome; pub mod integr_postgres; +pub mod integr_mysql; pub mod integr_cmdline; pub mod integr_cmdline_service; @@ -36,8 +37,9 @@ pub fn integration_from_name(n: &str) -> Result Ok(Box::new(integr_github::ToolGithub { ..Default::default() }) as Box), "gitlab" => Ok(Box::new(integr_gitlab::ToolGitlab { ..Default::default() }) as Box), "pdb" => Ok(Box::new(integr_pdb::ToolPdb { ..Default::default() }) as Box), - "postgres" => Ok(Box::new(integr_postgres::ToolPostgres { ..Default::default() }) as Box), "chrome" => Ok(Box::new(integr_chrome::ToolChrome { ..Default::default() }) as Box), + "postgres" => Ok(Box::new(integr_postgres::ToolPostgres { ..Default::default() }) as Box), + "mysql" => Ok(Box::new(integr_mysql::ToolMysql { ..Default::default() }) as Box), "docker" => Ok(Box::new(docker::integr_docker::ToolDocker {..Default::default() }) as Box), cmdline if cmdline.starts_with("cmdline_") => { // let tool_name = cmdline.strip_prefix("cmdline_").unwrap(); @@ -57,8 +59,9 @@ pub fn integrations_list(allow_experimental: bool) -> Vec<&'static str> { "github", "gitlab", "pdb", - "postgres", "chrome", + "postgres", + "mysql", "cmdline_TEMPLATE", "service_TEMPLATE", "docker", From 97dfae84ffd8b94d17baebe6ce3c3879fcfc47fa Mon Sep 17 00:00:00 2001 From: Dimitry Ageev Date: Thu, 12 Dec 2024 12:04:20 +0100 Subject: [PATCH 156/185] scale_factor and desc (#488) --- src/integrations/integr_chrome.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/integrations/integr_chrome.rs b/src/integrations/integr_chrome.rs index 780e8a763..e42d98f94 100644 --- a/src/integrations/integr_chrome.rs +++ b/src/integrations/integr_chrome.rs @@ -1199,7 +1199,7 @@ const CHROME_INTEGRATION_SCHEMA: &str = r#" fields: chrome_path: f_type: string_long - f_desc: "Path to Google Chrome or Chromium binary. If empty, it searches for Google Chrome in your system" + f_desc: "Path to Google Chrome, Chromium or Edge binary. If empty, it searches for binary in your system" idle_browser_timeout: f_type: string_short f_desc: "Idle timeout for the browser in seconds." @@ -1217,7 +1217,7 @@ fields: f_type: string_short f_desc: "Height of the browser window." f_extra: true - window_scale: + scale_factor: f_type: string_short f_desc: "Scale factor of the browser window." f_extra: true @@ -1229,7 +1229,7 @@ fields: f_type: string_short f_desc: "Height of the browser window in mobile mode." f_extra: true - mobile_window_scale: + mobile_scale_factor: f_type: string_short f_desc: "Scale factor of the browser window in mobile mode." f_extra: true @@ -1241,7 +1241,7 @@ fields: f_type: string_short f_desc: "Height of the browser window in tablet mode." f_extra: true - tablet_window_scale: + tablet_scale_factor: f_type: string_short f_desc: "Scale factor of the browser window in tablet mode." f_extra: true From ce95611aef65c6daa4490ed6c67be57d6435f5f9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Thu, 12 Dec 2024 12:27:45 +0100 Subject: [PATCH 157/185] fix: separate warning and commit link (warning is activated but commit link is not --- src/git.rs | 39 +++++++++++++++++++++++++----- src/http/routers/v1/links.rs | 47 ++++++++++++++++++++---------------- 2 files changed, 59 insertions(+), 27 deletions(-) diff --git a/src/git.rs b/src/git.rs index b6b98a7ac..28100fe51 100644 --- a/src/git.rs +++ b/src/git.rs @@ -15,6 +15,13 @@ pub struct CommitInfo { pub commit_message: String, pub file_changes: Vec, } +impl CommitInfo { + pub fn get_project_name(&self) -> String { + self.project_path.to_file_path().ok() + .and_then(|path| path.file_name().map(|name| name.to_string_lossy().into_owned())) + .unwrap_or_else(|| "".to_string()) + } +} #[derive(Serialize, Deserialize, Debug, Clone)] pub struct FileChange { @@ -225,7 +232,6 @@ pub fn git_diff(repository: &Repository, file_changes: &Vec, max_siz } pub async fn get_commit_information_from_current_changes(gcx: Arc>) -> Vec { - const MAX_DIFF_SIZE: usize = 4096; let mut commits = Vec::new(); for project_path in crate::files_correction::get_project_dirs(gcx.clone()).await { @@ -240,7 +246,28 @@ pub async fn get_commit_information_from_current_changes(gcx: Arc { error!("{}", e); continue; } }; - let diff = match git_diff(&repository, &file_changes, MAX_DIFF_SIZE) { + commits.push(CommitInfo { + project_path: Url::from_file_path(&project_path).ok().unwrap_or_else(|| Url::parse("file:///").unwrap()), + commit_message: "".to_string(), + file_changes, + }); + } + + commits +} + +pub async fn generate_commit_messages(gcx: Arc>, commits: Vec) -> Vec { + const MAX_DIFF_SIZE: usize = 4096; + let mut commits_with_messages = Vec::new(); + for commit in commits { + let project_path = commit.project_path.to_file_path().ok().unwrap_or_default(); + + let repository = match git2::Repository::open(&project_path) { + Ok(repo) => repo, + Err(e) => { error!("{}", e); continue; } + }; + + let diff = match git_diff(&repository, &commit.file_changes, MAX_DIFF_SIZE) { Ok(d) if d.is_empty() => { continue; } Ok(d) => d, Err(e) => { error!("{}", e); continue; } @@ -251,12 +278,12 @@ pub async fn get_commit_information_from_current_changes(gcx: Arc { error!("{}", e); continue; } }; - commits.push(CommitInfo { - project_path: Url::from_file_path(&project_path).ok().unwrap_or_else(|| Url::parse("file:///").unwrap()), + commits_with_messages.push(CommitInfo { + project_path: commit.project_path, commit_message: commit_msg, - file_changes, + file_changes: commit.file_changes, }); } - commits + commits_with_messages } diff --git a/src/http/routers/v1/links.rs b/src/http/routers/v1/links.rs index 7cf47258b..17c609f81 100644 --- a/src/http/routers/v1/links.rs +++ b/src/http/routers/v1/links.rs @@ -12,7 +12,7 @@ use crate::global_context::GlobalContext; use crate::integrations::go_to_configuration_message; use crate::tools::tool_patch_aux::tickets_parsing::get_tickets_from_messages; use crate::agentic::generate_follow_up_message::generate_follow_up_message; -use crate::git::get_commit_information_from_current_changes; +use crate::git::{get_commit_information_from_current_changes, generate_commit_messages}; use crate::http::routers::v1::git::GitCommitPost; #[derive(Deserialize, Clone, Debug)] @@ -97,31 +97,17 @@ pub async fn handle_v1_links( } // GIT uncommitted - if post.meta.chat_mode == ChatMode::AGENT && false { + if post.meta.chat_mode == ChatMode::AGENT { + let commits = get_commit_information_from_current_changes(gcx.clone()).await; + let mut project_changes = Vec::new(); - for commit in get_commit_information_from_current_changes(gcx.clone()).await { - let project_name = commit.project_path.to_file_path().ok() - .and_then(|path| path.file_name().map(|name| name.to_string_lossy().into_owned())) - .unwrap_or_else(|| "".to_string()); - let tooltip_message = format!( - "git commit -m \"{}{}\"\n{}", - commit.commit_message.lines().next().unwrap_or(""), - if commit.commit_message.lines().count() > 1 { "..." } else { "" }, - commit.file_changes.iter().map(|f| format!("{} {}", f.status.initial(), f.path)).collect::>().join("\n"), - ); + for commit in &commits { project_changes.push(format!( - "In project {project_name}: {}{}", + "In project {}: {}{}", + commit.get_project_name(), commit.file_changes.iter().take(3).map(|f| format!("{} {}", f.status.initial(), f.path)).collect::>().join(", "), if commit.file_changes.len() > 3 { ", ..." } else { "" }, )); - links.push(Link { - action: LinkAction::Commit, - text: format!("Commit {} files in `{}`", commit.file_changes.len(), project_name), - goto: Some("LINKS_AGAIN".to_string()), - current_config_file: None, - link_tooltip: tooltip_message, - link_payload: Some(LinkPayload::CommitPayload(GitCommitPost { commits: vec![commit] })), - }); } if !project_changes.is_empty() && post.messages.is_empty() { if project_changes.len() > 4 { @@ -130,6 +116,25 @@ pub async fn handle_v1_links( } uncommited_changes_warning = format!("You have uncommitted changes:\n```\n{}\n```\nāš ļø You might have a problem rolling back agent's changes.", project_changes.join("\n")); } + + if false { + for commit_with_msg in generate_commit_messages(gcx.clone(), commits).await { + let tooltip_message = format!( + "git commit -m \"{}{}\"\n{}", + commit_with_msg.commit_message.lines().next().unwrap_or(""), + if commit_with_msg.commit_message.lines().count() > 1 { "..." } else { "" }, + commit_with_msg.file_changes.iter().map(|f| format!("{} {}", f.status.initial(), f.path)).collect::>().join("\n"), + ); + links.push(Link { + action: LinkAction::Commit, + text: format!("Commit {} files in `{}`", commit_with_msg.file_changes.len(), commit_with_msg.get_project_name()), + goto: Some("LINKS_AGAIN".to_string()), + current_config_file: None, + link_tooltip: tooltip_message, + link_payload: Some(LinkPayload::CommitPayload(GitCommitPost { commits: vec![commit_with_msg] })), + }); + } + } } // Failures above From e2d6866ff23913a3fc5c496f014b901feae6c921 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Thu, 12 Dec 2024 13:08:59 +0100 Subject: [PATCH 158/185] links: comment follow-up --- src/http/routers/v1/links.rs | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/src/http/routers/v1/links.rs b/src/http/routers/v1/links.rs index 17c609f81..a3df17fda 100644 --- a/src/http/routers/v1/links.rs +++ b/src/http/routers/v1/links.rs @@ -220,21 +220,21 @@ pub async fn handle_v1_links( } // Follow-up - if post.meta.chat_mode != ChatMode::NO_TOOLS && links.is_empty() && post.messages.len() > 2 { - let follow_up_messages: Vec = generate_follow_up_message(post.messages.clone(), gcx.clone(), &post.model_name, &post.meta.chat_id).await - .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Error generating follow-up message: {}", e)))?; - for follow_up_message in follow_up_messages { - tracing::info!("follow-up {:?}", follow_up_message); - links.push(Link { - action: LinkAction::FollowUp, - text: follow_up_message, - goto: None, - current_config_file: None, - link_tooltip: format!(""), - link_payload: None, - }); - } - } + // if post.meta.chat_mode != ChatMode::NO_TOOLS && links.is_empty() && post.messages.len() > 2 { + // let follow_up_messages: Vec = generate_follow_up_message(post.messages.clone(), gcx.clone(), &post.model_name, &post.meta.chat_id).await + // .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Error generating follow-up message: {}", e)))?; + // for follow_up_message in follow_up_messages { + // tracing::info!("follow-up {:?}", follow_up_message); + // links.push(Link { + // action: LinkAction::FollowUp, + // text: follow_up_message, + // goto: None, + // current_config_file: None, + // link_tooltip: format!(""), + // link_payload: None, + // }); + // } + // } tracing::info!("generated links2: {:?}", links); From b208202f3fec82ff3156f93596605b0565d5e116 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Thu, 12 Dec 2024 13:09:26 +0100 Subject: [PATCH 159/185] minor (project summary system prompt) --- src/yaml_configs/customization_compiled_in.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/yaml_configs/customization_compiled_in.rs b/src/yaml_configs/customization_compiled_in.rs index 8c5443775..b61fd2203 100644 --- a/src/yaml_configs/customization_compiled_in.rs +++ b/src/yaml_configs/customization_compiled_in.rs @@ -165,7 +165,8 @@ PROMPT_PROJECT_SUMMARY: | Think of typical command line things that might be required to work on the project, how do you run the webserver, how do you compile it? For webserver to work you most likely need a service_* so it runs in the background and you can open and navigate web pages at the same time. Turn those things into recommendations, replace _TEMPLATE with lowercase name with underscores, don't overthink it, "cargo build" should become "cmdline_cargo_build", etc. - Recommendations here means just a list. The user will fill in the settings later. + If there's no web server detectable, skip it. + Recommendations here means just a list. Details will be filled later. 4. Write a summary in natural language to the user, get their feedback, just ask if it looks alright, or if any of it needs improving. 5. Finally use šŸ“REWRITE_WHOLE_FILE to overwrite %CONFIG_PATH% From 5d440510cac54ffc1b953e4f3f7335d7532d13ad Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Thu, 12 Dec 2024 14:02:50 +0100 Subject: [PATCH 160/185] pdb, chrome, knowledge -= experimental --- src/integrations/integr_chrome.rs | 2 +- src/integrations/integr_pdb.rs | 4 ++-- src/tools/tools_description.rs | 1 - 3 files changed, 3 insertions(+), 4 deletions(-) diff --git a/src/integrations/integr_chrome.rs b/src/integrations/integr_chrome.rs index e42d98f94..f1e602686 100644 --- a/src/integrations/integr_chrome.rs +++ b/src/integrations/integr_chrome.rs @@ -284,7 +284,7 @@ impl Tool for ToolChrome { ToolDesc { name: "chrome".to_string(), agentic: true, - experimental: true, + experimental: false, description: "A real web browser with graphical interface.".to_string(), parameters: vec![ToolParam { name: "commands".to_string(), diff --git a/src/integrations/integr_pdb.rs b/src/integrations/integr_pdb.rs index 30e71b5d2..9b99e1580 100644 --- a/src/integrations/integr_pdb.rs +++ b/src/integrations/integr_pdb.rs @@ -97,7 +97,7 @@ impl IntegrationTrait for ToolPdb { fn integr_common(&self) -> IntegrationCommon { self.common.clone() } - + fn integr_upgrade_to_tool(&self, _integr_name: &str) -> Box { Box::new(ToolPdb { common: self.common.clone(), @@ -178,7 +178,7 @@ impl Tool for ToolPdb { ToolDesc { name: "pdb".to_string(), agentic: true, - experimental: true, + experimental: false, description: "Python debugger for inspecting variables and exploring what the program really does. This tool executes only one command at a time. Start with python -m pdb ...".to_string(), parameters: vec![ ToolParam { diff --git a/src/tools/tools_description.rs b/src/tools/tools_description.rs index 4330e55bc..3012aba53 100644 --- a/src/tools/tools_description.rs +++ b/src/tools/tools_description.rs @@ -400,7 +400,6 @@ tools: - name: "knowledge" agentic: true description: "Fetches successful trajectories to help you accomplish your task. Call each time you have a new task to increase your chances of success." - experimental: true parameters: - name: "im_going_to_use_tools" type: "string" From 89d13b23e1a46e7232aec43cd49de971b7d35175 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Thu, 12 Dec 2024 13:39:23 +0100 Subject: [PATCH 161/185] fix: add mysql to list of tools built (#490) --- src/tools/tools_description.rs | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/src/tools/tools_description.rs b/src/tools/tools_description.rs index 3012aba53..a2976d591 100644 --- a/src/tools/tools_description.rs +++ b/src/tools/tools_description.rs @@ -385,6 +385,19 @@ tools: parameters_required: - "query" + - name: "mysql" + agentic: true + description: "MySQL integration, can run a single query per call." + parameters: + - name: "query" + type: "string" + description: | + Don't forget semicolon at the end, examples: + SELECT * FROM table_name; + CREATE INDEX my_index_users_email ON my_users (email); + parameters_required: + - "query" + - name: "docker" agentic: true experimental: true From bc5965ddce004c41f0b0636a4dde6e0dba40e3b6 Mon Sep 17 00:00:00 2001 From: mitya Date: Thu, 12 Dec 2024 18:49:07 +0100 Subject: [PATCH 162/185] get -> delete, return empty string json --- src/http/routers/v1.rs | 3 ++- src/http/routers/v1/v1_integrations.rs | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/src/http/routers/v1.rs b/src/http/routers/v1.rs index 1cca1e7fa..7e7027573 100644 --- a/src/http/routers/v1.rs +++ b/src/http/routers/v1.rs @@ -3,6 +3,7 @@ use axum::Extension; use axum::Router; use axum::routing::get; use axum::routing::post; +use axum::routing::delete; use futures::Future; use hyper::Body; use hyper::Response; @@ -128,7 +129,7 @@ pub fn make_v1_router() -> Router { .route("/integrations-filtered/:integr_name", get(handle_v1_integrations_filtered)) .route("/integration-get", telemetry_post!(handle_v1_integration_get)) .route("/integration-save", telemetry_post!(handle_v1_integration_save)) - .route("/integration-delete", get(handle_v1_integration_delete)) + .route("/integration-delete", delete(handle_v1_integration_delete)) .route("/integration-icon/:icon_name", get(handle_v1_integration_icon)) .route("/docker-container-list", telemetry_post!(handle_v1_docker_container_list)) diff --git a/src/http/routers/v1/v1_integrations.rs b/src/http/routers/v1/v1_integrations.rs index 35ce6462d..455f3c973 100644 --- a/src/http/routers/v1/v1_integrations.rs +++ b/src/http/routers/v1/v1_integrations.rs @@ -176,6 +176,7 @@ pub async fn handle_v1_integration_delete( Ok(Response::builder() .status(StatusCode::OK) - .body(Body::from(format!("File {} deleted ", integration_path))) + .header("Content-Type", "application/json") + .body(Body::from(format!(""))) .unwrap()) } From 9a5368cf610cb95fcf6a88c715ae9803b9ad9e5f Mon Sep 17 00:00:00 2001 From: Kirill Starkov Date: Fri, 13 Dec 2024 22:35:15 +0800 Subject: [PATCH 163/185] temporary disable integrations test --- src/integrations/setting_up_integrations.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/integrations/setting_up_integrations.rs b/src/integrations/setting_up_integrations.rs index 889a0a18c..772ca6a46 100644 --- a/src/integrations/setting_up_integrations.rs +++ b/src/integrations/setting_up_integrations.rs @@ -502,6 +502,7 @@ mod tests { use std::io::Write; #[tokio::test] + #[ignore] async fn test_integration_schemas() { let integrations = crate::integrations::integrations_list(true); for name in integrations { From 05e5113e5ea8ba07add045883884a1a8084fc518 Mon Sep 17 00:00:00 2001 From: Kirill Starkov Date: Fri, 13 Dec 2024 23:12:45 +0800 Subject: [PATCH 164/185] Integrated the `ping_http_server` method in the `LanguageServer` implementation to wait for the HTTP server to be ready before proceeding. --- src/lsp.rs | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/src/lsp.rs b/src/lsp.rs index 055fa78cc..33d3d0d54 100644 --- a/src/lsp.rs +++ b/src/lsp.rs @@ -179,6 +179,29 @@ impl LspBackend { self.gcx.write().await.documents_state.active_file_path = Some(path); Ok(SuccessRes { success: true }) } + + async fn ping_http_server(&self) -> Result<()> { + let (port, http_client) = { + let gcx_locked = self.gcx.write().await; + (gcx_locked.cmdline.http_port, gcx_locked.http_client.clone()) + }; + + let url = "http://127.0.0.1:".to_string() + &port.to_string() + &"/v1/ping".to_string(); + let mut attempts = 0; + while attempts < 15 { + let response = http_client.get(&url).send().await; + match response { + Ok(res) if res.status().is_success() => { + return Ok(()); + } + _ => { + attempts += 1; + tokio::time::sleep(tokio::time::Duration::from_millis(150)).await; + } + } + } + Err(internal_error("HTTP server is not ready after 15 attempts")) + } } @@ -220,6 +243,10 @@ impl LanguageServer for LspBackend { } }], }; + + + // wait for http server to be ready + self.ping_http_server().await?; Ok(InitializeResult { server_info: Some(ServerInfo { From bf60155c884b8297e2783df570c58430a3ef010a Mon Sep 17 00:00:00 2001 From: Sergey Vakhreev Date: Mon, 16 Dec 2024 16:57:26 +1030 Subject: [PATCH 165/185] Fixes for integrations confirmation (#497) * Refactor integration settings handling to use common settings abstraction * Remove unused import of Map in setting_up_integrations.rs file. --- src/integrations/setting_up_integrations.rs | 70 +++++++-------------- 1 file changed, 22 insertions(+), 48 deletions(-) diff --git a/src/integrations/setting_up_integrations.rs b/src/integrations/setting_up_integrations.rs index 772ca6a46..df43396fb 100644 --- a/src/integrations/setting_up_integrations.rs +++ b/src/integrations/setting_up_integrations.rs @@ -4,7 +4,7 @@ use std::sync::Arc; use std::collections::HashMap; use regex::Regex; use serde::Serialize; -use serde_json::json; +use serde_json::{json, Value}; use tokio::sync::RwLock as ARwLock; use tokio::fs as async_fs; use tokio::io::AsyncWriteExt; @@ -243,8 +243,8 @@ pub fn read_integrations_d( // 5. Fill confirmation in each record for rec in &mut result { if let Some(confirmation) = rec.config_unparsed.get("confirmation") { - rec.ask_user = get_array_of_str_or_empty(&confirmation, "ask_user"); - rec.deny = get_array_of_str_or_empty(&confirmation, "deny"); + rec.ask_user = get_array_of_str_or_empty(&confirmation, "/ask_user"); + rec.deny = get_array_of_str_or_empty(&confirmation, "/deny"); } else { let schema = match crate::integrations::integration_from_name(rec.integr_name.as_str()) { Ok(i) => { @@ -392,33 +392,27 @@ pub async fn integration_config_get( j }; - let mut available = serde_json::json!({ - "on_your_laptop": false, - "when_isolated": false - }); - let mut confirmation_ask_user = vec![]; - let mut confirmation_deny = vec![]; if exists { match fs::read_to_string(&sanitized_path) { Ok(content) => { match serde_yaml::from_str::(&content) { Ok(y) => { let j = serde_json::to_value(y).unwrap(); - available["on_your_laptop"] = j.get("available").and_then(|v| v.get("on_your_laptop")).and_then(|v| v.as_bool()).unwrap_or(false).into(); - available["when_isolated"] = j.get("available").and_then(|v| v.get("when_isolated")).and_then(|v| v.as_bool()).unwrap_or(false).into(); - confirmation_ask_user = if j.get("confirmation").is_some() { - get_array_of_str_or_empty(&j, "confirmation/ask_user") - } else { - get_array_of_str_or_empty(&result.integr_schema, "/confirmation/ask_user_default") - }; - confirmation_deny = if j.get("confirmation").is_some() { - get_array_of_str_or_empty(&j, "confirmation/deny") - } else { - get_array_of_str_or_empty(&result.integr_schema, "/confirmation/deny_default") - }; - let did_it_work = integration_box.integr_settings_apply(&j); - if let Err(e) = did_it_work { - tracing::error!("oops: {}", e); + match integration_box.integr_settings_apply(&j) { + Ok(_) => { + let common_settings = integration_box.integr_common(); + result.integr_values["available"]["on_your_laptop"] = common_settings.available.on_your_laptop.into(); + result.integr_values["available"]["when_isolated"] = common_settings.available.when_isolated.into(); + result.integr_values["confirmation"]["ask_user"] = common_settings.confirmation.ask_user.into(); + result.integr_values["confirmation"]["deny"] = common_settings.confirmation.deny.into(); + } + Err(err) => { + tracing::error!("cannot deserialize some fields in the integration cfg correctly: `{err}`. Use default empty values instead"); + result.integr_values["available"]["on_your_laptop"] = false.into(); + result.integr_values["available"]["when_isolated"] = false.into(); + result.integr_values["confirmation"]["ask_user"] = Vec::::new().into(); + result.integr_values["confirmation"]["deny"] = Vec::::new().into(); + } } } Err(e) => { @@ -431,13 +425,6 @@ pub async fn integration_config_get( } }; } - - result.integr_values = integration_box.integr_settings_as_json(); - result.integr_values["available"] = available; - result.integr_values["confirmation"] = serde_json::json!({ - "ask_user": confirmation_ask_user, - "deny": confirmation_deny - }); Ok(result) } @@ -452,25 +439,12 @@ pub async fn integration_config_save( .map_err(|e| format!("Failed to load integrations: {}", e))?; integration_box.integr_settings_apply(integr_values)?; // this will produce "no field XXX" errors - let schema_json = { - let y: serde_yaml::Value = serde_yaml::from_str(integration_box.integr_schema()).unwrap(); - let j = serde_json::to_value(y).unwrap(); - j - }; - let mut sanitized_json: serde_json::Value = integration_box.integr_settings_as_json(); - tracing::info!("posted values:\n{}", serde_json::to_string_pretty(integr_values).unwrap()); - if !sanitized_json.as_object_mut().unwrap().contains_key("available") { - sanitized_json["available"] = serde_json::Value::Object(serde_json::Map::new()); + let common_settings = integration_box.integr_common(); + if let (Value::Object(sanitized_json_m), Value::Object(common_settings_m)) = (&mut sanitized_json, json!(common_settings)) { + sanitized_json_m.extend(common_settings_m); } - sanitized_json["available"]["on_your_laptop"] = integr_values.pointer("/available/on_your_laptop").cloned().unwrap_or(serde_json::Value::Bool(false)); - sanitized_json["available"]["when_isolated"] = integr_values.pointer("/available/when_isolated").cloned().unwrap_or(serde_json::Value::Bool(false)); - sanitized_json["confirmation"]["ask_user"] = integr_values.pointer("/confirmation/ask_user").cloned().unwrap_or( - json!(get_array_of_str_or_empty(&schema_json, "/confirmation/ask_user_default")) - ); - sanitized_json["confirmation"]["deny"] = integr_values.pointer("/confirmation/deny").cloned().unwrap_or( - json!(get_array_of_str_or_empty(&schema_json, "/confirmation/deny_default")) - ); + tracing::info!("writing to {}:\n{}", config_path.display(), serde_json::to_string_pretty(&sanitized_json).unwrap()); let sanitized_yaml = serde_yaml::to_value(sanitized_json).unwrap(); From 9f4c9f18ed7f9ebaab7de12cc745176c39dbdb4a Mon Sep 17 00:00:00 2001 From: Sergey Vakhreev Date: Mon, 16 Dec 2024 17:24:37 +1030 Subject: [PATCH 166/185] Add check for existing destination file before migration (#494) --- src/global_context.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/global_context.rs b/src/global_context.rs index 9515f7060..8d339643a 100644 --- a/src/global_context.rs +++ b/src/global_context.rs @@ -182,6 +182,10 @@ pub async fn migrate_to_config_folder( let is_yaml_cfg = file_type.is_file() && path.extension().and_then(|e| e.to_str()) == Some("yaml"); if is_yaml_cfg { let new_path = config_dir.join(&file_name); + if new_path.exists() { + tracing::info!("cannot migrate {:?} to {:?}: destination exists".to_string(), path, new_path); + continue; + } tokio::fs::rename(&path, &new_path).await?; tracing::info!("migrated {:?} to {:?}", path, new_path); } From 364216b82842f08dea316ca0b1d68a91882c276d Mon Sep 17 00:00:00 2001 From: Sergey Vakhreev Date: Mon, 16 Dec 2024 17:24:55 +1030 Subject: [PATCH 167/185] Integration delete handler fixes (#493) * Add telemetry support for DELETE HTTP operations - Introduce a new `telemetry_delete` macro to wrap DELETE requests with telemetry functionality. - Modify the `/integration-delete` route to use the `telemetry_delete` macro for enhanced tracking. - Update `handle_v1_integration_delete` function to parse request body as JSON and validate integration paths more robustly. - Switch query parameter integration path type to `PathBuf` for better path handling. - Improve logging and error messages for clarity in path deletion and error responses. * Remove obsolete telemetry_delete macro and update integration delete handling in v1 routers - Remove the `telemetry_delete` macro from `utils.rs`, as it is no longer needed. - Update `handle_v1_integration_delete` function to use query parameters instead of parsing JSON from the request body. - Modify the `/integration-delete` route in `v1.rs` to directly use the `delete` handler without the telemetry macro. --- src/http/routers/v1/v1_integrations.rs | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/src/http/routers/v1/v1_integrations.rs b/src/http/routers/v1/v1_integrations.rs index 455f3c973..14863f172 100644 --- a/src/http/routers/v1/v1_integrations.rs +++ b/src/http/routers/v1/v1_integrations.rs @@ -1,3 +1,4 @@ +use std::path::PathBuf; use std::sync::Arc; use axum::Extension; use axum::http::{Response, StatusCode}; @@ -11,7 +12,7 @@ use axum::extract::Query; use crate::custom_error::ScratchError; use crate::global_context::GlobalContext; - +use crate::integrations::setting_up_integrations::split_path_into_project_and_integration; pub async fn handle_v1_integrations( Extension(gcx): Extension>>, @@ -155,28 +156,30 @@ pub async fn handle_v1_integration_icon( // Define a structure to match query parameters #[derive(Deserialize)] pub struct HTTPIntegrationDeleteQueryParams { - integration_path: String, // Optional field for flexibility + integration_path: PathBuf } pub async fn handle_v1_integration_delete( Query(params): Query, -) -> axum::response::Result, ScratchError> { - +) -> axum::response::Result, ScratchError> { let integration_path = params.integration_path; - log::info!("Deleting integration path: {}", integration_path); + log::info!("Deleting integration path: {:?}", integration_path); + + split_path_into_project_and_integration(&integration_path).map_err( + |_| ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, "integration_path is invalid".to_string()) + )?; - // If file path exists, delete it - if !std::path::Path::new(&integration_path).exists() { + if !integration_path.exists() { return Err(ScratchError::new(StatusCode::NOT_FOUND, "integration_path not found".to_string())); } std::fs::remove_file(&integration_path).map_err(|e| { - ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Failed to delete file: {}", e)) + ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("failed to delete integration config: {}", e)) })?; Ok(Response::builder() .status(StatusCode::OK) .header("Content-Type", "application/json") - .body(Body::from(format!(""))) + .body(Body::from("{}")) .unwrap()) } From 0318a239b142ab603c82bb34bffba12ec90bae9f Mon Sep 17 00:00:00 2001 From: JegernOUTT Date: Mon, 16 Dec 2024 17:30:05 +1030 Subject: [PATCH 168/185] Remove unnecessary `to_string()` call in migration log message --- src/global_context.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/global_context.rs b/src/global_context.rs index 8d339643a..51abff304 100644 --- a/src/global_context.rs +++ b/src/global_context.rs @@ -183,7 +183,7 @@ pub async fn migrate_to_config_folder( if is_yaml_cfg { let new_path = config_dir.join(&file_name); if new_path.exists() { - tracing::info!("cannot migrate {:?} to {:?}: destination exists".to_string(), path, new_path); + tracing::info!("cannot migrate {:?} to {:?}: destination exists", path, new_path); continue; } tokio::fs::rename(&path, &new_path).await?; From 99e938fc078a3082e9eb6baec5fed0584a1bc521 Mon Sep 17 00:00:00 2001 From: Nick Frolov Date: Mon, 16 Dec 2024 12:22:36 +0100 Subject: [PATCH 169/185] cmdline -move timeout back to main params --- src/integrations/integr_cmdline.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/integrations/integr_cmdline.rs b/src/integrations/integr_cmdline.rs index e7da6385e..dbf0b75c6 100644 --- a/src/integrations/integr_cmdline.rs +++ b/src/integrations/integr_cmdline.rs @@ -310,7 +310,6 @@ fields: f_type: string_short f_desc: "The command must immediately return the results, it can't be interactive. If the command runs for too long, it will be terminated and stderr/stdout collected will be presented to the model." f_default: "10" - f_extra: true output_filter: f_type: "output_filter" f_desc: "The output from the command can be long or even quasi-infinite. This section allows to set limits, prioritize top or bottom, or use regexp to show the model the relevant part." From 89d8179a9fb39a511a4fef267514b0f39418d0f7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Mon, 16 Dec 2024 12:27:46 +0100 Subject: [PATCH 170/185] fix: re-include integration values in integration-get endpoint --- src/integrations/setting_up_integrations.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/integrations/setting_up_integrations.rs b/src/integrations/setting_up_integrations.rs index df43396fb..fa806b918 100644 --- a/src/integrations/setting_up_integrations.rs +++ b/src/integrations/setting_up_integrations.rs @@ -401,6 +401,7 @@ pub async fn integration_config_get( match integration_box.integr_settings_apply(&j) { Ok(_) => { let common_settings = integration_box.integr_common(); + result.integr_values = integration_box.integr_settings_as_json(); result.integr_values["available"]["on_your_laptop"] = common_settings.available.on_your_laptop.into(); result.integr_values["available"]["when_isolated"] = common_settings.available.when_isolated.into(); result.integr_values["confirmation"]["ask_user"] = common_settings.confirmation.ask_user.into(); @@ -408,6 +409,7 @@ pub async fn integration_config_get( } Err(err) => { tracing::error!("cannot deserialize some fields in the integration cfg correctly: `{err}`. Use default empty values instead"); + result.integr_values = integration_box.integr_settings_as_json(); result.integr_values["available"]["on_your_laptop"] = false.into(); result.integr_values["available"]["when_isolated"] = false.into(); result.integr_values["confirmation"]["ask_user"] = Vec::::new().into(); From 108318bc6eb6d0365ad6041387a0aa8f01ee9db0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Mon, 16 Dec 2024 10:52:17 +0100 Subject: [PATCH 171/185] refactor: renames in link fields, and doing if false instead of comment for not available links to avoid warnings and get compile checks --- src/http/routers/v1/links.rs | 101 +++++++++++++++++------------------ 1 file changed, 49 insertions(+), 52 deletions(-) diff --git a/src/http/routers/v1/links.rs b/src/http/routers/v1/links.rs index a3df17fda..803c0c44a 100644 --- a/src/http/routers/v1/links.rs +++ b/src/http/routers/v1/links.rs @@ -34,17 +34,12 @@ enum LinkAction { #[derive(Serialize, Debug)] pub struct Link { - // XXX rename: - // link_action - // link_text - // link_goto - // link_tooltip - action: LinkAction, - text: String, + link_action: LinkAction, + link_text: String, #[serde(skip_serializing_if = "Option::is_none")] - goto: Option, + link_goto: Option, #[serde(skip_serializing_if = "Option::is_none")] - current_config_file: Option, // XXX rename + link_summary_path: Option, link_tooltip: String, link_payload: Option, } @@ -76,20 +71,20 @@ pub async fn handle_v1_links( if post.meta.chat_mode == ChatMode::CONFIGURE { // links.push(Link { - // action: LinkAction::Goto, - // text: "Return".to_string(), - // goto: Some("SETTINGS:DEFAULT".to_string()), - // current_config_file: None, + // link_action: LinkAction::Goto, + // link_text: "Return".to_string(), + // link_goto: Some("SETTINGS:DEFAULT".to_string()), + // link_summary_path: None, // link_tooltip: format!(""), // link_payload: None, // }); if !get_tickets_from_messages(gcx.clone(), &post.messages).await.is_empty() { links.push(Link { - action: LinkAction::PatchAll, - text: "Save and return".to_string(), - goto: Some("SETTINGS:DEFAULT".to_string()), - current_config_file: None, + link_action: LinkAction::PatchAll, + link_text: "Save and return".to_string(), + link_goto: Some("SETTINGS:DEFAULT".to_string()), + link_summary_path: None, link_tooltip: format!(""), link_payload: None, }); @@ -126,10 +121,10 @@ pub async fn handle_v1_links( commit_with_msg.file_changes.iter().map(|f| format!("{} {}", f.status.initial(), f.path)).collect::>().join("\n"), ); links.push(Link { - action: LinkAction::Commit, - text: format!("Commit {} files in `{}`", commit_with_msg.file_changes.len(), commit_with_msg.get_project_name()), - goto: Some("LINKS_AGAIN".to_string()), - current_config_file: None, + link_action: LinkAction::Commit, + link_text: format!("Commit {} files in `{}`", commit_with_msg.file_changes.len(), commit_with_msg.get_project_name()), + link_goto: Some("LINKS_AGAIN".to_string()), + link_summary_path: None, link_tooltip: tooltip_message, link_payload: Some(LinkPayload::CommitPayload(GitCommitPost { commits: vec![commit_with_msg] })), }); @@ -141,10 +136,10 @@ pub async fn handle_v1_links( if post.meta.chat_mode == ChatMode::AGENT { for failed_integr_name in failed_integration_names_after_last_user_message(&post.messages) { links.push(Link { - action: LinkAction::Goto, - text: format!("Configure {failed_integr_name}"), - goto: Some(format!("SETTINGS:{failed_integr_name}")), - current_config_file: None, + link_action: LinkAction::Goto, + link_text: format!("Configure {failed_integr_name}"), + link_goto: Some(format!("SETTINGS:{failed_integr_name}")), + link_summary_path: None, link_tooltip: format!(""), link_payload: None, }) @@ -154,10 +149,10 @@ pub async fn handle_v1_links( // YAML problems for e in integration_yaml_errors { links.push(Link { - action: LinkAction::Goto, - text: format!("Syntax error in {}", crate::nicer_logs::last_n_chars(&e.integr_config_path, 20)), - goto: Some(format!("SETTINGS:{}", e.integr_config_path)), - current_config_file: None, + link_action: LinkAction::Goto, + link_text: format!("Syntax error in {}", crate::nicer_logs::last_n_chars(&e.integr_config_path, 20)), + link_goto: Some(format!("SETTINGS:{}", e.integr_config_path)), + link_summary_path: None, link_tooltip: format!("Error at line {}: {}", e.error_line, e.error_msg), link_payload: None, }); @@ -169,10 +164,10 @@ pub async fn handle_v1_links( if !already_exists { // doesn't exist links.push(Link { - action: LinkAction::SummarizeProject, - text: "Initial project summarization".to_string(), - goto: None, - current_config_file: summary_path_option, + link_action: LinkAction::SummarizeProject, + link_text: "Initial project summarization".to_string(), + link_goto: None, + link_summary_path: summary_path_option, link_tooltip: format!("Project summary is a starting point for Refact Agent."), link_payload: None, }); @@ -192,10 +187,10 @@ pub async fn handle_v1_links( if !integrations_map.contains_key(igname) { tracing::info!("tool {} not present => link", igname); links.push(Link { - action: LinkAction::Goto, - text: format!("Configure {igname}"), - goto: Some(format!("SETTINGS:{igname}")), - current_config_file: None, + link_action: LinkAction::Goto, + link_text: format!("Configure {igname}"), + link_goto: Some(format!("SETTINGS:{igname}")), + link_summary_path: None, link_tooltip: format!(""), link_payload: None, }); @@ -220,21 +215,23 @@ pub async fn handle_v1_links( } // Follow-up - // if post.meta.chat_mode != ChatMode::NO_TOOLS && links.is_empty() && post.messages.len() > 2 { - // let follow_up_messages: Vec = generate_follow_up_message(post.messages.clone(), gcx.clone(), &post.model_name, &post.meta.chat_id).await - // .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Error generating follow-up message: {}", e)))?; - // for follow_up_message in follow_up_messages { - // tracing::info!("follow-up {:?}", follow_up_message); - // links.push(Link { - // action: LinkAction::FollowUp, - // text: follow_up_message, - // goto: None, - // current_config_file: None, - // link_tooltip: format!(""), - // link_payload: None, - // }); - // } - // } + if false { + if post.meta.chat_mode != ChatMode::NO_TOOLS && links.is_empty() && post.messages.len() > 2 { + let follow_up_messages: Vec = generate_follow_up_message(post.messages.clone(), gcx.clone(), &post.model_name, &post.meta.chat_id).await + .map_err(|e| ScratchError::new(StatusCode::INTERNAL_SERVER_ERROR, format!("Error generating follow-up message: {}", e)))?; + for follow_up_message in follow_up_messages { + tracing::info!("follow-up {:?}", follow_up_message); + links.push(Link { + link_action: LinkAction::FollowUp, + link_text: follow_up_message, + link_goto: None, + link_summary_path: None, + link_tooltip: format!(""), + link_payload: None, + }); + } + } + } tracing::info!("generated links2: {:?}", links); From ec431ce30eb7438357d26edf65011d8f850f3aa8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Fri, 13 Dec 2024 11:41:09 +0100 Subject: [PATCH 172/185] fix: horrible deadlock --- src/http/routers/v1/links.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/http/routers/v1/links.rs b/src/http/routers/v1/links.rs index 803c0c44a..8ab1f6d45 100644 --- a/src/http/routers/v1/links.rs +++ b/src/http/routers/v1/links.rs @@ -67,7 +67,8 @@ pub async fn handle_v1_links( let mut uncommited_changes_warning = String::new(); tracing::info!("for links, post.meta.chat_mode == {:?}", post.meta.chat_mode); - let (integrations_map, integration_yaml_errors) = crate::integrations::running_integrations::load_integrations(gcx.clone(), "".to_string(), gcx.read().await.cmdline.experimental).await; + let experimental = gcx.read().await.cmdline.experimental; + let (integrations_map, integration_yaml_errors) = crate::integrations::running_integrations::load_integrations(gcx.clone(), "".to_string(), experimental).await; if post.meta.chat_mode == ChatMode::CONFIGURE { // links.push(Link { From d0c24d82da368200003624c8a7e42f2079cb0b3b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Fri, 13 Dec 2024 19:42:37 +0100 Subject: [PATCH 173/185] fix: get project dirs does not need .write() to the global context --- src/files_correction.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/files_correction.rs b/src/files_correction.rs index 15e76d3d3..5e6a26bee 100644 --- a/src/files_correction.rs +++ b/src/files_correction.rs @@ -268,9 +268,9 @@ pub async fn correct_to_nearest_dir_path( } pub async fn get_project_dirs(gcx: Arc>) -> Vec { - let gcx_locked = gcx.write().await; - let workspace_folders = gcx_locked.documents_state.workspace_folders.lock().unwrap(); - workspace_folders.iter().cloned().collect::>() + let workspace_folders = gcx.read().await.documents_state.workspace_folders.clone(); + let workspace_folders_locked = workspace_folders.lock().unwrap(); + workspace_folders_locked.iter().cloned().collect::>() } pub async fn get_active_project_path(gcx: Arc>) -> Option { From ffc0b6525f0590f7b6f6a7991e62e6a203f888ef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Tue, 10 Dec 2024 23:00:18 +0100 Subject: [PATCH 174/185] fix: tool tree check if path is inside project workspaces --- src/tools/tool_tree.rs | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/src/tools/tool_tree.rs b/src/tools/tool_tree.rs index 871414969..374d01a37 100644 --- a/src/tools/tool_tree.rs +++ b/src/tools/tool_tree.rs @@ -54,10 +54,17 @@ impl Tool for ToolTree { return Err("Cannot execute tree() because 'path' provided refers to a file.".to_string()); } + let project_dirs = get_project_dirs(gcx.clone()).await; let candidate = return_one_candidate_or_a_good_error( - gcx.clone(), &path, &dir_candidates, &get_project_dirs(gcx.clone()).await, true + gcx.clone(), &path, &dir_candidates, &project_dirs, true ).await?; - let true_path = PathBuf::from(candidate); + let true_path = crate::files_correction::to_pathbuf_normalize(&candidate); + + let is_within_project_dirs = project_dirs.iter().any(|p| true_path.starts_with(&p)); + if !is_within_project_dirs && !gcx.read().await.cmdline.inside_container { + return Err(format!("Cannot execute tree(), '{path}' is not within the project directories.")); + } + let paths_in_dir = ls_files(&true_path, true).unwrap_or(vec![]); construct_tree_out_of_flat_list_of_paths(&paths_in_dir) }, From a9047a9d31ea041ce2a5ee87d35dd7eab9ca622e Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Mon, 16 Dec 2024 18:35:58 +0100 Subject: [PATCH 175/185] 10.6 that has gpt-4o in FREE --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index b84f116c6..15d40ec48 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -6,7 +6,7 @@ lto = true [package] name = "refact-lsp" -version = "0.10.5" +version = "0.10.6" edition = "2021" build = "build.rs" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html From 65b948e8a8108c798d1339839272d0603dcb02f5 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Mon, 16 Dec 2024 18:36:03 +0100 Subject: [PATCH 176/185] a warning --- src/tools/tool_tree.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/tools/tool_tree.rs b/src/tools/tool_tree.rs index 374d01a37..07b15cf5f 100644 --- a/src/tools/tool_tree.rs +++ b/src/tools/tool_tree.rs @@ -1,6 +1,5 @@ use std::sync::Arc; use std::collections::HashMap; -use std::path::PathBuf; use serde_json::Value; use tracing::warn; use async_trait::async_trait; @@ -24,7 +23,7 @@ fn preformat_path(path: &String) -> String { #[async_trait] impl Tool for ToolTree { fn as_any(&self) -> &dyn std::any::Any { self } - + async fn tool_execute( &mut self, ccx: Arc>, From 336d3df96ccdcbba86cd762f0e7d9ee79ad34e6e Mon Sep 17 00:00:00 2001 From: Dimitry Ageev Date: Tue, 17 Dec 2024 09:51:22 +0100 Subject: [PATCH 177/185] Chrome improvements 16.12.24 (#501) * wait_for command, fix logs filter * press key extended --- src/integrations/integr_chrome.rs | 134 +++++++++++++++++++----------- 1 file changed, 85 insertions(+), 49 deletions(-) diff --git a/src/integrations/integr_chrome.rs b/src/integrations/integr_chrome.rs index f1e602686..45f131f4c 100644 --- a/src/integrations/integr_chrome.rs +++ b/src/integrations/integr_chrome.rs @@ -22,6 +22,7 @@ use chrono::DateTime; use std::path::PathBuf; use headless_chrome::{Browser, Element, LaunchOptions, Tab as HeadlessTab}; use headless_chrome::browser::tab::point::Point; +use headless_chrome::browser::tab::ModifierKey; use headless_chrome::protocol::cdp::Page; use headless_chrome::protocol::cdp::Emulation; use headless_chrome::protocol::cdp::types::Event; @@ -265,11 +266,12 @@ impl Tool for ToolChrome { "screenshot ", "html ", "reload ", - "press_key_at ", + "press_key [,...]", "type_text_at ", "tab_log ", "eval ", "styles ", + "wait_for <1-5>", "click_at_element ", ]; if self.supports_clicks { @@ -280,6 +282,7 @@ impl Tool for ToolChrome { let description = format!( "One or several commands separated by newline. \ The is an integer, for example 10, for you to identify the tab later. \ + Most of web pages are dynamic. If you see that it's still loading try again with wait_for command. \ Supported commands:\n{}", supported_commands.join("\n")); ToolDesc { name: "chrome".to_string(), @@ -602,10 +605,11 @@ enum Command { ClickAtPoint(ClickAtPointArgs), ClickAtElement(TabElementArgs), TypeTextAt(TypeTextAtArgs), - PressKeyAt(PressKeyAtArgs), + PressKey(PressKeyArgs), TabLog(TabArgs), Eval(EvalArgs), Styles(StylesArgs), + WaitFor(WaitForArgs), } async fn chrome_command_exec( @@ -814,7 +818,7 @@ async fn chrome_command_exec( }; tool_log.push(log); }, - Command::PressKeyAt(args) => { + Command::PressKey(args) => { let tab = { let mut chrome_session_locked = chrome_session.lock().await; let chrome_session = chrome_session_locked.as_any_mut().downcast_mut::().ok_or("Failed to downcast to ChromeSession")?; @@ -823,17 +827,17 @@ async fn chrome_command_exec( let log = { let tab_lock = tab.lock().await; match { - tab_lock.headless_tab.press_key(args.key.to_string().as_str()).map_err(|e| e.to_string())?; + tab_lock.headless_tab.press_key_with_modifiers( + args.key.as_str(), args.key_modifiers.as_deref()) + .map_err(|e| e.to_string())?; tab_lock.headless_tab.wait_until_navigated().map_err(|e| e.to_string())?; - // TODO: sometimes page isn't ready for next step - sleep(Duration::from_secs(1)).await; Ok::<(), String>(()) } { Ok(_) => { - format!("press `{}` at {}", args.key, tab_lock.state_string()) + format!("press_key at {}", tab_lock.state_string()) }, Err(e) => { - format!("press `{}` failed at {}: {}", args.key, tab_lock.state_string(), e.to_string()) + format!("press_key failed at {}: {}", tab_lock.state_string(), e.to_string()) }, } }; @@ -847,14 +851,20 @@ async fn chrome_command_exec( }; let tab_log = { let tab_lock = tab.lock().await; - // NOTE: we're waiting for log to be collected for 3 seconds - sleep(Duration::from_secs(3)).await; let mut tab_log_lock = tab_lock.tab_log.lock().unwrap(); let tab_log = tab_log_lock.join("\n"); tab_log_lock.clear(); tab_log }; - let filter = CmdlineOutputFilter::default(); + // let filter = CmdlineOutputFilter::default(); + let filter = CmdlineOutputFilter { + limit_lines: 100, + limit_chars: 10000, + valuable_top_or_bottom: "top".to_string(), + grep: "".to_string(), + grep_context_lines: 0, + remove_from_output: "".to_string(), + }; let filtered_log = output_mini_postprocessing(&filter, tab_log.as_str()); tool_log.push(filtered_log.clone()); }, @@ -915,6 +925,22 @@ async fn chrome_command_exec( }; tool_log.push(log); }, + Command::WaitFor(args) => { + let tab = { + let mut chrome_session_locked = chrome_session.lock().await; + let chrome_session = chrome_session_locked.as_any_mut().downcast_mut::().ok_or("Failed to downcast to ChromeSession")?; + session_get_tab_arc(chrome_session, &args.tab_id).await? + }; + let log = { + let tab_lock = tab.lock().await; + if args.seconds < 1.0 && args.seconds > 5.0 { + return Err(format!("wait_for at {} failed: `seconds` should be integer in interval [1, 5]", tab_lock.state_string())) + } + sleep(Duration::from_secs(3)).await; + format!("wait_for {} seconds at {} successful.", args.seconds, tab_lock.state_string()) + }; + tool_log.push(log); + }, } Ok((tool_log, multimodal_els)) @@ -949,32 +975,10 @@ struct TypeTextAtArgs { tab_id: String, } -#[derive(Clone, Debug)] -enum Key { - ENTER, - ESC, - PAGEUP, - PAGEDOWN, - HOME, - END, -} - -impl std::fmt::Display for Key { - fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { - match self { - Key::ENTER => write!(f, "Enter"), - Key::ESC => write!(f, "Escape"), - Key::PAGEUP => write!(f, "PageUp"), - Key::PAGEDOWN => write!(f, "PageDown"), - Key::HOME => write!(f, "Home"), - Key::END => write!(f, "End"), - } - } -} - #[derive(Debug)] -struct PressKeyAtArgs { - key: Key, +struct PressKeyArgs { + key: String, + key_modifiers: Option>, tab_id: String, } @@ -997,6 +1001,12 @@ struct StylesArgs { property_filter: String, } +#[derive(Debug)] +struct WaitForArgs { + tab_id: String, + seconds: f64, +} + fn parse_single_command(command: &String) -> Result { let args = shell_words::split(&command).map_err(|e| e.to_string())?; if args.is_empty() { @@ -1130,23 +1140,35 @@ fn parse_single_command(command: &String) -> Result { } } }, - "press_key_at" => { + "press_key" => { match parsed_args.as_slice() { - [tab_id, key_str] => { - let key = match key_str.to_lowercase().as_str() { - "enter" => Key::ENTER, - "esc" => Key::ESC, - "pageup" => Key::PAGEUP, - "pagedown" => Key::PAGEDOWN, - "home" => Key::HOME, - "end" => Key::END, - _ => return Err(format!("Unknown key: {}", key_str)), - }; - Ok(Command::PressKeyAt(PressKeyAtArgs { - key, + [tab_id, key] => { + Ok(Command::PressKey(PressKeyArgs { + key: key.clone(), + key_modifiers: None, tab_id: tab_id.clone(), })) }, + [tab_id, key, key_modifiers] => { + let modifiers: Result, String> = key_modifiers.split(',') + .map(|modifier_str| match modifier_str.trim() { + "Alt" => Ok(ModifierKey::Alt), + "Ctrl" => Ok(ModifierKey::Ctrl), + "Meta" => Ok(ModifierKey::Meta), + "Shift" => Ok(ModifierKey::Shift), + _ => Err(format!("Unknown key modifier: {}", modifier_str)), + }) + .collect(); + + match modifiers { + Ok(modifiers) => Ok(Command::PressKey(PressKeyArgs { + key: key.clone(), + key_modifiers: Some(modifiers), + tab_id: tab_id.clone(), + })), + Err(e) => Err(e), + } + }, _ => { Err("Missing one or several arguments `tab_id`, `key`".to_string()) } @@ -1191,6 +1213,20 @@ fn parse_single_command(command: &String) -> Result { } } }, + "wait_for" => { + match parsed_args.as_slice() { + [tab_id, seconds_str] => { + let seconds = seconds_str.parse::().map_err(|e| format!("Failed to parse seconds: {}", e))?; + Ok(Command::WaitFor(WaitForArgs { + seconds: seconds.clone(), + tab_id: tab_id.clone(), + })) + }, + _ => { + Err("Missing one or several arguments `tab_id`, `seconds`.".to_string()) + } + } + }, _ => Err(format!("Unknown command: {:?}.", command_name)), } } From 06d07be099ead677d649cbd7e6b0b84e640c6f44 Mon Sep 17 00:00:00 2001 From: mitya Date: Wed, 18 Dec 2024 13:04:19 +0100 Subject: [PATCH 178/185] env for gh token --- src/integrations/integr_github.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/integrations/integr_github.rs b/src/integrations/integr_github.rs index a0ab4e861..4dd2006b9 100644 --- a/src/integrations/integr_github.rs +++ b/src/integrations/integr_github.rs @@ -93,7 +93,8 @@ impl Tool for ToolGithub { let output = Command::new(gh_binary_path) .args(&command_args) .current_dir(&project_dir) - .env("gh_token", &self.settings_github.gh_token) + .env("GH_TOKEN", &self.settings_github.gh_token) + .env("GITHUB_TOKEN", &self.settings_github.gh_token) .output() .await .map_err(|e| e.to_string())?; From 837c381df36a110dc6c90c6db328fe9e4d5e9a10 Mon Sep 17 00:00:00 2001 From: V4LER11 Date: Tue, 17 Dec 2024 13:32:58 +0000 Subject: [PATCH 179/185] added xai models --- src/known_models.rs | 44 +++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 43 insertions(+), 1 deletion(-) diff --git a/src/known_models.rs b/src/known_models.rs index f9a781a13..8eb38506a 100644 --- a/src/known_models.rs +++ b/src/known_models.rs @@ -453,6 +453,39 @@ pub const KNOWN_MODELS: &str = r####" "cerebras-llama3.1-70b" ] }, + "grok-beta": { + "n_ctx": 128000, + "supports_tools": true, + "supports_multimodality": false, + "supports_scratchpads": { + "PASSTHROUGH": {} + } + }, + "grok-vision-beta": { + "n_ctx": 8192, + "supports_tools": false, + "supports_multimodality": true, + "supports_scratchpads": { + "PASSTHROUGH": {} + } + }, + "grok-2-vision-1212": { + "n_ctx": 32000, + "supports_tools": true, + "supports_multimodality": true, + "supports_scratchpads": { + "PASSTHROUGH": {} + } + }, + "grok-2-1212": { + "n_ctx": 128000, + "supports_tools": true, + "supports_multimodality": false, + "supports_scratchpads": { + "PASSTHROUGH": {} + } + }, + "qwen2.5/coder/0.5b/instruct": { "n_ctx": 8192, "supports_tools": false, @@ -512,7 +545,16 @@ pub const KNOWN_MODELS: &str = r####" "claude-3-5-sonnet-20240620": "Xenova/claude-tokenizer", "claude-3-5-sonnet-20241022": "Xenova/claude-tokenizer", "groq-llama-3.1-8b": "Xenova/Meta-Llama-3.1-Tokenizer", - "cerebras-llama3.1-8b": "Xenova/Meta-Llama-3.1-Tokenizer" + "cerebras-llama3.1-8b": "Xenova/Meta-Llama-3.1-Tokenizer", + + "grok-beta": "Xenova/grok-1-tokenizer", + "grok-vision-beta": "Xenova/grok-1-tokenizer", + "grok-2-vision-1212": "Xenova/grok-1-tokenizer", + "grok-2-1212": "Xenova/grok-1-tokenizer" } } "####; + +// XAI WARNING: tokenizer is non-precise as there's no publicly available tokenizer for these models +// XAI says that for exact same model different tokenizers could be used +// therefore, using tokenizer for grok-1 which may or may not provide proximate enough results From 285484d9214847a90c15a8d1ea8072ea62078c11 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Humberto=20Yusta=20G=C3=B3mez?= Date: Mon, 16 Dec 2024 23:15:35 +0100 Subject: [PATCH 180/185] improve defaults and new container defaults --- src/integrations/integr_mysql.rs | 19 ++++++++++++------- src/integrations/integr_postgres.rs | 21 +++++++++++++-------- src/integrations/yaml_schema.rs | 2 ++ 3 files changed, 27 insertions(+), 15 deletions(-) diff --git a/src/integrations/integr_mysql.rs b/src/integrations/integr_mysql.rs index 912e46328..9b87875cc 100644 --- a/src/integrations/integr_mysql.rs +++ b/src/integrations/integr_mysql.rs @@ -173,14 +173,17 @@ fields: host: f_type: string_long f_desc: "Connect to this host, for example 127.0.0.1 or docker container name." - f_placeholder: marketing_db_container + f_default: "127.0.0.1" port: f_type: string_short f_desc: "Which port to use." - f_default: "5432" + f_default: "3306" user: f_type: string_short - f_placeholder: john_doe + f_placeholder: "$MYSQL_USER" + smartlinks: + - sl_label: "Open variables.yaml" + sl_goto: "EDITOR:variables.yaml" password: f_type: string_short f_default: "$MYSQL_PASSWORD" @@ -189,7 +192,7 @@ fields: sl_goto: "EDITOR:passwords.yaml" database: f_type: string_short - f_placeholder: marketing_db + f_placeholder: "mysql" mysql_binary_path: f_type: string_long f_desc: "If it can't find a path to `mysql` you can provide it here, leave blank if not sure." @@ -222,9 +225,11 @@ docker: new_container_default: image: "mysql:8.4" environment: - MYSQL_DATABASE: db_name - MYSQL_USER: $MYSQL_USER - MYSQL_PASSWORD: $MYSQL_PASSWORD + MYSQL_DATABASE: "$MYSQL_DB" + MYSQL_USER: "$MYSQL_USER" + MYSQL_PASSWORD: "$MYSQL_PASSWORD" + ports: + - "3306:3306" smartlinks: - sl_label: "Add Database Container" sl_chat: diff --git a/src/integrations/integr_postgres.rs b/src/integrations/integr_postgres.rs index d84e8700d..1680a0bce 100644 --- a/src/integrations/integr_postgres.rs +++ b/src/integrations/integr_postgres.rs @@ -172,23 +172,26 @@ fields: host: f_type: string_long f_desc: "Connect to this host, for example 127.0.0.1 or docker container name." - f_placeholder: marketing_db_container + f_default: "127.0.0.1" port: f_type: string_short f_desc: "Which port to use." f_default: "5432" user: f_type: string_short - f_placeholder: john_doe + f_placeholder: "$POSTGRES_USER" + smartlinks: + - sl_label: "Open variables.yaml" + sl_goto: "EDITOR:variables.yaml" password: f_type: string_short f_default: "$POSTGRES_PASSWORD" smartlinks: - - sl_label: "Open passwords.yaml" - sl_goto: "EDITOR:passwords.yaml" + - sl_label: "Open variables.yaml" + sl_goto: "EDITOR:variables.yaml" database: f_type: string_short - f_placeholder: marketing_db + f_placeholder: "postgres" psql_binary_path: f_type: string_long f_desc: "If it can't find a path to `psql` you can provide it here, leave blank if not sure." @@ -218,11 +221,13 @@ docker: filter_label: "" filter_image: "postgres" new_container_default: - image: "postgres:13" + image: "postgres:latest" environment: - POSTGRES_DB: "marketing_db" - POSTGRES_USER: "john_doe" + POSTGRES_DB: "$POSTGRES_DB" + POSTGRES_USER: "$POSTGRES_USER" POSTGRES_PASSWORD: "$POSTGRES_PASSWORD" + ports: + - "5432:5432" smartlinks: - sl_label: "Add Database Container" sl_chat: diff --git a/src/integrations/yaml_schema.rs b/src/integrations/yaml_schema.rs index 9a5d13a72..1ab72905a 100644 --- a/src/integrations/yaml_schema.rs +++ b/src/integrations/yaml_schema.rs @@ -8,6 +8,8 @@ pub struct DockerService { pub image: String, #[serde(default)] pub environment: IndexMap, + #[serde(default)] + pub ports: Vec, } #[derive(Serialize, Deserialize, Debug, Default)] From beac40c33b90539ef4e8cfa8b745f62ec4929c7e Mon Sep 17 00:00:00 2001 From: Kirill Starkov Date: Thu, 19 Dec 2024 18:04:57 +0800 Subject: [PATCH 181/185] link with static openssl --- .github/workflows/build.yml | 4 ++-- .github/workflows/release.yml | 5 ++--- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 3f3c35918..210b37466 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -117,8 +117,8 @@ jobs: - name: Build and test artifact if: matrix.cross == null && !startsWith(matrix.os, 'windows') run: | - cargo test --release --target ${{ matrix.target }} || exit 1 - cargo build --release --target ${{ matrix.target }} || exit 1 + OPENSSL_STATIC=1 cargo test --release --target ${{ matrix.target }} || exit 1 + OPENSSL_STATIC=1 cargo build --release --target ${{ matrix.target }} || exit 1 mkdir python_binding_and_cmdline/refact/bin cp ./target/${{ matrix.target }}/release/refact-lsp${{ matrix.ext }} python_binding_and_cmdline/refact/bin cp ./target/${{ matrix.target }}/release/refact-lsp${{ matrix.ext }} . diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 6e1f2798d..95c53e284 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -6,7 +6,6 @@ on: tags: - '*' - env: CARGO_INCREMENTAL: 0 CARGO_NET_RETRY: 10 @@ -110,8 +109,8 @@ jobs: - name: Build and test artifact if: matrix.cross == null && !startsWith(matrix.os, 'windows') run: | - cargo test --release --target ${{ matrix.target }} || exit 1 - cargo build --release --target ${{ matrix.target }} || exit 1 + OPENSSL_STATIC=1 cargo test --release --target ${{ matrix.target }} || exit 1 + OPENSSL_STATIC=1 cargo build --release --target ${{ matrix.target }} || exit 1 mkdir python_binding_and_cmdline/refact/bin cp ./target/${{ matrix.target }}/release/refact-lsp${{ matrix.ext }} python_binding_and_cmdline/refact/bin cp ./target/${{ matrix.target }}/release/refact-lsp${{ matrix.ext }} . From 48f2c0096921aabb7fd493fa76b19dae046f3be3 Mon Sep 17 00:00:00 2001 From: V4LER11 Date: Fri, 20 Dec 2024 03:34:36 +0000 Subject: [PATCH 182/185] use rust_embed instead (#513) --- Cargo.toml | 1 + build.rs | 46 -------------------------- src/http/routers/v1/v1_integrations.rs | 19 ++++++----- 3 files changed, 11 insertions(+), 55 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 15d40ec48..761402348 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -110,3 +110,4 @@ resvg = "0.44.0" async-tar = "0.5.0" git2 = "0.19.0" process-wrap = { version = "8.0.2", features = ["tokio1"] } +rust-embed = "8.5.0" diff --git a/build.rs b/build.rs index bd1ac2696..4d883eac5 100644 --- a/build.rs +++ b/build.rs @@ -1,50 +1,4 @@ -use std::env; -use std::fs::{self, read_dir, File}; -use std::io::Write; - fn main() -> shadow_rs::SdResult<()> { - let assets_dir = "assets/integrations"; - let out_dir = env::var("OUT_DIR").unwrap(); - let output_file_path = format!("{}/available_icons.rs", out_dir); - let mut output_file = File::create(&output_file_path).expect("Failed to create output file"); - - writeln!( - output_file, - "use std::collections::HashMap;\n\npub fn get_available_icons() -> HashMap<&'static str, &'static [u8]> {{\n let mut icons = HashMap::new();" - ) - .expect("Failed to write to output file"); - - for entry in read_dir(assets_dir).expect("Failed to read assets directory") { - let entry = entry.expect("Failed to read directory entry"); - let path = entry.path(); - - if path.extension().and_then(|ext| ext.to_str()) == Some("png") { - let image_data = fs::read(&path).expect("Failed to read image file"); - let file_stem = path - .file_stem() - .and_then(|stem| stem.to_str()) - .expect("Failed to get file stem"); - - let constant_name = format!("{}_ICON_BYTES", file_stem.to_uppercase()); - - writeln!( - output_file, - " pub const {}: &[u8] = &{:?};", - constant_name, image_data - ) - .expect("Failed to write constant definition"); - - writeln!( - output_file, - " icons.insert(\"{}.png\", {});", - file_stem, constant_name - ) - .expect("Failed to write HashMap entry"); - } - } - - writeln!(output_file, " icons\n}}").expect("Failed to write closing brace"); - shadow_rs::new() } diff --git a/src/http/routers/v1/v1_integrations.rs b/src/http/routers/v1/v1_integrations.rs index 14863f172..6cb1839c3 100644 --- a/src/http/routers/v1/v1_integrations.rs +++ b/src/http/routers/v1/v1_integrations.rs @@ -1,3 +1,4 @@ +use std::borrow::Cow; use std::path::PathBuf; use std::sync::Arc; use axum::Extension; @@ -8,12 +9,12 @@ use tokio::sync::RwLock as ARwLock; use regex::Regex; use axum::extract::Path; use axum::extract::Query; - - +use rust_embed::RustEmbed; use crate::custom_error::ScratchError; use crate::global_context::GlobalContext; use crate::integrations::setting_up_integrations::split_path_into_project_and_integration; + pub async fn handle_v1_integrations( Extension(gcx): Extension>>, _: hyper::body::Bytes, @@ -130,29 +131,29 @@ pub async fn handle_v1_integration_save( .unwrap()) } -mod generated { - include!(concat!(env!("OUT_DIR"), "/available_icons.rs")); -} +#[derive(RustEmbed)] +#[folder = "assets/integrations/"] +struct IntegrationAsset; pub async fn handle_v1_integration_icon( Path(icon_name): Path, ) -> axum::response::Result, ScratchError> { - let icons = generated::get_available_icons(); let sanitized_icon_name = icon_name .split('/').last() .map(|x| x.replace("_TEMPLATE", "")).ok_or( ScratchError::new(StatusCode::BAD_REQUEST, "invalid file name".to_string()) )?; - if let Some(icon_bytes) = icons.get(sanitized_icon_name.as_str()) { + if let Some(icon_bytes) = IntegrationAsset::get(&sanitized_icon_name).map(|file| file.data) { return Ok(Response::builder() .status(StatusCode::OK) .header("Content-Type", "image/png") .header("Content-Disposition", "inline") - .body(Body::from(*icon_bytes)) + .body(Body::from(icon_bytes)) .unwrap()); } - Err(ScratchError::new(StatusCode::NOT_FOUND, "icon not found".to_string())) + Err(ScratchError::new(StatusCode::NOT_FOUND, format!("icon {} not found", sanitized_icon_name))) } + // Define a structure to match query parameters #[derive(Deserialize)] pub struct HTTPIntegrationDeleteQueryParams { From 5db2b5f9972d64d6b922c0b7ec7ff7c5226e72b7 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Fri, 20 Dec 2024 05:42:03 +0100 Subject: [PATCH 183/185] get rid of integrations.yaml code --- python_binding_and_cmdline/refact/cli_main.py | 2 +- src/global_context.rs | 4 +- src/http/routers/v1/links.rs | 4 +- src/integrations/integr_pdb.rs | 2 +- src/integrations/setting_up_integrations.rs | 6 +- src/tools/tools_description.rs | 77 +------------------ 6 files changed, 9 insertions(+), 86 deletions(-) diff --git a/python_binding_and_cmdline/refact/cli_main.py b/python_binding_and_cmdline/refact/cli_main.py index 43a5adb55..720a4a1a2 100644 --- a/python_binding_and_cmdline/refact/cli_main.py +++ b/python_binding_and_cmdline/refact/cli_main.py @@ -55,7 +55,7 @@ async def welcome_message(settings: cli_settings.CmdlineArgs, tip: str): text = f""" ~/.cache/refact/cli.yaml -- set up this program ~/.cache/refact/bring-your-own-key.yaml -- set up models you want to use -~/.cache/refact/integrations.yaml -- set up github, jira, make, gdb, and other tools, including which actions require confirmation +~/.cache/refact/integrations.d/* -- set up github, jira, make, gdb, and other tools, including which actions require confirmation ~/.cache/refact/privacy.yaml -- which files should never leave your computer Project: {settings.project_path} To exit, type 'exit' or Ctrl+D. {tip}. diff --git a/src/global_context.rs b/src/global_context.rs index 51abff304..67666e4c6 100644 --- a/src/global_context.rs +++ b/src/global_context.rs @@ -82,7 +82,7 @@ pub struct CommandLine { #[structopt(long, short="w", default_value="", help="Workspace folder to find all the files. An LSP or HTTP request can override this later.")] pub workspace_folder: String, - #[structopt(long, help="create manually bring-your-own-key.yaml, integrations.yaml, customization.yaml and privacy.yaml and EXIT")] + #[structopt(long, help="create manually bring-your-own-key.yaml, customization.yaml and privacy.yaml and exit.")] pub only_create_yaml_configs: bool, #[structopt(long, help="Print combined customization settings from both system defaults and customization.yaml.")] pub print_customization: bool, @@ -93,7 +93,7 @@ pub struct CommandLine { #[structopt(long, help="A way to tell this binary it can run more tools without confirmation.")] pub inside_container: bool, - #[structopt(long, default_value="", help="Specify an alternative integrations.yaml, this also disables the global integrations.d")] + #[structopt(long, default_value="", help="Specify the integrations.yaml, this also disables the global integrations.d")] pub integrations_yaml: String, } diff --git a/src/http/routers/v1/links.rs b/src/http/routers/v1/links.rs index 8ab1f6d45..99dbd82ff 100644 --- a/src/http/routers/v1/links.rs +++ b/src/http/routers/v1/links.rs @@ -95,7 +95,7 @@ pub async fn handle_v1_links( // GIT uncommitted if post.meta.chat_mode == ChatMode::AGENT { let commits = get_commit_information_from_current_changes(gcx.clone()).await; - + let mut project_changes = Vec::new(); for commit in &commits { project_changes.push(format!( @@ -133,7 +133,7 @@ pub async fn handle_v1_links( } } - // Failures above + // Failures in integrations if post.meta.chat_mode == ChatMode::AGENT { for failed_integr_name in failed_integration_names_after_last_user_message(&post.messages) { links.push(Link { diff --git a/src/integrations/integr_pdb.rs b/src/integrations/integr_pdb.rs index 9b99e1580..f3fef15a8 100644 --- a/src/integrations/integr_pdb.rs +++ b/src/integrations/integr_pdb.rs @@ -231,7 +231,7 @@ async fn start_pdb_session( timeout_seconds: u64, ) -> Result { if !(command_args.len() >= 3 && command_args[0] == "python" && command_args[1] == "-m" && command_args[2] == "pdb") { - return Err("Usage: python -m pdb ... To use a different Python environment, set `python_path` in `integrations.yaml`.".to_string()); + return Err("Usage: python -m pdb ... To use a different Python environment, use a path to python binary.".to_string()); } command_args.remove(0); diff --git a/src/integrations/setting_up_integrations.rs b/src/integrations/setting_up_integrations.rs index fa806b918..939993e7d 100644 --- a/src/integrations/setting_up_integrations.rs +++ b/src/integrations/setting_up_integrations.rs @@ -269,10 +269,6 @@ pub fn read_integrations_d( pub async fn get_integrations_yaml_path(gcx: Arc>) -> String { let gcx_locked = gcx.read().await; let r = gcx_locked.cmdline.integrations_yaml.clone(); - // if r.is_empty() { - // let config_dir = gcx_locked.config_dir.join("integrations.yaml"); - // return config_dir.to_string_lossy().to_string(); - // } r } @@ -433,7 +429,7 @@ pub async fn integration_config_get( pub async fn integration_config_save( integr_config_path: &String, integr_values: &serde_json::Value, -) -> Result<(), String> { +) -> Result<(), String> { let config_path = crate::files_correction::canonical_path(integr_config_path); let (integr_name, _project_path) = crate::integrations::setting_up_integrations::split_path_into_project_and_integration(&config_path) .map_err(|e| format!("Failed to split path: {}", e))?; diff --git a/src/tools/tools_description.rs b/src/tools/tools_description.rs index a2976d591..c62167fd7 100644 --- a/src/tools/tools_description.rs +++ b/src/tools/tools_description.rs @@ -1,4 +1,3 @@ -use std::path::PathBuf; use std::collections::HashMap; use std::sync::Arc; use indexmap::IndexMap; @@ -102,50 +101,19 @@ pub trait Tool: Send + Sync { } } -pub async fn read_integrations_yaml(config_dir: &PathBuf) -> Result { - let yaml_path = config_dir.join("integrations.yaml"); - - let file = std::fs::File::open(&yaml_path).map_err( - |e| format!("Failed to open {}: {}", yaml_path.display(), e) - )?; - - let reader = std::io::BufReader::new(file); - serde_yaml::from_reader(reader).map_err( - |e| { - let location = e.location().map(|loc| format!(" at line {}, column {}", loc.line(), loc.column())).unwrap_or_default(); - format!("Failed to parse {}{}: {}", yaml_path.display(), location, e) - } - ) -} - pub async fn tools_merged_and_filtered( gcx: Arc>, _supports_clicks: bool, // XXX ) -> Result>>>, String> { - let (ast_on, vecdb_on, allow_experimental, config_dir) = { + let (ast_on, vecdb_on, allow_experimental) = { let gcx_locked = gcx.read().await; #[cfg(feature="vecdb")] let vecdb_on = gcx_locked.vec_db.lock().await.is_some(); #[cfg(not(feature="vecdb"))] let vecdb_on = false; - (gcx_locked.ast_service.is_some(), vecdb_on, gcx_locked.cmdline.experimental, gcx_locked.config_dir.clone()) - }; - - let integrations_value = match read_integrations_yaml(&config_dir).await { - Ok(value) => value, - Err(e) => return Err(format!("Problem in integrations.yaml: {}", e)), + (gcx_locked.ast_service.is_some(), vecdb_on, gcx_locked.cmdline.experimental) }; - if let Some(env_vars) = integrations_value.get("environment_variables") { - if let Some(env_vars_map) = env_vars.as_mapping() { - for (key, value) in env_vars_map { - if let (Some(key_str), Some(value_str)) = (key.as_str(), value.as_str()) { - std::env::set_var(key_str, value_str); - } - } - } - } - let mut tools_all = IndexMap::from([ ("definition".to_string(), Arc::new(AMutex::new(Box::new(crate::tools::tool_ast_definition::ToolAstDefinition{}) as Box))), ("references".to_string(), Arc::new(AMutex::new(Box::new(crate::tools::tool_ast_reference::ToolAstReference{}) as Box))), @@ -164,47 +132,6 @@ pub async fn tools_merged_and_filtered( #[cfg(feature="vecdb")] tools_all.insert("knowledge".to_string(), Arc::new(AMutex::new(Box::new(crate::tools::tool_knowledge::ToolGetKnowledge{}) as Box))); - if allow_experimental { - // The approach here: if it exists, it shouldn't have syntax errors, note the "?" - // if let Some(gh_config) = integrations_value.get("github") { - // tools_all.insert("github".to_string(), Arc::new(AMutex::new(Box::new(ToolGithub::new_from_yaml(gh_config)?) as Box))); - // } - // if let Some(gl_config) = integrations_value.get("gitlab") { - // tools_all.insert("gitlab".to_string(), Arc::new(AMutex::new(Box::new(ToolGitlab::new_from_yaml(gl_config)?) as Box))); - // } - // if let Some(pdb_config) = integrations_value.get("pdb") { - // tools_all.insert("pdb".to_string(), Arc::new(AMutex::new(Box::new(ToolPdb::new_from_yaml(pdb_config)?) as Box))); - // } - // if let Some(chrome_config) = integrations_value.get("chrome") { - // tools_all.insert("chrome".to_string(), Arc::new(AMutex::new(Box::new(ToolChrome::new_from_yaml(chrome_config, supports_clicks)?) as Box))); - // } - // if let Some(postgres_config) = integrations_value.get("postgres") { - // tools_all.insert("postgres".to_string(), Arc::new(AMutex::new(Box::new(ToolPostgres::new_from_yaml(postgres_config)?) as Box))); - // } - // if let Some(docker_config) = integrations_value.get("docker") { - // tools_all.insert("docker".to_string(), Arc::new(AMutex::new(Box::new(ToolDocker::new_from_yaml(docker_config)?) as Box))); - // } - // if let Ok(caps) = crate::global_context::try_load_caps_quickly_if_not_present(gcx.clone(), 0).await { - // let have_thinking_model = { - // let caps_locked = caps.read().unwrap(); - // caps_locked.running_models.contains(&"o1-mini".to_string()) - // }; - // if have_thinking_model { - // tools_all.insert("deep_thinking".to_string(), Arc::new(AMutex::new(Box::new(crate::tools::tool_deep_thinking::ToolDeepThinking{}) as Box))); - // } - // } - } - - // if let Some(cmdline) = integrations_value.get("cmdline") { - // let cmdline_tools = crate::tools::tool_cmdline::cmdline_tool_from_yaml_value(cmdline, false)?; - // tools_all.extend(cmdline_tools); - // } - - // if let Some(cmdline) = integrations_value.get("cmdline_services") { - // let cmdline_tools = crate::tools::tool_cmdline::cmdline_tool_from_yaml_value(cmdline, true)?; - // tools_all.extend(cmdline_tools); - // } - let integrations = crate::integrations::running_integrations::load_integration_tools( gcx.clone(), "".to_string(), From c5fb75a3d92be139230af958568e04e02ff367f4 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Fri, 20 Dec 2024 09:23:23 +0100 Subject: [PATCH 184/185] fix all schemas, restore schema test --- src/http/routers/v1/links.rs | 24 ++++++++------- src/integrations/docker/integr_docker.rs | 13 +++++---- src/integrations/integr_chrome.rs | 20 +++++-------- src/integrations/integr_cmdline.rs | 7 +++-- src/integrations/integr_github.rs | 28 +++++++++++------- src/integrations/integr_gitlab.rs | 29 +++++++++++-------- src/integrations/integr_mysql.rs | 21 +++++++------- src/integrations/integr_pdb.rs | 13 +++++---- src/integrations/integr_postgres.rs | 15 +++++----- src/integrations/project_summary_chat.rs | 1 - src/integrations/setting_up_integrations.rs | 1 - src/integrations/yaml_schema.rs | 13 ++++++++- src/yaml_configs/customization_compiled_in.rs | 2 +- 13 files changed, 106 insertions(+), 81 deletions(-) diff --git a/src/http/routers/v1/links.rs b/src/http/routers/v1/links.rs index 99dbd82ff..35d0fe914 100644 --- a/src/http/routers/v1/links.rs +++ b/src/http/routers/v1/links.rs @@ -71,15 +71,6 @@ pub async fn handle_v1_links( let (integrations_map, integration_yaml_errors) = crate::integrations::running_integrations::load_integrations(gcx.clone(), "".to_string(), experimental).await; if post.meta.chat_mode == ChatMode::CONFIGURE { - // links.push(Link { - // link_action: LinkAction::Goto, - // link_text: "Return".to_string(), - // link_goto: Some("SETTINGS:DEFAULT".to_string()), - // link_summary_path: None, - // link_tooltip: format!(""), - // link_payload: None, - // }); - if !get_tickets_from_messages(gcx.clone(), &post.messages).await.is_empty() { links.push(Link { link_action: LinkAction::PatchAll, @@ -92,6 +83,19 @@ pub async fn handle_v1_links( } } + if post.meta.chat_mode == ChatMode::PROJECT_SUMMARY { + if !get_tickets_from_messages(gcx.clone(), &post.messages).await.is_empty() { + links.push(Link { + link_action: LinkAction::PatchAll, + link_text: "Save and return".to_string(), + link_goto: Some("NEWCHAT".to_string()), + link_summary_path: None, + link_tooltip: format!(""), + link_payload: None, + }); + } + } + // GIT uncommitted if post.meta.chat_mode == ChatMode::AGENT { let commits = get_commit_information_from_current_changes(gcx.clone()).await; @@ -110,7 +114,7 @@ pub async fn handle_v1_links( project_changes.truncate(4); project_changes.push("...".to_string()); } - uncommited_changes_warning = format!("You have uncommitted changes:\n```\n{}\n```\nāš ļø You might have a problem rolling back agent's changes.", project_changes.join("\n")); + uncommited_changes_warning = format!("You have uncommitted changes:\n```\n{}\n```\nIt's fine, but you might have a problem rolling back agent's changes.", project_changes.join("\n")); } if false { diff --git a/src/integrations/docker/integr_docker.rs b/src/integrations/docker/integr_docker.rs index 6f35ebeaf..fe77f9574 100644 --- a/src/integrations/docker/integr_docker.rs +++ b/src/integrations/docker/integr_docker.rs @@ -312,6 +312,12 @@ fields: f_desc: "Path to the SSH identity file to connect to remote Docker." f_label: "SSH Identity File" f_extra: true +available: + on_your_laptop_possible: true + when_isolated_possible: false +confirmation: + ask_user_default: [] + deny_default: ["docker* rm *", "docker* rmi *", "docker* pause *", "docker* stop *", "docker* kill *"] smartlinks: - sl_label: "Test" sl_chat: @@ -319,10 +325,5 @@ smartlinks: content: | šŸ”§ The docker tool should be visible now. To test the tool, list the running containers, briefly describe the containers and express satisfaction and relief if it works, and change nothing. If it doesn't work or the tool isn't available, go through the usual plan in the system prompt. -available: - on_your_laptop_possible: true - when_isolated_possible: false -confirmation: - ask_user_default: [] - deny_default: ["docker* rm *", "docker* rmi *", "docker* pause *", "docker* stop *", "docker* kill *"] + sl_enable_only_with_tool: true "#; diff --git a/src/integrations/integr_chrome.rs b/src/integrations/integr_chrome.rs index 45f131f4c..48ceaf3b8 100644 --- a/src/integrations/integr_chrome.rs +++ b/src/integrations/integr_chrome.rs @@ -1281,22 +1281,24 @@ fields: f_type: string_short f_desc: "Scale factor of the browser window in tablet mode." f_extra: true +available: + on_your_laptop_possible: true + when_isolated_possible: true +confirmation: + ask_user_default: [] + deny_default: [] smartlinks: - sl_label: "Test" sl_chat: - role: "user" content: | šŸ”§ The chrome tool should be visible now. To test the tool, navigate to a website like https://example.com/ take a screenshot, and express happiness if it works. If it doesn't work or the tool isn't available, go through the usual plan in the system prompt. + sl_enable_only_with_tool: true - sl_label: "Help me install Chrome for Testing" sl_chat: - role: "user" content: | - šŸ”§ Help user to install Chrome for Testing using npm, once that done rewrite the current config file %CURRENT_CONFIG% to use it. - - sl_label: "Help me connect regular Chrome via ws:// protocol" - sl_chat: - - role: "user" - content: | - šŸ”§ Help user to connect regular Chrome via ws:// protocol, rewrite the current config file %CURRENT_CONFIG% to use it. The `chrome_path` accepts the "ws://..." notation. + šŸ”§ Help the user to install Chrome for Testing using npm, once that is done rewrite the current config file %CURRENT_CONFIG% to use chrome_path to use it. docker: filter_label: "" filter_image: "standalone-chrome" @@ -1315,10 +1317,4 @@ docker: - role: "user" content: | šŸ”§ Your job is to modify chrome config in the current file to connect through websockets to the container, use docker tool to inspect the container if needed. Current config file: %CURRENT_CONFIG%. -available: - on_your_laptop_possible: true - when_isolated_possible: true -confirmation: - ask_user_default: [] - deny_default: [] "#; diff --git a/src/integrations/integr_cmdline.rs b/src/integrations/integr_cmdline.rs index dbf0b75c6..849194d89 100644 --- a/src/integrations/integr_cmdline.rs +++ b/src/integrations/integr_cmdline.rs @@ -321,13 +321,14 @@ description: | available: on_your_laptop_possible: true when_isolated_possible: true +confirmation: + ask_user_default: ["*"] + deny_default: ["sudo*"] smartlinks: - sl_label: "Auto Configure" sl_chat: - role: "user" content: | šŸ”§ Test the tool that corresponds to the current config file. If it works express happiness, and change nothing. If it doesn't work or the tool isn't available, go through the usual plan in the system prompt. -confirmation: - ask_user_default: ["*"] - deny_default: ["sudo*"] + sl_enable_only_with_tool: true "#; diff --git a/src/integrations/integr_github.rs b/src/integrations/integr_github.rs index 4dd2006b9..2e10c456b 100644 --- a/src/integrations/integr_github.rs +++ b/src/integrations/integr_github.rs @@ -179,18 +179,29 @@ fn parse_command_args(args: &HashMap) -> Result, Stri const GITHUB_INTEGRATION_SCHEMA: &str = r#" fields: + gh_token: + f_type: string_long + f_desc: "GitHub Personal Access Token, you can create one at https://github.com/settings/tokens. If you don't want to send your key to the AI model that helps you to configure the agent, put it into secrets.yaml and write $MY_SECRET_VARIABLE in this field." + f_placeholder: "ghp_xxxxxxxxxxxxxxxx" + f_label: "Token" + smartlinks: + - sl_label: "Open secrets.yaml" + sl_goto: "EDITOR:secrets.yaml" gh_binary_path: f_type: string_long - f_desc: "Path to the GitHub CLI binary. Leave empty to use the default 'gh' command." + f_desc: "Path to the GitHub CLI binary. Leave empty if you have it in PATH." f_placeholder: "/usr/local/bin/gh" f_label: "GH Binary Path" - gh_token: - f_type: string_long - f_desc: "GitHub Personal Access Token for authentication." - f_placeholder: "ghp_xxxxxxxxxxxxxxxx" + f_extra: true description: | The GitHub integration allows interaction with GitHub repositories using the GitHub CLI. It provides functionality for various GitHub operations such as creating issues, pull requests, and more. +available: + on_your_laptop_possible: true + when_isolated_possible: true +confirmation: + ask_user_default: ["gh * delete *", "gh * close *"] + deny_default: ["gh auth token *"] smartlinks: - sl_label: "Test" sl_chat: @@ -198,10 +209,5 @@ smartlinks: content: | šŸ”§ The `github` (`gh`) tool should be visible now. To test the tool, list opened pull requests for `smallcloudai/refact-lsp`, and briefly describe them and express happiness, and change nothing. If it doesn't work or the tool isn't available, go through the usual plan in the system prompt. -available: - on_your_laptop_possible: true - when_isolated_possible: true -confirmation: - ask_user_default: ["gh * delete *", "gh * close *"] - deny_default: ["gh auth token *"] + sl_enable_only_with_tool: true "#; diff --git a/src/integrations/integr_gitlab.rs b/src/integrations/integr_gitlab.rs index 0b4c720f9..5c2e0ead7 100644 --- a/src/integrations/integr_gitlab.rs +++ b/src/integrations/integr_gitlab.rs @@ -56,7 +56,7 @@ impl IntegrationTrait for ToolGitlab { fn integr_common(&self) -> IntegrationCommon { self.common.clone() } - + fn integr_upgrade_to_tool(&self, _integr_name: &str) -> Box { Box::new(ToolGitlab { common: self.common.clone(), @@ -176,18 +176,28 @@ fn parse_command_args(args: &HashMap) -> Result, Stri const GITLAB_INTEGRATION_SCHEMA: &str = r#" fields: + glab_token: + f_type: string_long + f_desc: "GitLab Personal Access Token, you can get one at https://gitlab.com/-/user_settings/personal_access_tokens. If you don't want to send your key to the AI model that helps you to configure the agent, put it into secrets.yaml and write $MY_SECRET_VARIABLE in this field." + f_placeholder: "glpat_xxxxxxxxxxxxxxxx" + smartlinks: + - sl_label: "Open secrets.yaml" + sl_goto: "EDITOR:secrets.yaml" glab_binary_path: f_type: string_long f_desc: "Path to the GitLab CLI binary. Leave empty to use the default 'glab' command." f_placeholder: "/usr/local/bin/glab" - f_label: "GLAB Binary Path" - glab_token: - f_type: string_long - f_desc: "GitLab Personal Access Token for authentication." - f_placeholder: "glpat_xxxxxxxxxxxxxxxx" + f_label: "glab binary path" + f_extra: true description: | The GitLab integration allows interaction with GitLab repositories using the GitLab CLI. It provides functionality for various GitLab operations such as creating issues, merge requests, and more. +available: + on_your_laptop_possible: true + when_isolated_possible: true +confirmation: + ask_user_default: ["glab * delete *"] + deny_default: ["glab auth token *"] smartlinks: - sl_label: "Test" sl_chat: @@ -195,10 +205,5 @@ smartlinks: content: | šŸ”§ The `gitlab` (`glab`) tool should be visible now. To test the tool, list opened merge requests for your GitLab project, and briefly describe them and express happiness, and change nothing. If it doesn't work or the tool isn't available, go through the usual plan in the system prompt. -available: - on_your_laptop_possible: true - when_isolated_possible: true -confirmation: - ask_user_default: ["glab * delete *"] - deny_default: ["glab auth token *"] + sl_enable_only_with_tool: true "#; diff --git a/src/integrations/integr_mysql.rs b/src/integrations/integr_mysql.rs index 9b87875cc..e6bd35d90 100644 --- a/src/integrations/integr_mysql.rs +++ b/src/integrations/integr_mysql.rs @@ -188,8 +188,8 @@ fields: f_type: string_short f_default: "$MYSQL_PASSWORD" smartlinks: - - sl_label: "Open passwords.yaml" - sl_goto: "EDITOR:passwords.yaml" + - sl_label: "Open secrets.yaml" + sl_goto: "EDITOR:secrets.yaml" database: f_type: string_short f_placeholder: "mysql" @@ -204,6 +204,12 @@ description: | On this page you can also see Docker containers with Mysql servers. You can ask model to create a new container with a new database for you, or ask model to configure the tool to use an existing container with existing database. +available: + on_your_laptop_possible: true + when_isolated_possible: true +confirmation: + ask_user_default: [] + deny_default: [] smartlinks: - sl_label: "Test" sl_chat: @@ -212,13 +218,14 @@ smartlinks: šŸ”§ The mysql tool should be visible now. To test the tool, list the tables available, briefly describe the tables and express happiness, and change nothing. If it doesn't work or the tool isn't available, go through the usual plan in the system prompt. The current config file is %CURRENT_CONFIG%. - - sl_label: "Look at the project, fill in automatically" + sl_enable_only_with_tool: true + - sl_label: "Look at the project, help me set it up" sl_chat: - role: "user" content: | šŸ”§ Your goal is to set up mysql client. Look at the project, especially files like "docker-compose.yaml" or ".env". Call tree() to see what files the project has. After that is completed, go through the usual plan in the system prompt. - The current config file is %CURRENT_CONFIG%. + Keep MYSQL_HOST MYSQL_PORT MYSQL_USER MYSQL_PASSWORD MYSQL_DATABASE in variables.yaml so they can be reused by command line tools later. docker: filter_label: "" filter_image: "mysql" @@ -242,10 +249,4 @@ docker: - role: "user" content: | šŸ”§ Your job is to modify mysql connection config in the current file to match the variables from the container, use docker tool to inspect the container if needed. Current config file: %CURRENT_CONFIG%. -available: - on_your_laptop_possible: true - when_isolated_possible: true -confirmation: - ask_user_default: [] - deny_default: [] "#; diff --git a/src/integrations/integr_pdb.rs b/src/integrations/integr_pdb.rs index f3fef15a8..b6b7ac577 100644 --- a/src/integrations/integr_pdb.rs +++ b/src/integrations/integr_pdb.rs @@ -377,6 +377,12 @@ fields: description: | The PDB integration allows interaction with the Python debugger for inspecting variables and exploring program execution. It provides functionality for debugging Python scripts and applications. +available: + on_your_laptop_possible: true + when_isolated_possible: true +confirmation: + ask_user_default: [] + deny_default: [] smartlinks: - sl_label: "Test" sl_chat: @@ -384,10 +390,5 @@ smartlinks: content: | šŸ”§ The pdb tool should be visible now. To test the tool, start a debugging session for a simple Python script, set a breakpoint, and inspect some variables. If it doesn't work or the tool isn't available, go through the usual plan in the system prompt. -available: - on_your_laptop_possible: true - when_isolated_possible: true -confirmation: - ask_user_default: [] - deny_default: [] + sl_enable_only_with_tool: true "#; diff --git a/src/integrations/integr_postgres.rs b/src/integrations/integr_postgres.rs index 1680a0bce..6d60af615 100644 --- a/src/integrations/integr_postgres.rs +++ b/src/integrations/integr_postgres.rs @@ -191,7 +191,7 @@ fields: sl_goto: "EDITOR:variables.yaml" database: f_type: string_short - f_placeholder: "postgres" + f_placeholder: "my_marketing_db" psql_binary_path: f_type: string_long f_desc: "If it can't find a path to `psql` you can provide it here, leave blank if not sure." @@ -203,6 +203,12 @@ description: | On this page you can also see Docker containers with Postgres servers. You can ask model to create a new container with a new database for you, or ask model to configure the tool to use an existing container with existing database. +available: + on_your_laptop_possible: true + when_isolated_possible: true +confirmation: + ask_user_default: ["psql*[!SELECT]*"] + deny_default: [] smartlinks: - sl_label: "Test" sl_chat: @@ -210,6 +216,7 @@ smartlinks: content: | šŸ”§ The postgres tool should be visible now. To test the tool, list the tables available, briefly describe the tables and express happiness, and change nothing. If it doesn't work or the tool isn't available, go through the usual plan in the system prompt. + sl_enable_only_with_tool: true - sl_label: "Look at the project, fill in automatically" sl_chat: - role: "user" @@ -240,12 +247,6 @@ docker: - role: "user" content: | šŸ”§ Your job is to modify postgres connection config in the current file to match the variables from the container, use docker tool to inspect the container if needed. Current config file: %CURRENT_CONFIG%. -available: - on_your_laptop_possible: true - when_isolated_possible: true -confirmation: - ask_user_default: ["psql*[!SELECT]*"] - deny_default: [] "#; // To think about: PGPASSWORD PGHOST PGUSER PGPORT PGDATABASE maybe tell the model to set that in variables.yaml as well diff --git a/src/integrations/project_summary_chat.rs b/src/integrations/project_summary_chat.rs index b9392712f..4fdedfdc4 100644 --- a/src/integrations/project_summary_chat.rs +++ b/src/integrations/project_summary_chat.rs @@ -15,7 +15,6 @@ pub async fn mix_project_summary_messages( ) { assert!(messages[0].role != "system"); // we are here to add this, can't already exist - // XXX should be a better way to load the prompt let custom: crate::yaml_configs::customization_loader::CustomizationYaml = match crate::yaml_configs::customization_loader::load_customization(gcx.clone(), true).await { Ok(x) => x, diff --git a/src/integrations/setting_up_integrations.rs b/src/integrations/setting_up_integrations.rs index 939993e7d..0daa88011 100644 --- a/src/integrations/setting_up_integrations.rs +++ b/src/integrations/setting_up_integrations.rs @@ -474,7 +474,6 @@ mod tests { use std::io::Write; #[tokio::test] - #[ignore] async fn test_integration_schemas() { let integrations = crate::integrations::integrations_list(true); for name in integrations { diff --git a/src/integrations/yaml_schema.rs b/src/integrations/yaml_schema.rs index 1ab72905a..315472531 100644 --- a/src/integrations/yaml_schema.rs +++ b/src/integrations/yaml_schema.rs @@ -8,7 +8,7 @@ pub struct DockerService { pub image: String, #[serde(default)] pub environment: IndexMap, - #[serde(default)] + #[serde(default, skip_serializing_if="is_empty")] pub ports: Vec, } @@ -36,6 +36,8 @@ pub struct ISmartLink { pub sl_chat: Vec, #[serde(default, skip_serializing_if="is_default")] pub sl_goto: String, + #[serde(default, skip_serializing_if="is_default")] + pub sl_enable_only_with_tool: bool, } #[derive(Serialize, Deserialize, Debug, Default)] @@ -55,12 +57,21 @@ pub struct ISchemaDocker { pub smartlinks_for_each_container: Vec, } +#[derive(Serialize, Deserialize, Debug, Default)] +pub struct ISchemaConfirmation { + #[serde(default)] + pub ask_user_default: Vec, + #[serde(default)] + pub deny_default: Vec, +} + #[derive(Serialize, Deserialize, Debug, Default)] pub struct ISchema { pub fields: IndexMap, #[serde(default, skip_serializing_if="is_default")] pub description: String, pub available: ISchemaAvailable, + pub confirmation: ISchemaConfirmation, #[serde(default, skip_serializing_if="is_empty")] pub smartlinks: Vec, #[serde(skip_serializing_if = "Option::is_none")] diff --git a/src/yaml_configs/customization_compiled_in.rs b/src/yaml_configs/customization_compiled_in.rs index b61fd2203..7cca1e382 100644 --- a/src/yaml_configs/customization_compiled_in.rs +++ b/src/yaml_configs/customization_compiled_in.rs @@ -162,7 +162,7 @@ PROMPT_PROJECT_SUMMARY: | Most of those integrations are easy, you can just repeat the name. But two of those are special: cmdline_TEMPLATE and service_TEMPLATE. Those can integrate a blocking command line utility (such as cmake) and a blocking background command (such as hypercorn server that runs forever until you hit Ctrl+C), respectively. - Think of typical command line things that might be required to work on the project, how do you run the webserver, how do you compile it? + Think of typical command line things that might be required to work on the project, how do you run the webserver, how do you compile the project? For webserver to work you most likely need a service_* so it runs in the background and you can open and navigate web pages at the same time. Turn those things into recommendations, replace _TEMPLATE with lowercase name with underscores, don't overthink it, "cargo build" should become "cmdline_cargo_build", etc. If there's no web server detectable, skip it. From 6b8f9a014dc5523fcd9c9635cab0f2811b99e620 Mon Sep 17 00:00:00 2001 From: Oleg Klimov Date: Fri, 20 Dec 2024 09:44:59 +0100 Subject: [PATCH 185/185] a warning --- src/http/routers/v1/v1_integrations.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/http/routers/v1/v1_integrations.rs b/src/http/routers/v1/v1_integrations.rs index 6cb1839c3..d67469a61 100644 --- a/src/http/routers/v1/v1_integrations.rs +++ b/src/http/routers/v1/v1_integrations.rs @@ -1,4 +1,3 @@ -use std::borrow::Cow; use std::path::PathBuf; use std::sync::Arc; use axum::Extension;