diff --git a/dbgpt/app/base.py b/dbgpt/app/base.py index 54c6f5122..622440666 100644 --- a/dbgpt/app/base.py +++ b/dbgpt/app/base.py @@ -192,7 +192,8 @@ def _create_mysql_database(db_name: str, db_url: str, try_to_create_db: bool = F with engine_no_db.connect() as conn: conn.execute( DDL( - f"CREATE DATABASE {db_name} CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci" + f"CREATE DATABASE {db_name} CHARACTER SET utf8mb4 COLLATE " + f"utf8mb4_unicode_ci" ) ) logger.info(f"Database {db_name} successfully created") @@ -218,26 +219,31 @@ class WebServerParameters(BaseParameters): controller_addr: Optional[str] = field( default=None, metadata={ - "help": "The Model controller address to connect. If None, read model controller address from environment key `MODEL_SERVER`." + "help": "The Model controller address to connect. If None, read model " + "controller address from environment key `MODEL_SERVER`." }, ) model_name: str = field( default=None, metadata={ - "help": "The default model name to use. If None, read model name from environment key `LLM_MODEL`.", + "help": "The default model name to use. If None, read model name from " + "environment key `LLM_MODEL`.", "tags": "fixed", }, ) share: Optional[bool] = field( default=False, metadata={ - "help": "Whether to create a publicly shareable link for the interface. Creates an SSH tunnel to make your UI accessible from anywhere. " + "help": "Whether to create a publicly shareable link for the interface. " + "Creates an SSH tunnel to make your UI accessible from anywhere. " }, ) remote_embedding: Optional[bool] = field( default=False, metadata={ - "help": "Whether to enable remote embedding models. If it is True, you need to start a embedding model through `dbgpt start worker --worker_type text2vec --model_name xxx --model_path xxx`" + "help": "Whether to enable remote embedding models. If it is True, you need" + " to start a embedding model through `dbgpt start worker --worker_type " + "text2vec --model_name xxx --model_path xxx`" }, ) log_level: Optional[str] = field( @@ -286,3 +292,10 @@ class WebServerParameters(BaseParameters): "help": "The directories to search awel files, split by `,`", }, ) + default_thread_pool_size: Optional[int] = field( + default=None, + metadata={ + "help": "The default thread pool size, If None, " + "use default config of python thread pool", + }, + ) diff --git a/dbgpt/app/component_configs.py b/dbgpt/app/component_configs.py index fefe57a16..d68c6de2f 100644 --- a/dbgpt/app/component_configs.py +++ b/dbgpt/app/component_configs.py @@ -25,7 +25,9 @@ def initialize_components( from dbgpt.model.cluster.controller.controller import controller # Register global default executor factory first - system_app.register(DefaultExecutorFactory) + system_app.register( + DefaultExecutorFactory, max_workers=param.default_thread_pool_size + ) system_app.register_instance(controller) from dbgpt.serve.agent.hub.controller import module_agent diff --git a/dbgpt/app/scene/base_chat.py b/dbgpt/app/scene/base_chat.py index e2647fb4c..f48feceb0 100644 --- a/dbgpt/app/scene/base_chat.py +++ b/dbgpt/app/scene/base_chat.py @@ -313,8 +313,9 @@ async def stream_call(self): ) ### store current conversation span.end(metadata={"error": str(e)}) - # self.memory.append(self.current_message) - self.current_message.end_current_round() + await blocking_func_to_async( + self._executor, self.current_message.end_current_round + ) async def nostream_call(self): payload = await self._build_model_request() @@ -381,8 +382,9 @@ async def nostream_call(self): ) span.end(metadata={"error": str(e)}) ### store dialogue - # self.memory.append(self.current_message) - self.current_message.end_current_round() + await blocking_func_to_async( + self._executor, self.current_message.end_current_round + ) return self.current_ai_response() async def get_llm_response(self):