diff --git a/invokeai/backend/install/invokeai_configure.py b/invokeai/backend/install/invokeai_configure.py index 5b713516beb..a0104bef25e 100755 --- a/invokeai/backend/install/invokeai_configure.py +++ b/invokeai/backend/install/invokeai_configure.py @@ -7,8 +7,6 @@ # Coauthor: Kevin Turner http://github.com/keturn # import sys -print("Loading Python libraries...\n",file=sys.stderr) - import argparse import io import os @@ -706,18 +704,6 @@ def write_opts(opts: Namespace, init_file: Path): def default_output_dir() -> Path: return config.root_path / "outputs" -# # ------------------------------------- -# def default_embedding_dir() -> Path: -# return config.root_path / "embeddings" - -# # ------------------------------------- -# def default_lora_dir() -> Path: -# return config.root_path / "loras" - -# # ------------------------------------- -# def default_controlnet_dir() -> Path: -# return config.root_path / "controlnets" - # ------------------------------------- def write_default_options(program_opts: Namespace, initfile: Path): opt = default_startup_options(initfile) diff --git a/invokeai/backend/install/model_install_backend.py b/invokeai/backend/install/model_install_backend.py index f6cde2c90f5..1c2f4d2fc13 100644 --- a/invokeai/backend/install/model_install_backend.py +++ b/invokeai/backend/install/model_install_backend.py @@ -155,8 +155,6 @@ def default_model(self)->str: def install(self, selections: InstallSelections): job = 1 jobs = len(selections.remove_models) + len(selections.install_models) -# if selections.scan_directory: -# jobs += 1 # remove requested models for key in selections.remove_models: @@ -218,7 +216,7 @@ def heuristic_install(self, # the model from being probed twice in the event that it has already been probed. def _install_path(self, path: Path, info: ModelProbeInfo=None)->Path: try: - logger.info(f'Probing {path}') + # logger.debug(f'Probing {path}') info = info or ModelProbe().heuristic_probe(path,self.prediction_helper) model_name = path.stem if info.format=='checkpoint' else path.name if self.mgr.model_exists(model_name, info.base_type, info.model_type): diff --git a/invokeai/backend/model_management/model_manager.py b/invokeai/backend/model_management/model_manager.py index 292b7061762..66206ac165a 100644 --- a/invokeai/backend/model_management/model_manager.py +++ b/invokeai/backend/model_management/model_manager.py @@ -714,9 +714,12 @@ def scan_models_directory( if model_path.is_relative_to(self.app_config.root_path): model_path = model_path.relative_to(self.app_config.root_path) - model_config: ModelConfigBase = model_class.probe_config(str(model_path)) - self.models[model_key] = model_config - new_models_found = True + try: + model_config: ModelConfigBase = model_class.probe_config(str(model_path)) + self.models[model_key] = model_config + new_models_found = True + except NotImplementedError as e: + self.logger.warning(e) imported_models = self.autoimport() @@ -737,10 +740,10 @@ def autoimport(self)->set[Path]: ) installed = set() - + scanned_dirs = set() + config = self.app_config known_paths = {(self.app_config.root_path / x['path']) for x in self.list_models()} - scanned_dirs = set() for autodir in [config.autoimport_dir, config.lora_dir, @@ -748,19 +751,25 @@ def autoimport(self)->set[Path]: config.controlnet_dir]: if autodir is None: continue + + self.logger.info(f'Scanning {autodir} for models to import') autodir = self.app_config.root_path / autodir if not autodir.exists(): continue - + + items_scanned = 0 + new_models_found = set() + for root, dirs, files in os.walk(autodir): + items_scanned += len(dirs) + len(files) for d in dirs: path = Path(root) / d if path in known_paths or path.parent in scanned_dirs: scanned_dirs.add(path) continue if any([(path/x).exists() for x in {'config.json','model_index.json','learned_embeds.bin'}]): - installed.update(installer.heuristic_install(path)) + new_models_found.update(installer.heuristic_install(path)) scanned_dirs.add(path) for f in files: @@ -768,7 +777,11 @@ def autoimport(self)->set[Path]: if path in known_paths or path.parent in scanned_dirs: continue if path.suffix in {'.ckpt','.bin','.pth','.safetensors','.pt'}: - installed.update(installer.heuristic_install(path)) + new_models_found.update(installer.heuristic_install(path)) + + self.logger.info(f'Scanned {items_scanned} files and directories, imported {len(new_models_found)} models') + installed.update(new_models_found) + return installed def heuristic_import(self, diff --git a/invokeai/backend/model_management/models/stable_diffusion.py b/invokeai/backend/model_management/models/stable_diffusion.py index ee95e3a8499..a5d43c98a2c 100644 --- a/invokeai/backend/model_management/models/stable_diffusion.py +++ b/invokeai/backend/model_management/models/stable_diffusion.py @@ -69,7 +69,7 @@ def probe_config(cls, path: str, **kwargs): in_channels = unet_config['in_channels'] else: - raise Exception("Not supported stable diffusion diffusers format(possibly onnx?)") + raise NotImplementedError(f"{path} is not a supported stable diffusion diffusers format") else: raise NotImplementedError(f"Unknown stable diffusion 1.* format: {model_format}") diff --git a/invokeai/frontend/install/model_install.py b/invokeai/frontend/install/model_install.py index 04dabca5903..33ef1149128 100644 --- a/invokeai/frontend/install/model_install.py +++ b/invokeai/frontend/install/model_install.py @@ -316,31 +316,6 @@ def add_pipeline_widgets(self, **kwargs, ) - # label = "Directory to scan for models to automatically import ( autocompletes):" - # self.nextrely += 1 - # widgets.update( - # autoload_directory = self.add_widget_intelligent( - # FileBox, - # max_height=3, - # name=label, - # value=str(config.root_path / config.autoimport_dir) if config.autoimport_dir else None, - # select_dir=True, - # must_exist=True, - # use_two_lines=False, - # labelColor="DANGER", - # begin_entry_at=len(label)+1, - # scroll_exit=True, - # ) - # ) - # widgets.update( - # autoscan_on_startup = self.add_widget_intelligent( - # npyscreen.Checkbox, - # name="Scan and import from this directory each time InvokeAI starts", - # value=config.autoimport_dir is not None, - # relx=4, - # scroll_exit=True, - # ) - # ) return widgets def resize(self):