Skip to content

Commit

Permalink
warn but do not crash when model scan finds random cruft in models
Browse files Browse the repository at this point in the history
…directory
  • Loading branch information
Lincoln Stein committed Jun 28, 2023
1 parent 72209d0 commit 79fc708
Show file tree
Hide file tree
Showing 5 changed files with 23 additions and 51 deletions.
14 changes: 0 additions & 14 deletions invokeai/backend/install/invokeai_configure.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,6 @@
# Coauthor: Kevin Turner http://github.com/keturn
#
import sys
print("Loading Python libraries...\n",file=sys.stderr)

import argparse
import io
import os
Expand Down Expand Up @@ -706,18 +704,6 @@ def write_opts(opts: Namespace, init_file: Path):
def default_output_dir() -> Path:
return config.root_path / "outputs"

# # -------------------------------------
# def default_embedding_dir() -> Path:
# return config.root_path / "embeddings"

# # -------------------------------------
# def default_lora_dir() -> Path:
# return config.root_path / "loras"

# # -------------------------------------
# def default_controlnet_dir() -> Path:
# return config.root_path / "controlnets"

# -------------------------------------
def write_default_options(program_opts: Namespace, initfile: Path):
opt = default_startup_options(initfile)
Expand Down
4 changes: 1 addition & 3 deletions invokeai/backend/install/model_install_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,8 +155,6 @@ def default_model(self)->str:
def install(self, selections: InstallSelections):
job = 1
jobs = len(selections.remove_models) + len(selections.install_models)
# if selections.scan_directory:
# jobs += 1

# remove requested models
for key in selections.remove_models:
Expand Down Expand Up @@ -218,7 +216,7 @@ def heuristic_install(self,
# the model from being probed twice in the event that it has already been probed.
def _install_path(self, path: Path, info: ModelProbeInfo=None)->Path:
try:
logger.info(f'Probing {path}')
# logger.debug(f'Probing {path}')
info = info or ModelProbe().heuristic_probe(path,self.prediction_helper)
model_name = path.stem if info.format=='checkpoint' else path.name
if self.mgr.model_exists(model_name, info.base_type, info.model_type):
Expand Down
29 changes: 21 additions & 8 deletions invokeai/backend/model_management/model_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -714,9 +714,12 @@ def scan_models_directory(

if model_path.is_relative_to(self.app_config.root_path):
model_path = model_path.relative_to(self.app_config.root_path)
model_config: ModelConfigBase = model_class.probe_config(str(model_path))
self.models[model_key] = model_config
new_models_found = True
try:
model_config: ModelConfigBase = model_class.probe_config(str(model_path))
self.models[model_key] = model_config
new_models_found = True
except NotImplementedError as e:
self.logger.warning(e)

imported_models = self.autoimport()

Expand All @@ -737,38 +740,48 @@ def autoimport(self)->set[Path]:
)

installed = set()

scanned_dirs = set()

config = self.app_config
known_paths = {(self.app_config.root_path / x['path']) for x in self.list_models()}
scanned_dirs = set()

for autodir in [config.autoimport_dir,
config.lora_dir,
config.embedding_dir,
config.controlnet_dir]:
if autodir is None:
continue

self.logger.info(f'Scanning {autodir} for models to import')

autodir = self.app_config.root_path / autodir
if not autodir.exists():
continue


items_scanned = 0
new_models_found = set()

for root, dirs, files in os.walk(autodir):
items_scanned += len(dirs) + len(files)
for d in dirs:
path = Path(root) / d
if path in known_paths or path.parent in scanned_dirs:
scanned_dirs.add(path)
continue
if any([(path/x).exists() for x in {'config.json','model_index.json','learned_embeds.bin'}]):
installed.update(installer.heuristic_install(path))
new_models_found.update(installer.heuristic_install(path))
scanned_dirs.add(path)

for f in files:
path = Path(root) / f
if path in known_paths or path.parent in scanned_dirs:
continue
if path.suffix in {'.ckpt','.bin','.pth','.safetensors','.pt'}:
installed.update(installer.heuristic_install(path))
new_models_found.update(installer.heuristic_install(path))

self.logger.info(f'Scanned {items_scanned} files and directories, imported {len(new_models_found)} models')
installed.update(new_models_found)

return installed

def heuristic_import(self,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ def probe_config(cls, path: str, **kwargs):
in_channels = unet_config['in_channels']

else:
raise Exception("Not supported stable diffusion diffusers format(possibly onnx?)")
raise NotImplementedError(f"{path} is not a supported stable diffusion diffusers format")

else:
raise NotImplementedError(f"Unknown stable diffusion 1.* format: {model_format}")
Expand Down
25 changes: 0 additions & 25 deletions invokeai/frontend/install/model_install.py
Original file line number Diff line number Diff line change
Expand Up @@ -316,31 +316,6 @@ def add_pipeline_widgets(self,
**kwargs,
)

# label = "Directory to scan for models to automatically import (<tab> autocompletes):"
# self.nextrely += 1
# widgets.update(
# autoload_directory = self.add_widget_intelligent(
# FileBox,
# max_height=3,
# name=label,
# value=str(config.root_path / config.autoimport_dir) if config.autoimport_dir else None,
# select_dir=True,
# must_exist=True,
# use_two_lines=False,
# labelColor="DANGER",
# begin_entry_at=len(label)+1,
# scroll_exit=True,
# )
# )
# widgets.update(
# autoscan_on_startup = self.add_widget_intelligent(
# npyscreen.Checkbox,
# name="Scan and import from this directory each time InvokeAI starts",
# value=config.autoimport_dir is not None,
# relx=4,
# scroll_exit=True,
# )
# )
return widgets

def resize(self):
Expand Down

0 comments on commit 79fc708

Please sign in to comment.