diff --git a/ai_diffusion/custom_workflow.py b/ai_diffusion/custom_workflow.py index 804f15a9c1..edb70b1278 100644 --- a/ai_diffusion/custom_workflow.py +++ b/ai_diffusion/custom_workflow.py @@ -438,14 +438,17 @@ def collect_parameters(self, layers: "LayerManager", bounds: Bounds): params = copy(self.params) for md in self.metadata: param = params.get(md.name) - assert param is not None, f"Parameter {md.name} not found" if md.kind is ParamKind.image_layer: + if param is None and len(layers.images) > 0: + param = layers.images[0].id layer = layers.find(QUuid(param)) if layer is None: raise ValueError(f"Input layer for parameter {md.name} not found") params[md.name] = layer.get_pixels(bounds) elif md.kind is ParamKind.mask_layer: + if param is None and len(layers.masks) > 0: + param = layers.masks[0].id layer = layers.find(QUuid(param)) if layer is None: raise ValueError(f"Input layer for parameter {md.name} not found") @@ -455,6 +458,9 @@ def collect_parameters(self, layers: "LayerManager", bounds: Bounds): if style is None: raise ValueError(f"Style {param} not found") params[md.name] = style + elif param is None: + raise ValueError(f"Parameter {md.name} not found") + return params def _handle_job_finished(self, job: Job): diff --git a/ai_diffusion/model.py b/ai_diffusion/model.py index 941e6aed47..573bb82f54 100644 --- a/ai_diffusion/model.py +++ b/ai_diffusion/model.py @@ -487,16 +487,18 @@ def show_preview(self, job_id: str, index: int, name_prefix="Preview"): job = self.jobs.find(job_id) assert job is not None, "Cannot show preview, invalid job id" name = f"[{name_prefix}] {trim_text(job.params.name, 77)}" + image = job.results[index] + bounds = job.params.bounds + if image.extent != bounds.extent: + image = Image.crop(image, Bounds(0, 0, *bounds.extent)) if self._layer and self._layer.was_removed: self._layer = None # layer was removed by user if self._layer is not None: self._layer.name = name - self._layer.write_pixels(job.results[index], job.params.bounds) + self._layer.write_pixels(image, bounds) self._layer.move_to_top() else: - self._layer = self.layers.create( - name, job.results[index], job.params.bounds, make_active=False - ) + self._layer = self.layers.create(name, image, bounds, make_active=False) self._layer.is_locked = True def hide_preview(self): @@ -504,6 +506,8 @@ def hide_preview(self): self._layer.hide() def apply_result(self, image: Image, params: JobParams, behavior: ApplyBehavior, prefix=""): + if image.extent != params.bounds.extent: + image = Image.crop(image, Bounds(0, 0, *params.bounds.extent)) if len(params.regions) == 0: if behavior is ApplyBehavior.replace: self.layers.update_layer_image(self.layers.active, image, params.bounds) diff --git a/ai_diffusion/ui/custom_workflow.py b/ai_diffusion/ui/custom_workflow.py index 85def2632b..4cd7f9931c 100644 --- a/ai_diffusion/ui/custom_workflow.py +++ b/ai_diffusion/ui/custom_workflow.py @@ -30,8 +30,11 @@ class LayerSelect(QComboBox): def __init__(self, filter: str | None = None, parent: QWidget | None = None): super().__init__(parent) - self.setContentsMargins(0, 0, 0, 0) self.filter = filter + + self.setContentsMargins(0, 0, 0, 0) + self.setMinimumContentsLength(20) + self.setSizeAdjustPolicy(QComboBox.SizeAdjustPolicy.AdjustToMinimumContentsLength) self.currentIndexChanged.connect(lambda _: self.value_changed.emit()) self._update() @@ -257,6 +260,9 @@ class ChoiceParamWidget(QComboBox): def __init__(self, param: CustomParam, parent: QWidget | None = None): super().__init__(parent) + self.setMinimumContentsLength(20) + self.setSizeAdjustPolicy(QComboBox.SizeAdjustPolicy.AdjustToMinimumContentsLength) + if param.choices: self.addItems(param.choices) self.currentIndexChanged.connect(lambda _: self.value_changed.emit()) diff --git a/tests/test_workflow.py b/tests/test_workflow.py index 4820465beb..8717185923 100644 --- a/tests/test_workflow.py +++ b/tests/test_workflow.py @@ -180,7 +180,6 @@ def test_prepare_lora(): files = FileLibrary(FileCollection(), FileCollection()) fractal = files.loras.add(File.remote("x/FRACTAL.safetensors")) files.loras.set_meta(fractal, "lora_strength", 0.55) - files.loras.set_meta(fractal, "lora_triggers", "FRACTAL HEART") mop = files.loras.add(File.remote("MOTHER_OF_PEARL.safetensors")) files.loras.set_meta(mop, "lora_triggers", "crab") @@ -200,7 +199,7 @@ def test_prepare_lora(): files=files, perf=default_perf, ) - assert job.conditioning and job.conditioning.positive == "test baloon FRACTAL HEART space crab" + assert job.conditioning and job.conditioning.positive == "test baloon space crab" assert ( job.models and LoraInput("PINK_UNICORNS.safetensors", 0.77) in job.models.loras