diff --git a/kohya_gui/lora_gui.py b/kohya_gui/lora_gui.py index 68eb6780..f1713edf 100644 --- a/kohya_gui/lora_gui.py +++ b/kohya_gui/lora_gui.py @@ -287,6 +287,9 @@ def save_configuration( split_qkv, train_t5xxl, cpu_offload_checkpointing, + blocks_to_swap, + single_blocks_to_swap, + double_blocks_to_swap, img_attn_dim, img_mlp_dim, img_mod_dim, @@ -553,6 +556,9 @@ def open_configuration( split_qkv, train_t5xxl, cpu_offload_checkpointing, + blocks_to_swap, + single_blocks_to_swap, + double_blocks_to_swap, img_attn_dim, img_mlp_dim, img_mod_dim, @@ -853,6 +859,9 @@ def train_model( split_qkv, train_t5xxl, cpu_offload_checkpointing, + blocks_to_swap, + single_blocks_to_swap, + double_blocks_to_swap, img_attn_dim, img_mlp_dim, img_mod_dim, @@ -1558,6 +1567,9 @@ def train_model( "mem_eff_save": mem_eff_save if flux1_checkbox else None, "apply_t5_attn_mask": apply_t5_attn_mask if flux1_checkbox else None, "cpu_offload_checkpointing": cpu_offload_checkpointing if flux1_checkbox else None, + "blocks_to_swap": blocks_to_swap if flux1_checkbox else None, + "single_blocks_to_swap": single_blocks_to_swap if flux1_checkbox else None, + "double_blocks_to_swap": double_blocks_to_swap if flux1_checkbox else None, } # Given dictionary `config_toml_data` @@ -2745,6 +2757,9 @@ def update_LoRA_settings( flux1_training.split_qkv, flux1_training.train_t5xxl, flux1_training.cpu_offload_checkpointing, + flux1_training.blocks_to_swap, + flux1_training.single_blocks_to_swap, + flux1_training.double_blocks_to_swap, flux1_training.img_attn_dim, flux1_training.img_mlp_dim, flux1_training.img_mod_dim, diff --git a/venv3/Scripts/python.exe b/venv3/Scripts/python.exe new file mode 100644 index 00000000..8655d9d5 Binary files /dev/null and b/venv3/Scripts/python.exe differ