From c20a10d7fd12a97cec563fb1fc6bd49204c36ba3 Mon Sep 17 00:00:00 2001 From: bmaltais Date: Fri, 6 Jan 2023 07:13:12 -0500 Subject: [PATCH] Emergency fix for dreambooth_ui no longer working, sorry - Add LoRA network merge too GUI. Run `pip install -U -r requirements.txt` after pulling this new release. --- README.md | 5 +- dreambooth_gui.py | 4 +- finetune_gui.py | 4 +- library/merge_lora_gui.py | 145 ++++++++++++++++++++++++++++++++++++++ lora_gui.py | 6 +- 5 files changed, 157 insertions(+), 7 deletions(-) create mode 100644 library/merge_lora_gui.py diff --git a/README.md b/README.md index b674e38..c64f3b0 100644 --- a/README.md +++ b/README.md @@ -30,7 +30,10 @@ Once you have created the LoRA network you can generate images via auto1111 by i ## Change history -* 2023/01/05 (v19.2): +* 2023/01/06 (v19.3.1): + - Emergency fix for dreambooth_ui no longer working, sorry + - Add LoRA network merge too GUI. Run `pip install -U -r requirements.txt` after pulling this new release. +* 2023/01/05 (v19.3): - Add support for `--clip_skip` option - Add missing `detect_face_rotate.py` to tools folder - Add `gui.cmd` for easy start of GUI diff --git a/dreambooth_gui.py b/dreambooth_gui.py index 01822d2..fdff172 100644 --- a/dreambooth_gui.py +++ b/dreambooth_gui.py @@ -429,8 +429,8 @@ def train_model( run_cmd += f' --resume={resume}' if not float(prior_loss_weight) == 1.0: run_cmd += f' --prior_loss_weight={prior_loss_weight}' - if clip_skip > 1: - run_cmd += f' --clip_skip={int(clip_skip)}' + if int(clip_skip) > 1: + run_cmd += f' --clip_skip={str(clip_skip)}' print(run_cmd) # Run the command diff --git a/finetune_gui.py b/finetune_gui.py index c7e279a..5bb3eeb 100644 --- a/finetune_gui.py +++ b/finetune_gui.py @@ -363,8 +363,8 @@ def train_model( run_cmd += f' --save_precision={save_precision}' if not save_model_as == 'same as source model': run_cmd += f' --save_model_as={save_model_as}' - if clip_skip > 1: - run_cmd += f' --clip_skip={int(clip_skip)}' + if int(clip_skip) > 1: + run_cmd += f' --clip_skip={str(clip_skip)}' print(run_cmd) # Run the command diff --git a/library/merge_lora_gui.py b/library/merge_lora_gui.py new file mode 100644 index 0000000..381d411 --- /dev/null +++ b/library/merge_lora_gui.py @@ -0,0 +1,145 @@ +import gradio as gr +from easygui import msgbox +import subprocess +import os +from .common_gui import get_folder_path, get_any_file_path + +folder_symbol = '\U0001f4c2' # 📂 +refresh_symbol = '\U0001f504' # 🔄 +save_style_symbol = '\U0001f4be' # 💾 +document_symbol = '\U0001F4C4' # 📄 + + +def merge_lora( + lora_a_model, lora_b_model, ratio, save_to, precision, save_precision, +): + # Check for caption_text_input + if lora_a_model == '': + msgbox('Invalid model A file') + return + + if lora_b_model == '': + msgbox('Invalid model B file') + return + + # Check if source model exist + if not os.path.isfile(lora_a_model): + msgbox('The provided model A is not a file') + return + + if not os.path.isfile(lora_b_model): + msgbox('The provided model B is not a file') + return + + ratio_a = ratio + ratio_b = 1 - ratio + + run_cmd = f'.\\venv\Scripts\python.exe "networks\merge_lora.py"' + run_cmd += f' --save_precision {save_precision}' + run_cmd += f' --precision {precision}' + run_cmd += f' --save_to {save_to}' + run_cmd += f' --models {lora_a_model} {lora_b_model}' + run_cmd += f' --ratios {ratio_a} {ratio_b}' + + print(run_cmd) + + # Run the command + subprocess.run(run_cmd) + + +### +# Gradio UI +### + + +def gradio_merge_lora_tab(): + with gr.Tab('Merge LoRA'): + gr.Markdown( + 'This utility can merge LoRA networks.' + ) + # with gr.Row(): + # sd_model = gr.Textbox( + # label='Stable Diffusion model', + # placeholder='(Optional) only select if mergind a LoRA into a ckpt or tensorflow model', + # interactive=True, + # ) + # button_sd_model_dir = gr.Button( + # folder_symbol, elem_id='open_folder_small' + # ) + # button_sd_model_dir.click( + # get_folder_path, outputs=sd_model + # ) + + # button_sd_model_file = gr.Button( + # document_symbol, elem_id='open_folder_small' + # ) + # button_sd_model_file.click( + # get_any_file_path, + # inputs=[sd_model], + # outputs=sd_model, + # ) + + with gr.Row(): + lora_a_model = gr.Textbox( + label='LoRA model "A"', + placeholder='Path to the LoRA A model', + interactive=True, + ) + button_lora_a_model_file = gr.Button( + document_symbol, elem_id='open_folder_small' + ) + button_lora_a_model_file.click( + get_any_file_path, + inputs=[lora_a_model], + outputs=lora_a_model, + ) + + lora_b_model = gr.Textbox( + label='LoRA model "B"', + placeholder='Path to the LoRA B model', + interactive=True, + ) + button_lora_b_model_file = gr.Button( + document_symbol, elem_id='open_folder_small' + ) + button_lora_b_model_file.click( + get_any_file_path, + inputs=[lora_b_model], + outputs=lora_b_model, + ) + with gr.Row(): + ratio = gr.Slider(label="Merge ratio (eg: 0.7 mean 70% of model A and 30% of model B", minimum=0, maximum=1, step=0.01, value=0.5, + interactive=True,) + + with gr.Row(): + save_to = gr.Textbox( + label='Save to', + placeholder='path for the file to save...', + interactive=True, + ) + button_save_to = gr.Button( + folder_symbol, elem_id='open_folder_small' + ) + button_save_to.click( + get_any_file_path, inputs=save_to, outputs=save_to + ) + precision = gr.Dropdown( + label='Merge precison', + choices=['fp16', 'bf16', 'float'], + value='float', + interactive=True, + ) + save_precision = gr.Dropdown( + label='Save precison', + choices=['fp16', 'bf16', 'float'], + value='float', + interactive=True, + ) + + convert_button = gr.Button('Merge model') + + convert_button.click( + merge_lora, + inputs=[lora_a_model, lora_b_model, ratio, save_to, precision, save_precision, + ], + ) diff --git a/lora_gui.py b/lora_gui.py index c95bac8..2e0d865 100644 --- a/lora_gui.py +++ b/lora_gui.py @@ -24,6 +24,7 @@ from library.dreambooth_folder_creation_gui import ( ) from library.dataset_balancing_gui import gradio_dataset_balancing_tab from library.utilities import utilities_tab +from library.merge_lora_gui import gradio_merge_lora_tab from easygui import msgbox folder_symbol = '\U0001f4c2' # 📂 @@ -473,7 +474,7 @@ def train_model( if not lora_network_weights == '': run_cmd += f' --network_weights={lora_network_weights}' if int(clip_skip) > 1: - run_cmd += f' --clip_skip={int(clip_skip)}' + run_cmd += f' --clip_skip={str(clip_skip)}' print(run_cmd) # Run the command @@ -779,7 +780,7 @@ def lora_tab( # ) network_dim = gr.Slider( minimum=1, - maximum=32, + maximum=128, label='Network Dimension', value=4, step=1, @@ -904,6 +905,7 @@ def lora_tab( logging_dir_input=logging_dir_input, ) gradio_dataset_balancing_tab() + gradio_merge_lora_tab() button_run = gr.Button('Train model')