Undo LoCon commit
This commit is contained in:
parent
04f0f0cf4f
commit
dfd155a8e1
22
lora_gui.py
22
lora_gui.py
@ -10,7 +10,6 @@ import os
|
|||||||
import subprocess
|
import subprocess
|
||||||
import pathlib
|
import pathlib
|
||||||
import argparse
|
import argparse
|
||||||
import shutil
|
|
||||||
from library.common_gui import (
|
from library.common_gui import (
|
||||||
get_folder_path,
|
get_folder_path,
|
||||||
remove_doublequote,
|
remove_doublequote,
|
||||||
@ -44,12 +43,6 @@ folder_symbol = '\U0001f4c2' # 📂
|
|||||||
refresh_symbol = '\U0001f504' # 🔄
|
refresh_symbol = '\U0001f504' # 🔄
|
||||||
save_style_symbol = '\U0001f4be' # 💾
|
save_style_symbol = '\U0001f4be' # 💾
|
||||||
document_symbol = '\U0001F4C4' # 📄
|
document_symbol = '\U0001F4C4' # 📄
|
||||||
locon_path = os.getcwd()+'\\locon\\'
|
|
||||||
|
|
||||||
def getlocon():
|
|
||||||
os.system('git clone https://github.com/KohakuBlueleaf/LoCon.git')
|
|
||||||
os.system('ren '+locon_path[:-6]+'\\LoCon\\'+' locon_github-sourcecode')
|
|
||||||
shutil.copytree(locon_path[:-6]+'locon_github-sourcecode\\locon\\', locon_path)
|
|
||||||
|
|
||||||
def save_configuration(
|
def save_configuration(
|
||||||
save_as,
|
save_as,
|
||||||
@ -111,7 +104,7 @@ def save_configuration(
|
|||||||
bucket_reso_steps,
|
bucket_reso_steps,
|
||||||
caption_dropout_every_n_epochs, caption_dropout_rate,
|
caption_dropout_every_n_epochs, caption_dropout_rate,
|
||||||
optimizer,
|
optimizer,
|
||||||
optimizer_args,noise_offset, locon = 0
|
optimizer_args,noise_offset,
|
||||||
):
|
):
|
||||||
# Get list of function parameters and values
|
# Get list of function parameters and values
|
||||||
parameters = list(locals().items())
|
parameters = list(locals().items())
|
||||||
@ -217,7 +210,7 @@ def open_configuration(
|
|||||||
bucket_reso_steps,
|
bucket_reso_steps,
|
||||||
caption_dropout_every_n_epochs, caption_dropout_rate,
|
caption_dropout_every_n_epochs, caption_dropout_rate,
|
||||||
optimizer,
|
optimizer,
|
||||||
optimizer_args,noise_offset, locon=0
|
optimizer_args,noise_offset,
|
||||||
):
|
):
|
||||||
# Get list of function parameters and values
|
# Get list of function parameters and values
|
||||||
parameters = list(locals().items())
|
parameters = list(locals().items())
|
||||||
@ -301,7 +294,7 @@ def train_model(
|
|||||||
bucket_reso_steps,
|
bucket_reso_steps,
|
||||||
caption_dropout_every_n_epochs, caption_dropout_rate,
|
caption_dropout_every_n_epochs, caption_dropout_rate,
|
||||||
optimizer,
|
optimizer,
|
||||||
optimizer_args,noise_offset, locon
|
optimizer_args,noise_offset,
|
||||||
):
|
):
|
||||||
if pretrained_model_name_or_path == '':
|
if pretrained_model_name_or_path == '':
|
||||||
msgbox('Source model information is missing')
|
msgbox('Source model information is missing')
|
||||||
@ -437,11 +430,6 @@ def train_model(
|
|||||||
run_cmd += f' --save_model_as={save_model_as}'
|
run_cmd += f' --save_model_as={save_model_as}'
|
||||||
if not float(prior_loss_weight) == 1.0:
|
if not float(prior_loss_weight) == 1.0:
|
||||||
run_cmd += f' --prior_loss_weight={prior_loss_weight}'
|
run_cmd += f' --prior_loss_weight={prior_loss_weight}'
|
||||||
if locon:
|
|
||||||
if not os.path.exists(locon_path):
|
|
||||||
getlocon()
|
|
||||||
run_cmd += ' --network_module=locon.locon_kohya'
|
|
||||||
else:
|
|
||||||
run_cmd += f' --network_module=networks.lora'
|
run_cmd += f' --network_module=networks.lora'
|
||||||
|
|
||||||
if not (float(text_encoder_lr) == 0) or not (float(unet_lr) == 0):
|
if not (float(text_encoder_lr) == 0) or not (float(unet_lr) == 0):
|
||||||
@ -690,8 +678,6 @@ def lora_tab(
|
|||||||
)
|
)
|
||||||
enable_bucket = gr.Checkbox(label='Enable buckets', value=True)
|
enable_bucket = gr.Checkbox(label='Enable buckets', value=True)
|
||||||
with gr.Accordion('Advanced Configuration', open=False):
|
with gr.Accordion('Advanced Configuration', open=False):
|
||||||
with gr.Row():
|
|
||||||
locon= gr.Checkbox(label='Train a LoCon instead of a general LoRA (may not be able to merge now)', value=False)
|
|
||||||
with gr.Row():
|
with gr.Row():
|
||||||
no_token_padding = gr.Checkbox(
|
no_token_padding = gr.Checkbox(
|
||||||
label='No token padding', value=False
|
label='No token padding', value=False
|
||||||
@ -833,7 +819,7 @@ def lora_tab(
|
|||||||
bucket_reso_steps,
|
bucket_reso_steps,
|
||||||
caption_dropout_every_n_epochs, caption_dropout_rate,
|
caption_dropout_every_n_epochs, caption_dropout_rate,
|
||||||
optimizer,
|
optimizer,
|
||||||
optimizer_args,noise_offset,locon
|
optimizer_args,noise_offset,
|
||||||
]
|
]
|
||||||
|
|
||||||
button_open_config.click(
|
button_open_config.click(
|
||||||
|
Loading…
Reference in New Issue
Block a user