network_dim=128 network_alpha=128 network_train_on=both learning_rate=1e-4 unet_lr=0 text_encoder_lr=5e-5 lr_scheduler=constant lr_scheduler_num_cycles=1 lr_scheduler_power=1 train_batch_size=6 num_epochs=20 caption_extension=".txt" mixed_precision=fp16 save_precision=fp16 save_n_epochs_type=save_n_epoch_ratio save_n_epochs_type_value=3 save_model_as=safetensors resolution=512 enable_bucket=1 cache_latents=1 max_token_length=225 clip_skip=2 use_8bit_adam=1 gradient_checkpointing=0 gradient_accumulation_steps=1 seed=0 additional_arguments="--shuffle_caption --xformers" print_hyperparameter=1