Changed the default output folder to be shorter.

This commit is contained in:
ZeroCool940711 2022-10-02 09:46:14 -07:00
parent 0dfab1ad92
commit 4e0511dbac
4 changed files with 19 additions and 16 deletions

View File

@ -30,14 +30,14 @@ general:
use_sd_concepts_library: True
sd_concepts_library_folder: "models/custom/sd-concepts-library"
GFPGAN_dir: "./src/gfpgan"
GFPGAN_model: "GFPGANv1.3"
GFPGAN_model: "GFPGANv1.4"
LDSR_dir: "./models/ldsr"
LDSR_model: "model"
RealESRGAN_dir: "./src/realesrgan"
RealESRGAN_model: "RealESRGAN_x4plus"
upscaling_method: "RealESRGAN"
outdir_txt2img: outputs/txt2img-samples
outdir_img2img: outputs/img2img-samples
outdir_txt2img: outputs/txt2img
outdir_img2img: outputs/img2img
gfpgan_cpu: False
esrgan_cpu: False
extra_models_cpu: False

View File

@ -59,7 +59,7 @@ def img2img(prompt: str = '', init_info: any = None, init_info_mask: any = None,
random_seed_loopback: bool = False
):
outpath = st.session_state['defaults'].general.outdir_img2img or st.session_state['defaults'].general.outdir or "outputs/img2img-samples"
outpath = st.session_state['defaults'].general.outdir_img2img
seed = seed_to_int(seed)
batch_size = 1

View File

@ -352,6 +352,8 @@ def load_models(use_LDSR = False, LDSR_model='model', use_GFPGAN=False, GFPGAN_m
print("Model loaded.")
return True
def load_model_from_config(config, ckpt, verbose=False):

View File

@ -99,7 +99,7 @@ def txt2img(prompt: str, ddim_steps: int, sampler_name: str, n_iter: int, batch_
fp = None, variant_amount: float = None,
variant_seed: int = None, ddim_eta:float = 0.0, write_info_files:bool = True):
outpath = st.session_state['defaults'].general.outdir_txt2img or st.session_state['defaults'].general.outdir or "outputs/txt2img-samples"
outpath = st.session_state['defaults'].general.outdir_txt2img
seed = seed_to_int(seed)
@ -385,18 +385,19 @@ def layout():
CustomModel_available=server_state["CustomModel_available"], custom_model=st.session_state["custom_model"])
#print(st.session_state['use_RealESRGAN'])
#print(st.session_state['use_LDSR'])
#try:
#
output_images, seeds, info, stats = txt2img(prompt, st.session_state.sampling_steps, sampler_name, batch_count, batch_size,
cfg_scale, seed, height, width, separate_prompts, normalize_prompt_weights, save_individual_images,
save_grid, group_by_prompt, save_as_jpg, st.session_state["use_GFPGAN"], st.session_state['GFPGAN_model'],
use_RealESRGAN=st.session_state["use_RealESRGAN"], RealESRGAN_model=st.session_state["RealESRGAN_model"],
use_LDSR=st.session_state["use_LDSR"], LDSR_model=st.session_state["LDSR_model"],
variant_amount=variant_amount, variant_seed=variant_seed, write_info_files=write_info_files)
#print(st.session_state['use_RealESRGAN'])
#print(st.session_state['use_LDSR'])
#try:
#
message.success('Render Complete: ' + info + '; Stats: ' + stats, icon="")
output_images, seeds, info, stats = txt2img(prompt, st.session_state.sampling_steps, sampler_name, batch_count, batch_size,
cfg_scale, seed, height, width, separate_prompts, normalize_prompt_weights, save_individual_images,
save_grid, group_by_prompt, save_as_jpg, st.session_state["use_GFPGAN"], st.session_state['GFPGAN_model'],
use_RealESRGAN=st.session_state["use_RealESRGAN"], RealESRGAN_model=st.session_state["RealESRGAN_model"],
use_LDSR=st.session_state["use_LDSR"], LDSR_model=st.session_state["LDSR_model"],
variant_amount=variant_amount, variant_seed=variant_seed, write_info_files=write_info_files)
message.success('Render Complete: ' + info + '; Stats: ' + stats, icon="")
#history_tab,col1,col2,col3,PlaceHolder,col1_cont,col2_cont,col3_cont = st.session_state['historyTab']