mirror of
https://github.com/Sygil-Dev/sygil-webui.git
synced 2024-12-14 22:13:41 +03:00
Simple templating with ESRGAN bug fix
This commit is contained in:
parent
97f413c17a
commit
4d13c894aa
20
webui.py
20
webui.py
@ -399,10 +399,13 @@ def image_grid(imgs, batch_size, force_n_rows=None, captions=None):
|
||||
|
||||
for i, img in enumerate(imgs):
|
||||
grid.paste(img, box=(i % cols * w, i // cols * h))
|
||||
if captions:
|
||||
d = ImageDraw.Draw( grid )
|
||||
size = d.textbbox( (0,0), captions[i], font=fnt, stroke_width=2, align="center" )
|
||||
d.multiline_text((i % cols * w + w/2, i // cols * h + h - size[3]), captions[i], font=fnt, fill=(255,255,255), stroke_width=2, stroke_fill=(0,0,0), anchor="mm", align="center")
|
||||
try:
|
||||
if captions:
|
||||
d = ImageDraw.Draw( grid )
|
||||
size = d.textbbox( (0,0), captions[i], font=fnt, stroke_width=2, align="center" )
|
||||
d.multiline_text((i % cols * w + w/2, i // cols * h + h - size[3]), captions[i], font=fnt, fill=(255,255,255), stroke_width=2, stroke_fill=(0,0,0), anchor="mm", align="center")
|
||||
except:
|
||||
pass
|
||||
|
||||
return grid
|
||||
|
||||
@ -723,14 +726,17 @@ def process_images(
|
||||
|
||||
precision_scope = autocast if opt.precision == "autocast" else nullcontext
|
||||
output_images = []
|
||||
new_prompt_matrix_parts = []
|
||||
stats = []
|
||||
with torch.no_grad(), precision_scope("cuda"), (model.ema_scope() if not opt.optimized else nullcontext()):
|
||||
init_data = func_init()
|
||||
tic = time.time()
|
||||
|
||||
|
||||
for n in range(n_iter):
|
||||
print(f"Iteration: {n+1}/{n_iter}")
|
||||
prompts = all_prompts[n * batch_size:(n + 1) * batch_size]
|
||||
parts = prompt_matrix_parts[n * batch_size:(n + 1) * batch_size]
|
||||
seeds = all_seeds[n * batch_size:(n + 1) * batch_size]
|
||||
|
||||
if opt.optimized:
|
||||
@ -810,6 +816,7 @@ skip_grid, sort_samples, sampler_name, ddim_eta, n_iter, batch_size, i, denoisin
|
||||
normalize_prompt_weights, use_GFPGAN, write_info_files, prompt_matrix, init_img, uses_loopback, uses_random_seed_loopback, skip_save,
|
||||
skip_grid, sort_samples, sampler_name, ddim_eta, n_iter, batch_size, i, denoising_strength, resize_mode)
|
||||
output_images.append(gfpgan_image) #287
|
||||
new_prompt_matrix_parts.append(parts[i]+"\ngfpgan")
|
||||
|
||||
if use_RealESRGAN and RealESRGAN is not None and not use_GFPGAN:
|
||||
skip_save = True # #287 >_>
|
||||
@ -827,6 +834,7 @@ skip_grid, sort_samples, sampler_name, ddim_eta, n_iter, batch_size, i, denoisin
|
||||
normalize_prompt_weights, use_GFPGAN, write_info_files, prompt_matrix, init_img, uses_loopback, uses_random_seed_loopback, skip_save,
|
||||
skip_grid, sort_samples, sampler_name, ddim_eta, n_iter, batch_size, i, denoising_strength, resize_mode)
|
||||
output_images.append(esrgan_image) #287
|
||||
new_prompt_matrix_parts.append(parts[i]+"\nesrgan4x")
|
||||
|
||||
if use_RealESRGAN and RealESRGAN is not None and use_GFPGAN and GFPGAN is not None:
|
||||
skip_save = True # #287 >_>
|
||||
@ -846,6 +854,7 @@ skip_grid, sort_samples, sampler_name, ddim_eta, n_iter, batch_size, i, denoisin
|
||||
normalize_prompt_weights, use_GFPGAN, write_info_files, prompt_matrix, init_img, uses_loopback, uses_random_seed_loopback, skip_save,
|
||||
skip_grid, sort_samples, sampler_name, ddim_eta, n_iter, batch_size, i, denoising_strength, resize_mode)
|
||||
output_images.append(gfpgan_esrgan_image) #287
|
||||
new_prompt_matrix_parts.append(parts[i]+"\ngfpgan-esrgan4x")
|
||||
|
||||
|
||||
if init_mask:
|
||||
@ -872,6 +881,7 @@ skip_grid, sort_samples, sampler_name, ddim_eta, n_iter, batch_size, i, denoisin
|
||||
normalize_prompt_weights, use_GFPGAN, write_info_files, prompt_matrix, init_img, uses_loopback, uses_random_seed_loopback, skip_save,
|
||||
skip_grid, sort_samples, sampler_name, ddim_eta, n_iter, batch_size, i, denoising_strength, resize_mode)
|
||||
output_images.append(image)
|
||||
new_prompt_matrix_parts.append(parts[i])
|
||||
|
||||
if opt.optimized:
|
||||
mem = torch.cuda.memory_allocated()/1e6
|
||||
@ -882,7 +892,7 @@ skip_grid, sort_samples, sampler_name, ddim_eta, n_iter, batch_size, i, denoisin
|
||||
if (prompt_matrix or not skip_grid) and not do_not_save_grid:
|
||||
if prompt_matrix:
|
||||
if prompt.startswith("@"):
|
||||
grid = image_grid(output_images, batch_size, force_n_rows=frows, captions=prompt_matrix_parts)
|
||||
grid = image_grid(output_images, batch_size, force_n_rows=frows, captions=new_prompt_matrix_parts)
|
||||
else:
|
||||
grid = image_grid(output_images, batch_size, force_n_rows=1 << ((len(prompt_matrix_parts)-1)//2))
|
||||
try:
|
||||
|
Loading…
Reference in New Issue
Block a user