2022-08-27 21:32:28 +03:00
|
|
|
import os
|
2022-09-03 12:08:45 +03:00
|
|
|
import threading
|
2022-08-31 11:04:19 +03:00
|
|
|
|
2022-09-03 12:08:45 +03:00
|
|
|
from modules.paths import script_path
|
2022-08-29 01:58:15 +03:00
|
|
|
|
2022-08-22 17:15:46 +03:00
|
|
|
import torch
|
|
|
|
from omegaconf import OmegaConf
|
2022-09-03 12:08:45 +03:00
|
|
|
|
2022-08-31 11:04:19 +03:00
|
|
|
import signal
|
2022-08-22 17:15:46 +03:00
|
|
|
|
|
|
|
from ldm.util import instantiate_from_config
|
2022-09-01 21:20:25 +03:00
|
|
|
|
2022-09-03 12:08:45 +03:00
|
|
|
from modules.shared import opts, cmd_opts, state
|
|
|
|
import modules.shared as shared
|
|
|
|
import modules.ui
|
|
|
|
import modules.scripts
|
|
|
|
import modules.sd_hijack
|
2022-09-07 12:32:28 +03:00
|
|
|
import modules.codeformer_model
|
|
|
|
import modules.gfpgan_model
|
|
|
|
import modules.face_restoration
|
2022-09-03 12:08:45 +03:00
|
|
|
import modules.realesrgan_model as realesrgan
|
2022-09-04 18:54:12 +03:00
|
|
|
import modules.esrgan_model as esrgan
|
2022-09-11 18:48:36 +03:00
|
|
|
import modules.extras
|
2022-09-03 12:08:45 +03:00
|
|
|
import modules.lowvram
|
|
|
|
import modules.txt2img
|
|
|
|
import modules.img2img
|
|
|
|
|
|
|
|
|
2022-09-07 12:32:28 +03:00
|
|
|
modules.codeformer_model.setup_codeformer()
|
|
|
|
modules.gfpgan_model.setup_gfpgan()
|
|
|
|
shared.face_restorers.append(modules.face_restoration.FaceRestoration())
|
|
|
|
|
2022-09-04 18:54:12 +03:00
|
|
|
esrgan.load_models(cmd_opts.esrgan_models_path)
|
2022-09-03 12:08:45 +03:00
|
|
|
realesrgan.setup_realesrgan()
|
2022-08-25 21:52:05 +03:00
|
|
|
|
2022-09-11 18:48:36 +03:00
|
|
|
|
2022-08-22 17:15:46 +03:00
|
|
|
def load_model_from_config(config, ckpt, verbose=False):
|
2022-09-12 20:47:46 +03:00
|
|
|
print(f"Loading model [{shared.sd_model_hash}] from {ckpt}")
|
2022-08-22 17:15:46 +03:00
|
|
|
pl_sd = torch.load(ckpt, map_location="cpu")
|
|
|
|
if "global_step" in pl_sd:
|
|
|
|
print(f"Global Step: {pl_sd['global_step']}")
|
|
|
|
sd = pl_sd["state_dict"]
|
2022-09-08 12:17:26 +03:00
|
|
|
|
2022-08-22 17:15:46 +03:00
|
|
|
model = instantiate_from_config(config.model)
|
|
|
|
m, u = model.load_state_dict(sd, strict=False)
|
|
|
|
if len(m) > 0 and verbose:
|
|
|
|
print("missing keys:")
|
|
|
|
print(m)
|
|
|
|
if len(u) > 0 and verbose:
|
|
|
|
print("unexpected keys:")
|
|
|
|
print(u)
|
2022-09-12 16:52:06 +03:00
|
|
|
if cmd_opts.opt_channelslast:
|
2022-09-12 16:26:42 +03:00
|
|
|
model = model.to(memory_format=torch.channels_last)
|
2022-08-22 17:15:46 +03:00
|
|
|
model.eval()
|
|
|
|
return model
|
|
|
|
|
2022-08-29 20:10:59 +03:00
|
|
|
|
2022-09-03 12:08:45 +03:00
|
|
|
queue_lock = threading.Lock()
|
2022-08-27 21:32:28 +03:00
|
|
|
|
2022-08-25 21:52:05 +03:00
|
|
|
|
2022-09-03 12:08:45 +03:00
|
|
|
def wrap_gradio_gpu_call(func):
|
|
|
|
def f(*args, **kwargs):
|
2022-09-06 02:09:01 +03:00
|
|
|
shared.state.sampling_step = 0
|
2022-09-06 10:11:25 +03:00
|
|
|
shared.state.job_count = -1
|
2022-09-06 02:09:01 +03:00
|
|
|
shared.state.job_no = 0
|
2022-09-06 19:33:51 +03:00
|
|
|
shared.state.current_latent = None
|
|
|
|
shared.state.current_image = None
|
2022-09-06 23:10:12 +03:00
|
|
|
shared.state.current_image_sampling_step = 0
|
2022-09-06 02:09:01 +03:00
|
|
|
|
2022-09-03 12:08:45 +03:00
|
|
|
with queue_lock:
|
|
|
|
res = func(*args, **kwargs)
|
2022-08-25 21:52:05 +03:00
|
|
|
|
2022-09-03 12:08:45 +03:00
|
|
|
shared.state.job = ""
|
2022-09-06 02:09:01 +03:00
|
|
|
shared.state.job_count = 0
|
2022-08-25 21:52:05 +03:00
|
|
|
|
2022-09-03 12:08:45 +03:00
|
|
|
return res
|
2022-08-25 21:52:05 +03:00
|
|
|
|
2022-09-03 12:08:45 +03:00
|
|
|
return modules.ui.wrap_gradio_call(f)
|
2022-08-25 21:52:05 +03:00
|
|
|
|
2022-09-11 11:31:16 +03:00
|
|
|
|
2022-09-08 15:19:36 +03:00
|
|
|
modules.scripts.load_scripts(os.path.join(script_path, "scripts"))
|
2022-08-25 21:52:05 +03:00
|
|
|
|
2022-08-30 21:51:30 +03:00
|
|
|
try:
|
|
|
|
# this silences the annoying "Some weights of the model checkpoint were not used when initializing..." message at start.
|
|
|
|
|
|
|
|
from transformers import logging
|
|
|
|
|
|
|
|
logging.set_verbosity_error()
|
|
|
|
except Exception:
|
|
|
|
pass
|
|
|
|
|
2022-09-12 20:47:46 +03:00
|
|
|
with open(cmd_opts.ckpt, "rb") as file:
|
|
|
|
import hashlib
|
|
|
|
m = hashlib.sha256()
|
|
|
|
|
|
|
|
file.seek(0x100000)
|
|
|
|
m.update(file.read(0x10000))
|
|
|
|
shared.sd_model_hash = m.hexdigest()[0:8]
|
|
|
|
|
2022-09-01 21:26:46 +03:00
|
|
|
sd_config = OmegaConf.load(cmd_opts.config)
|
2022-09-03 12:08:45 +03:00
|
|
|
shared.sd_model = load_model_from_config(sd_config, cmd_opts.ckpt)
|
|
|
|
shared.sd_model = (shared.sd_model if cmd_opts.no_half else shared.sd_model.half())
|
2022-08-29 01:58:15 +03:00
|
|
|
|
2022-09-01 21:26:46 +03:00
|
|
|
if cmd_opts.lowvram or cmd_opts.medvram:
|
2022-09-03 18:47:37 +03:00
|
|
|
modules.lowvram.setup_for_low_vram(shared.sd_model, cmd_opts.medvram)
|
2022-09-01 21:26:46 +03:00
|
|
|
else:
|
2022-09-03 12:08:45 +03:00
|
|
|
shared.sd_model = shared.sd_model.to(shared.device)
|
2022-08-25 21:52:05 +03:00
|
|
|
|
2022-09-03 12:08:45 +03:00
|
|
|
modules.sd_hijack.model_hijack.hijack(shared.sd_model)
|
2022-08-31 22:19:30 +03:00
|
|
|
|
2022-09-08 12:17:26 +03:00
|
|
|
|
|
|
|
def webui():
|
2022-09-06 08:54:11 +03:00
|
|
|
# make the program just exit at ctrl+c without waiting for anything
|
|
|
|
def sigint_handler(sig, frame):
|
2022-09-06 19:33:51 +03:00
|
|
|
print(f'Interrupted with signal {sig} in {frame}')
|
2022-09-06 08:54:11 +03:00
|
|
|
os._exit(0)
|
2022-08-22 17:15:46 +03:00
|
|
|
|
2022-09-06 08:54:11 +03:00
|
|
|
signal.signal(signal.SIGINT, sigint_handler)
|
2022-08-31 22:19:30 +03:00
|
|
|
|
2022-09-06 08:54:11 +03:00
|
|
|
demo = modules.ui.create_ui(
|
|
|
|
txt2img=wrap_gradio_gpu_call(modules.txt2img.txt2img),
|
|
|
|
img2img=wrap_gradio_gpu_call(modules.img2img.img2img),
|
2022-09-11 11:31:16 +03:00
|
|
|
run_extras=wrap_gradio_gpu_call(modules.extras.run_extras),
|
|
|
|
run_pnginfo=modules.extras.run_pnginfo
|
2022-09-06 08:54:11 +03:00
|
|
|
)
|
2022-08-31 11:04:19 +03:00
|
|
|
|
2022-09-12 15:10:05 +03:00
|
|
|
demo.launch(
|
|
|
|
share=cmd_opts.share,
|
|
|
|
server_name="0.0.0.0" if cmd_opts.listen else None,
|
|
|
|
server_port=cmd_opts.port,
|
|
|
|
debug=cmd_opts.gradio_debug,
|
2022-09-12 15:52:16 +03:00
|
|
|
auth=[tuple(cred.split(':')) for cred in cmd_opts.gradio_auth.strip('"').split(',')] if cmd_opts.gradio_auth else None,
|
2022-09-12 15:10:05 +03:00
|
|
|
)
|
2022-09-08 12:17:26 +03:00
|
|
|
|
2022-09-11 18:48:36 +03:00
|
|
|
|
2022-09-08 12:17:26 +03:00
|
|
|
if __name__ == "__main__":
|
|
|
|
webui()
|