Fixed issue with new ldm folder requiring the personalization_config to be set even if empty.

Added shutup as a dependency to shutup python warnings for good.
This commit is contained in:
ZeroCool940711 2022-11-26 17:59:34 -07:00
parent 5841dc85a6
commit 2e2b35ff71
No known key found for this signature in database
GPG Key ID: 4E4072992B5BC640
2 changed files with 27 additions and 19 deletions

View File

@ -67,6 +67,7 @@ retry==0.9.2 # used by sd_utils
python-slugify==6.1.2 # used by sd_utils
piexif==1.1.3 # used by sd_utils
pywebview==3.6.3 # used by streamlit_webview.py
shutup==0.2.0 # remove all the annoying warnings
accelerate==0.12.0
albumentations==0.4.3

View File

@ -72,6 +72,7 @@ from io import BytesIO
from packaging import version
from pathlib import Path
from huggingface_hub import hf_hub_download
import shutup
#import librosa
from logger import logger, set_logger_verbosity, quiesce_logger
@ -91,6 +92,9 @@ except ImportError as e:
# end of imports
#---------------------------------------------------------------------------------------------------------------
# remove all the annoying python warnings.
shutup.please()
try:
# this silences the annoying "Some weights of the model checkpoint were not used when initializing..." message at start.
from transformers import logging
@ -261,10 +265,13 @@ def set_page_title(title):
def make_grid(n_items=5, n_cols=5):
# Compute number of rows
n_rows = 1 + n_items // int(n_cols)
# Create rows
rows = [st.container() for _ in range(n_rows)]
# Create columns in each row
cols_per_row = [r.columns(n_cols) for r in rows]
cols = [column for row in cols_per_row for column in row]
@ -272,29 +279,29 @@ def make_grid(n_items=5, n_cols=5):
def merge(file1, file2, out, weight):
alpha = (weight)/100
if not(file1.endswith(".ckpt")):
file1 += ".ckpt"
if not(file2.endswith(".ckpt")):
file2 += ".ckpt"
if not(out.endswith(".ckpt")):
out += ".ckpt"
try:
#Load Models
model_0 = torch.load(file1)
model_1 = torch.load(file2)
theta_0 = model_0['state_dict']
theta_1 = model_1['state_dict']
alpha = (weight)/100
for key in theta_0.keys():
if 'model' in key and key in theta_1:
theta_0[key] = (alpha) * theta_0[key] + (1-alpha) * theta_1[key]
logger.info("RUNNING...\n(STAGE 2)")
for key in theta_1.keys():
if 'model' in key and key not in theta_0:
theta_0[key] = theta_1[key]
torch.save(model_0, out)
except:
logger.error("Error in merging")
def human_readable_size(size, decimal_places=3):
@ -483,7 +490,7 @@ def load_model_from_config(config, ckpt, verbose=False):
if "global_step" in pl_sd:
logger.info(f"Global Step: {pl_sd['global_step']}")
sd = pl_sd["state_dict"]
model = instantiate_from_config(config.model)
model = instantiate_from_config(config.model, personalization_config='')
m, u = model.load_state_dict(sd, strict=False)
if len(m) > 0 and verbose:
logger.info("missing keys:")