stable-diffusion-webui/modules/textual_inversion/learn_schedule.py

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

82 lines
2.7 KiB
Python
Raw Normal View History

import tqdm
2022-10-11 22:03:05 +03:00
class LearnScheduleIterator:
2022-10-11 22:03:05 +03:00
def __init__(self, learn_rate, max_steps, cur_step=0):
"""
2022-10-29 11:42:51 +03:00
specify learn_rate as "0.001:100, 0.00001:1000, 1e-5:10000" to have lr of 0.001 until step 100, 0.00001 until 1000, and 1e-5 until 10000
"""
2022-10-11 22:03:05 +03:00
pairs = learn_rate.split(',')
self.rates = []
self.it = 0
self.maxit = 0
2022-10-29 11:37:24 +03:00
try:
2023-05-10 11:37:18 +03:00
for pair in pairs:
2022-10-29 11:37:24 +03:00
if not pair.strip():
continue
tmp = pair.split(':')
if len(tmp) == 2:
step = int(tmp[1])
if step > cur_step:
self.rates.append((float(tmp[0]), min(step, max_steps)))
self.maxit += 1
if step > max_steps:
return
elif step == -1:
self.rates.append((float(tmp[0]), max_steps))
self.maxit += 1
2022-10-11 22:03:05 +03:00
return
2022-10-29 11:37:24 +03:00
else:
2022-10-11 22:03:05 +03:00
self.rates.append((float(tmp[0]), max_steps))
self.maxit += 1
return
2022-10-29 11:37:24 +03:00
assert self.rates
2023-05-10 11:19:16 +03:00
except (ValueError, AssertionError) as e:
raise Exception('Invalid learning rate schedule. It should be a number or, for example, like "0.001:100, 0.00001:1000, 1e-5:10000" to have lr of 0.001 until step 100, 0.00001 until 1000, and 1e-5 until 10000.') from e
2022-10-29 11:37:24 +03:00
2022-10-11 22:03:05 +03:00
def __iter__(self):
return self
def __next__(self):
if self.it < self.maxit:
self.it += 1
return self.rates[self.it - 1]
else:
raise StopIteration
class LearnRateScheduler:
def __init__(self, learn_rate, max_steps, cur_step=0, verbose=True):
self.schedules = LearnScheduleIterator(learn_rate, max_steps, cur_step)
(self.learn_rate, self.end_step) = next(self.schedules)
self.verbose = verbose
if self.verbose:
print(f'Training at rate of {self.learn_rate} until step {self.end_step}')
self.finished = False
def step(self, step_number):
2022-10-28 16:48:08 +03:00
if step_number < self.end_step:
return False
try:
(self.learn_rate, self.end_step) = next(self.schedules)
except StopIteration:
self.finished = True
return False
return True
def apply(self, optimizer, step_number):
if not self.step(step_number):
return
if self.verbose:
tqdm.tqdm.write(f'Training at rate of {self.learn_rate} until step {self.end_step}')
for pg in optimizer.param_groups:
pg['lr'] = self.learn_rate