Interrupt after current generation

This commit is contained in:
Anthony Fu 2023-10-16 14:12:18 +08:00
parent 861cbd5636
commit 8aa13d5dce
7 changed files with 17 additions and 8 deletions

View File

@ -78,6 +78,7 @@ def wrap_gradio_call(func, extra_outputs=None, add_stats=False):
shared.state.skipped = False
shared.state.interrupted = False
shared.state.interrupted_next = False
shared.state.job_count = 0
if not add_stats:

View File

@ -49,7 +49,7 @@ def process_batch(p, input_dir, output_dir, inpaint_mask_dir, args, to_scale=Fal
if state.skipped:
state.skipped = False
if state.interrupted:
if state.interrupted or state.interrupted_next:
break
try:

View File

@ -819,7 +819,7 @@ def process_images_inner(p: StableDiffusionProcessing) -> Processed:
if state.skipped:
state.skipped = False
if state.interrupted:
if state.interrupted or state.interrupted_next:
break
sd_models.reload_model_weights() # model can be changed for example by refiner

View File

@ -113,6 +113,7 @@ options_templates.update(options_section(('system', "System"), {
"disable_mmap_load_safetensors": OptionInfo(False, "Disable memmapping for loading .safetensors files.").info("fixes very slow loading speed in some cases"),
"hide_ldm_prints": OptionInfo(True, "Prevent Stability-AI's ldm/sgm modules from printing noise to console."),
"dump_stacks_on_signal": OptionInfo(False, "Print stack traces before exiting the program with ctrl+c."),
"interrupt_after_current": OptionInfo(False, "Interrupt generation after current image is finished on batch processing"),
}))
options_templates.update(options_section(('API', "API"), {

View File

@ -12,6 +12,7 @@ log = logging.getLogger(__name__)
class State:
skipped = False
interrupted = False
interrupted_next = False
job = ""
job_no = 0
job_count = 0
@ -76,6 +77,10 @@ class State:
log.info("Received skip request")
def interrupt(self):
if shared.opts.interrupt_after_current and self.job_count > 1:
self.interrupted_next = True
log.info("Received interrupt request, interrupt after current job")
else:
self.interrupted = True
log.info("Received interrupt request")
@ -91,6 +96,7 @@ class State:
obj = {
"skipped": self.skipped,
"interrupted": self.interrupted,
"interrupted_next": self.interrupted_next,
"job": self.job,
"job_count": self.job_count,
"job_timestamp": self.job_timestamp,
@ -114,6 +120,7 @@ class State:
self.id_live_preview = 0
self.skipped = False
self.interrupted = False
self.interrupted_next = False
self.textinfo = None
self.job = job
devices.torch_gc()

View File

@ -95,7 +95,7 @@ class Script(scripts.Script):
processed = processing.process_images(p)
# Generation cancelled.
if state.interrupted:
if state.interrupted or state.interrupted_next:
break
if initial_seed is None:
@ -122,7 +122,7 @@ class Script(scripts.Script):
p.inpainting_fill = original_inpainting_fill
if state.interrupted:
if state.interrupted or state.interrupted_next:
break
if len(history) > 1:

View File

@ -688,7 +688,7 @@ class Script(scripts.Script):
grid_infotext = [None] * (1 + len(zs))
def cell(x, y, z, ix, iy, iz):
if shared.state.interrupted:
if shared.state.interrupted or state.interrupted_next:
return Processed(p, [], p.seed, "")
pc = copy(p)