Move cleanup_models to improve performance.

main
comfyanonymous 11 months ago
parent a28a9dc836
commit 6a32c06f06

@ -368,7 +368,6 @@ class PromptExecutor:
d = self.outputs_ui.pop(x)
del d
comfy.model_management.cleanup_models()
self.add_message("execution_cached",
{ "nodes": list(current_outputs) , "prompt_id": prompt_id},
broadcast=False)

@ -139,6 +139,7 @@ def prompt_worker(q, server):
if need_gc:
current_time = time.perf_counter()
if (current_time - last_gc_collect) > gc_collect_interval:
comfy.model_management.cleanup_models()
gc.collect()
comfy.model_management.soft_empty_cache()
last_gc_collect = current_time

Loading…
Cancel
Save