From 08844019cca3ab05636fcd8a5d9c38ff10f63391 Mon Sep 17 00:00:00 2001 From: asagi4 <130366179+asagi4@users.noreply.github.com> Date: Sun, 15 Dec 2024 15:33:50 +0200 Subject: [PATCH] Cache hack causes lots of parser calls, memoize parser --- prompt_control/nodes_lazy.py | 8 +++++--- prompt_control/parser.py | 7 +++++-- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/prompt_control/nodes_lazy.py b/prompt_control/nodes_lazy.py index 04971bf..27dec32 100644 --- a/prompt_control/nodes_lazy.py +++ b/prompt_control/nodes_lazy.py @@ -8,12 +8,14 @@ from .utils import consolidate_schedule, find_nonscheduled_loras + def cache_key_hack(inputs): out = inputs.copy() if not is_link(inputs["text"]): out["text"] = cache_key_from_inputs(**inputs) return out + def create_lora_loader_nodes(graph, model, clip, loras): for path, info in loras.items(): log.info("Creating LoraLoader for %s", path) @@ -156,7 +158,7 @@ def INPUT_TYPES(s): FUNCTION = "apply" def apply(self, model, clip, text, unique_id, apply_hooks=True, tags="", start=0.0, end=1.0): - schedule = parse_prompt_schedules(text).with_filters(filters=tags, start=start, end=end) + schedule = parse_prompt_schedules(text, filters=tags, start=start, end=end) graph = GraphBuilder(f"PCLazyLoraLoaderAdvanced-{unique_id}") return build_lora_schedule(graph, schedule, model, clip, apply_hooks=apply_hooks, return_hooks=True) @@ -226,7 +228,7 @@ def build_scheduled_prompts(graph, schedules, clip): def cache_key_from_inputs(text, tags="", start=0.0, end=1.0, **kwargs): - schedules = parse_prompt_schedules(text).with_filters(start=start, end=end, filters=tags) + schedules = parse_prompt_schedules(text, filters=tags, start=start, end=end) return [(pct, s["prompt"]) for pct, s in schedules] @@ -270,7 +272,7 @@ def INPUT_TYPES(s): FUNCTION = "apply" def apply(self, clip, text, unique_id, tags="", start=0.1, end=1.0): - schedules = parse_prompt_schedules(text).with_filters(start=start, end=end, filters=tags) + schedules = parse_prompt_schedules(text, filters=tags, start=start, end=end) graph = GraphBuilder(f"PCLazyTextEncodeAdvanced-{unique_id}") return build_scheduled_prompts(graph, schedules, clip) diff --git a/prompt_control/parser.py b/prompt_control/parser.py index 7879430..2a0a384 100644 --- a/prompt_control/parser.py +++ b/prompt_control/parser.py @@ -5,6 +5,8 @@ logging.basicConfig() log = logging.getLogger("comfyui-prompt-control") +from functools import lru_cache + if lark.__version__ == "0.12.0": x = "Your lark package reports an ancient version (0.12.0) and will not work. If you have the 'lark-parser' package in your Python environment, remove that and *reinstall* lark!" log.error(x) @@ -323,5 +325,6 @@ def at_step_idx(self, step, total_steps=1): return len(self.parsed_prompt) - 1, self.parsed_prompt[-1] -def parse_prompt_schedules(prompt): - return PromptSchedule(prompt) +@lru_cache +def parse_prompt_schedules(prompt, **kwargs): + return PromptSchedule(prompt, **kwargs)