Skip to content

Commit

Permalink
Cache hack causes lots of parser calls, memoize parser
Browse files Browse the repository at this point in the history
  • Loading branch information
asagi4 committed Dec 15, 2024
1 parent f7d78e5 commit 0884401
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 5 deletions.
8 changes: 5 additions & 3 deletions prompt_control/nodes_lazy.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,14 @@

from .utils import consolidate_schedule, find_nonscheduled_loras


def cache_key_hack(inputs):
out = inputs.copy()
if not is_link(inputs["text"]):
out["text"] = cache_key_from_inputs(**inputs)
return out


def create_lora_loader_nodes(graph, model, clip, loras):
for path, info in loras.items():
log.info("Creating LoraLoader for %s", path)
Expand Down Expand Up @@ -156,7 +158,7 @@ def INPUT_TYPES(s):
FUNCTION = "apply"

def apply(self, model, clip, text, unique_id, apply_hooks=True, tags="", start=0.0, end=1.0):
schedule = parse_prompt_schedules(text).with_filters(filters=tags, start=start, end=end)
schedule = parse_prompt_schedules(text, filters=tags, start=start, end=end)
graph = GraphBuilder(f"PCLazyLoraLoaderAdvanced-{unique_id}")
return build_lora_schedule(graph, schedule, model, clip, apply_hooks=apply_hooks, return_hooks=True)

Expand Down Expand Up @@ -226,7 +228,7 @@ def build_scheduled_prompts(graph, schedules, clip):


def cache_key_from_inputs(text, tags="", start=0.0, end=1.0, **kwargs):
schedules = parse_prompt_schedules(text).with_filters(start=start, end=end, filters=tags)
schedules = parse_prompt_schedules(text, filters=tags, start=start, end=end)
return [(pct, s["prompt"]) for pct, s in schedules]


Expand Down Expand Up @@ -270,7 +272,7 @@ def INPUT_TYPES(s):
FUNCTION = "apply"

def apply(self, clip, text, unique_id, tags="", start=0.1, end=1.0):
schedules = parse_prompt_schedules(text).with_filters(start=start, end=end, filters=tags)
schedules = parse_prompt_schedules(text, filters=tags, start=start, end=end)
graph = GraphBuilder(f"PCLazyTextEncodeAdvanced-{unique_id}")
return build_scheduled_prompts(graph, schedules, clip)

Expand Down
7 changes: 5 additions & 2 deletions prompt_control/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
logging.basicConfig()
log = logging.getLogger("comfyui-prompt-control")

from functools import lru_cache

if lark.__version__ == "0.12.0":
x = "Your lark package reports an ancient version (0.12.0) and will not work. If you have the 'lark-parser' package in your Python environment, remove that and *reinstall* lark!"
log.error(x)
Expand Down Expand Up @@ -323,5 +325,6 @@ def at_step_idx(self, step, total_steps=1):
return len(self.parsed_prompt) - 1, self.parsed_prompt[-1]


def parse_prompt_schedules(prompt):
return PromptSchedule(prompt)
@lru_cache
def parse_prompt_schedules(prompt, **kwargs):
return PromptSchedule(prompt, **kwargs)

0 comments on commit 0884401

Please sign in to comment.