From 75d3ff4ffe03ab10b902c7bddf559339c76360e2 Mon Sep 17 00:00:00 2001 From: psychedelicious <4822129+psychedelicious@users.noreply.github.com> Date: Mon, 9 Oct 2023 21:06:18 +1100 Subject: [PATCH] fix: on error parsing, fall back to "raw" prompt Closes #69 --- src/compel/compel.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/compel/compel.py b/src/compel/compel.py index 3c34dfc..f011c8d 100644 --- a/src/compel/compel.py +++ b/src/compel/compel.py @@ -4,6 +4,7 @@ import torch from torch import Tensor from transformers import CLIPTokenizer, CLIPTextModel +from pyparsing.exceptions import ParseException from . import cross_attention_control from .conditioning_scheduler import ConditioningScheduler, StaticConditioningScheduler @@ -155,7 +156,10 @@ def parse_prompt_string(cls, prompt_string: str) -> Conjunction: Parse the given prompt string and return a structured Conjunction object that represents the prompt it contains. """ pp = PromptParser() - conjunction = pp.parse_conjunction(prompt_string) + try: + conjunction = pp.parse_conjunction(prompt_string) + except ParseException: + return Conjunction(prompts=[FlattenedPrompt([(prompt_string, 1.0)])], weights=[1.0]) return conjunction def describe_tokenization(self, text: str) -> List[str]: