From f12b4bd54e0dd12aa41c08e2a000612a85fb76d9 Mon Sep 17 00:00:00 2001 From: "Dr.Lt.Data" Date: Mon, 27 Jan 2025 11:51:03 +0900 Subject: [PATCH] feat: wildcards - add `reproduce` mode --- __init__.py | 2 +- inspire/inspire_server.py | 28 ++++++++++++++++++++++------ inspire/prompt_support.py | 14 ++++++++++++-- js/prompt.js | 26 +++++++++++++++++++------- pyproject.toml | 2 +- 5 files changed, 55 insertions(+), 17 deletions(-) diff --git a/__init__.py b/__init__.py index 7106b2d..f931283 100644 --- a/__init__.py +++ b/__init__.py @@ -7,7 +7,7 @@ import importlib -version_code = [1, 10] +version_code = [1, 11] version_str = f"V{version_code[0]}.{version_code[1]}" + (f'.{version_code[2]}' if len(version_code) > 2 else '') print(f"### Loading: ComfyUI-Inspire-Pack ({version_str})") diff --git a/inspire/inspire_server.py b/inspire/inspire_server.py index 01b8a88..505d85a 100644 --- a/inspire/inspire_server.py +++ b/inspire/inspire_server.py @@ -272,7 +272,17 @@ def populate_wildcards(json_data): for k, v in prompt.items(): if 'class_type' in v and v['class_type'] == 'WildcardEncode //Inspire': inputs = v['inputs'] - if inputs['mode'] and isinstance(inputs['populated_text'], str): + + # legacy adapter + if isinstance(inputs['mode'], bool): + if inputs['mode']: + new_mode = 'populate' + else: + new_mode = 'fixed' + + inputs['mode'] = new_mode + + if inputs['mode'] == 'populate' and isinstance(inputs['populated_text'], str): if isinstance(inputs['seed'], list): try: input_node = prompt[inputs['seed'][0]] @@ -293,14 +303,17 @@ def populate_wildcards(json_data): input_seed = int(inputs['seed']) inputs['populated_text'] = wildcard_process(text=inputs['wildcard_text'], seed=input_seed) - inputs['mode'] = False + inputs['mode'] = 'reproduce' server.PromptServer.instance.send_sync("inspire-node-feedback", {"node_id": k, "widget_name": "populated_text", "type": "text", "data": inputs['populated_text']}) updated_widget_values[k] = inputs['populated_text'] + if inputs['mode'] == 'reproduce': + server.PromptServer.instance.send_sync("inspire-node-feedback", {"node_id": k, "widget_name": "mode", "type": "text", "value": 'populate'}) + elif 'class_type' in v and v['class_type'] == 'MakeBasicPipe //Inspire': inputs = v['inputs'] - if inputs['wildcard_mode'] and (isinstance(inputs['positive_populated_text'], str) or isinstance(inputs['negative_populated_text'], str)): + if inputs['wildcard_mode'] == 'populate' and (isinstance(inputs['positive_populated_text'], str) or isinstance(inputs['negative_populated_text'], str)): if isinstance(inputs['seed'], list): try: input_node = prompt[inputs['seed'][0]] @@ -328,9 +341,12 @@ def populate_wildcards(json_data): inputs['negative_populated_text'] = wildcard_process(text=inputs['negative_wildcard_text'], seed=input_seed) server.PromptServer.instance.send_sync("inspire-node-feedback", {"node_id": k, "widget_name": "negative_populated_text", "type": "text", "data": inputs['negative_populated_text']}) - inputs['wildcard_mode'] = False + inputs['wildcard_mode'] = 'reproduce' mbp_updated_widget_values[k] = inputs['positive_populated_text'], inputs['negative_populated_text'] + if inputs['wildcard_mode'] == 'reproduce': + server.PromptServer.instance.send_sync("inspire-node-feedback", {"node_id": k, "widget_name": "wildcard_mode", "type": "text", "value": 'populate'}) + if 'extra_data' in json_data and 'extra_pnginfo' in json_data['extra_data']: extra_pnginfo = json_data['extra_data']['extra_pnginfo'] if 'workflow' in extra_pnginfo and extra_pnginfo['workflow'] is not None and 'nodes' in extra_pnginfo['workflow']: @@ -338,11 +354,11 @@ def populate_wildcards(json_data): key = str(node['id']) if key in updated_widget_values: node['widgets_values'][3] = updated_widget_values[key] - node['widgets_values'][4] = False + node['widgets_values'][4] = 'reproduce' if key in mbp_updated_widget_values: node['widgets_values'][7] = mbp_updated_widget_values[key][0] node['widgets_values'][8] = mbp_updated_widget_values[key][1] - node['widgets_values'][5] = False + node['widgets_values'][5] = 'reproduce' def force_reset_useless_params(json_data): diff --git a/inspire/prompt_support.py b/inspire/prompt_support.py index 050c55a..4753f11 100644 --- a/inspire/prompt_support.py +++ b/inspire/prompt_support.py @@ -557,7 +557,13 @@ def INPUT_TYPES(s): "weight_interpretation": (["comfy", "A1111", "compel", "comfy++", "down_weight"], {'default': 'comfy++'}), "wildcard_text": ("STRING", {"multiline": True, "dynamicPrompts": False, 'placeholder': 'Wildcard Prompt (User Input)'}), "populated_text": ("STRING", {"multiline": True, "dynamicPrompts": False, 'placeholder': 'Populated Prompt (Will be generated automatically)'}), - "mode": ("BOOLEAN", {"default": True, "label_on": "Populate", "label_off": "Fixed"}), + + "mode": (["populate", "fixed", "reproduce"], {"default": "populate", "tooltip": + "populate: Before running the workflow, it overwrites the existing value of 'populated_text' with the prompt processed from 'wildcard_text'. In this mode, 'populated_text' cannot be edited.\n" + "fixed: Ignores wildcard_text and keeps 'populated_text' as is. You can edit 'populated_text' in this mode.\n" + "reproduce: This mode operates as 'fixed' mode only once for reproduction, and then it switches to 'populate' mode." + }), + "Select to add LoRA": (["Select the LoRA to add to the text"] + folder_paths.get_filename_list("loras"), ), "Select to add Wildcard": (["Select the Wildcard to add to the text"],), "seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), @@ -598,7 +604,11 @@ def INPUT_TYPES(s): "Add selection to": ("BOOLEAN", {"default": True, "label_on": "Positive", "label_off": "Negative"}), "Select to add LoRA": (["Select the LoRA to add to the text"] + folder_paths.get_filename_list("loras"),), "Select to add Wildcard": (["Select the Wildcard to add to the text"],), - "wildcard_mode": ("BOOLEAN", {"default": True, "label_on": "Populate", "label_off": "Fixed"}), + "wildcard_mode": (["populate", "fixed", "reproduce"], {"default": "populate", "tooltip": + "populate: Before running the workflow, it overwrites the existing value of 'populated_text' with the prompt processed from 'wildcard_text'. In this mode, 'populated_text' cannot be edited.\n" + "fixed: Ignores wildcard_text and keeps 'populated_text' as is. You can edit 'populated_text' in this mode.\n" + "reproduce: This mode operates as 'fixed' mode only once for reproduction, and then it switches to 'populate' mode." + }), "positive_populated_text": ("STRING", {"multiline": True, "dynamicPrompts": False, 'placeholder': 'Populated Positive Prompt (Will be generated automatically)'}), "negative_populated_text": ("STRING", {"multiline": True, "dynamicPrompts": False, 'placeholder': 'Populated Negative Prompt (Will be generated automatically)'}), diff --git a/js/prompt.js b/js/prompt.js index 8ecca37..7c3b3b3 100644 --- a/js/prompt.js +++ b/js/prompt.js @@ -95,14 +95,20 @@ app.registerExtension({ // mode combo Object.defineProperty(mode_widget, "value", { set: (value) => { - node._mode_value = value == true || value == "Populate"; - populated_text_widget.inputEl.disabled = value == true || value == "Populate"; + if(value == true) + node._mode_value = "populate"; + else if(value == false) + node._mode_value = "fixed"; + else + node._mode_value = value; // combo value + + populated_text_widget.inputEl.disabled = node._mode_value != 'populate'; }, get: () => { if(node._mode_value != undefined) return node._mode_value; else - return true; + return 'populate'; } }); } @@ -180,15 +186,21 @@ app.registerExtension({ // mode combo Object.defineProperty(mode_widget, "value", { set: (value) => { - pos_populated_text_widget.inputEl.disabled = node._mode_value; - neg_populated_text_widget.inputEl.disabled = node._mode_value; - node._mode_value = value; + if(value == true) + node._mode_value = "populate"; + else if(value == false) + node._mode_value = "fixed"; + else + node._mode_value = value; // combo value + + pos_populated_text_widget.inputEl.disabled = node._mode_value != 'populate'; + neg_populated_text_widget.inputEl.disabled = node._mode_value != 'populate'; }, get: () => { if(node._mode_value != undefined) return node._mode_value; else - return true; + return 'populate'; } }); } diff --git a/pyproject.toml b/pyproject.toml index ae23a28..2c44115 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,7 @@ [project] name = "comfyui-inspire-pack" description = "This extension provides various nodes to support Lora Block Weight, Regional Nodes, Backend Cache, Prompt Utils, List Utils, Noise(Seed) Utils, ... and the Impact Pack." -version = "1.10" +version = "1.11" license = { file = "LICENSE" } dependencies = ["matplotlib", "cachetools"]