diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..6224f90 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,12 @@ +## 2.0.0 + +* Added Stack version of the Randomize LoRAs node; +* Added Trigger words fields for the Randomize LoRAs nodes; +* Fixed Randomize LoRAs outputing duplicated LoRAs if the user selected the same LoRA multiple times; +* Added Random Text from Multiline node; +* Added Text Multiline With Variables node; + +## 1.0.0 + +* Initial launch +* Added Randomize LoRAs node \ No newline at end of file diff --git a/README.md b/README.md index 936e911..1b396a6 100644 --- a/README.md +++ b/README.md @@ -1,24 +1,43 @@ # unwdef Custom Nodes for ComfyUI -At the moment, only one node is available. +This is a work in progress repo. ## Randomize LoRAs Node The Randomize LoRAs node randomly loads LoRAs based on a predefined selection with also randomized weights. This enables users to experiment with different artistic effects on their generated images. -![preview](https://github.com/unwdef/unwdef-nodes-comfyui/assets/166751903/686f12e1-ed35-4165-94f7-048c0550c2fc) +![nodes_lora](https://github.com/unwdef/unwdef-nodes-comfyui/assets/166751903/e3ae5179-06ac-4154-94a9-1fb31a47fe35) Note: The "Show Text" node is part of [pythongosssss/ComfyUI-Custom-Scripts](https://github.com/pythongosssss/ComfyUI-Custom-Scripts) +There is also a "stack" version for working with other lora nodes with the stacking functionality such as [Efficiency Nodes](https://github.com/jags111/efficiency-nodes-comfyui) + ### How It Works Connect the **model** and **clip** outputs from this node to your KSampler or other processing nodes. The output, **chosen loras**, provides a textual representation detailing which LoRAs and corresponding weights were applied during the generation. +You can also provide the **trigger words** for each lora. They will be outputted as a formatted text separated by commas. Useful for you to concatenate the trigger words into your prompts. + ### Configuration Fields - **seed**: Ensures reproducibility. Maintain the same seed for consistent results across generations. _Note: Keep the same selected loras for this to work._ - **max_random**: Limits the maximum number of LoRAs to apply. Even if you select up to 10, you can choose to apply fewer. - **lora_x**: Specifies the LoRA file to use. - **min_str_x** and **max_str_x**: Defines the minimum and maximum strengths for each LoRA, allowing for a range of intensities. +- **trigger_words_x**: The trigger words for the selected lora. + +## Random Text from Multiline Node +Will output one (or multiple) lines from a multiline text component. + +![node_random_text_from_multiline](https://github.com/unwdef/unwdef-nodes-comfyui/assets/166751903/432196cc-067f-4f84-9ca4-769d3a3c46d7) + +## Text Multiline with Variables + +Will replace instances of !var_x in your text with the contents of the var_x inputs. + +![nodes_text](https://github.com/unwdef/unwdef-nodes-comfyui/assets/166751903/cd9c0724-1dcc-426b-b66e-6e733b3be264) + + +## Installation +You can use the [ComfyUI-Manager](https://github.com/ltdrdata/ComfyUI-Manager). Search for "unwdef" or "unwdef-nodes". -### Installation -To install the Randomize LoRAs node in ComfyUI: +Or you can install it manually: 1. Open your terminal and navigate to your `ComfyUI/custom_nodes` directory. 2. Clone the repository using: diff --git a/__init__.py b/__init__.py index 4ac5cc2..30e7834 100644 --- a/__init__.py +++ b/__init__.py @@ -1,3 +1,18 @@ -from .unwdef_nodes import NODE_CLASS_MAPPINGS, NODE_DISPLAY_NAME_MAPPINGS +from .unwdef_nodes.nodes_lora import * +from .unwdef_nodes.nodes_text import * + +NODE_CLASS_MAPPINGS = { + "RandomizeLoras": RandomizeLoras, + "RandomizeLorasStack": RandomizeLorasStack, + "RandomTextFromMultiline": RandomTextFromMultiline, + "TextMultilineWithVariables" : TextMultilineWithVariables, +} + +NODE_DISPLAY_NAME_MAPPINGS = { + "RandomizeLoras": "Randomize LoRAs", + "RandomizeLorasStack": "Randomize LoRAs (Stack)", + "RandomTextFromMultiline": "Random Text From Multiline", + "TextMultilineWithVariables": "Text Multiline with Variables", +} __all__ = ["NODE_CLASS_MAPPINGS", "NODE_DISPLAY_NAME_MAPPINGS"] \ No newline at end of file diff --git a/preview.png b/preview.png deleted file mode 100644 index 600086d..0000000 Binary files a/preview.png and /dev/null differ diff --git a/previews/nodes_lora.png b/previews/nodes_lora.png new file mode 100644 index 0000000..0a6732a Binary files /dev/null and b/previews/nodes_lora.png differ diff --git a/previews/nodes_text.png b/previews/nodes_text.png new file mode 100644 index 0000000..2e66470 Binary files /dev/null and b/previews/nodes_text.png differ diff --git a/unwdef_nodes.py b/unwdef_nodes.py deleted file mode 100644 index dedff6a..0000000 --- a/unwdef_nodes.py +++ /dev/null @@ -1,87 +0,0 @@ -import random -from nodes import LoraLoader -import folder_paths - -class RandomizeLoras: - def __init__(self): - pass - - @classmethod - def INPUT_TYPES(cls): - loras = ["None"] + folder_paths.get_filename_list("loras") - inputs = { - "required": { - "model": ("MODEL",), - "clip": ("CLIP", ), - "seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), - "max_random": ("INT", {"default": 10, "min": 1, "max": 10}), - } - } - for i in range(1, 11): - inputs["required"][f"lora_{i}"] = (loras,) - inputs["required"][f"min_str_{i}"] = ("FLOAT", {"default": 0.5, "min": -10.0, "max": 10.0, "step": 0.01}) - inputs["required"][f"max_str_{i}"] = ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}) - - return inputs - - RETURN_TYPES = ("MODEL", "CLIP", "STRING") - RETURN_NAMES = ("model", "clip", "chosen loras") - FUNCTION = "load_lora" - CATEGORY = "unwdef" - - def load_lora(self, model, clip, seed, max_random, **kwargs): - if seed is not None: - random.seed(seed) # For reproducibility - - # Initialize list to hold lora configurations - lora_configs = [] - - # Dynamically extract lora configurations from kwargs - for i in range(1, 11): - lora_name = kwargs.get(f"lora_{i}") - min_str = kwargs.get(f"min_str_{i}") - max_str = kwargs.get(f"max_str_{i}") - - if lora_name != "None": - lora_configs.append({"name": lora_name, "min_str": min_str, "max_str": max_str}) - - # Initialize the string to hold chosen loras and values - chosen_str = "" - - # Check if no loras are selected - if len(lora_configs) == 0: - return (model, clip, chosen_str) - - # Adjust max_random - if (max_random > len(lora_configs)): - max_random = len(lora_configs) - - # Randomly choose some of these loras - chosen_loras = random.sample(lora_configs, random.randint(1, max_random)) - - for lora in chosen_loras: - # Randomly determine a value between min_str and max_str - strength = random.uniform(lora['min_str'], lora['max_str']) - - # Apply changes to model and clip - model, clip = LoraLoader().load_lora(model, clip, lora['name'], strength, strength) - - # Append the current lora and its value to the string - chosen_str += f", " - - # Find the last occurrence of the comma to remove it - last_comma_index = chosen_str.rfind(',') - # Slice the string to remove the last comma and everything after it - if last_comma_index != -1: - chosen_str = chosen_str[:last_comma_index] - - return (model, clip, chosen_str) - - -NODE_CLASS_MAPPINGS = { - "RandomizeLoras": RandomizeLoras -} - -NODE_DISPLAY_NAME_MAPPINGS = { - "RandomizeLoras": "Randomize LoRAs" -} \ No newline at end of file diff --git a/unwdef_nodes/nodes_lora.py b/unwdef_nodes/nodes_lora.py new file mode 100644 index 0000000..cbc3f17 --- /dev/null +++ b/unwdef_nodes/nodes_lora.py @@ -0,0 +1,186 @@ +import random +from nodes import LoraLoader +import folder_paths + +class RandomizeLoras: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(cls): + loras = ["None"] + folder_paths.get_filename_list("loras") + inputs = { + "required": { + "model": ("MODEL",), + "clip": ("CLIP", ), + "seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "max_random": ("INT", {"default": 10, "min": 1, "max": 10}), + } + } + for i in range(1, 11): + inputs["required"][f"lora_{i}"] = (loras,) + inputs["required"][f"min_str_{i}"] = ("FLOAT", {"default": 0.5, "min": -10.0, "max": 10.0, "step": 0.01}) + inputs["required"][f"max_str_{i}"] = ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}) + inputs["required"][f"trigger_words_{i}"] = ("STRING", { "multiline": False, "default": "" }) + + return inputs + + RETURN_TYPES = ("MODEL", "CLIP", "STRING", "STRING") + RETURN_NAMES = ("model", "clip", "trigger_words", "chosen_loras") + FUNCTION = "load_lora" + CATEGORY = "unwdef/lora" + + def load_lora(self, model, clip, seed, max_random, **kwargs): + if seed is not None: + random.seed(seed) # For reproducibility + + # Initialize list to hold lora configurations + lora_configs = [] + + # Dynamically extract lora configurations from kwargs + for i in range(1, 11): + lora_name = kwargs.get(f"lora_{i}") + min_str = kwargs.get(f"min_str_{i}") + max_str = kwargs.get(f"max_str_{i}") + trigger_words = kwargs.get(f"trigger_words_{i}") + + if lora_name != "None" and not any(config['name'] == lora_name for config in lora_configs): + lora_configs.append({"name": lora_name, "min_str": min_str, "max_str": max_str, + "trigger_words": ', '.join([s.strip() for s in trigger_words.strip().split(',') if s.strip()])}) + + # Initialize the string to hold chosen loras and values + chosen_str = "" + + # Initialize the string to hold the trigger words + chosen_trigger_words = "" + + # Check if no loras are selected + if len(lora_configs) == 0: + return (model, clip, chosen_trigger_words, chosen_str) + + # Adjust max_random + if (max_random > len(lora_configs)): + max_random = len(lora_configs) + + # Randomly choose some of these loras + chosen_loras = random.sample(lora_configs, random.randint(1, max_random)) + + for lora in chosen_loras: + # Randomly determine a value between min_str and max_str + strength = random.uniform(lora['min_str'], lora['max_str']) + + # Apply changes to model and clip + model, clip = LoraLoader().load_lora(model, clip, lora['name'], strength, strength) + + # Append the current lora and its value to the string + chosen_str += f", " + + # Append the trigger words for each lora + existing_chosen_trigger_words = set(chosen_trigger_words.split(', ')) + chosen_trigger_words = set(lora['trigger_words'].split(', ')) + combined_words = existing_chosen_trigger_words | chosen_trigger_words + chosen_trigger_words = ', '.join(sorted(combined_words)) + + + # Find the last occurrence of the comma to remove it + last_comma_index = chosen_str.rfind(',') + # Slice the string to remove the last comma and everything after it + if last_comma_index != -1: + chosen_str = chosen_str[:last_comma_index] + + return (model, clip, chosen_trigger_words.lstrip(", "), chosen_str) + +class RandomizeLorasStack: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(cls): + loras = ["None"] + folder_paths.get_filename_list("loras") + inputs = { + "required": { + "seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "max_random": ("INT", {"default": 10, "min": 1, "max": 10}), + } + } + for i in range(1, 11): + inputs["required"][f"lora_{i}"] = (loras,) + inputs["required"][f"min_str_{i}"] = ("FLOAT", {"default": 0.5, "min": -10.0, "max": 10.0, "step": 0.01}) + inputs["required"][f"max_str_{i}"] = ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}) + inputs["required"][f"trigger_words_{i}"] = ("STRING", { "multiline": False, "default": "" }) + + inputs["optional"] = { + "lora_stack": ("LORA_STACK",) + } + + return inputs + + RETURN_TYPES = ("LORA_STACK", "STRING", "STRING") + RETURN_NAMES = ("LORA_STACK", "trigger_words", "chosen_loras") + FUNCTION = "load_lora_stack" + CATEGORY = "unwdef/lora" + + def load_lora_stack(self, seed, max_random, lora_stack=None, **kwargs): + if seed is not None: + random.seed(seed) # For reproducibility + + # Initialize list to hold lora configurations + lora_configs = [] + + # Initialize lora stack list + lora_list = list() + if lora_stack is not None: + lora_list.extend([l for l in lora_stack if l[0] != "None"]) + + # Dynamically extract lora configurations from kwargs + for i in range(1, 11): + lora_name = kwargs.get(f"lora_{i}") + min_str = kwargs.get(f"min_str_{i}") + max_str = kwargs.get(f"max_str_{i}") + trigger_words = kwargs.get(f"trigger_words_{i}") + + if lora_name != "None" and not any(config['name'] == lora_name for config in lora_configs): + lora_configs.append({"name": lora_name, "min_str": min_str, "max_str": max_str, + "trigger_words": ', '.join([s.strip() for s in trigger_words.strip().split(',') if s.strip()])}) + + # Initialize the string to hold chosen loras and values + chosen_str = "" + + # Initialize the string to hold the trigger words + chosen_trigger_words = "" + + # Check if no loras are selected + if len(lora_configs) == 0: + return (lora_list, chosen_trigger_words, chosen_str, ) + + # Adjust max_random + if (max_random > len(lora_configs)): + max_random = len(lora_configs) + + # Randomly choose some of these loras + chosen_loras = random.sample(lora_configs, random.randint(1, max_random)) + + for lora in chosen_loras: + # Randomly determine a value between min_str and max_str + strength = random.uniform(lora['min_str'], lora['max_str']) + + # Add to the stack + lora_list.extend([(lora['name'], strength, strength)]), + + # Append the current lora and its value to the string + chosen_str += f", " + + # Append the trigger words for each lora + existing_chosen_trigger_words = set(chosen_trigger_words.split(', ')) + chosen_trigger_words = set(lora['trigger_words'].split(', ')) + combined_words = existing_chosen_trigger_words | chosen_trigger_words + chosen_trigger_words = ', '.join(sorted(combined_words)) + + # Find the last occurrence of the comma to remove it + last_comma_index = chosen_str.rfind(',') + # Slice the string to remove the last comma and everything after it + if last_comma_index != -1: + chosen_str = chosen_str[:last_comma_index] + + return (lora_list, chosen_trigger_words.lstrip(", "), chosen_str,) + \ No newline at end of file diff --git a/unwdef_nodes/nodes_text.py b/unwdef_nodes/nodes_text.py new file mode 100644 index 0000000..3592f88 --- /dev/null +++ b/unwdef_nodes/nodes_text.py @@ -0,0 +1,88 @@ +import random +import json + +class RandomTextFromMultiline: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(cls): + inputs = { + "required": { + "seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}), + "lines_count": ("INT", {"default": 1, "min": 1, "max": 0xffffffffffffffff}), + "delimiter": ("STRING", { "multiline": False, "default": ", " }), + "text": ("STRING", { "multiline": True, "default": "" }), + } + } + + return inputs + + RETURN_TYPES = ("STRING",) + RETURN_NAMES = ("STRING",) + FUNCTION = "random_text_from_multiline" + CATEGORY = "unwdef/text" + + def random_text_from_multiline(self, seed, lines_count, delimiter, text): + if seed is not None: + random.seed(seed) # For reproducibility + + # Split the text into separate lines + lines = text.splitlines() + + # If lines_count is greater than the number of available lines, adjust it + lines_count = min(lines_count, len(lines)) + + # Select random unique lines + selected_lines = random.sample(lines, lines_count) + + # Join the selected lines with the delimiter + result = delimiter.join(selected_lines) + + return (result, ) + +class TextMultilineWithVariables: + def __init__(self): + pass + + @classmethod + def INPUT_TYPES(cls): + inputs = { + "required": { + "text": ("STRING", { "multiline": True, "default": "picture of !var_1 in the style of !var_2" }), + } + } + + inputs["optional"] = { + "var_1": ("STRING", { "multiline": False, "default": "", "forceInput": True }), + "var_2": ("STRING", { "multiline": False, "default": "", "forceInput": True }), + "var_3": ("STRING", { "multiline": False, "default": "", "forceInput": True }), + "var_4": ("STRING", { "multiline": False, "default": "", "forceInput": True }), + "var_5": ("STRING", { "multiline": False, "default": "", "forceInput": True }), + } + + return inputs + + RETURN_TYPES = ("STRING",) + RETURN_NAMES = ("STRING",) + FUNCTION = "variable_text_multiline" + CATEGORY = "unwdef/text" + + def variable_text_multiline(self, text=None, var_1=None, var_2=None, var_3=None, var_4=None, var_5=None): + if text == None: + return ("", ) + + # Create a dictionary to map placeholders to their corresponding variables + replacements = { + '!var_1': var_1 if var_1 is not None else '', + '!var_2': var_2 if var_2 is not None else '', + '!var_3': var_3 if var_3 is not None else '', + '!var_4': var_4 if var_4 is not None else '', + '!var_5': var_5 if var_5 is not None else '' + } + + # Replace each placeholder in the text with its corresponding value from the dictionary + for key, value in replacements.items(): + text = text.replace(key, value) + + return (text, ) \ No newline at end of file