From ce92dd0570edb2ff6c0485c8b90ae8d3210ef8ff Mon Sep 17 00:00:00 2001 From: EmmanuelMr18 Date: Fri, 27 Sep 2024 15:13:27 -0700 Subject: [PATCH 1/3] refactor: remove ExternalTextList node, was for lora traning --- comfy-nodes/external_text_list.py | 52 ------------------------------- 1 file changed, 52 deletions(-) delete mode 100644 comfy-nodes/external_text_list.py diff --git a/comfy-nodes/external_text_list.py b/comfy-nodes/external_text_list.py deleted file mode 100644 index cc52258..0000000 --- a/comfy-nodes/external_text_list.py +++ /dev/null @@ -1,52 +0,0 @@ -import folder_paths -from PIL import Image, ImageOps -import numpy as np -import torch -import json - -class ComfyUIDeployExternalTextList: - @classmethod - def INPUT_TYPES(s): - return { - "required": { - "input_id": ( - "STRING", - {"multiline": False, "default": 'input_text_list'}, - ), - "text": ( - "STRING", - {"multiline": True, "default": "[]"}, - ), - }, - "optional": { - "display_name": ( - "STRING", - {"multiline": False, "default": ""}, - ), - "description": ( - "STRING", - {"multiline": True, "default": ""}, - ), - } - } - - RETURN_TYPES = ("STRING",) - RETURN_NAMES = ("text",) - - OUTPUT_IS_LIST = (True,) - - FUNCTION = "run" - - CATEGORY = "text" - - def run(self, input_id, text=None, display_name=None, description=None): - text_list = [] - try: - text_list = json.loads(text) # Assuming text is a JSON array string - except Exception as e: - print(f"Error processing images: {e}") - pass - return ([text_list],) - -NODE_CLASS_MAPPINGS = {"ComfyUIDeployExternalTextList": ComfyUIDeployExternalTextList} -NODE_DISPLAY_NAME_MAPPINGS = {"ComfyUIDeployExternalTextList": "External Text List (ComfyUI Deploy)"} From 07926158f0e5d95ae7b5fee3240c9fb87a195aae Mon Sep 17 00:00:00 2001 From: EmmanuelMr18 Date: Fri, 27 Sep 2024 18:19:56 -0700 Subject: [PATCH 2/3] feat: model_list node to display all the models available --- comfy-nodes/model_list.py | 60 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 60 insertions(+) create mode 100644 comfy-nodes/model_list.py diff --git a/comfy-nodes/model_list.py b/comfy-nodes/model_list.py new file mode 100644 index 0000000..e828bc5 --- /dev/null +++ b/comfy-nodes/model_list.py @@ -0,0 +1,60 @@ +import folder_paths +class AnyType(str): + def __ne__(self, __value: object) -> bool: + return False + +from os import walk + +WILDCARD = AnyType("*") + +MODEL_EXTENSIONS = { + "safetensors": "SafeTensors file format", + "ckpt": "Checkpoint file", + "pth": "PyTorch serialized file", + "pkl": "Pickle file", + "onnx": "ONNX file", +} + +def fetch_files(path): + for (dirpath, dirnames, filenames) in walk(path): + fs = [] + if len(dirnames) > 0: + for dirname in dirnames: + fs.extend(fetch_files(f"{dirpath}/{dirname}")) + for filename in filenames: + # Remove "./models/" from the beginning of dirpath + relative_dirpath = dirpath.replace("./models/", "", 1) + file_path = f"{relative_dirpath}/{filename}" + + # Only add files that are known model extensions + file_extension = filename.split('.')[-1].lower() + if file_extension in MODEL_EXTENSIONS: + fs.append(file_path) + + return fs +allModels = fetch_files("./models") + +class ComfyUIDeployModalList: + @classmethod + def INPUT_TYPES(s): + return { + "required": { + "model": (allModels, ), + } + } + + RETURN_TYPES = (WILDCARD,) + RETURN_NAMES = ("model",) + + FUNCTION = "run" + + CATEGORY = "model" + + def run(self, model=""): + # Split the model path by '/' and select the last item + model_name = model.split('/')[-1] + return [model_name] + + +NODE_CLASS_MAPPINGS = {"ComfyUIDeployModelList": ComfyUIDeployModalList} +NODE_DISPLAY_NAME_MAPPINGS = {"ComfyUIDeployModelList": "Model List (ComfyUI Deploy)"} \ No newline at end of file From ae68aae01173190dbe84383c01bdc0e87a64d83b Mon Sep 17 00:00:00 2001 From: EdwinWong Date: Tue, 24 Sep 2024 15:35:48 -0700 Subject: [PATCH 3/3] fix: add workflow data to extra data --- custom_routes.py | 11 +++++++++-- globals.py | 1 + 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/custom_routes.py b/custom_routes.py index 68cbe7e..73d0ac6 100644 --- a/custom_routes.py +++ b/custom_routes.py @@ -333,6 +333,7 @@ def apply_inputs_to_workflow(workflow_api: Any, inputs: Any, sid: str = None): def send_prompt(sid: str, inputs: StreamingPrompt): # workflow_api = inputs.workflow_api workflow_api = copy.deepcopy(inputs.workflow_api) + workflow = copy.deepcopy(inputs.workflow) # Random seed apply_random_seed_to_workflow(workflow_api) @@ -348,7 +349,8 @@ def send_prompt(sid: str, inputs: StreamingPrompt): prompt = { "prompt": workflow_api, "client_id": sid, #"comfy_deploy_instance", #api.client_id - "prompt_id": prompt_id + "prompt_id": prompt_id, + "extra_data": {"extra_pnginfo": {"workflow": workflow}}, } try: @@ -387,6 +389,7 @@ async def comfy_deploy_run(request): # The prompt id generated from comfy deploy, can be None prompt_id = data.get("prompt_id") inputs = data.get("inputs") + workflow = data.get("workflow") # Now it handles directly in here apply_random_seed_to_workflow(workflow_api) @@ -396,6 +399,7 @@ async def comfy_deploy_run(request): "prompt": workflow_api, "client_id": "comfy_deploy_instance", #api.client_id "prompt_id": prompt_id, + "extra_data": {"extra_pnginfo": {"workflow": workflow}} } prompt_metadata[prompt_id] = SimplePrompt( @@ -446,6 +450,7 @@ async def stream_prompt(data, token): # The prompt id generated from comfy deploy, can be None prompt_id = data.get("prompt_id") inputs = data.get("inputs") + workflow = data.get("workflow") # Now it handles directly in here apply_random_seed_to_workflow(workflow_api) @@ -454,7 +459,8 @@ async def stream_prompt(data, token): prompt = { "prompt": workflow_api, "client_id": "comfy_deploy_instance", #api.client_id - "prompt_id": prompt_id + "prompt_id": prompt_id, + "extra_data": {"extra_pnginfo": {"workflow": workflow}}, } prompt_metadata[prompt_id] = SimplePrompt( @@ -788,6 +794,7 @@ async def websocket_handler(request): inputs={}, status_endpoint=status_endpoint, file_upload_endpoint=request.rel_url.query.get('file_upload_endpoint', None), + workflow=workflow["workflow"], ) await update_realtime_run_status(realtime_id, status_endpoint, Status.RUNNING) diff --git a/globals.py b/globals.py index 3ee2658..ea705ee 100644 --- a/globals.py +++ b/globals.py @@ -24,6 +24,7 @@ class StreamingPrompt(BaseModel): running_prompt_ids: set[str] = set() status_endpoint: Optional[str] file_upload_endpoint: Optional[str] + workflow: Any class SimplePrompt(BaseModel): status_endpoint: Optional[str]