merge
This commit is contained in:
		
						commit
						101b6cca57
					
				@ -1,52 +0,0 @@
 | 
			
		||||
import folder_paths
 | 
			
		||||
from PIL import Image, ImageOps
 | 
			
		||||
import numpy as np
 | 
			
		||||
import torch
 | 
			
		||||
import json
 | 
			
		||||
 | 
			
		||||
class ComfyUIDeployExternalTextList:
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def INPUT_TYPES(s):
 | 
			
		||||
        return {
 | 
			
		||||
            "required": {
 | 
			
		||||
                "input_id": (
 | 
			
		||||
                    "STRING",
 | 
			
		||||
                    {"multiline": False, "default": 'input_text_list'},
 | 
			
		||||
                ),
 | 
			
		||||
                 "text": (
 | 
			
		||||
                    "STRING",
 | 
			
		||||
                    {"multiline": True, "default": "[]"},
 | 
			
		||||
                ),
 | 
			
		||||
            },
 | 
			
		||||
            "optional": {
 | 
			
		||||
                "display_name": (
 | 
			
		||||
                    "STRING",
 | 
			
		||||
                    {"multiline": False, "default": ""},
 | 
			
		||||
                ),
 | 
			
		||||
                "description": (
 | 
			
		||||
                    "STRING",
 | 
			
		||||
                    {"multiline": True, "default": ""},
 | 
			
		||||
                ),
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
    RETURN_TYPES = ("STRING",)
 | 
			
		||||
    RETURN_NAMES = ("text",)
 | 
			
		||||
 | 
			
		||||
    OUTPUT_IS_LIST = (True,)
 | 
			
		||||
 | 
			
		||||
    FUNCTION = "run"
 | 
			
		||||
 | 
			
		||||
    CATEGORY = "text"
 | 
			
		||||
 | 
			
		||||
    def run(self, input_id, text=None, display_name=None, description=None):
 | 
			
		||||
        text_list = []
 | 
			
		||||
        try:
 | 
			
		||||
            text_list = json.loads(text)  # Assuming text is a JSON array string
 | 
			
		||||
        except Exception as e:
 | 
			
		||||
            print(f"Error processing images: {e}")
 | 
			
		||||
            pass
 | 
			
		||||
        return ([text_list],)
 | 
			
		||||
 | 
			
		||||
NODE_CLASS_MAPPINGS = {"ComfyUIDeployExternalTextList": ComfyUIDeployExternalTextList}
 | 
			
		||||
NODE_DISPLAY_NAME_MAPPINGS = {"ComfyUIDeployExternalTextList": "External Text List (ComfyUI Deploy)"}
 | 
			
		||||
							
								
								
									
										60
									
								
								comfy-nodes/model_list.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										60
									
								
								comfy-nodes/model_list.py
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,60 @@
 | 
			
		||||
import folder_paths
 | 
			
		||||
class AnyType(str):
 | 
			
		||||
    def __ne__(self, __value: object) -> bool:
 | 
			
		||||
        return False
 | 
			
		||||
 | 
			
		||||
from os import walk
 | 
			
		||||
 | 
			
		||||
WILDCARD = AnyType("*")
 | 
			
		||||
 | 
			
		||||
MODEL_EXTENSIONS = {
 | 
			
		||||
    "safetensors": "SafeTensors file format",
 | 
			
		||||
    "ckpt": "Checkpoint file",
 | 
			
		||||
    "pth": "PyTorch serialized file",
 | 
			
		||||
    "pkl": "Pickle file",
 | 
			
		||||
    "onnx": "ONNX file",
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
def fetch_files(path):
 | 
			
		||||
    for (dirpath, dirnames, filenames) in walk(path):
 | 
			
		||||
        fs = []
 | 
			
		||||
        if len(dirnames) > 0:
 | 
			
		||||
            for dirname in dirnames:
 | 
			
		||||
                fs.extend(fetch_files(f"{dirpath}/{dirname}"))
 | 
			
		||||
        for filename in filenames:
 | 
			
		||||
            # Remove "./models/" from the beginning of dirpath
 | 
			
		||||
            relative_dirpath = dirpath.replace("./models/", "", 1)
 | 
			
		||||
            file_path = f"{relative_dirpath}/{filename}"
 | 
			
		||||
            
 | 
			
		||||
            # Only add files that are known model extensions
 | 
			
		||||
            file_extension = filename.split('.')[-1].lower()
 | 
			
		||||
            if file_extension in MODEL_EXTENSIONS:
 | 
			
		||||
                fs.append(file_path)
 | 
			
		||||
 | 
			
		||||
        return fs
 | 
			
		||||
allModels = fetch_files("./models")
 | 
			
		||||
 | 
			
		||||
class ComfyUIDeployModalList:
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def INPUT_TYPES(s):
 | 
			
		||||
        return {
 | 
			
		||||
            "required": {
 | 
			
		||||
                "model": (allModels, ),
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
    RETURN_TYPES = (WILDCARD,)
 | 
			
		||||
    RETURN_NAMES = ("model",)
 | 
			
		||||
 | 
			
		||||
    FUNCTION = "run"
 | 
			
		||||
 | 
			
		||||
    CATEGORY = "model"
 | 
			
		||||
 | 
			
		||||
    def run(self, model=""):
 | 
			
		||||
        # Split the model path by '/' and select the last item
 | 
			
		||||
        model_name = model.split('/')[-1]
 | 
			
		||||
        return [model_name]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
NODE_CLASS_MAPPINGS = {"ComfyUIDeployModelList": ComfyUIDeployModalList}
 | 
			
		||||
NODE_DISPLAY_NAME_MAPPINGS = {"ComfyUIDeployModelList": "Model List (ComfyUI Deploy)"}
 | 
			
		||||
@ -383,6 +383,7 @@ def apply_inputs_to_workflow(workflow_api: Any, inputs: Any, sid: str = None):
 | 
			
		||||
def send_prompt(sid: str, inputs: StreamingPrompt):
 | 
			
		||||
    # workflow_api = inputs.workflow_api
 | 
			
		||||
    workflow_api = copy.deepcopy(inputs.workflow_api)
 | 
			
		||||
    workflow = copy.deepcopy(inputs.workflow)
 | 
			
		||||
 | 
			
		||||
    # Random seed
 | 
			
		||||
    apply_random_seed_to_workflow(workflow_api)
 | 
			
		||||
@ -399,6 +400,7 @@ def send_prompt(sid: str, inputs: StreamingPrompt):
 | 
			
		||||
        "prompt": workflow_api,
 | 
			
		||||
        "client_id": sid,  # "comfy_deploy_instance", #api.client_id
 | 
			
		||||
        "prompt_id": prompt_id,
 | 
			
		||||
        "extra_data": {"extra_pnginfo": {"workflow": workflow}},
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
@ -439,6 +441,7 @@ async def comfy_deploy_run(request):
 | 
			
		||||
    prompt_id = data.get("prompt_id")
 | 
			
		||||
    inputs = data.get("inputs")
 | 
			
		||||
    gpu_event_id = data.get("gpu_event_id", None)
 | 
			
		||||
    workflow = data.get("workflow")
 | 
			
		||||
 | 
			
		||||
    # Now it handles directly in here
 | 
			
		||||
    apply_random_seed_to_workflow(workflow_api)
 | 
			
		||||
@ -448,6 +451,7 @@ async def comfy_deploy_run(request):
 | 
			
		||||
        "prompt": workflow_api,
 | 
			
		||||
        "client_id": "comfy_deploy_instance",  # api.client_id
 | 
			
		||||
        "prompt_id": prompt_id,
 | 
			
		||||
        "extra_data": {"extra_pnginfo": {"workflow": workflow}},
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    prompt_metadata[prompt_id] = SimplePrompt(
 | 
			
		||||
@ -503,6 +507,7 @@ async def stream_prompt(data, token):
 | 
			
		||||
    # The prompt id generated from comfy deploy, can be None
 | 
			
		||||
    prompt_id = data.get("prompt_id")
 | 
			
		||||
    inputs = data.get("inputs")
 | 
			
		||||
    workflow = data.get("workflow")
 | 
			
		||||
 | 
			
		||||
    # Now it handles directly in here
 | 
			
		||||
    apply_random_seed_to_workflow(workflow_api)
 | 
			
		||||
@ -512,6 +517,7 @@ async def stream_prompt(data, token):
 | 
			
		||||
        "prompt": workflow_api,
 | 
			
		||||
        "client_id": "comfy_deploy_instance",  # api.client_id
 | 
			
		||||
        "prompt_id": prompt_id,
 | 
			
		||||
        "extra_data": {"extra_pnginfo": {"workflow": workflow}},
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    prompt_metadata[prompt_id] = SimplePrompt(
 | 
			
		||||
@ -886,6 +892,7 @@ async def websocket_handler(request):
 | 
			
		||||
                file_upload_endpoint=request.rel_url.query.get(
 | 
			
		||||
                    "file_upload_endpoint", None
 | 
			
		||||
                ),
 | 
			
		||||
                workflow=workflow["workflow"],
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
            await update_realtime_run_status(
 | 
			
		||||
 | 
			
		||||
@ -24,6 +24,7 @@ class StreamingPrompt(BaseModel):
 | 
			
		||||
    running_prompt_ids: set[str] = set()
 | 
			
		||||
    status_endpoint: Optional[str]
 | 
			
		||||
    file_upload_endpoint: Optional[str]
 | 
			
		||||
    workflow: Any
 | 
			
		||||
    
 | 
			
		||||
class SimplePrompt(BaseModel):
 | 
			
		||||
    status_endpoint: Optional[str]
 | 
			
		||||
 | 
			
		||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user