This commit is contained in:
nick 2024-08-07 20:43:38 -07:00
commit 0582d1d869
6 changed files with 1290 additions and 1039 deletions

View File

@ -49,7 +49,8 @@ class ComfyUIDeployExternalLora:
existing_loras = folder_paths.get_filename_list("loras") existing_loras = folder_paths.get_filename_list("loras")
# Check if lora_save_name exists in the list # Check if lora_save_name exists in the list
if lora_save_name in existing_loras: if lora_save_name in existing_loras:
raise "LoRA file '{lora_save_name}' already exists." print(f"using lora: {lora_save_name}")
return (lora_save_name,)
else: else:
lora_save_name = str(uuid.uuid4()) + ".safetensors" lora_save_name = str(uuid.uuid4()) + ".safetensors"
print(lora_save_name) print(lora_save_name)

View File

@ -0,0 +1,43 @@
import folder_paths
from PIL import Image, ImageOps
import numpy as np
import torch
import json
class ComfyUIDeployExternalTextList:
@classmethod
def INPUT_TYPES(s):
return {
"required": {
"input_id": (
"STRING",
{"multiline": False, "default": 'input_text_list'},
),
"text": (
"STRING",
{"multiline": True, "default": "[]"},
),
}
}
RETURN_TYPES = ("STRING",)
RETURN_NAMES = ("text",)
OUTPUT_IS_LIST = (True,)
FUNCTION = "run"
CATEGORY = "text"
def run(self, input_id, text=None):
text_list = []
try:
text_list = json.loads(text) # Assuming text is a JSON array string
except Exception as e:
print(f"Error processing images: {e}")
pass
return [text_list]
NODE_CLASS_MAPPINGS = {"ComfyUIDeployExternalTextList": ComfyUIDeployExternalTextList}
NODE_DISPLAY_NAME_MAPPINGS = {"ComfyUIDeployExternalTextList": "External Text List (ComfyUI Deploy)"}

View File

@ -22,17 +22,98 @@ from typing import Dict, List, Union, Any, Optional
from PIL import Image from PIL import Image
import copy import copy
import struct import struct
from aiohttp import ClientError
import atexit
# Global session
client_session = None
# def create_client_session():
# global client_session
# if client_session is None:
# client_session = aiohttp.ClientSession()
async def ensure_client_session():
global client_session
if client_session is None:
client_session = aiohttp.ClientSession()
async def cleanup():
global client_session
if client_session:
await client_session.close()
def exit_handler():
print("Exiting the application. Initiating cleanup...")
loop = asyncio.get_event_loop()
loop.run_until_complete(cleanup())
atexit.register(exit_handler)
max_retries = int(os.environ.get('MAX_RETRIES', '3'))
retry_delay_multiplier = float(os.environ.get('RETRY_DELAY_MULTIPLIER', '2'))
print(f"max_retries: {max_retries}, retry_delay_multiplier: {retry_delay_multiplier}")
async def async_request_with_retry(method, url, **kwargs):
global client_session
await ensure_client_session()
retry_delay = 1 # Start with 1 second delay
for attempt in range(max_retries):
try:
async with client_session.request(method, url, **kwargs) as response:
response.raise_for_status()
return response
except ClientError as e:
if attempt == max_retries - 1:
logger.error(f"Request failed after {max_retries} attempts: {e}")
# raise
logger.warning(f"Request failed (attempt {attempt + 1}/{max_retries}): {e}")
await asyncio.sleep(retry_delay)
retry_delay *= retry_delay_multiplier # Exponential backoff
from logging import basicConfig, getLogger from logging import basicConfig, getLogger
# Check for an environment variable to enable/disable Logfire
use_logfire = os.environ.get('USE_LOGFIRE', 'false').lower() == 'true'
if use_logfire:
try:
import logfire import logfire
# if os.environ.get('LOGFIRE_TOKEN', None) is not None:
logfire.configure( logfire.configure(
send_to_logfire="if-token-present" send_to_logfire="if-token-present"
) )
# basicConfig(handlers=[logfire.LogfireLoggingHandler()]) logger = logfire
logfire_handler = logfire.LogfireLoggingHandler() except ImportError:
print("Logfire not installed or disabled. Using standard Python logger.")
use_logfire = False
if not use_logfire:
# Use a standard Python logger when Logfire is disabled or not available
logger = getLogger("comfy-deploy") logger = getLogger("comfy-deploy")
logger.addHandler(logfire_handler) basicConfig(level="INFO") # You can adjust the logging level as needed
def log(level, message, **kwargs):
if use_logfire:
getattr(logger, level)(message, **kwargs)
else:
getattr(logger, level)(f"{message} {kwargs}")
# For a span, you might need to create a context manager
from contextlib import contextmanager
@contextmanager
def log_span(name):
if use_logfire:
with logger.span(name):
yield
else:
yield
# logger.info(f"Start: {name}")
# yield
# logger.info(f"End: {name}")
from globals import StreamingPrompt, Status, sockets, SimplePrompt, streaming_prompt_metadata, prompt_metadata from globals import StreamingPrompt, Status, sockets, SimplePrompt, streaming_prompt_metadata, prompt_metadata
@ -306,7 +387,7 @@ async def stream_prompt(data):
workflow_api=workflow_api workflow_api=workflow_api
) )
logfire.info("Begin prompt", prompt=prompt) # log('info', "Begin prompt", prompt=prompt)
try: try:
res = post_prompt(prompt) res = post_prompt(prompt)
@ -359,8 +440,8 @@ async def stream_response(request):
prompt_id = data.get("prompt_id") prompt_id = data.get("prompt_id")
comfy_message_queues[prompt_id] = asyncio.Queue() comfy_message_queues[prompt_id] = asyncio.Queue()
with logfire.span('Streaming Run'): with log_span('Streaming Run'):
logfire.info('Streaming prompt') log('info', 'Streaming prompt')
try: try:
result = await stream_prompt(data=data) result = await stream_prompt(data=data)
@ -373,7 +454,7 @@ async def stream_response(request):
if not comfy_message_queues[prompt_id].empty(): if not comfy_message_queues[prompt_id].empty():
data = await comfy_message_queues[prompt_id].get() data = await comfy_message_queues[prompt_id].get()
logfire.info(data["event"], data=json.dumps(data)) # log('info', data["event"], data=json.dumps(data))
# logger.info("listener", data) # logger.info("listener", data)
await response.write(f"event: event_update\ndata: {json.dumps(data)}\n\n".encode('utf-8')) await response.write(f"event: event_update\ndata: {json.dumps(data)}\n\n".encode('utf-8'))
await response.drain() # Ensure the buffer is flushed await response.drain() # Ensure the buffer is flushed
@ -384,10 +465,10 @@ async def stream_response(request):
await asyncio.sleep(0.1) # Adjust the sleep duration as needed await asyncio.sleep(0.1) # Adjust the sleep duration as needed
except asyncio.CancelledError: except asyncio.CancelledError:
logfire.info("Streaming was cancelled") log('info', "Streaming was cancelled")
raise raise
except Exception as e: except Exception as e:
logfire.error("Streaming error", error=e) log('error', "Streaming error", error=e)
finally: finally:
# event_emitter.off("send_json", task) # event_emitter.off("send_json", task)
await response.write_eof() await response.write_eof()
@ -482,10 +563,9 @@ async def upload_file_endpoint(request):
if get_url: if get_url:
try: try:
async with aiohttp.ClientSession() as session:
headers = {'Authorization': f'Bearer {token}'} headers = {'Authorization': f'Bearer {token}'}
params = {'file_size': file_size, 'type': file_type} params = {'file_size': file_size, 'type': file_type}
async with session.get(get_url, params=params, headers=headers) as response: response = await async_request_with_retry('GET', get_url, params=params, headers=headers)
if response.status == 200: if response.status == 200:
content = await response.json() content = await response.json()
upload_url = content["upload_url"] upload_url = content["upload_url"]
@ -496,7 +576,7 @@ async def upload_file_endpoint(request):
# "x-amz-acl": "public-read", # "x-amz-acl": "public-read",
"Content-Length": str(file_size) "Content-Length": str(file_size)
} }
async with session.put(upload_url, data=f, headers=headers) as upload_response: upload_response = await async_request_with_retry('PUT', upload_url, data=f, headers=headers)
if upload_response.status == 200: if upload_response.status == 200:
return web.json_response({ return web.json_response({
"message": "File uploaded successfully", "message": "File uploaded successfully",
@ -588,9 +668,7 @@ async def update_realtime_run_status(realtime_id: str, status_endpoint: str, sta
if (status_endpoint is None): if (status_endpoint is None):
return return
# requests.post(status_endpoint, json=body) # requests.post(status_endpoint, json=body)
async with aiohttp.ClientSession() as session: await async_request_with_retry('POST', status_endpoint, json=body)
async with session.post(status_endpoint, json=body) as response:
pass
@server.PromptServer.instance.routes.get('/comfyui-deploy/ws') @server.PromptServer.instance.routes.get('/comfyui-deploy/ws')
async def websocket_handler(request): async def websocket_handler(request):
@ -611,9 +689,8 @@ async def websocket_handler(request):
status_endpoint = request.rel_url.query.get('status_endpoint', None) status_endpoint = request.rel_url.query.get('status_endpoint', None)
if auth_token is not None and get_workflow_endpoint_url is not None: if auth_token is not None and get_workflow_endpoint_url is not None:
async with aiohttp.ClientSession() as session:
headers = {'Authorization': f'Bearer {auth_token}'} headers = {'Authorization': f'Bearer {auth_token}'}
async with session.get(get_workflow_endpoint_url, headers=headers) as response: response = await async_request_with_retry('GET', get_workflow_endpoint_url, headers=headers)
if response.status == 200: if response.status == 200:
workflow = await response.json() workflow = await response.json()
@ -805,13 +882,14 @@ async def send_json_override(self, event, data, sid=None):
prompt_metadata[prompt_id].progress.add(node) prompt_metadata[prompt_id].progress.add(node)
calculated_progress = len(prompt_metadata[prompt_id].progress) / len(prompt_metadata[prompt_id].workflow_api) calculated_progress = len(prompt_metadata[prompt_id].progress) / len(prompt_metadata[prompt_id].workflow_api)
calculated_progress = round(calculated_progress, 2)
# logger.info("calculated_progress", calculated_progress) # logger.info("calculated_progress", calculated_progress)
if prompt_metadata[prompt_id].last_updated_node is not None and prompt_metadata[prompt_id].last_updated_node == node: if prompt_metadata[prompt_id].last_updated_node is not None and prompt_metadata[prompt_id].last_updated_node == node:
return return
prompt_metadata[prompt_id].last_updated_node = node prompt_metadata[prompt_id].last_updated_node = node
class_type = prompt_metadata[prompt_id].workflow_api[node]['class_type'] class_type = prompt_metadata[prompt_id].workflow_api[node]['class_type']
logger.info(f"updating run live status {class_type}") logger.info(f"At: {calculated_progress * 100}% - {class_type}")
await send("live_status", { await send("live_status", {
"prompt_id": prompt_id, "prompt_id": prompt_id,
"current_node": class_type, "current_node": class_type,
@ -836,14 +914,15 @@ async def send_json_override(self, event, data, sid=None):
# await update_run_with_output(prompt_id, data) # await update_run_with_output(prompt_id, data)
if event == 'executed' and 'node' in data and 'output' in data: if event == 'executed' and 'node' in data and 'output' in data:
logger.info(f"executed {data}")
if prompt_id in prompt_metadata: if prompt_id in prompt_metadata:
node = data.get('node') node = data.get('node')
class_type = prompt_metadata[prompt_id].workflow_api[node]['class_type'] class_type = prompt_metadata[prompt_id].workflow_api[node]['class_type']
logger.info(f"executed {class_type}") logger.info(f"Executed {class_type} {data}")
if class_type == "PreviewImage": if class_type == "PreviewImage":
logger.info("skipping preview image") logger.info("Skipping preview image")
return return
else:
logger.info(f"Executed {data}")
await update_run_with_output(prompt_id, data.get('output'), node_id=data.get('node')) await update_run_with_output(prompt_id, data.get('output'), node_id=data.get('node'))
# await update_run_with_output(prompt_id, data.get('output'), node_id=data.get('node')) # await update_run_with_output(prompt_id, data.get('output'), node_id=data.get('node'))
@ -864,7 +943,7 @@ async def update_run_live_status(prompt_id, live_status, calculated_progress: fl
if (status_endpoint is None): if (status_endpoint is None):
return return
logger.info(f"progress {calculated_progress}") # logger.info(f"progress {calculated_progress}")
body = { body = {
"run_id": prompt_id, "run_id": prompt_id,
@ -883,9 +962,7 @@ async def update_run_live_status(prompt_id, live_status, calculated_progress: fl
}) })
# requests.post(status_endpoint, json=body) # requests.post(status_endpoint, json=body)
async with aiohttp.ClientSession() as session: await async_request_with_retry('POST', status_endpoint, json=body)
async with session.post(status_endpoint, json=body) as response:
pass
async def update_run(prompt_id: str, status: Status): async def update_run(prompt_id: str, status: Status):
@ -916,9 +993,7 @@ async def update_run(prompt_id: str, status: Status):
try: try:
# requests.post(status_endpoint, json=body) # requests.post(status_endpoint, json=body)
if (status_endpoint is not None): if (status_endpoint is not None):
async with aiohttp.ClientSession() as session: await async_request_with_retry('POST', status_endpoint, json=body)
async with session.post(status_endpoint, json=body) as response:
pass
if (status_endpoint is not None) and cd_enable_run_log and (status == Status.SUCCESS or status == Status.FAILED): if (status_endpoint is not None) and cd_enable_run_log and (status == Status.SUCCESS or status == Status.FAILED):
try: try:
@ -948,9 +1023,7 @@ async def update_run(prompt_id: str, status: Status):
] ]
} }
async with aiohttp.ClientSession() as session: await async_request_with_retry('POST', status_endpoint, json=body)
async with session.post(status_endpoint, json=body) as response:
pass
# requests.post(status_endpoint, json=body) # requests.post(status_endpoint, json=body)
except Exception as log_error: except Exception as log_error:
logger.info(f"Error reading log file: {log_error}") logger.info(f"Error reading log file: {log_error}")
@ -998,7 +1071,7 @@ async def upload_file(prompt_id, filename, subfolder=None, content_type="image/p
filename = os.path.basename(filename) filename = os.path.basename(filename)
file = os.path.join(output_dir, filename) file = os.path.join(output_dir, filename)
logger.info(f"uploading file {file}") logger.info(f"Uploading file {file}")
file_upload_endpoint = prompt_metadata[prompt_id].file_upload_endpoint file_upload_endpoint = prompt_metadata[prompt_id].file_upload_endpoint
@ -1024,18 +1097,17 @@ async def upload_file(prompt_id, filename, subfolder=None, content_type="image/p
"Content-Length": str(len(data)), "Content-Length": str(len(data)),
} }
# response = requests.put(ok.get("url"), headers=headers, data=data) # response = requests.put(ok.get("url"), headers=headers, data=data)
async with aiohttp.ClientSession() as session: response = await async_request_with_retry('PUT', ok.get("url"), headers=headers, data=data)
async with session.put(ok.get("url"), headers=headers, data=data) as response:
logger.info(f"Upload file response status: {response.status}, status text: {response.reason}") logger.info(f"Upload file response status: {response.status}, status text: {response.reason}")
end_time = time.time() # End timing after the request is complete end_time = time.time() # End timing after the request is complete
logger.info("Upload time: {:.2f} seconds".format(end_time - start_time)) logger.info("Upload time: {:.2f} seconds".format(end_time - start_time))
def have_pending_upload(prompt_id): def have_pending_upload(prompt_id):
if prompt_id in prompt_metadata and len(prompt_metadata[prompt_id].uploading_nodes) > 0: if prompt_id in prompt_metadata and len(prompt_metadata[prompt_id].uploading_nodes) > 0:
logger.info(f"have pending upload {len(prompt_metadata[prompt_id].uploading_nodes)}") logger.info(f"Have pending upload {len(prompt_metadata[prompt_id].uploading_nodes)}")
return True return True
logger.info("no pending upload") logger.info("No pending upload")
return False return False
def mark_prompt_done(prompt_id): def mark_prompt_done(prompt_id):
@ -1093,7 +1165,7 @@ async def update_file_status(prompt_id: str, data, uploading, have_error=False,
else: else:
prompt_metadata[prompt_id].uploading_nodes.discard(node_id) prompt_metadata[prompt_id].uploading_nodes.discard(node_id)
logger.info(prompt_metadata[prompt_id].uploading_nodes) logger.info(f"Remaining uploads: {prompt_metadata[prompt_id].uploading_nodes}")
# Update the remote status # Update the remote status
if have_error: if have_error:
@ -1177,7 +1249,7 @@ async def update_run_with_output(prompt_id, data, node_id=None):
if have_upload_media: if have_upload_media:
try: try:
logger.info(f"\nhave_upload {have_upload_media} {node_id}") logger.info(f"\nHave_upload {have_upload_media} Node Id: {node_id}")
if have_upload_media: if have_upload_media:
await update_file_status(prompt_id, data, True, node_id=node_id) await update_file_status(prompt_id, data, True, node_id=node_id)
@ -1190,9 +1262,7 @@ async def update_run_with_output(prompt_id, data, node_id=None):
# requests.post(status_endpoint, json=body) # requests.post(status_endpoint, json=body)
if status_endpoint is not None: if status_endpoint is not None:
async with aiohttp.ClientSession() as session: await async_request_with_retry('POST', status_endpoint, json=body)
async with session.post(status_endpoint, json=body) as response:
pass
await send('outputs_uploaded', { await send('outputs_uploaded', {
"prompt_id": prompt_id "prompt_id": prompt_id

View File

@ -2,4 +2,4 @@ aiofiles
pydantic pydantic
opencv-python opencv-python
imageio-ffmpeg imageio-ffmpeg
logfire # logfire

View File

@ -24,7 +24,10 @@ function dispatchAPIEventData(data) {
message += "\n" + nodeError.class_type + ":"; message += "\n" + nodeError.class_type + ":";
for (const errorReason of nodeError.errors) { for (const errorReason of nodeError.errors) {
message += message +=
"\n - " + errorReason.message + ": " + errorReason.details; "\n - " +
errorReason.message +
": " +
errorReason.details;
} }
} }
@ -44,32 +47,38 @@ function dispatchAPIEventData(data) {
// window.name = this.clientId; // use window name so it isnt reused when duplicating tabs // window.name = this.clientId; // use window name so it isnt reused when duplicating tabs
// sessionStorage.setItem("clientId", this.clientId); // store in session storage so duplicate tab can load correct workflow // sessionStorage.setItem("clientId", this.clientId); // store in session storage so duplicate tab can load correct workflow
} }
api.dispatchEvent(new CustomEvent("status", { detail: msg.data.status })); api.dispatchEvent(
new CustomEvent("status", { detail: msg.data.status })
);
break; break;
case "progress": case "progress":
api.dispatchEvent(new CustomEvent("progress", { detail: msg.data })); api.dispatchEvent(
new CustomEvent("progress", { detail: msg.data })
);
break; break;
case "executing": case "executing":
api.dispatchEvent( api.dispatchEvent(
new CustomEvent("executing", { detail: msg.data.node }), new CustomEvent("executing", { detail: msg.data.node })
); );
break; break;
case "executed": case "executed":
api.dispatchEvent(new CustomEvent("executed", { detail: msg.data })); api.dispatchEvent(
new CustomEvent("executed", { detail: msg.data })
);
break; break;
case "execution_start": case "execution_start":
api.dispatchEvent( api.dispatchEvent(
new CustomEvent("execution_start", { detail: msg.data }), new CustomEvent("execution_start", { detail: msg.data })
); );
break; break;
case "execution_error": case "execution_error":
api.dispatchEvent( api.dispatchEvent(
new CustomEvent("execution_error", { detail: msg.data }), new CustomEvent("execution_error", { detail: msg.data })
); );
break; break;
case "execution_cached": case "execution_cached":
api.dispatchEvent( api.dispatchEvent(
new CustomEvent("execution_cached", { detail: msg.data }), new CustomEvent("execution_cached", { detail: msg.data })
); );
break; break;
default: default:
@ -143,11 +152,13 @@ const ext = {
} }
if (!workflow_version_id) { if (!workflow_version_id) {
console.error("No workflow_version_id provided in query parameters."); console.error(
"No workflow_version_id provided in query parameters."
);
} else { } else {
loadingDialog.showLoading( loadingDialog.showLoading(
"Loading workflow from " + org_display, "Loading workflow from " + org_display,
"Please wait...", "Please wait..."
); );
fetch(endpoint + "/api/workflow-version/" + workflow_version_id, { fetch(endpoint + "/api/workflow-version/" + workflow_version_id, {
method: "GET", method: "GET",
@ -160,7 +171,10 @@ const ext = {
const data = await res.json(); const data = await res.json();
const { workflow, workflow_id, error } = data; const { workflow, workflow_id, error } = data;
if (error) { if (error) {
infoDialog.showMessage("Unable to load this workflow", error); infoDialog.showMessage(
"Unable to load this workflow",
error
);
return; return;
} }
@ -183,7 +197,7 @@ const ext = {
window.history.replaceState( window.history.replaceState(
{}, {},
document.title, document.title,
window.location.pathname, window.location.pathname
); );
}); });
} }
@ -206,22 +220,37 @@ const ext = {
ComfyWidgets.STRING( ComfyWidgets.STRING(
this, this,
"workflow_name", "workflow_name",
["", { default: this.properties.workflow_name, multiline: false }], [
app, "",
{
default: this.properties.workflow_name,
multiline: false,
},
],
app
); );
ComfyWidgets.STRING( ComfyWidgets.STRING(
this, this,
"workflow_id", "workflow_id",
["", { default: this.properties.workflow_id, multiline: false }], [
app, "",
{
default: this.properties.workflow_id,
multiline: false,
},
],
app
); );
ComfyWidgets.STRING( ComfyWidgets.STRING(
this, this,
"version", "version",
["", { default: this.properties.version, multiline: false }], [
app, "",
{ default: this.properties.version, multiline: false },
],
app
); );
// this.widgets.forEach((w) => { // this.widgets.forEach((w) => {
@ -248,7 +277,7 @@ const ext = {
title_mode: LiteGraph.NORMAL_TITLE, title_mode: LiteGraph.NORMAL_TITLE,
title: "Comfy Deploy", title: "Comfy Deploy",
collapsable: true, collapsable: true,
}), })
); );
ComfyDeploy.category = "deploy"; ComfyDeploy.category = "deploy";
@ -278,7 +307,13 @@ const ext = {
sendEventToCD("cd_plugin_onDeployChanges", prompt); sendEventToCD("cd_plugin_onDeployChanges", prompt);
} else if (message.type === "queue_prompt") { } else if (message.type === "queue_prompt") {
const prompt = await app.graphToPrompt(); const prompt = await app.graphToPrompt();
if (typeof api.handlePromptGenerated === "function") {
api.handlePromptGenerated(prompt); api.handlePromptGenerated(prompt);
} else {
console.warn(
"api.handlePromptGenerated is not a function"
);
}
sendEventToCD("cd_plugin_onQueuePrompt", prompt); sendEventToCD("cd_plugin_onQueuePrompt", prompt);
} else if (message.type === "get_prompt") { } else if (message.type === "get_prompt") {
const prompt = await app.graphToPrompt(); const prompt = await app.graphToPrompt();
@ -301,6 +336,72 @@ const ext = {
app.graph.add(node); app.graph.add(node);
app.graph.afterChange(); app.graph.afterChange();
} else if (message.type === "zoom_to_node") {
const nodeId = message.data.nodeId;
const position = message.data.position;
const node = app.graph.getNodeById(nodeId);
if (!node) return;
const canvas = app.canvas;
const targetScale = 1;
const targetOffsetX =
canvas.canvas.width / 4 -
position[0] -
node.size[0] / 2;
const targetOffsetY =
canvas.canvas.height / 4 -
position[1] -
node.size[1] / 2;
const startScale = canvas.ds.scale;
const startOffsetX = canvas.ds.offset[0];
const startOffsetY = canvas.ds.offset[1];
const duration = 400; // Animation duration in milliseconds
const startTime = Date.now();
function easeOutCubic(t) {
return 1 - Math.pow(1 - t, 3);
}
function lerp(start, end, t) {
return start * (1 - t) + end * t;
}
function animate() {
const currentTime = Date.now();
const elapsedTime = currentTime - startTime;
const t = Math.min(elapsedTime / duration, 1);
const easedT = easeOutCubic(t);
const currentScale = lerp(
startScale,
targetScale,
easedT
);
const currentOffsetX = lerp(
startOffsetX,
targetOffsetX,
easedT
);
const currentOffsetY = lerp(
startOffsetY,
targetOffsetY,
easedT
);
canvas.setZoom(currentScale);
canvas.ds.offset = [currentOffsetX, currentOffsetY];
canvas.draw(true, true);
if (t < 1) {
requestAnimationFrame(animate);
}
}
animate();
} }
// else if (message.type === "refresh") { // else if (message.type === "refresh") {
// sendEventToCD("cd_plugin_onRefresh"); // sendEventToCD("cd_plugin_onRefresh");
@ -341,7 +442,7 @@ const ext = {
function showError(title, message) { function showError(title, message) {
infoDialog.show( infoDialog.show(
`<h3 style="margin: 0px; color: red;">${title}</h3><br><span>${message}</span> `, `<h3 style="margin: 0px; color: red;">${title}</h3><br><span>${message}</span> `
); );
} }
@ -375,11 +476,14 @@ function createDynamicUIHtml(data) {
Object.values(data.custom_nodes).forEach((node) => { Object.values(data.custom_nodes).forEach((node) => {
html += ` html += `
<div style="border-bottom: 1px solid #e2e8f0; padding-top: 16px;"> <div style="border-bottom: 1px solid #e2e8f0; padding-top: 16px;">
<a href="${node.url <a href="${
}" target="_blank" style="font-size: 18px; font-weight: semibold; color: white; text-decoration: none;">${node.name node.url
}" target="_blank" style="font-size: 18px; font-weight: semibold; color: white; text-decoration: none;">${
node.name
}</a> }</a>
<p style="font-size: 14px; color: #4b5563;">${node.hash}</p> <p style="font-size: 14px; color: #4b5563;">${node.hash}</p>
${node.warning ${
node.warning
? `<p style="font-size: 14px; color: #d69e2e;">${node.warning}</p>` ? `<p style="font-size: 14px; color: #d69e2e;">${node.warning}</p>`
: "" : ""
} }
@ -396,7 +500,8 @@ function createDynamicUIHtml(data) {
Object.entries(data.models).forEach(([section, items]) => { Object.entries(data.models).forEach(([section, items]) => {
html += ` html += `
<div style="border-bottom: 1px solid #e2e8f0; padding-top: 8px; padding-bottom: 8px;"> <div style="border-bottom: 1px solid #e2e8f0; padding-top: 8px; padding-bottom: 8px;">
<h3 style="font-size: 18px; font-weight: semibold; margin-bottom: 8px;">${section.charAt(0).toUpperCase() + section.slice(1) <h3 style="font-size: 18px; font-weight: semibold; margin-bottom: 8px;">${
section.charAt(0).toUpperCase() + section.slice(1)
}</h3>`; }</h3>`;
items.forEach((item) => { items.forEach((item) => {
html += `<p style="font-size: 14px; color: ${textColor};">${item.name}</p>`; html += `<p style="font-size: 14px; color: ${textColor};">${item.name}</p>`;
@ -413,7 +518,8 @@ function createDynamicUIHtml(data) {
Object.entries(data.files).forEach(([section, items]) => { Object.entries(data.files).forEach(([section, items]) => {
html += ` html += `
<div style="border-bottom: 1px solid #e2e8f0; padding-top: 8px; padding-bottom: 8px;"> <div style="border-bottom: 1px solid #e2e8f0; padding-top: 8px; padding-bottom: 8px;">
<h3 style="font-size: 18px; font-weight: semibold; margin-bottom: 8px;">${section.charAt(0).toUpperCase() + section.slice(1) <h3 style="font-size: 18px; font-weight: semibold; margin-bottom: 8px;">${
section.charAt(0).toUpperCase() + section.slice(1)
}</h3>`; }</h3>`;
items.forEach((item) => { items.forEach((item) => {
html += `<p style="font-size: 14px; color: ${textColor};">${item.name}</p>`; html += `<p style="font-size: 14px; color: ${textColor};">${item.name}</p>`;
@ -444,7 +550,7 @@ async function deployWorkflow() {
if (deployMeta.length == 0) { if (deployMeta.length == 0) {
const text = await inputDialog.input( const text = await inputDialog.input(
"Create your deployment", "Create your deployment",
"Workflow name", "Workflow name"
); );
if (!text) return; if (!text) return;
console.log(text); console.log(text);
@ -485,7 +591,7 @@ async function deployWorkflow() {
<input id="reuse-hash" type="checkbox" checked>Reuse hash from last version</input> <input id="reuse-hash" type="checkbox" checked>Reuse hash from last version</input>
</label> </label>
</div> </div>
`, `
); );
if (!ok) return; if (!ok) return;
@ -504,7 +610,7 @@ async function deployWorkflow() {
if (!snapshot) { if (!snapshot) {
showError( showError(
"Error when deploying", "Error when deploying",
"Unable to generate snapshot, please install ComfyUI Manager", "Unable to generate snapshot, please install ComfyUI Manager"
); );
return; return;
} }
@ -525,7 +631,7 @@ async function deployWorkflow() {
"Content-Type": "application/json", "Content-Type": "application/json",
Authorization: "Bearer " + apiKey, Authorization: "Bearer " + apiKey,
}, },
}, }
) )
.then((x) => x.json()) .then((x) => x.json())
.catch(() => { .catch(() => {
@ -544,7 +650,7 @@ async function deployWorkflow() {
// Match previous hash for models // Match previous hash for models
if (reuseHash && existing_workflow?.dependencies?.models) { if (reuseHash && existing_workflow?.dependencies?.models) {
const previousModelHash = Object.entries( const previousModelHash = Object.entries(
existing_workflow?.dependencies?.models, existing_workflow?.dependencies?.models
).flatMap(([key, value]) => { ).flatMap(([key, value]) => {
return Object.values(value).map((x) => ({ return Object.values(value).map((x) => ({
...x, ...x,
@ -566,7 +672,9 @@ async function deployWorkflow() {
console.log(file); console.log(file);
loadingDialog.showLoading("Generating hash", file); loadingDialog.showLoading("Generating hash", file);
const hash = await fetch( const hash = await fetch(
`/comfyui-deploy/get-file-hash?file_path=${encodeURIComponent(file)}`, `/comfyui-deploy/get-file-hash?file_path=${encodeURIComponent(
file
)}`
).then((x) => x.json()); ).then((x) => x.json());
loadingDialog.showLoading("Generating hash", file); loadingDialog.showLoading("Generating hash", file);
console.log(hash); console.log(hash);
@ -576,18 +684,24 @@ async function deployWorkflow() {
console.log("Uploading ", file); console.log("Uploading ", file);
loadingDialog.showLoading("Uploading file", file); loadingDialog.showLoading("Uploading file", file);
try { try {
const { download_url } = await fetch(`/comfyui-deploy/upload-file`, { const { download_url } = await fetch(
`/comfyui-deploy/upload-file`,
{
method: "POST", method: "POST",
body: JSON.stringify({ body: JSON.stringify({
file_path: file, file_path: file,
token: apiKey, token: apiKey,
url: endpoint + "/api/upload-url", url: endpoint + "/api/upload-url",
}), }),
}) }
)
.then((x) => x.json()) .then((x) => x.json())
.catch(() => { .catch(() => {
loadingDialog.close(); loadingDialog.close();
confirmDialog.confirm("Error", "Unable to upload file " + file); confirmDialog.confirm(
"Error",
"Unable to upload file " + file
);
}); });
loadingDialog.showLoading("Uploaded file", file); loadingDialog.showLoading("Uploaded file", file);
console.log(download_url); console.log(download_url);
@ -624,8 +738,8 @@ async function deployWorkflow() {
<iframe <iframe
style="z-index: 10; min-width: 600px; max-width: 1024px; min-height: 600px; border: none; background-color: transparent;" style="z-index: 10; min-width: 600px; max-width: 1024px; min-height: 600px; border: none; background-color: transparent;"
src="https://www.comfydeploy.com/dependency-graph?deps=${encodeURIComponent( src="https://www.comfydeploy.com/dependency-graph?deps=${encodeURIComponent(
JSON.stringify(deps), JSON.stringify(deps)
)}" />`, )}" />`
// createDynamicUIHtml(deps), // createDynamicUIHtml(deps),
); );
if (!depsOk) return; if (!depsOk) return;
@ -686,7 +800,7 @@ async function deployWorkflow() {
graph.change(); graph.change();
infoDialog.show( infoDialog.show(
`<span style="color:green;">Deployed successfully!</span> <a style="color:white;" target="_blank" href=${endpoint}/workflows/${data.workflow_id}>-> View here</a> <br/> <br/> Workflow ID: ${data.workflow_id} <br/> Workflow Name: ${workflow_name} <br/> Workflow Version: ${data.version} <br/>`, `<span style="color:green;">Deployed successfully!</span> <a style="color:white;" target="_blank" href=${endpoint}/workflows/${data.workflow_id}>-> View here</a> <br/> <br/> Workflow ID: ${data.workflow_id} <br/> Workflow Name: ${workflow_name} <br/> Workflow Version: ${data.version} <br/>`
); );
setTimeout(() => { setTimeout(() => {
@ -837,9 +951,11 @@ export class LoadingDialog extends ComfyDialog {
showLoading(title, message) { showLoading(title, message) {
this.show(` this.show(`
<div style="width: 400px; display: flex; gap: 18px; flex-direction: column; overflow: unset"> <div style="width: 400px; display: flex; gap: 18px; flex-direction: column; overflow: unset">
<h3 style="margin: 0px; display: flex; align-items: center; justify-content: center; gap: 12px;">${title} ${this.loadingIcon <h3 style="margin: 0px; display: flex; align-items: center; justify-content: center; gap: 12px;">${title} ${
this.loadingIcon
}</h3> }</h3>
${message ${
message
? `<label style="max-width: 100%; white-space: pre-wrap; word-wrap: break-word;">${message}</label>` ? `<label style="max-width: 100%; white-space: pre-wrap; word-wrap: break-word;">${message}</label>`
: "" : ""
} }
@ -881,17 +997,22 @@ export class InputDialog extends InfoDialog {
type: "button", type: "button",
textContent: "Save", textContent: "Save",
onclick: () => { onclick: () => {
const input = this.textElement.querySelector("#input").value; const input =
this.textElement.querySelector("#input").value;
if (input.trim() === "") { if (input.trim() === "") {
showError("Input validation", "Input cannot be empty"); showError(
"Input validation",
"Input cannot be empty"
);
} else { } else {
this.callback?.(input); this.callback?.(input);
this.close(); this.close();
this.textElement.querySelector("#input").value = ""; this.textElement.querySelector("#input").value =
"";
} }
}, },
}), }),
], ]
), ),
]; ];
} }
@ -952,7 +1073,7 @@ export class ConfirmDialog extends InfoDialog {
this.close(); this.close();
}, },
}), }),
], ]
), ),
]; ];
} }
@ -1009,7 +1130,7 @@ function getData(environment) {
function saveData(data) { function saveData(data) {
localStorage.setItem( localStorage.setItem(
"comfy_deploy_env_data_" + data.environment, "comfy_deploy_env_data_" + data.environment,
JSON.stringify(data), JSON.stringify(data)
); );
} }
@ -1024,7 +1145,9 @@ export class ConfigDialog extends ComfyDialog {
this.element.style.paddingBottom = "20px"; this.element.style.paddingBottom = "20px";
this.container = document.createElement("div"); this.container = document.createElement("div");
this.element.querySelector(".comfy-modal-content").prepend(this.container); this.element
.querySelector(".comfy-modal-content")
.prepend(this.container);
} }
createButtons() { createButtons() {
@ -1058,7 +1181,7 @@ export class ConfigDialog extends ComfyDialog {
this.close(); this.close();
}, },
}), }),
], ]
), ),
]; ];
} }
@ -1070,7 +1193,8 @@ export class ConfigDialog extends ComfyDialog {
} }
save(api_key, displayName) { save(api_key, displayName) {
const deployOption = this.container.querySelector("#deployOption").value; const deployOption =
this.container.querySelector("#deployOption").value;
localStorage.setItem("comfy_deploy_env", deployOption); localStorage.setItem("comfy_deploy_env", deployOption);
const endpoint = this.container.querySelector("#endpoint").value; const endpoint = this.container.querySelector("#endpoint").value;
@ -1102,22 +1226,32 @@ export class ConfigDialog extends ComfyDialog {
<h3 style="margin: 0px;">Comfy Deploy Config</h3> <h3 style="margin: 0px;">Comfy Deploy Config</h3>
<label style="color: white; width: 100%;"> <label style="color: white; width: 100%;">
<select id="deployOption" style="margin: 8px 0px; width: 100%; height:30px; box-sizing: border-box;" > <select id="deployOption" style="margin: 8px 0px; width: 100%; height:30px; box-sizing: border-box;" >
<option value="cloud" ${data.environment === "cloud" ? "selected" : ""}>Cloud</option> <option value="cloud" ${
<option value="local" ${data.environment === "local" ? "selected" : ""}>Local</option> data.environment === "cloud" ? "selected" : ""
}>Cloud</option>
<option value="local" ${
data.environment === "local" ? "selected" : ""
}>Local</option>
</select> </select>
</label> </label>
<label style="color: white; width: 100%;"> <label style="color: white; width: 100%;">
Endpoint: Endpoint:
<input id="endpoint" style="margin-top: 8px; width: 100%; height:40px; box-sizing: border-box; padding: 0px 6px;" type="text" value="${data.endpoint <input id="endpoint" style="margin-top: 8px; width: 100%; height:40px; box-sizing: border-box; padding: 0px 6px;" type="text" value="${
data.endpoint
}"> }">
</label> </label>
<div style="color: white;"> <div style="color: white;">
API Key: User / Org <button style="font-size: 18px;">${data.displayName ?? "" API Key: User / Org <button style="font-size: 18px;">${
data.displayName ?? ""
}</button> }</button>
<input id="apiKey" style="margin-top: 8px; width: 100%; height:40px; box-sizing: border-box; padding: 0px 6px;" type="password" value="${data.apiKey <input id="apiKey" style="margin-top: 8px; width: 100%; height:40px; box-sizing: border-box; padding: 0px 6px;" type="password" value="${
data.apiKey
}"> }">
<button id="loginButton" style="margin-top: 8px; width: 100%; height:40px; box-sizing: border-box; padding: 0px 6px;"> <button id="loginButton" style="margin-top: 8px; width: 100%; height:40px; box-sizing: border-box; padding: 0px 6px;">
${data.apiKey ? "Re-login with ComfyDeploy" : "Login with ComfyDeploy" ${
data.apiKey
? "Re-login with ComfyDeploy"
: "Login with ComfyDeploy"
} }
</button> </button>
</div> </div>
@ -1138,7 +1272,7 @@ export class ConfigDialog extends ComfyDialog {
clearInterval(poll); clearInterval(poll);
infoDialog.showMessage( infoDialog.showMessage(
"Timeout", "Timeout",
"Wait too long for the response, please try re-login", "Wait too long for the response, please try re-login"
); );
}, 30000); // Stop polling after 30 seconds }, 30000); // Stop polling after 30 seconds
@ -1149,14 +1283,15 @@ export class ConfigDialog extends ComfyDialog {
if (json.api_key) { if (json.api_key) {
this.save(json.api_key, json.name); this.save(json.api_key, json.name);
this.close(); this.close();
this.container.querySelector("#apiKey").value = json.api_key; this.container.querySelector("#apiKey").value =
json.api_key;
// infoDialog.show(); // infoDialog.show();
clearInterval(this.poll); clearInterval(this.poll);
clearTimeout(this.timeout); clearTimeout(this.timeout);
// Refresh dialog // Refresh dialog
const a = await confirmDialog.confirm( const a = await confirmDialog.confirm(
"Authenticated", "Authenticated",
`<div>You will be able to upload workflow to <button style="font-size: 18px; width: fit;">${json.name}</button></div>`, `<div>You will be able to upload workflow to <button style="font-size: 18px; width: fit;">${json.name}</button></div>`
); );
configDialog.show(); configDialog.show();
} }

View File

@ -51,7 +51,9 @@ const createRunRoute = createRoute({
export const registerCreateRunRoute = (app: App) => { export const registerCreateRunRoute = (app: App) => {
app.openapi(createRunRoute, async (c) => { app.openapi(createRunRoute, async (c) => {
const data = c.req.valid("json"); const data = c.req.valid("json");
const origin = new URL(c.req.url).origin; const proto = c.req.headers.get('x-forwarded-proto') || "http";
const host = c.req.headers.get('x-forwarded-host') || c.req.headers.get('host');
const origin = `${proto}://${host}` || new URL(c.req.url).origin;
const apiKeyTokenData = c.get("apiKeyTokenData")!; const apiKeyTokenData = c.get("apiKeyTokenData")!;
const { deployment_id, inputs } = data; const { deployment_id, inputs } = data;