From f3370c1bb1c438a26be5c9d4b69c02e659ac0c7f Mon Sep 17 00:00:00 2001 From: bennykok Date: Sat, 17 Feb 2024 14:45:48 +0800 Subject: [PATCH] feat: add new dependency viewer, reused previous cache for faster deploy --- custom_routes.py | 75 +++++++++++++++++++++++++++++++++++--- requirement.txt | 1 + web-plugin/index.js | 88 ++++++++++++++++++++++++++++++++++----------- 3 files changed, 138 insertions(+), 26 deletions(-) create mode 100644 requirement.txt diff --git a/custom_routes.py b/custom_routes.py index 190ccce..80c54e3 100644 --- a/custom_routes.py +++ b/custom_routes.py @@ -24,6 +24,8 @@ from urllib.parse import quote import threading import hashlib import aiohttp +import aiofiles +import concurrent.futures api = None api_task = None @@ -163,15 +165,74 @@ def get_comfyui_path_from_file_path(file_path): return file_path # Form ComfyUI Manager -def compute_sha256_checksum(filepath): +async def compute_sha256_checksum(filepath): + print("computing sha256 checksum") + chunk_size = 1024 * 256 # Example: 256KB filepath = get_comfyui_path_from_file_path(filepath) - """Compute the SHA256 checksum of a file, in chunks""" + """Compute the SHA256 checksum of a file, in chunks, asynchronously""" sha256 = hashlib.sha256() - with open(filepath, 'rb') as f: - for chunk in iter(lambda: f.read(4096), b''): + async with aiofiles.open(filepath, 'rb') as f: + while True: + chunk = await f.read(chunk_size) + if not chunk: + break sha256.update(chunk) return sha256.hexdigest() +# def hash_chunk(start_end, filepath): +# """Hash a specific chunk of the file.""" +# start, end = start_end +# sha256 = hashlib.sha256() +# with open(filepath, 'rb') as f: +# f.seek(start) +# chunk = f.read(end - start) +# sha256.update(chunk) +# return sha256.digest() # Return the digest of the chunk + +# async def compute_sha256_checksum(filepath): +# file_size = os.path.getsize(filepath) +# parts = 1 # Or any other division based on file size or desired concurrency +# part_size = file_size // parts +# start_end_ranges = [(i * part_size, min((i + 1) * part_size, file_size)) for i in range(parts)] + +# print(start_end_ranges, file_size) + +# loop = asyncio.get_running_loop() + +# # Use ThreadPoolExecutor to process chunks in parallel +# with concurrent.futures.ThreadPoolExecutor(max_workers=4) as executor: +# futures = [loop.run_in_executor(executor, hash_chunk, start_end, filepath) for start_end in start_end_ranges] +# chunk_hashes = await asyncio.gather(*futures) + +# # Combine the hashes sequentially +# final_sha256 = hashlib.sha256() +# for chunk_hash in chunk_hashes: +# final_sha256.update(chunk_hash) + +# return final_sha256.hexdigest() + +# def hash_chunk(filepath): +# chunk_size = 1024 * 256 # 256KB per chunk +# sha256 = hashlib.sha256() +# with open(filepath, 'rb') as f: +# while True: +# chunk = f.read(chunk_size) +# if not chunk: +# break # End of file +# sha256.update(chunk) +# return sha256.hexdigest() + +# async def compute_sha256_checksum(filepath): +# print("computing sha256 checksum") +# filepath = get_comfyui_path_from_file_path(filepath) + +# loop = asyncio.get_running_loop() + +# with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor: +# task = loop.run_in_executor(executor, hash_chunk, filepath) + +# return await task + # This is start uploading the files to Comfy Deploy @server.PromptServer.instance.routes.post('/comfyui-deploy/upload-file') async def upload_file(request): @@ -269,9 +330,13 @@ async def get_file_hash(request): base = folder_paths.base_path file_path = os.path.join(base, file_path) # print("file_path", file_path) - file_hash = compute_sha256_checksum( + start_time = time.time() # Capture the start time + file_hash = await compute_sha256_checksum( file_path ) + end_time = time.time() # Capture the end time after the code execution + elapsed_time = end_time - start_time # Calculate the elapsed time + print(f"Execution time: {elapsed_time} seconds") return web.json_response({ "file_hash": file_hash }) diff --git a/requirement.txt b/requirement.txt new file mode 100644 index 0000000..f72db6f --- /dev/null +++ b/requirement.txt @@ -0,0 +1 @@ +aiofiles \ No newline at end of file diff --git a/web-plugin/index.js b/web-plugin/index.js index 72e3217..beb40bd 100644 --- a/web-plugin/index.js +++ b/web-plugin/index.js @@ -3,6 +3,8 @@ import { api } from "./api.js"; import { ComfyWidgets, LGraphNode } from "./widgets.js"; import { generateDependencyGraph } from "https://esm.sh/comfyui-json@0.1.19"; +const loadingIcon = ``; + /** @typedef {import('../../../web/types/comfy.js').ComfyExtension} ComfyExtension*/ /** @type {ComfyExtension} */ const ext = { @@ -284,21 +286,37 @@ function addButton() { } const ok = await confirmDialog.confirm( - "Confirm deployment -> " + - displayName + - "

" + - endpoint + - "

", - `A new version will be deployed, are you confirm?

Include dependence`, + `Confirm deployment`, + ` +
+ + A new version will be deployed, do you confirm? +

+ + +
+ + +

+ +
+ +
+ `, ); if (!ok) return; const includeDeps = document.getElementById("include-deps").checked; + const reuseHash = document.getElementById("reuse-hash").checked; if (endpoint.endsWith("/")) { endpoint = endpoint.slice(0, -1); } - loadingDialog.showLoading("Generating snapshot", "Please wait..."); + loadingDialog.showLoading("Generating snapshot"); const snapshot = await fetch("/snapshot/get_current").then((x) => x.json()); // console.log(snapshot); @@ -343,7 +361,7 @@ function addButton() { let deps = undefined; if (includeDeps) { - loadingDialog.showLoading("Fetching existing version", "Please wait..."); + loadingDialog.showLoading("Fetching existing version"); const existing_workflow = await fetch( endpoint + "/api/workflow/" + workflow_id, @@ -362,14 +380,35 @@ function addButton() { loadingDialog.close(); - loadingDialog.showLoading( - "Generating dependency graph", - "Please wait...", - ); + loadingDialog.showLoading("Generating dependency graph"); deps = await generateDependencyGraph({ workflow_api: prompt.output, snapshot: snapshot, computeFileHash: async (file) => { + console.log(existing_workflow?.dependencies?.models); + + // Match previous hash for models + if (reuseHash && existing_workflow?.dependencies?.models) { + const previousModelHash = Object.entries( + existing_workflow?.dependencies?.models, + ).flatMap(([key, value]) => { + return Object.values(value).map((x) => ({ + ...x, + name: "models/" + key + "/" + x.name, + })); + }); + console.log(previousModelHash); + + const match = previousModelHash.find((x) => { + console.log(file, x.name); + return file == x.name; + }); + console.log(match); + if (match && match.hash) { + console.log("cached hash used"); + return match.hash; + } + } console.log(file); loadingDialog.showLoading("Generating hash", file); const hash = await fetch( @@ -416,7 +455,14 @@ function addButton() { const depsOk = await confirmDialog.confirm( "Check dependencies", // JSON.stringify(deps, null, 2), - createDynamicUIHtml(deps), + ` +
${loadingIcon}
+