fix: noise seed
This commit is contained in:
		
							parent
							
								
									894d8e1503
								
							
						
					
					
						commit
						dfee31f0ed
					
				@ -33,7 +33,7 @@ client_session = None
 | 
				
			|||||||
#     global client_session
 | 
					#     global client_session
 | 
				
			||||||
#     if client_session is None:
 | 
					#     if client_session is None:
 | 
				
			||||||
#         client_session = aiohttp.ClientSession()
 | 
					#         client_session = aiohttp.ClientSession()
 | 
				
			||||||
        
 | 
					
 | 
				
			||||||
async def ensure_client_session():
 | 
					async def ensure_client_session():
 | 
				
			||||||
    global client_session
 | 
					    global client_session
 | 
				
			||||||
    if client_session is None:
 | 
					    if client_session is None:
 | 
				
			||||||
@ -43,7 +43,7 @@ async def cleanup():
 | 
				
			|||||||
    global client_session
 | 
					    global client_session
 | 
				
			||||||
    if client_session:
 | 
					    if client_session:
 | 
				
			||||||
        await client_session.close()
 | 
					        await client_session.close()
 | 
				
			||||||
        
 | 
					
 | 
				
			||||||
def exit_handler():
 | 
					def exit_handler():
 | 
				
			||||||
    print("Exiting the application. Initiating cleanup...")
 | 
					    print("Exiting the application. Initiating cleanup...")
 | 
				
			||||||
    loop = asyncio.get_event_loop()
 | 
					    loop = asyncio.get_event_loop()
 | 
				
			||||||
@ -82,7 +82,7 @@ async def async_request_with_retry(method, url, disable_timeout=False, **kwargs)
 | 
				
			|||||||
                logger.error(f"Request failed after {max_retries} attempts: {e}")
 | 
					                logger.error(f"Request failed after {max_retries} attempts: {e}")
 | 
				
			||||||
                # raise
 | 
					                # raise
 | 
				
			||||||
            logger.warning(f"Request failed (attempt {attempt + 1}/{max_retries}): {e}")
 | 
					            logger.warning(f"Request failed (attempt {attempt + 1}/{max_retries}): {e}")
 | 
				
			||||||
        
 | 
					
 | 
				
			||||||
        # Wait before retrying
 | 
					        # Wait before retrying
 | 
				
			||||||
        await asyncio.sleep(retry_delay)
 | 
					        await asyncio.sleep(retry_delay)
 | 
				
			||||||
        retry_delay *= retry_delay_multiplier  # Exponential backoff
 | 
					        retry_delay *= retry_delay_multiplier  # Exponential backoff
 | 
				
			||||||
@ -116,7 +116,7 @@ def log(level, message, **kwargs):
 | 
				
			|||||||
        getattr(logger, level)(message, **kwargs)
 | 
					        getattr(logger, level)(message, **kwargs)
 | 
				
			||||||
    else:
 | 
					    else:
 | 
				
			||||||
        getattr(logger, level)(f"{message} {kwargs}")
 | 
					        getattr(logger, level)(f"{message} {kwargs}")
 | 
				
			||||||
        
 | 
					
 | 
				
			||||||
# For a span, you might need to create a context manager
 | 
					# For a span, you might need to create a context manager
 | 
				
			||||||
from contextlib import contextmanager
 | 
					from contextlib import contextmanager
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@ -234,29 +234,32 @@ def apply_random_seed_to_workflow(workflow_api):
 | 
				
			|||||||
        workflow_api (dict): The workflow API dictionary to modify.
 | 
					        workflow_api (dict): The workflow API dictionary to modify.
 | 
				
			||||||
    """
 | 
					    """
 | 
				
			||||||
    for key in workflow_api:
 | 
					    for key in workflow_api:
 | 
				
			||||||
        if 'inputs' in workflow_api[key] and 'seed' in workflow_api[key]['inputs']:
 | 
					        if 'inputs' in workflow_api[key]
 | 
				
			||||||
            if isinstance(workflow_api[key]['inputs']['seed'], list):
 | 
					            if 'seed' in workflow_api[key]['inputs']:
 | 
				
			||||||
                continue
 | 
					                if isinstance(workflow_api[key]['inputs']['seed'], list):
 | 
				
			||||||
            if workflow_api[key]['class_type'] == "PromptExpansion":
 | 
					                    continue
 | 
				
			||||||
                workflow_api[key]['inputs']['seed'] = randomSeed(8)
 | 
					                if workflow_api[key]['class_type'] == "PromptExpansion":
 | 
				
			||||||
                logger.info(f"Applied random seed {workflow_api[key]['inputs']['seed']} to PromptExpansion")
 | 
					                    workflow_api[key]['inputs']['seed'] = randomSeed(8)
 | 
				
			||||||
                continue
 | 
					                    logger.info(f"Applied random seed {workflow_api[key]['inputs']['seed']} to PromptExpansion")
 | 
				
			||||||
            if workflow_api[key]['class_type'] == "RandomNoise":
 | 
					                    continue
 | 
				
			||||||
                workflow_api[key]['inputs']['noise_seed'] = randomSeed()
 | 
					                workflow_api[key]['inputs']['seed'] = randomSeed()
 | 
				
			||||||
                logger.info(f"Applied random noise_seed {workflow_api[key]['inputs']['noise_seed']} to RandomNoise")
 | 
					                logger.info(f"Applied random seed {workflow_api[key]['inputs']['seed']} to {workflow_api[key]['class_type']}")
 | 
				
			||||||
                continue
 | 
					 | 
				
			||||||
            if workflow_api[key]['class_type'] == "KSamplerAdvanced":
 | 
					 | 
				
			||||||
                workflow_api[key]['inputs']['noise_seed'] = randomSeed()
 | 
					 | 
				
			||||||
                logger.info(f"Applied random noise_seed {workflow_api[key]['inputs']['noise_seed']} to KSamplerAdvanced")
 | 
					 | 
				
			||||||
                continue
 | 
					 | 
				
			||||||
            if workflow_api[key]['class_type'] == "SamplerCustom":
 | 
					 | 
				
			||||||
                workflow_api[key]['inputs']['noise_seed'] = randomSeed()
 | 
					 | 
				
			||||||
                logger.info(f"Applied random noise_seed {workflow_api[key]['inputs']['noise_seed']} to SamplerCustom")
 | 
					 | 
				
			||||||
                continue
 | 
					 | 
				
			||||||
            workflow_api[key]['inputs']['seed'] = randomSeed()
 | 
					 | 
				
			||||||
            logger.info(f"Applied random seed {workflow_api[key]['inputs']['seed']} to {workflow_api[key]['class_type']}")
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
def apply_inputs_to_workflow(workflow_api: Any, inputs: Any, sid: str = None):
 | 
					            if 'noise_seed' in workflow_api[key]:
 | 
				
			||||||
 | 
					                if workflow_api[key]['class_type'] == "RandomNoise":
 | 
				
			||||||
 | 
					                    workflow_api[key]['inputs']['noise_seed'] = randomSeed()
 | 
				
			||||||
 | 
					                    logger.info(f"Applied random noise_seed {workflow_api[key]['inputs']['noise_seed']} to RandomNoise")
 | 
				
			||||||
 | 
					                    continue
 | 
				
			||||||
 | 
					                if workflow_api[key]['class_type'] == "KSamplerAdvanced":
 | 
				
			||||||
 | 
					                    workflow_api[key]['inputs']['noise_seed'] = randomSeed()
 | 
				
			||||||
 | 
					                    logger.info(f"Applied random noise_seed {workflow_api[key]['inputs']['noise_seed']} to KSamplerAdvanced")
 | 
				
			||||||
 | 
					                    continue
 | 
				
			||||||
 | 
					                if workflow_api[key]['class_type'] == "SamplerCustom":
 | 
				
			||||||
 | 
					                    workflow_api[key]['inputs']['noise_seed'] = randomSeed()
 | 
				
			||||||
 | 
					                    logger.info(f"Applied random noise_seed {workflow_api[key]['inputs']['noise_seed']} to SamplerCustom")
 | 
				
			||||||
 | 
					                    continue
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def apply_inputs_to_workflow(workflow_api: Any, inputs: Any, sid: str | None = None):
 | 
				
			||||||
    # Loop through each of the inputs and replace them
 | 
					    # Loop through each of the inputs and replace them
 | 
				
			||||||
    for key, value in workflow_api.items():
 | 
					    for key, value in workflow_api.items():
 | 
				
			||||||
        if 'inputs' in value:
 | 
					        if 'inputs' in value:
 | 
				
			||||||
@ -852,19 +855,19 @@ async def send(event, data, sid=None):
 | 
				
			|||||||
    except Exception as e:
 | 
					    except Exception as e:
 | 
				
			||||||
        logger.info(f"Exception: {e}")
 | 
					        logger.info(f"Exception: {e}")
 | 
				
			||||||
        traceback.print_exc()
 | 
					        traceback.print_exc()
 | 
				
			||||||
        
 | 
					
 | 
				
			||||||
@server.PromptServer.instance.routes.get('/comfydeploy/{tail:.*}')
 | 
					@server.PromptServer.instance.routes.get('/comfydeploy/{tail:.*}')
 | 
				
			||||||
@server.PromptServer.instance.routes.post('/comfydeploy/{tail:.*}')
 | 
					@server.PromptServer.instance.routes.post('/comfydeploy/{tail:.*}')
 | 
				
			||||||
async def proxy_to_comfydeploy(request):
 | 
					async def proxy_to_comfydeploy(request):
 | 
				
			||||||
    # Get the base URL
 | 
					    # Get the base URL
 | 
				
			||||||
    base_url = f'https://www.comfydeploy.com/{request.match_info["tail"]}'
 | 
					    base_url = f'https://www.comfydeploy.com/{request.match_info["tail"]}'
 | 
				
			||||||
    
 | 
					
 | 
				
			||||||
    # Get all query parameters
 | 
					    # Get all query parameters
 | 
				
			||||||
    query_params = request.query_string
 | 
					    query_params = request.query_string
 | 
				
			||||||
    
 | 
					
 | 
				
			||||||
    # Construct the full target URL with query parameters
 | 
					    # Construct the full target URL with query parameters
 | 
				
			||||||
    target_url = f"{base_url}?{query_params}" if query_params else base_url
 | 
					    target_url = f"{base_url}?{query_params}" if query_params else base_url
 | 
				
			||||||
    
 | 
					
 | 
				
			||||||
    # print(f"Proxying request to: {target_url}")
 | 
					    # print(f"Proxying request to: {target_url}")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    try:
 | 
					    try:
 | 
				
			||||||
@ -998,7 +1001,7 @@ async def send_json_override(self, event, data, sid=None):
 | 
				
			|||||||
                return
 | 
					                return
 | 
				
			||||||
        else:
 | 
					        else:
 | 
				
			||||||
            logger.info(f"Executed {data}")
 | 
					            logger.info(f"Executed {data}")
 | 
				
			||||||
            
 | 
					
 | 
				
			||||||
        await update_run_with_output(prompt_id, data.get('output'), node_id=data.get('node'))
 | 
					        await update_run_with_output(prompt_id, data.get('output'), node_id=data.get('node'))
 | 
				
			||||||
        # await update_run_with_output(prompt_id, data.get('output'), node_id=data.get('node'))
 | 
					        # await update_run_with_output(prompt_id, data.get('output'), node_id=data.get('node'))
 | 
				
			||||||
        # update_run_with_output(prompt_id, data.get('output'))
 | 
					        # update_run_with_output(prompt_id, data.get('output'))
 | 
				
			||||||
@ -1404,4 +1407,4 @@ if cd_enable_log:
 | 
				
			|||||||
@server.PromptServer.instance.routes.get("/comfyui-deploy/filename_list_cache")
 | 
					@server.PromptServer.instance.routes.get("/comfyui-deploy/filename_list_cache")
 | 
				
			||||||
async def get_filename_list_cache(_):
 | 
					async def get_filename_list_cache(_):
 | 
				
			||||||
    from folder_paths import filename_list_cache
 | 
					    from folder_paths import filename_list_cache
 | 
				
			||||||
    return web.json_response({'filename_list': filename_list_cache})
 | 
					    return web.json_response({'filename_list': filename_list_cache})
 | 
				
			||||||
 | 
				
			|||||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user