diff --git "a/app.py" "b/app.py" --- "a/app.py" +++ "b/app.py" @@ -1,27 +1,59 @@ import os import random import sys -from typing import Sequence, Mapping, Any, Union -import torch +from typing import Any, Mapping, Sequence, Union + import gradio as gr -from PIL import Image import numpy as np - import spaces +import torch +from huggingface_hub import hf_hub_download +from PIL import Image # Import FreeU for quality improvements from comfy_extras.nodes_freelunch import FreeU_V2 -from huggingface_hub import hf_hub_download +hf_hub_download( + repo_id="stable-diffusion-v1-5/stable-diffusion-v1-5", + filename="v1-5-pruned-emaonly.ckpt", + local_dir="models/checkpoints", +) +hf_hub_download( + repo_id="Lykon/DreamShaper", + filename="DreamShaper_3.32_baked_vae_clip_fix_half.safetensors", + local_dir="models/checkpoints", +) +hf_hub_download( + repo_id="Lykon/DreamShaper", + filename="DreamShaper_6.31_BakedVae_pruned.safetensors", + local_dir="models/checkpoints", +) +hf_hub_download( + repo_id="latentcat/latentcat-controlnet", + filename="models/control_v1p_sd15_brightness.safetensors", + local_dir="models/controlnet", +) +hf_hub_download( + repo_id="comfyanonymous/ControlNet-v1-1_fp16_safetensors", + filename="control_v11f1e_sd15_tile_fp16.safetensors", + local_dir="models/controlnet", +) +hf_hub_download( + repo_id="Lykon/dreamshaper-7", + filename="vae/diffusion_pytorch_model.fp16.safetensors", + local_dir="models", +) +hf_hub_download( + repo_id="stabilityai/sd-vae-ft-mse-original", + filename="vae-ft-mse-840000-ema-pruned.safetensors", + local_dir="models/vae", +) +hf_hub_download( + repo_id="lllyasviel/Annotators", + filename="RealESRGAN_x4plus.pth", + local_dir="models/upscale_models", +) -hf_hub_download(repo_id="stable-diffusion-v1-5/stable-diffusion-v1-5", filename="v1-5-pruned-emaonly.ckpt", local_dir="models/checkpoints") -hf_hub_download(repo_id="Lykon/DreamShaper", filename="DreamShaper_3.32_baked_vae_clip_fix_half.safetensors", local_dir="models/checkpoints") -hf_hub_download(repo_id="Lykon/DreamShaper", filename="DreamShaper_6.31_BakedVae_pruned.safetensors", local_dir="models/checkpoints") -hf_hub_download(repo_id="latentcat/latentcat-controlnet", filename="models/control_v1p_sd15_brightness.safetensors", local_dir="models/controlnet") -hf_hub_download(repo_id="comfyanonymous/ControlNet-v1-1_fp16_safetensors", filename="control_v11f1e_sd15_tile_fp16.safetensors", local_dir="models/controlnet") -hf_hub_download(repo_id="Lykon/dreamshaper-7", filename="vae/diffusion_pytorch_model.fp16.safetensors", local_dir="models") -hf_hub_download(repo_id="stabilityai/sd-vae-ft-mse-original", filename="vae-ft-mse-840000-ema-pruned.safetensors", local_dir="models/vae") -hf_hub_download(repo_id="lllyasviel/Annotators", filename="RealESRGAN_x4plus.pth", local_dir="models/upscale_models") def get_value_at_index(obj: Union[Sequence, Mapping], index: int) -> Any: """Returns the value at the given index of a sequence or mapping. @@ -114,9 +146,10 @@ def import_custom_nodes() -> None: creates a PromptQueue, and initializes the custom nodes. """ import asyncio + import execution - from nodes import init_extra_nodes import server + from nodes import init_extra_nodes # Creating a new event loop and setting it as the default loop loop = asyncio.new_event_loop() @@ -147,35 +180,75 @@ controlnetloader = NODE_CLASS_MAPPINGS["ControlNetLoader"]() controlnetapplyadvanced = NODE_CLASS_MAPPINGS["ControlNetApplyAdvanced"]() ksampler = NODE_CLASS_MAPPINGS["KSampler"]() vaedecode = NODE_CLASS_MAPPINGS["VAEDecode"]() +vaedecodetiled = NODE_CLASS_MAPPINGS["VAEDecodeTiled"]() import_custom_nodes() comfy_qr_by_module_size = NODE_CLASS_MAPPINGS["comfy-qr-by-module-size"]() tilepreprocessor = NODE_CLASS_MAPPINGS["TilePreprocessor"]() -# Load upscale model and additional nodes for artistic pipeline -upscalemodelloader = NODE_CLASS_MAPPINGS["UpscaleModelLoader"]() -upscalemodelloader_30 = upscalemodelloader.load_model( - model_name="RealESRGAN_x4plus.pth" -) +# Load additional nodes for artistic pipeline (upscale model loaded lazily when needed) imageupscalewithmodel = NODE_CLASS_MAPPINGS["ImageUpscaleWithModel"]() imagescale = NODE_CLASS_MAPPINGS["ImageScale"]() latentupscaleby = NODE_CLASS_MAPPINGS["LatentUpscaleBy"]() -from comfy import model_management - # MPS (Apple Silicon) comprehensive workaround for black QR code bug # Issue: PyTorch 2.6+ FP16 handling on MPS causes black images in samplers # Additional issue: MPS tensor operations can produce NaN/inf values (PyTorch bug #84364) # Solution: Monkey-patch dtype functions to force fp32, enable MPS fallback # References: https://civitai.com/articles/11106, https://github.com/pytorch/pytorch/issues/84364 - import os -os.environ['PYTORCH_ENABLE_MPS_FALLBACK'] = '1' +from comfy import model_management from comfy.cli_args import args +# Lazy upscale model loading - only load when needed +# This is safe for ZeroGPU since upscaling happens inside @spaces.GPU function +_upscale_model_cache = None + + +def get_upscale_model(): + """Load upscale model on-demand and cache it within GPU context""" + global _upscale_model_cache + if _upscale_model_cache is None: + upscalemodelloader = NODE_CLASS_MAPPINGS["UpscaleModelLoader"]() + _upscale_model_cache = upscalemodelloader.load_model( + model_name="RealESRGAN_x4plus.pth" + ) + return _upscale_model_cache + + +def calculate_vae_tile_size(image_size): + """ + Calculate optimal VAE tile size based on image dimensions. + + Args: + image_size: Width/height of square image in pixels + + Returns: + tuple: (tile_size, overlap) or (None, None) for no tiling + """ + # No tiling for small images (fits in memory easily) + if image_size <= 512: + return None, None + + # Medium images: 512px tiles + elif image_size <= 1024: + return 512, 64 + + # Large images: 768px tiles (reduces tile count) + elif image_size <= 2048: + return 768, 96 + + # XL images: 1024px tiles + else: + return 1024, 128 + + if torch.backends.mps.is_available(): print(f"MPS device detected (PyTorch {torch.__version__})") + os.environ["PYTORCH_ENABLE_MPS_FALLBACK"] = ( + "1" # Enable MPS fallback for unsupported ops + ) # Store original dtype functions _original_unet_dtype = model_management.unet_dtype @@ -217,11 +290,15 @@ if torch.backends.mps.is_available(): # Performance settings: Tune these for speed vs stability # Try uncommenting these one at a time for better speed: - args.lowvram = False # Set to False for FASTER (try this first!) - args.use_split_cross_attention = False # Set to False for even FASTER (might cause black images) + args.lowvram = False # Set to False for FASTER (try this first!) + args.use_split_cross_attention = ( + False # Set to False for even FASTER (might cause black images) + ) lowvram_status = "enabled" if args.lowvram else "disabled (faster)" - split_attn_status = "enabled" if args.use_split_cross_attention else "disabled (faster)" + split_attn_status = ( + "enabled" if args.use_split_cross_attention else "disabled (faster)" + ) print(" ✓ Enabled global fp32 dtype enforcement (monkey-patched)") print(" ✓ Enabled MPS fallback mode") print(f" ✓ lowvram: {lowvram_status}, split-cross-attention: {split_attn_status}") @@ -231,15 +308,41 @@ model_loaders = [checkpointloadersimple_4, checkpointloadersimple_artistic] # Check which models are valid and how to best load them valid_models = [ - getattr(loader[0], 'patcher', loader[0]) + getattr(loader[0], "patcher", loader[0]) for loader in model_loaders - if not isinstance(loader[0], dict) and not isinstance(getattr(loader[0], 'patcher', None), dict) + if not isinstance(loader[0], dict) + and not isinstance(getattr(loader[0], "patcher", None), dict) ] model_management.load_models_gpu(valid_models) + @spaces.GPU(duration=30) -def generate_qr_code_unified(prompt: str, text_input: str, input_type: str = "URL", image_size: int = 512, border_size: int = 4, error_correction: str = "Medium (15%)", module_size: int = 12, module_drawer: str = "Square", use_custom_seed: bool = False, seed: int = 0, pipeline: str = "standard", enable_upscale: bool = False, freeu_b1: float = 1.4, freeu_b2: float = 1.3, freeu_s1: float = 0.0, freeu_s2: float = 1.3, enable_sag: bool = True, sag_scale: float = 0.5, sag_blur_sigma: float = 1.5, controlnet_strength_first: float = 0.45, controlnet_strength_final: float = 0.7, controlnet_strength_standard_first: float = 0.45, controlnet_strength_standard_final: float = 1.0): +def generate_qr_code_unified( + prompt: str, + text_input: str, + input_type: str = "URL", + image_size: int = 512, + border_size: int = 4, + error_correction: str = "Medium (15%)", + module_size: int = 12, + module_drawer: str = "Square", + use_custom_seed: bool = False, + seed: int = 0, + pipeline: str = "standard", + enable_upscale: bool = False, + freeu_b1: float = 1.4, + freeu_b2: float = 1.3, + freeu_s1: float = 0.0, + freeu_s2: float = 1.3, + enable_sag: bool = True, + sag_scale: float = 0.5, + sag_blur_sigma: float = 1.5, + controlnet_strength_first: float = 0.45, + controlnet_strength_final: float = 0.7, + controlnet_strength_standard_first: float = 0.45, + controlnet_strength_standard_final: float = 1.0, +): # Only manipulate the text if it's a URL input type qr_text = text_input if input_type == "URL": @@ -253,11 +356,60 @@ def generate_qr_code_unified(prompt: str, text_input: str, input_type: str = "UR with torch.inference_mode(): if pipeline == "standard": - yield from _pipeline_standard(prompt, qr_text, input_type, image_size, border_size, error_correction, module_size, module_drawer, actual_seed, enable_upscale, controlnet_strength_standard_first, controlnet_strength_standard_final) + yield from _pipeline_standard( + prompt, + qr_text, + input_type, + image_size, + border_size, + error_correction, + module_size, + module_drawer, + actual_seed, + enable_upscale, + controlnet_strength_standard_first, + controlnet_strength_standard_final, + ) else: # artistic - yield from _pipeline_artistic(prompt, qr_text, input_type, image_size, border_size, error_correction, module_size, module_drawer, actual_seed, enable_upscale, freeu_b1, freeu_b2, freeu_s1, freeu_s2, enable_sag, sag_scale, sag_blur_sigma, controlnet_strength_first, controlnet_strength_final) + yield from _pipeline_artistic( + prompt, + qr_text, + input_type, + image_size, + border_size, + error_correction, + module_size, + module_drawer, + actual_seed, + enable_upscale, + freeu_b1, + freeu_b2, + freeu_s1, + freeu_s2, + enable_sag, + sag_scale, + sag_blur_sigma, + controlnet_strength_first, + controlnet_strength_final, + ) + -def generate_standard_qr(prompt: str, text_input: str, input_type: str = "URL", image_size: int = 512, border_size: int = 4, error_correction: str = "Medium (15%)", module_size: int = 12, module_drawer: str = "Square", use_custom_seed: bool = False, seed: int = 0, enable_upscale: bool = False, enable_freeu: bool = False, controlnet_strength_standard_first: float = 0.45, controlnet_strength_standard_final: float = 1.0): +def generate_standard_qr( + prompt: str, + text_input: str, + input_type: str = "URL", + image_size: int = 512, + border_size: int = 4, + error_correction: str = "Medium (15%)", + module_size: int = 12, + module_drawer: str = "Square", + use_custom_seed: bool = False, + seed: int = 0, + enable_upscale: bool = False, + enable_freeu: bool = False, + controlnet_strength_standard_first: float = 0.45, + controlnet_strength_standard_final: float = 1.0, +): """Wrapper function for standard QR generation""" # Get actual seed used (custom or random) actual_seed = seed if use_custom_seed else random.randint(1, 2**64) @@ -278,12 +430,27 @@ def generate_standard_qr(prompt: str, text_input: str, input_type: str = "URL", "enable_upscale": enable_upscale, "enable_freeu": enable_freeu, "controlnet_strength_standard_first": controlnet_strength_standard_first, - "controlnet_strength_standard_final": controlnet_strength_standard_final + "controlnet_strength_standard_final": controlnet_strength_standard_final, } settings_json = generate_settings_json(settings_dict) # Generate QR and yield progressive results - generator = generate_qr_code_unified(prompt, text_input, input_type, image_size, border_size, error_correction, module_size, module_drawer, use_custom_seed, seed, pipeline="standard", enable_upscale=enable_upscale, controlnet_strength_standard_first=controlnet_strength_standard_first, controlnet_strength_standard_final=controlnet_strength_standard_final) + generator = generate_qr_code_unified( + prompt, + text_input, + input_type, + image_size, + border_size, + error_correction, + module_size, + module_drawer, + use_custom_seed, + seed, + pipeline="standard", + enable_upscale=enable_upscale, + controlnet_strength_standard_first=controlnet_strength_standard_first, + controlnet_strength_standard_final=controlnet_strength_standard_final, + ) final_image = None final_status = None @@ -300,10 +467,33 @@ def generate_standard_qr(prompt: str, text_input: str, input_type: str = "URL", final_image, final_status, gr.update(value=settings_json), # Update textbox content - gr.update(visible=True) # Make accordion visible only at the end + gr.update(visible=True), # Make accordion visible only at the end ) -def generate_artistic_qr(prompt: str, text_input: str, input_type: str = "URL", image_size: int = 512, border_size: int = 4, error_correction: str = "Medium (15%)", module_size: int = 12, module_drawer: str = "Square", use_custom_seed: bool = False, seed: int = 0, enable_upscale: bool = True, enable_freeu: bool = True, freeu_b1: float = 1.4, freeu_b2: float = 1.3, freeu_s1: float = 0.0, freeu_s2: float = 1.3, enable_sag: bool = True, sag_scale: float = 0.5, sag_blur_sigma: float = 0.5, controlnet_strength_first: float = 0.45, controlnet_strength_final: float = 0.70): + +def generate_artistic_qr( + prompt: str, + text_input: str, + input_type: str = "URL", + image_size: int = 512, + border_size: int = 4, + error_correction: str = "Medium (15%)", + module_size: int = 12, + module_drawer: str = "Square", + use_custom_seed: bool = False, + seed: int = 0, + enable_upscale: bool = True, + enable_freeu: bool = True, + freeu_b1: float = 1.4, + freeu_b2: float = 1.3, + freeu_s1: float = 0.0, + freeu_s2: float = 1.3, + enable_sag: bool = True, + sag_scale: float = 0.5, + sag_blur_sigma: float = 0.5, + controlnet_strength_first: float = 0.45, + controlnet_strength_final: float = 0.70, +): """Wrapper function for artistic QR generation with FreeU and SAG parameters""" # Get actual seed used (custom or random) actual_seed = seed if use_custom_seed else random.randint(1, 2**64) @@ -331,12 +521,34 @@ def generate_artistic_qr(prompt: str, text_input: str, input_type: str = "URL", "sag_scale": sag_scale, "sag_blur_sigma": sag_blur_sigma, "controlnet_strength_first": controlnet_strength_first, - "controlnet_strength_final": controlnet_strength_final + "controlnet_strength_final": controlnet_strength_final, } settings_json = generate_settings_json(settings_dict) # Generate QR and yield progressive results - generator = generate_qr_code_unified(prompt, text_input, input_type, image_size, border_size, error_correction, module_size, module_drawer, use_custom_seed, seed, pipeline="artistic", enable_upscale=enable_upscale, freeu_b1=freeu_b1, freeu_b2=freeu_b2, freeu_s1=freeu_s1, freeu_s2=freeu_s2, enable_sag=enable_sag, sag_scale=sag_scale, sag_blur_sigma=sag_blur_sigma, controlnet_strength_first=controlnet_strength_first, controlnet_strength_final=controlnet_strength_final) + generator = generate_qr_code_unified( + prompt, + text_input, + input_type, + image_size, + border_size, + error_correction, + module_size, + module_drawer, + use_custom_seed, + seed, + pipeline="artistic", + enable_upscale=enable_upscale, + freeu_b1=freeu_b1, + freeu_b2=freeu_b2, + freeu_s1=freeu_s1, + freeu_s2=freeu_s2, + enable_sag=enable_sag, + sag_scale=sag_scale, + sag_blur_sigma=sag_blur_sigma, + controlnet_strength_first=controlnet_strength_first, + controlnet_strength_final=controlnet_strength_final, + ) final_image = None final_status = None @@ -353,12 +565,14 @@ def generate_artistic_qr(prompt: str, text_input: str, input_type: str = "URL", final_image, final_status, gr.update(value=settings_json), # Update textbox content - gr.update(visible=True) # Make accordion visible only at the end + gr.update(visible=True), # Make accordion visible only at the end ) + # Helper functions for shareable settings JSON import json + def generate_settings_json(params_dict: dict) -> str: """Generate a formatted JSON string from parameters dictionary""" try: @@ -366,6 +580,7 @@ def generate_settings_json(params_dict: dict) -> str: except Exception as e: return json.dumps({"error": f"Failed to generate JSON: {str(e)}"}, indent=2) + def parse_settings_json(json_string: str) -> dict: """Parse JSON string and return parameters dictionary with validation""" try: @@ -382,21 +597,35 @@ def parse_settings_json(json_string: str) -> dict: except Exception as e: return {"error": f"Failed to parse JSON: {str(e)}"} + def load_settings_from_json_standard(json_string: str): """Load settings from JSON for Standard pipeline""" try: params = json.loads(json_string) # Validate pipeline type - pipeline = params.get("pipeline", "standard") # Default to standard for backward compatibility + pipeline = params.get( + "pipeline", "standard" + ) # Default to standard for backward compatibility if pipeline != "standard": error_msg = f"❌ Error: You're trying to load {pipeline.upper()} pipeline settings into the STANDARD pipeline. Please use the correct tab." # Return empty updates for all fields + error message + make status visible return ( - gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), - gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), - gr.update(), gr.update(), gr.update(), gr.update(), - gr.update(value=error_msg, visible=True) + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(value=error_msg, visible=True), ) # Extract parameters with defaults @@ -412,50 +641,107 @@ def load_settings_from_json_standard(json_string: str): seed = params.get("seed", 718313) enable_upscale = params.get("enable_upscale", False) enable_freeu = params.get("enable_freeu", False) - controlnet_strength_standard_first = params.get("controlnet_strength_standard_first", 0.45) - controlnet_strength_standard_final = params.get("controlnet_strength_standard_final", 1.0) + controlnet_strength_standard_first = params.get( + "controlnet_strength_standard_first", 0.45 + ) + controlnet_strength_standard_final = params.get( + "controlnet_strength_standard_final", 1.0 + ) success_msg = "✅ Settings loaded successfully!" return ( - prompt, text_input, input_type, image_size, border_size, - error_correction, module_size, module_drawer, use_custom_seed, - seed, enable_upscale, enable_freeu, controlnet_strength_standard_first, - controlnet_strength_standard_final, gr.update(value=success_msg, visible=True) + prompt, + text_input, + input_type, + image_size, + border_size, + error_correction, + module_size, + module_drawer, + use_custom_seed, + seed, + enable_upscale, + enable_freeu, + controlnet_strength_standard_first, + controlnet_strength_standard_final, + gr.update(value=success_msg, visible=True), ) except json.JSONDecodeError as e: error_msg = f"❌ Invalid JSON format: {str(e)}" return ( - gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), - gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), - gr.update(), gr.update(), gr.update(), gr.update(), - gr.update(value=error_msg, visible=True) + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(value=error_msg, visible=True), ) except Exception as e: error_msg = f"❌ Error loading settings: {str(e)}" return ( - gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), - gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), - gr.update(), gr.update(), gr.update(), gr.update(), - gr.update(value=error_msg, visible=True) + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(value=error_msg, visible=True), ) + def load_settings_from_json_artistic(json_string: str): """Load settings from JSON for Artistic pipeline""" try: params = json.loads(json_string) # Validate pipeline type - pipeline = params.get("pipeline", "artistic") # Default to artistic for backward compatibility + pipeline = params.get( + "pipeline", "artistic" + ) # Default to artistic for backward compatibility if pipeline != "artistic": error_msg = f"❌ Error: You're trying to load {pipeline.upper()} pipeline settings into the ARTISTIC pipeline. Please use the correct tab." # Return empty updates for all fields + error message + make status visible return ( - gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), - gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), - gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), - gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), - gr.update(), gr.update(value=error_msg, visible=True) + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(value=error_msg, visible=True), ) # Extract parameters with defaults @@ -483,33 +769,87 @@ def load_settings_from_json_artistic(json_string: str): success_msg = "✅ Settings loaded successfully!" return ( - prompt, text_input, input_type, image_size, border_size, - error_correction, module_size, module_drawer, use_custom_seed, - seed, enable_upscale, enable_freeu, freeu_b1, freeu_b2, freeu_s1, - freeu_s2, enable_sag, sag_scale, sag_blur_sigma, controlnet_strength_first, - controlnet_strength_final, gr.update(value=success_msg, visible=True) + prompt, + text_input, + input_type, + image_size, + border_size, + error_correction, + module_size, + module_drawer, + use_custom_seed, + seed, + enable_upscale, + enable_freeu, + freeu_b1, + freeu_b2, + freeu_s1, + freeu_s2, + enable_sag, + sag_scale, + sag_blur_sigma, + controlnet_strength_first, + controlnet_strength_final, + gr.update(value=success_msg, visible=True), ) except json.JSONDecodeError as e: error_msg = f"❌ Invalid JSON format: {str(e)}" return ( - gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), - gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), - gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), - gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), - gr.update(), gr.update(value=error_msg, visible=True) + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(value=error_msg, visible=True), ) except Exception as e: error_msg = f"❌ Error loading settings: {str(e)}" return ( - gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), - gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), - gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), - gr.update(), gr.update(), gr.update(), gr.update(), gr.update(), - gr.update(), gr.update(value=error_msg, visible=True) + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(), + gr.update(value=error_msg, visible=True), ) -def add_noise_to_border_only(image_tensor, seed: int, border_size: int, image_size: int, module_size: int = 12): + +def add_noise_to_border_only( + image_tensor, seed: int, border_size: int, image_size: int, module_size: int = 12 +): """ Add QR-like cubic patterns ONLY to the border region of a QR code image. Creates black squares that resemble QR modules for a smooth transition. @@ -548,11 +888,11 @@ def add_noise_to_border_only(image_tensor, seed: int, border_size: int, image_si # Top border border_mask[0:border_thickness, :] = True # Bottom border - border_mask[height-border_thickness:height, :] = True + border_mask[height - border_thickness : height, :] = True # Left border border_mask[:, 0:border_thickness] = True # Right border - border_mask[:, width-border_thickness:width] = True + border_mask[:, width - border_thickness : width] = True # Only apply to white/light areas in the border (threshold > 240) img_255 = (img * 255).astype(np.uint8) @@ -596,7 +936,21 @@ def add_noise_to_border_only(image_tensor, seed: int, border_size: int, image_si # Convert back to tensor return torch.from_numpy(img_np).to(image_tensor.device) -def _pipeline_standard(prompt: str, qr_text: str, input_type: str, image_size: int, border_size: int, error_correction: str, module_size: int, module_drawer: str, seed: int, enable_upscale: bool = False, controlnet_strength_first: float = 0.45, controlnet_strength_final: float = 1.0): + +def _pipeline_standard( + prompt: str, + qr_text: str, + input_type: str, + image_size: int, + border_size: int, + error_correction: str, + module_size: int, + module_drawer: str, + seed: int, + enable_upscale: bool = False, + controlnet_strength_first: float = 0.45, + controlnet_strength_final: float = 1.0, +): emptylatentimage_5 = emptylatentimage.generate( width=image_size, height=image_size, batch_size=1 ) @@ -650,7 +1004,7 @@ def _pipeline_standard(prompt: str, qr_text: str, input_type: str, image_size: i yield base_qr_pil, "Generated base QR pattern… enhancing with AI (step 1/3)" emptylatentimage_17 = emptylatentimage.generate( - width=image_size*2, height=image_size*2, batch_size=1 + width=image_size * 2, height=image_size * 2, batch_size=1 ) controlnetloader_19 = controlnetloader.load_controlnet( @@ -699,10 +1053,23 @@ def _pipeline_standard(prompt: str, qr_text: str, input_type: str, image_size: i latent_image=get_value_at_index(emptylatentimage_5, 0), ) - vaedecode_8 = vaedecode.decode( - samples=get_value_at_index(ksampler_3, 0), - vae=get_value_at_index(checkpointloadersimple_4, 2), - ) + # Calculate optimal tile size for this image + tile_size, overlap = calculate_vae_tile_size(image_size) + + if tile_size is not None: + # Use tiled decode for larger images + vaedecode_8 = vaedecodetiled.decode( + samples=get_value_at_index(ksampler_3, 0), + vae=get_value_at_index(checkpointloadersimple_4, 2), + tile_size=tile_size, + overlap=overlap, + ) + else: + # Small image, use standard decode (faster) + vaedecode_8 = vaedecode.decode( + samples=get_value_at_index(ksampler_3, 0), + vae=get_value_at_index(checkpointloadersimple_4, 2), + ) # 2) Yield the first decoded image as a second intermediate result mid_tensor = get_value_at_index(vaedecode_8, 0) @@ -711,6 +1078,9 @@ def _pipeline_standard(prompt: str, qr_text: str, input_type: str, image_size: i mid_pil = Image.fromarray(mid_np) yield mid_pil, "First enhancement pass complete (step 2/3)… refining details" + # Clear cache before second pass to free memory + model_management.soft_empty_cache() + controlnetapplyadvanced_20 = controlnetapplyadvanced.apply_controlnet( strength=controlnet_strength_final, start_percent=0, @@ -735,10 +1105,21 @@ def _pipeline_standard(prompt: str, qr_text: str, input_type: str, image_size: i latent_image=get_value_at_index(emptylatentimage_17, 0), ) - vaedecode_21 = vaedecode.decode( - samples=get_value_at_index(ksampler_18, 0), - vae=get_value_at_index(checkpointloadersimple_4, 2), - ) + # Second pass is always 2x original, calculate based on doubled size + tile_size_2x, overlap_2x = calculate_vae_tile_size(image_size * 2) + + if tile_size_2x is not None: + vaedecode_21 = vaedecodetiled.decode( + samples=get_value_at_index(ksampler_18, 0), + vae=get_value_at_index(checkpointloadersimple_4, 2), + tile_size=tile_size_2x, + overlap=overlap_2x, + ) + else: + vaedecode_21 = vaedecode.decode( + samples=get_value_at_index(ksampler_18, 0), + vae=get_value_at_index(checkpointloadersimple_4, 2), + ) # 3) Optionally upscale if enabled if enable_upscale: @@ -749,9 +1130,10 @@ def _pipeline_standard(prompt: str, qr_text: str, input_type: str, image_size: i pre_upscale_pil = Image.fromarray(pre_upscale_np) yield pre_upscale_pil, "Enhancement complete (step 3/4)... upscaling image" - # Upscale the final image + # Upscale the final image (load model on-demand) + upscale_model = get_upscale_model() upscaled = imageupscalewithmodel.upscale( - upscale_model=get_value_at_index(upscalemodelloader_30, 0), + upscale_model=get_value_at_index(upscale_model, 0), image=get_value_at_index(vaedecode_21, 0), ) @@ -759,7 +1141,10 @@ def _pipeline_standard(prompt: str, qr_text: str, input_type: str, image_size: i image_np = (image_tensor.cpu().numpy() * 255).astype(np.uint8) image_np = image_np[0] pil_image = Image.fromarray(image_np) - yield pil_image, "No errors, all good! Final QR art generated and upscaled. (step 4/4)" + yield ( + pil_image, + "No errors, all good! Final QR art generated and upscaled. (step 4/4)", + ) else: # No upscaling image_tensor = get_value_at_index(vaedecode_21, 0) @@ -768,7 +1153,28 @@ def _pipeline_standard(prompt: str, qr_text: str, input_type: str, image_size: i pil_image = Image.fromarray(image_np) yield pil_image, "No errors, all good! Final QR art generated." -def _pipeline_artistic(prompt: str, qr_text: str, input_type: str, image_size: int, border_size: int, error_correction: str, module_size: int, module_drawer: str, seed: int, enable_upscale: bool = True, freeu_b1: float = 1.4, freeu_b2: float = 1.3, freeu_s1: float = 0.0, freeu_s2: float = 1.3, enable_sag: bool = True, sag_scale: float = 0.5, sag_blur_sigma: float = 0.5, controlnet_strength_first: float = 0.45, controlnet_strength_final: float = 0.7): + +def _pipeline_artistic( + prompt: str, + qr_text: str, + input_type: str, + image_size: int, + border_size: int, + error_correction: str, + module_size: int, + module_drawer: str, + seed: int, + enable_upscale: bool = True, + freeu_b1: float = 1.4, + freeu_b2: float = 1.3, + freeu_s1: float = 0.0, + freeu_s2: float = 1.3, + enable_sag: bool = True, + sag_scale: float = 0.5, + sag_blur_sigma: float = 0.5, + controlnet_strength_first: float = 0.45, + controlnet_strength_final: float = 0.7, +): # Generate QR code qr_protocol = "None" if input_type == "Plain Text" else "Https" @@ -809,7 +1215,10 @@ def _pipeline_artistic(prompt: str, qr_text: str, input_type: str, image_size: i # Only add noise if there's a border (border_size > 0) if border_size > 0: - yield base_qr_pil, f"Generated base QR pattern... adding QR-like cubics to border (step {current_step}/{total_steps})" + yield ( + base_qr_pil, + f"Generated base QR pattern... adding QR-like cubics to border (step {current_step}/{total_steps})", + ) current_step += 1 # Add QR-like cubic patterns ONLY to border region (extends QR structure into border) @@ -826,12 +1235,18 @@ def _pipeline_artistic(prompt: str, qr_text: str, input_type: str, image_size: i noisy_qr_np = (qr_with_border_noise.cpu().numpy() * 255).astype(np.uint8) noisy_qr_np = noisy_qr_np[0] noisy_qr_pil = Image.fromarray(noisy_qr_np) - yield noisy_qr_pil, f"Added QR-like cubics to border... enhancing with AI (step {current_step}/{total_steps})" + yield ( + noisy_qr_pil, + f"Added QR-like cubics to border... enhancing with AI (step {current_step}/{total_steps})", + ) current_step += 1 else: # No border, skip noise qr_with_border_noise = get_value_at_index(comfy_qr, 0) - yield base_qr_pil, f"Generated base QR pattern (no border)... enhancing with AI (step {current_step}/{total_steps})" + yield ( + base_qr_pil, + f"Generated base QR pattern (no border)... enhancing with AI (step {current_step}/{total_steps})", + ) current_step += 1 # Generate latent image @@ -899,7 +1314,7 @@ def _pipeline_artistic(prompt: str, qr_text: str, input_type: str, image_size: i b1=freeu_b1, # Backbone feature enhancement - customizable b2=freeu_b2, # Backbone feature enhancement (layer 2) - customizable s1=freeu_s1, # Skip connection dampening - customizable structure hiding - s2=freeu_s2 # Skip connection dampening (layer 2) - customizable scannability balance + s2=freeu_s2, # Skip connection dampening (layer 2) - customizable scannability balance )[0] # Apply SAG (Self-Attention Guidance) for improved structural coherence (if enabled) @@ -927,20 +1342,36 @@ def _pipeline_artistic(prompt: str, qr_text: str, input_type: str, image_size: i latent_image=get_value_at_index(latent_image, 0), ) - # First decode - decoded = vaedecode.decode( - samples=get_value_at_index(samples, 0), - vae=get_value_at_index(checkpointloadersimple_artistic, 2), - ) + # First decode with dynamic tiling + tile_size, overlap = calculate_vae_tile_size(image_size) + + if tile_size is not None: + decoded = vaedecodetiled.decode( + samples=get_value_at_index(samples, 0), + vae=get_value_at_index(checkpointloadersimple_artistic, 2), + tile_size=tile_size, + overlap=overlap, + ) + else: + decoded = vaedecode.decode( + samples=get_value_at_index(samples, 0), + vae=get_value_at_index(checkpointloadersimple_artistic, 2), + ) # Show first pass result first_pass_tensor = get_value_at_index(decoded, 0) first_pass_np = (first_pass_tensor.cpu().numpy() * 255).astype(np.uint8) first_pass_np = first_pass_np[0] first_pass_pil = Image.fromarray(first_pass_np) - yield first_pass_pil, f"First enhancement pass complete (step {current_step}/{total_steps})... final refinement pass" + yield ( + first_pass_pil, + f"First enhancement pass complete (step {current_step}/{total_steps})... final refinement pass", + ) current_step += 1 + # Clear cache before second pass to free memory + model_management.soft_empty_cache() + # Final ControlNet pass (second pass - refinement) controlnet_apply_final = controlnetapplyadvanced.apply_controlnet( strength=controlnet_strength_final, @@ -974,11 +1405,21 @@ def _pipeline_artistic(prompt: str, qr_text: str, input_type: str, image_size: i latent_image=get_value_at_index(upscaled_latent, 0), ) - # Final decode - final_decoded = vaedecode.decode( - samples=get_value_at_index(final_samples, 0), - vae=get_value_at_index(checkpointloadersimple_artistic, 2), - ) + # Final decode with dynamic tiling + tile_size, overlap = calculate_vae_tile_size(image_size) + + if tile_size is not None: + final_decoded = vaedecodetiled.decode( + samples=get_value_at_index(final_samples, 0), + vae=get_value_at_index(checkpointloadersimple_artistic, 2), + tile_size=tile_size, + overlap=overlap, + ) + else: + final_decoded = vaedecode.decode( + samples=get_value_at_index(final_samples, 0), + vae=get_value_at_index(checkpointloadersimple_artistic, 2), + ) # Optionally upscale if enabled if enable_upscale: @@ -987,12 +1428,16 @@ def _pipeline_artistic(prompt: str, qr_text: str, input_type: str, image_size: i pre_upscale_np = (pre_upscale_tensor.cpu().numpy() * 255).astype(np.uint8) pre_upscale_np = pre_upscale_np[0] pre_upscale_pil = Image.fromarray(pre_upscale_np) - yield pre_upscale_pil, f"Final refinement complete (step {current_step}/{total_steps})... upscaling image" + yield ( + pre_upscale_pil, + f"Final refinement complete (step {current_step}/{total_steps})... upscaling image", + ) current_step += 1 - # Upscale image with model (after final samples, before returning) + # Upscale image with model (load model on-demand) + upscale_model = get_upscale_model() upscaled = imageupscalewithmodel.upscale( - upscale_model=get_value_at_index(upscalemodelloader_30, 0), + upscale_model=get_value_at_index(upscale_model, 0), image=get_value_at_index(final_decoded, 0), ) @@ -1001,18 +1446,23 @@ def _pipeline_artistic(prompt: str, qr_text: str, input_type: str, image_size: i image_np = (image_tensor.cpu().numpy() * 255).astype(np.uint8) image_np = image_np[0] final_image = Image.fromarray(image_np) - yield final_image, f"No errors, all good! Final artistic QR code generated and upscaled. (step {current_step}/{total_steps})" + yield ( + final_image, + f"No errors, all good! Final artistic QR code generated and upscaled. (step {current_step}/{total_steps})", + ) else: # No upscaling image_tensor = get_value_at_index(final_decoded, 0) image_np = (image_tensor.cpu().numpy() * 255).astype(np.uint8) image_np = image_np[0] final_image = Image.fromarray(image_np) - yield final_image, f"No errors, all good! Final artistic QR code generated. (step {current_step}/{total_steps})" - + yield ( + final_image, + f"No errors, all good! Final artistic QR code generated. (step {current_step}/{total_steps})", + ) -if __name__ == "__main__" and not os.environ.get('QR_TESTING_MODE'): +if __name__ == "__main__" and not os.environ.get("QR_TESTING_MODE"): # Start your Gradio app with gr.Blocks() as app: # Add a title and description @@ -1021,6 +1471,9 @@ if __name__ == "__main__" and not os.environ.get('QR_TESTING_MODE'): This is an AI-powered QR code generator that creates artistic QR codes using Stable Diffusion 1.5 and ControlNet models. The application uses a custom ComfyUI workflow to generate QR codes. + **Privacy Notice:** Generated images are temporarily cached during your session. + Files are cleared when the server restarts. Download your QR codes immediately after generation. + ### Tips: - Use detailed prompts for better results - Include style keywords like 'photorealistic', 'detailed', '8k' @@ -1049,7 +1502,7 @@ if __name__ == "__main__" and not os.environ.get('QR_TESTING_MODE'): choices=["URL", "Plain Text"], value="URL", label="Input Type", - info="URL: For web links (auto-removes https://). Plain Text: For VCARD, WiFi, calendar, location, etc. (no manipulation)" + info="URL: For web links (auto-removes https://). Plain Text: For VCARD, WiFi, calendar, location, etc. (no manipulation)", ) # Add inputs for artistic QR @@ -1057,32 +1510,38 @@ if __name__ == "__main__" and not os.environ.get('QR_TESTING_MODE'): label="Prompt", placeholder="Describe the image you want to generate (check examples below for inspiration)", value="Enter your prompt here... For example: 'a beautiful sunset over mountains, photorealistic, detailed landscape'", - lines=3 + lines=3, ) artistic_text_input = gr.Textbox( label="QR Code Content", placeholder="Enter URL or plain text", value="Enter your URL or text here... For example: https://github.com", - lines=3 + lines=3, ) # Import Settings section - separate accordion with gr.Accordion("Import Settings from JSON", open=False): - gr.Markdown("Paste a settings JSON string (copied from a previous generation) to load all parameters at once.") + gr.Markdown( + "Paste a settings JSON string (copied from a previous generation) to load all parameters at once." + ) import_json_input_artistic = gr.Textbox( label="Paste Settings JSON", placeholder='{"pipeline": "artistic", "prompt": "...", "seed": 718313, ...}', - lines=3 + lines=3, ) import_status_artistic = gr.Textbox( label="Import Status", interactive=False, visible=False, - lines=2 + lines=2, ) with gr.Row(): - load_settings_btn_artistic = gr.Button("Load Settings", variant="primary") - clear_json_btn_artistic = gr.Button("Clear", variant="secondary") + load_settings_btn_artistic = gr.Button( + "Load Settings", variant="primary" + ) + clear_json_btn_artistic = gr.Button( + "Clear", variant="secondary" + ) # Change Settings Manually - separate accordion with gr.Accordion("Change Settings Manually", open=False): @@ -1093,7 +1552,7 @@ if __name__ == "__main__" and not os.environ.get('QR_TESTING_MODE'): step=64, value=704, label="Image Size", - info="Base size of the generated image. Final output will be 2x this size (e.g., 704 → 1408) due to the two-step enhancement process. Higher values use more VRAM and take longer to process." + info="Base size of the generated image. Final output will be 2x this size (e.g., 704 → 1408) due to the two-step enhancement process. Higher values use more VRAM and take longer to process.", ) # Add border size slider for artistic QR @@ -1103,15 +1562,20 @@ if __name__ == "__main__" and not os.environ.get('QR_TESTING_MODE'): step=1, value=6, label="QR Code Border Size", - info="Number of modules (squares) to use as border around the QR code. Higher values add more whitespace." + info="Number of modules (squares) to use as border around the QR code. Higher values add more whitespace.", ) # Add error correction dropdown for artistic QR artistic_error_correction = gr.Dropdown( - choices=["Low (7%)", "Medium (15%)", "Quartile (25%)", "High (30%)"], + choices=[ + "Low (7%)", + "Medium (15%)", + "Quartile (25%)", + "High (30%)", + ], value="High (30%)", label="Error Correction Level", - info="Higher error correction makes the QR code more scannable when damaged or obscured, but increases its size and complexity. High (30%) is recommended for artistic QR codes." + info="Higher error correction makes the QR code more scannable when damaged or obscured, but increases its size and complexity. High (30%) is recommended for artistic QR codes.", ) # Add module size slider for artistic QR @@ -1121,15 +1585,22 @@ if __name__ == "__main__" and not os.environ.get('QR_TESTING_MODE'): step=1, value=16, label="QR Module Size", - info="Pixel width of the smallest QR code unit. Larger values improve readability but require a larger image size. 16 is a good starting point." + info="Pixel width of the smallest QR code unit. Larger values improve readability but require a larger image size. 16 is a good starting point.", ) # Add module drawer dropdown with style examples for artistic QR artistic_module_drawer = gr.Dropdown( - choices=["Square", "Gapped Square", "Circle", "Rounded", "Vertical bars", "Horizontal bars"], + choices=[ + "Square", + "Gapped Square", + "Circle", + "Rounded", + "Vertical bars", + "Horizontal bars", + ], value="Square", label="QR Code Style", - info="Select the style of the QR code modules (squares). See examples below. Different styles can give your QR code a unique look while maintaining scannability." + info="Select the style of the QR code modules (squares). See examples below. Different styles can give your QR code a unique look while maintaining scannability.", ) # Add style examples with labels @@ -1139,38 +1610,68 @@ if __name__ == "__main__" and not os.environ.get('QR_TESTING_MODE'): with gr.Row(): with gr.Column(scale=1, min_width=0): gr.Markdown("**Square**", show_label=False) - gr.Image("custom_nodes/ComfyQR/img/square.png", width=100, show_label=False, show_download_button=False) + gr.Image( + "custom_nodes/ComfyQR/img/square.png", + width=100, + show_label=False, + show_download_button=False, + ) with gr.Column(scale=1, min_width=0): gr.Markdown("**Gapped Square**", show_label=False) - gr.Image("custom_nodes/ComfyQR/img/gapped_square.png", width=100, show_label=False, show_download_button=False) + gr.Image( + "custom_nodes/ComfyQR/img/gapped_square.png", + width=100, + show_label=False, + show_download_button=False, + ) with gr.Column(scale=1, min_width=0): gr.Markdown("**Circle**", show_label=False) - gr.Image("custom_nodes/ComfyQR/img/circle.png", width=100, show_label=False, show_download_button=False) + gr.Image( + "custom_nodes/ComfyQR/img/circle.png", + width=100, + show_label=False, + show_download_button=False, + ) # Second row of examples with gr.Row(): with gr.Column(scale=1, min_width=0): gr.Markdown("**Rounded**", show_label=False) - gr.Image("custom_nodes/ComfyQR/img/rounded.png", width=100, show_label=False, show_download_button=False) + gr.Image( + "custom_nodes/ComfyQR/img/rounded.png", + width=100, + show_label=False, + show_download_button=False, + ) with gr.Column(scale=1, min_width=0): gr.Markdown("**Vertical Bars**", show_label=False) - gr.Image("custom_nodes/ComfyQR/img/vertical-bars.png", width=100, show_label=False, show_download_button=False) + gr.Image( + "custom_nodes/ComfyQR/img/vertical-bars.png", + width=100, + show_label=False, + show_download_button=False, + ) with gr.Column(scale=1, min_width=0): gr.Markdown("**Horizontal Bars**", show_label=False) - gr.Image("custom_nodes/ComfyQR/img/horizontal-bars.png", width=100, show_label=False, show_download_button=False) + gr.Image( + "custom_nodes/ComfyQR/img/horizontal-bars.png", + width=100, + show_label=False, + show_download_button=False, + ) # Add upscale checkbox artistic_enable_upscale = gr.Checkbox( label="Enable Upscaling", value=True, - info="Enable upscaling with RealESRGAN for higher quality output (enabled by default for artistic pipeline)" + info="Enable upscaling with RealESRGAN for higher quality output (enabled by default for artistic pipeline)", ) # Add seed controls for artistic QR artistic_use_custom_seed = gr.Checkbox( label="Use Custom Seed", value=True, - info="Enable to use a specific seed for reproducible results" + info="Enable to use a specific seed for reproducible results", ) artistic_seed = gr.Slider( minimum=0, @@ -1179,7 +1680,7 @@ if __name__ == "__main__" and not os.environ.get('QR_TESTING_MODE'): value=718313, label="Seed", visible=True, # Initially visible since artistic_use_custom_seed=True - info="Seed value for reproducibility. Same seed with same settings will produce the same result." + info="Seed value for reproducibility. Same seed with same settings will produce the same result.", ) # FreeU Parameters @@ -1187,7 +1688,7 @@ if __name__ == "__main__" and not os.environ.get('QR_TESTING_MODE'): enable_freeu_artistic = gr.Checkbox( label="Enable FreeU", value=True, - info="Enable FreeU quality enhancement (enabled by default for artistic pipeline)" + info="Enable FreeU quality enhancement (enabled by default for artistic pipeline)", ) freeu_b1 = gr.Slider( minimum=1.0, @@ -1195,7 +1696,7 @@ if __name__ == "__main__" and not os.environ.get('QR_TESTING_MODE'): step=0.01, value=1.4, label="FreeU B1 (Backbone 1)", - info="Backbone feature enhancement for first layer. Higher values improve detail but may reduce blending. Range: 1.0-1.6, Default: 1.4" + info="Backbone feature enhancement for first layer. Higher values improve detail but may reduce blending. Range: 1.0-1.6, Default: 1.4", ) freeu_b2 = gr.Slider( minimum=1.0, @@ -1203,7 +1704,7 @@ if __name__ == "__main__" and not os.environ.get('QR_TESTING_MODE'): step=0.01, value=1.3, label="FreeU B2 (Backbone 2)", - info="Backbone feature enhancement for second layer. Higher values improve texture. Range: 1.0-1.6, Default: 1.3" + info="Backbone feature enhancement for second layer. Higher values improve texture. Range: 1.0-1.6, Default: 1.3", ) freeu_s1 = gr.Slider( minimum=0.0, @@ -1211,7 +1712,7 @@ if __name__ == "__main__" and not os.environ.get('QR_TESTING_MODE'): step=0.01, value=0.0, label="FreeU S1 (Skip 1)", - info="Skip connection dampening for first layer. Lower values hide QR structure more. Range: 0.0-1.5, Default: 0.0" + info="Skip connection dampening for first layer. Lower values hide QR structure more. Range: 0.0-1.5, Default: 0.0", ) freeu_s2 = gr.Slider( minimum=0.0, @@ -1219,7 +1720,7 @@ if __name__ == "__main__" and not os.environ.get('QR_TESTING_MODE'): step=0.01, value=1.3, label="FreeU S2 (Skip 2)", - info="Skip connection dampening for second layer. Balances scannability. Range: 0.0-1.5, Default: 1.3" + info="Skip connection dampening for second layer. Balances scannability. Range: 0.0-1.5, Default: 1.3", ) # SAG (Self-Attention Guidance) Parameters @@ -1227,7 +1728,7 @@ if __name__ == "__main__" and not os.environ.get('QR_TESTING_MODE'): enable_sag = gr.Checkbox( label="Enable SAG", value=True, - info="Enable Self-Attention Guidance for improved structural coherence and artistic blending" + info="Enable Self-Attention Guidance for improved structural coherence and artistic blending", ) sag_scale = gr.Slider( minimum=0.0, @@ -1235,7 +1736,7 @@ if __name__ == "__main__" and not os.environ.get('QR_TESTING_MODE'): step=0.1, value=0.5, label="SAG Scale", - info="Guidance strength. Higher values provide more structural coherence. Range: 0.0-3.0, Default: 0.5" + info="Guidance strength. Higher values provide more structural coherence. Range: 0.0-3.0, Default: 0.5", ) sag_blur_sigma = gr.Slider( minimum=0.0, @@ -1243,19 +1744,23 @@ if __name__ == "__main__" and not os.environ.get('QR_TESTING_MODE'): step=0.1, value=0.5, label="SAG Blur Sigma", - info="Blur amount for artistic blending. Higher values create softer, more artistic effects. Range: 0.0-5.0, Default: 0.5" + info="Blur amount for artistic blending. Higher values create softer, more artistic effects. Range: 0.0-5.0, Default: 0.5", ) # ControlNet Strength Parameters - gr.Markdown("### ControlNet Strength (QR Code Preservation)") - gr.Markdown("**IMPORTANT:** Lower values preserve QR structure better (more scannable). Higher values create more artistic effects but may reduce scannability.") + gr.Markdown( + "### ControlNet Strength (QR Code Preservation)" + ) + gr.Markdown( + "**IMPORTANT:** Lower values preserve QR structure better (more scannable). Higher values create more artistic effects but may reduce scannability." + ) controlnet_strength_first = gr.Slider( minimum=0.0, maximum=1.0, step=0.05, value=0.45, label="First Pass Strength", - info="Controls how much the AI modifies the QR in the first pass. LOWER = more scannable, HIGHER = more artistic. Try 0.30-0.40 for better scannability. Default: 0.45" + info="Controls how much the AI modifies the QR in the first pass. LOWER = more scannable, HIGHER = more artistic. Try 0.30-0.40 for better scannability. Default: 0.45", ) controlnet_strength_final = gr.Slider( minimum=0.0, @@ -1263,34 +1768,67 @@ if __name__ == "__main__" and not os.environ.get('QR_TESTING_MODE'): step=0.05, value=0.7, label="Final Pass Strength", - info="Controls how much the AI modifies the QR in the refinement pass. LOWER = preserves QR structure, HIGHER = more creative. Try 0.55-0.65 for balance. Default: 0.70" + info="Controls how much the AI modifies the QR in the refinement pass. LOWER = preserves QR structure, HIGHER = more creative. Try 0.55-0.65 for balance. Default: 0.70", ) # The generate button for artistic QR - artistic_generate_btn = gr.Button("Generate Artistic QR", variant="primary") + artistic_generate_btn = gr.Button( + "Generate Artistic QR", variant="primary" + ) with gr.Column(): # The output image for artistic QR - artistic_output_image = gr.Image(label="Generated Artistic QR Code") + artistic_output_image = gr.Image( + label="Generated Artistic QR Code" + ) artistic_error_message = gr.Textbox( label="Status / Errors", interactive=False, lines=3, ) # Wrap settings output in accordion (initially hidden) - with gr.Accordion("Shareable Settings (JSON)", open=True, visible=False) as settings_accordion_artistic: + with gr.Accordion( + "Shareable Settings (JSON)", open=True, visible=False + ) as settings_accordion_artistic: settings_output_artistic = gr.Textbox( label="Copy this JSON to share your exact settings", interactive=True, lines=5, - show_copy_button=True + show_copy_button=True, ) # When clicking the button, it will trigger the artistic function artistic_generate_btn.click( fn=generate_artistic_qr, - inputs=[artistic_prompt_input, artistic_text_input, artistic_input_type, artistic_image_size, artistic_border_size, artistic_error_correction, artistic_module_size, artistic_module_drawer, artistic_use_custom_seed, artistic_seed, artistic_enable_upscale, enable_freeu_artistic, freeu_b1, freeu_b2, freeu_s1, freeu_s2, enable_sag, sag_scale, sag_blur_sigma, controlnet_strength_first, controlnet_strength_final], - outputs=[artistic_output_image, artistic_error_message, settings_output_artistic, settings_accordion_artistic] + inputs=[ + artistic_prompt_input, + artistic_text_input, + artistic_input_type, + artistic_image_size, + artistic_border_size, + artistic_error_correction, + artistic_module_size, + artistic_module_drawer, + artistic_use_custom_seed, + artistic_seed, + artistic_enable_upscale, + enable_freeu_artistic, + freeu_b1, + freeu_b2, + freeu_s1, + freeu_s2, + enable_sag, + sag_scale, + sag_blur_sigma, + controlnet_strength_first, + controlnet_strength_final, + ], + outputs=[ + artistic_output_image, + artistic_error_message, + settings_output_artistic, + settings_accordion_artistic, + ], ) # Load Settings button event handler @@ -1319,124 +1857,423 @@ if __name__ == "__main__" and not os.environ.get('QR_TESTING_MODE'): sag_blur_sigma, controlnet_strength_first, controlnet_strength_final, - import_status_artistic - ] + import_status_artistic, + ], ) # Clear button event handler for artistic tab clear_json_btn_artistic.click( fn=lambda: ("", gr.update(visible=False)), inputs=[], - outputs=[import_json_input_artistic, import_status_artistic] + outputs=[import_json_input_artistic, import_status_artistic], ) # Seed slider visibility toggle for artistic tab artistic_use_custom_seed.change( fn=lambda x: gr.update(visible=x), inputs=[artistic_use_custom_seed], - outputs=[artistic_seed] + outputs=[artistic_seed], ) # Custom Examples Gallery with Images gr.Markdown("### Featured Examples") - gr.Markdown("Click 'Load Settings' under any example to populate the form with those exact settings") + gr.Markdown( + "Click 'Load Settings' under any example to populate the form with those exact settings" + ) # First row (3 images) with gr.Row(): # Example 1: Japanese Temple with gr.Column(scale=1): - ex1_img = gr.Image("examples/artistic/japanese_temple.jpg", label="Japanese Temple", show_label=True, interactive=False, show_download_button=False, height=280) - ex1_btn = gr.Button("Load Settings", size="sm", variant="secondary") + ex1_img = gr.Image( + "examples/artistic/japanese_temple.jpg", + label="Japanese Temple", + show_label=True, + interactive=False, + show_download_button=False, + height=280, + ) + ex1_btn = gr.Button( + "Load Settings", size="sm", variant="secondary" + ) # Example 2: Sunset Mountains with gr.Column(scale=1): - ex2_img = gr.Image("examples/artistic/sunset_mountains.jpg", label="Sunset Mountains", show_label=True, interactive=False, show_download_button=False, height=280) - ex2_btn = gr.Button("Load Settings", size="sm", variant="secondary") + ex2_img = gr.Image( + "examples/artistic/sunset_mountains.jpg", + label="Sunset Mountains", + show_label=True, + interactive=False, + show_download_button=False, + height=280, + ) + ex2_btn = gr.Button( + "Load Settings", size="sm", variant="secondary" + ) # Example 3: Roman City with gr.Column(scale=1): - ex3_img = gr.Image("examples/artistic/roman_city.jpg", label="Roman City", show_label=True, interactive=False, show_download_button=False, height=280) - ex3_btn = gr.Button("Load Settings", size="sm", variant="secondary") + ex3_img = gr.Image( + "examples/artistic/roman_city.jpg", + label="Roman City", + show_label=True, + interactive=False, + show_download_button=False, + height=280, + ) + ex3_btn = gr.Button( + "Load Settings", size="sm", variant="secondary" + ) # Second row (3 images) with gr.Row(): # Example 4: Neapolitan Pizza with gr.Column(scale=1): - ex4_img = gr.Image("examples/artistic/neapolitan_pizza.webp", label="Neapolitan Pizza", show_label=True, interactive=False, show_download_button=False, height=280) - ex4_btn = gr.Button("Load Settings", size="sm", variant="secondary") + ex4_img = gr.Image( + "examples/artistic/neapolitan_pizza.webp", + label="Neapolitan Pizza", + show_label=True, + interactive=False, + show_download_button=False, + height=280, + ) + ex4_btn = gr.Button( + "Load Settings", size="sm", variant="secondary" + ) # Example 5: Poker Chips with gr.Column(scale=1): - ex5_img = gr.Image("examples/artistic/poker_chips.webp", label="Poker Chips", show_label=True, interactive=False, show_download_button=False, height=280) - ex5_btn = gr.Button("Load Settings", size="sm", variant="secondary") + ex5_img = gr.Image( + "examples/artistic/poker_chips.webp", + label="Poker Chips", + show_label=True, + interactive=False, + show_download_button=False, + height=280, + ) + ex5_btn = gr.Button( + "Load Settings", size="sm", variant="secondary" + ) # Example 6: Underwater Fish with gr.Column(scale=1): - ex6_img = gr.Image("examples/artistic/underwater_fish.webp", label="Underwater Fish", show_label=True, interactive=False, show_download_button=False, height=280) - ex6_btn = gr.Button("Load Settings", size="sm", variant="secondary") + ex6_img = gr.Image( + "examples/artistic/underwater_fish.webp", + label="Underwater Fish", + show_label=True, + interactive=False, + show_download_button=False, + height=280, + ) + ex6_btn = gr.Button( + "Load Settings", size="sm", variant="secondary" + ) # Third row (3 images) with gr.Row(): # Example 7: Mediterranean Garden with gr.Column(scale=1): - ex7_img = gr.Image("examples/artistic/mediterranean_garden.jpg", label="Mediterranean Garden", show_label=True, interactive=False, show_download_button=False, height=280) - ex7_btn = gr.Button("Load Settings", size="sm", variant="secondary") + ex7_img = gr.Image( + "examples/artistic/mediterranean_garden.jpg", + label="Mediterranean Garden", + show_label=True, + interactive=False, + show_download_button=False, + height=280, + ) + ex7_btn = gr.Button( + "Load Settings", size="sm", variant="secondary" + ) # Example 8: Rice Fields with gr.Column(scale=1): - ex8_img = gr.Image("examples/artistic/rice_fields.jpg", label="Rice Fields", show_label=True, interactive=False, show_download_button=False, height=280) - ex8_btn = gr.Button("Load Settings", size="sm", variant="secondary") + ex8_img = gr.Image( + "examples/artistic/rice_fields.jpg", + label="Rice Fields", + show_label=True, + interactive=False, + show_download_button=False, + height=280, + ) + ex8_btn = gr.Button( + "Load Settings", size="sm", variant="secondary" + ) # Example 9: Cyberpunk City with gr.Column(scale=1): - ex9_img = gr.Image("examples/artistic/cyberpunk_city.webp", label="Cyberpunk City", show_label=True, interactive=False, show_download_button=False, height=280) - ex9_btn = gr.Button("Load Settings", size="sm", variant="secondary") + ex9_img = gr.Image( + "examples/artistic/cyberpunk_city.webp", + label="Cyberpunk City", + show_label=True, + interactive=False, + show_download_button=False, + height=280, + ) + ex9_btn = gr.Button( + "Load Settings", size="sm", variant="secondary" + ) # Load settings button handlers # Ex1: Japanese Temple ex1_btn.click( - fn=lambda: ("some clothes spread on ropes, Japanese girl sits inside in the middle of the image, few sakura flowers, realistic, great details, out in the open air sunny day realistic, great details, absence of people, Detailed and Intricate, CGI, Photoshoot, rim light, 8k, 16k, ultra detail", "https://www.google.com", "URL", 640, 6, "Medium (15%)", 14, "Square", True, 718313, 0.5), - outputs=[artistic_prompt_input, artistic_text_input, artistic_input_type, artistic_image_size, artistic_border_size, artistic_error_correction, artistic_module_size, artistic_module_drawer, artistic_use_custom_seed, artistic_seed, sag_blur_sigma] + fn=lambda: ( + "some clothes spread on ropes, Japanese girl sits inside in the middle of the image, few sakura flowers, realistic, great details, out in the open air sunny day realistic, great details, absence of people, Detailed and Intricate, CGI, Photoshoot, rim light, 8k, 16k, ultra detail", + "https://www.google.com", + "URL", + 640, + 6, + "Medium (15%)", + 14, + "Square", + True, + 718313, + 0.5, + ), + outputs=[ + artistic_prompt_input, + artistic_text_input, + artistic_input_type, + artistic_image_size, + artistic_border_size, + artistic_error_correction, + artistic_module_size, + artistic_module_drawer, + artistic_use_custom_seed, + artistic_seed, + sag_blur_sigma, + ], ) # Ex2: Sunset Mountains ex2_btn.click( - fn=lambda: ("a beautiful sunset over mountains, photorealistic, detailed landscape, golden hour, dramatic lighting, 8k, ultra detailed", "https://github.com", "URL", 704, 6, "High (30%)", 16, "Square", True, 718313, 0.5), - outputs=[artistic_prompt_input, artistic_text_input, artistic_input_type, artistic_image_size, artistic_border_size, artistic_error_correction, artistic_module_size, artistic_module_drawer, artistic_use_custom_seed, artistic_seed, sag_blur_sigma] + fn=lambda: ( + "a beautiful sunset over mountains, photorealistic, detailed landscape, golden hour, dramatic lighting, 8k, ultra detailed", + "https://github.com", + "URL", + 704, + 6, + "High (30%)", + 16, + "Square", + True, + 718313, + 0.5, + ), + outputs=[ + artistic_prompt_input, + artistic_text_input, + artistic_input_type, + artistic_image_size, + artistic_border_size, + artistic_error_correction, + artistic_module_size, + artistic_module_drawer, + artistic_use_custom_seed, + artistic_seed, + sag_blur_sigma, + ], ) # Ex3: Roman City ex3_btn.click( - fn=lambda: ("aerial bird view of ancient Roman city, cobblestone streets and pathways forming intricate patterns, vintage illustration style, sepia tones, aged parchment look, detailed architecture, 8k, ultra detailed", "WIFI:T:WPA;S:MyNetwork;P:MyPassword123;;", "Plain Text", 832, 6, "High (30%)", 16, "Square", True, 718313, 0.5), - outputs=[artistic_prompt_input, artistic_text_input, artistic_input_type, artistic_image_size, artistic_border_size, artistic_error_correction, artistic_module_size, artistic_module_drawer, artistic_use_custom_seed, artistic_seed, sag_blur_sigma] + fn=lambda: ( + "aerial bird view of ancient Roman city, cobblestone streets and pathways forming intricate patterns, vintage illustration style, sepia tones, aged parchment look, detailed architecture, 8k, ultra detailed", + "WIFI:T:WPA;S:MyNetwork;P:MyPassword123;;", + "Plain Text", + 832, + 6, + "High (30%)", + 16, + "Square", + True, + 718313, + 0.5, + ), + outputs=[ + artistic_prompt_input, + artistic_text_input, + artistic_input_type, + artistic_image_size, + artistic_border_size, + artistic_error_correction, + artistic_module_size, + artistic_module_drawer, + artistic_use_custom_seed, + artistic_seed, + sag_blur_sigma, + ], ) # Ex4: Neapolitan Pizza ex4_btn.click( - fn=lambda: ("artisan Neapolitan pizza on rustic wooden board, fresh basil leaves scattered on top and around, oregano sprinkled, flour dust particles floating in air, melted mozzarella with char marks, traditional Italian pizzeria ambiance, warm brick oven glow in background, detailed food photography, photorealistic, 8k, ultra detailed", "https://www.pizzamaking.com", "URL", 704, 6, "High (30%)", 16, "Square", True, 856749, 2.0), - outputs=[artistic_prompt_input, artistic_text_input, artistic_input_type, artistic_image_size, artistic_border_size, artistic_error_correction, artistic_module_size, artistic_module_drawer, artistic_use_custom_seed, artistic_seed, sag_blur_sigma] + fn=lambda: ( + "artisan Neapolitan pizza on rustic wooden board, fresh basil leaves scattered on top and around, oregano sprinkled, flour dust particles floating in air, melted mozzarella with char marks, traditional Italian pizzeria ambiance, warm brick oven glow in background, detailed food photography, photorealistic, 8k, ultra detailed", + "https://www.pizzamaking.com", + "URL", + 704, + 6, + "High (30%)", + 16, + "Square", + True, + 856749, + 2.0, + ), + outputs=[ + artistic_prompt_input, + artistic_text_input, + artistic_input_type, + artistic_image_size, + artistic_border_size, + artistic_error_correction, + artistic_module_size, + artistic_module_drawer, + artistic_use_custom_seed, + artistic_seed, + sag_blur_sigma, + ], ) # Ex5: Poker Chips ex5_btn.click( - fn=lambda: ("some cards on poker tale, realistic, great details, realistic, great details,absence of people, Detailed and Intricate, CGI, Photoshoot,rim light, 8k, 16k, ultra detail", "https://store.steampowered.com", "URL", 768, 6, "High (30%)", 16, "Square", True, 718313, 2.5), - outputs=[artistic_prompt_input, artistic_text_input, artistic_input_type, artistic_image_size, artistic_border_size, artistic_error_correction, artistic_module_size, artistic_module_drawer, artistic_use_custom_seed, artistic_seed, sag_blur_sigma] + fn=lambda: ( + "some cards on poker tale, realistic, great details, realistic, great details,absence of people, Detailed and Intricate, CGI, Photoshoot,rim light, 8k, 16k, ultra detail", + "https://store.steampowered.com", + "URL", + 768, + 6, + "High (30%)", + 16, + "Square", + True, + 718313, + 2.5, + ), + outputs=[ + artistic_prompt_input, + artistic_text_input, + artistic_input_type, + artistic_image_size, + artistic_border_size, + artistic_error_correction, + artistic_module_size, + artistic_module_drawer, + artistic_use_custom_seed, + artistic_seed, + sag_blur_sigma, + ], ) # Ex6: Underwater Fish ex6_btn.click( - fn=lambda: ("underwater scene with tropical fish, coral reef, rays of sunlight penetrating water, vibrant colors, detailed marine life, photorealistic, 8k, ultra detailed", "https://www.reddit.com", "URL", 704, 6, "High (30%)", 16, "Square", True, 718313, 0.5), - outputs=[artistic_prompt_input, artistic_text_input, artistic_input_type, artistic_image_size, artistic_border_size, artistic_error_correction, artistic_module_size, artistic_module_drawer, artistic_use_custom_seed, artistic_seed, sag_blur_sigma] + fn=lambda: ( + "underwater scene with tropical fish, coral reef, rays of sunlight penetrating water, vibrant colors, detailed marine life, photorealistic, 8k, ultra detailed", + "https://www.reddit.com", + "URL", + 704, + 6, + "High (30%)", + 16, + "Square", + True, + 718313, + 0.5, + ), + outputs=[ + artistic_prompt_input, + artistic_text_input, + artistic_input_type, + artistic_image_size, + artistic_border_size, + artistic_error_correction, + artistic_module_size, + artistic_module_drawer, + artistic_use_custom_seed, + artistic_seed, + sag_blur_sigma, + ], ) # Ex7: Mediterranean Garden ex7_btn.click( - fn=lambda: ("ancient stone sundial in Mediterranean garden, olive trees, dappled sunlight through leaves, weathered stone texture, peaceful afternoon scene, photorealistic, detailed, 8k, ultra detailed", "BEGIN:VEVENT\\nSUMMARY:Team Meeting\\nDTSTART:20251115T140000Z\\nDTEND:20251115T150000Z\\nLOCATION:Conference Room A\\nEND:VEVENT", "Plain Text", 1024, 6, "High (30%)", 14, "Square", True, 413468, 0.5), - outputs=[artistic_prompt_input, artistic_text_input, artistic_input_type, artistic_image_size, artistic_border_size, artistic_error_correction, artistic_module_size, artistic_module_drawer, artistic_use_custom_seed, artistic_seed, sag_blur_sigma] + fn=lambda: ( + "ancient stone sundial in Mediterranean garden, olive trees, dappled sunlight through leaves, weathered stone texture, peaceful afternoon scene, photorealistic, detailed, 8k, ultra detailed", + "BEGIN:VEVENT\\nSUMMARY:Team Meeting\\nDTSTART:20251115T140000Z\\nDTEND:20251115T150000Z\\nLOCATION:Conference Room A\\nEND:VEVENT", + "Plain Text", + 1024, + 6, + "High (30%)", + 14, + "Square", + True, + 413468, + 0.5, + ), + outputs=[ + artistic_prompt_input, + artistic_text_input, + artistic_input_type, + artistic_image_size, + artistic_border_size, + artistic_error_correction, + artistic_module_size, + artistic_module_drawer, + artistic_use_custom_seed, + artistic_seed, + sag_blur_sigma, + ], ) # Ex8: Rice Fields ex8_btn.click( - fn=lambda: ("aerial view of terraced rice fields on mountainside, winding pathways between green paddies, Asian countryside, bird's eye perspective, detailed landscape, golden hour lighting, photorealistic, 8k, ultra detailed", "geo:37.7749,-122.4194", "Plain Text", 704, 6, "High (30%)", 16, "Square", True, 962359, 0.5), - outputs=[artistic_prompt_input, artistic_text_input, artistic_input_type, artistic_image_size, artistic_border_size, artistic_error_correction, artistic_module_size, artistic_module_drawer, artistic_use_custom_seed, artistic_seed, sag_blur_sigma] + fn=lambda: ( + "aerial view of terraced rice fields on mountainside, winding pathways between green paddies, Asian countryside, bird's eye perspective, detailed landscape, golden hour lighting, photorealistic, 8k, ultra detailed", + "geo:37.7749,-122.4194", + "Plain Text", + 704, + 6, + "High (30%)", + 16, + "Square", + True, + 962359, + 0.5, + ), + outputs=[ + artistic_prompt_input, + artistic_text_input, + artistic_input_type, + artistic_image_size, + artistic_border_size, + artistic_error_correction, + artistic_module_size, + artistic_module_drawer, + artistic_use_custom_seed, + artistic_seed, + sag_blur_sigma, + ], ) # Ex9: Cyberpunk City ex9_btn.click( - fn=lambda: ("futuristic cityscape with flying cars and neon lights, cyberpunk style, detailed architecture, night scene, 8k, ultra detailed", "https://linkedin.com", "URL", 704, 6, "High (30%)", 16, "Square", True, 718313, 1.5), - outputs=[artistic_prompt_input, artistic_text_input, artistic_input_type, artistic_image_size, artistic_border_size, artistic_error_correction, artistic_module_size, artistic_module_drawer, artistic_use_custom_seed, artistic_seed, sag_blur_sigma] + fn=lambda: ( + "futuristic cityscape with flying cars and neon lights, cyberpunk style, detailed architecture, night scene, 8k, ultra detailed", + "https://linkedin.com", + "URL", + 704, + 6, + "High (30%)", + 16, + "Square", + True, + 718313, + 1.5, + ), + outputs=[ + artistic_prompt_input, + artistic_text_input, + artistic_input_type, + artistic_image_size, + artistic_border_size, + artistic_error_correction, + artistic_module_size, + artistic_module_drawer, + artistic_use_custom_seed, + artistic_seed, + sag_blur_sigma, + ], ) # STANDARD QR TAB @@ -1448,7 +2285,7 @@ if __name__ == "__main__" and not os.environ.get('QR_TESTING_MODE'): choices=["URL", "Plain Text"], value="URL", label="Input Type", - info="URL: For web links (auto-removes https://). Plain Text: For VCARD, WiFi, calendar, location, etc. (no manipulation)" + info="URL: For web links (auto-removes https://). Plain Text: For VCARD, WiFi, calendar, location, etc. (no manipulation)", ) # Add inputs @@ -1456,32 +2293,38 @@ if __name__ == "__main__" and not os.environ.get('QR_TESTING_MODE'): label="Prompt", placeholder="Describe the image you want to generate (check examples below for inspiration)", value="Enter your prompt here... For example: 'a beautiful sunset over mountains, photorealistic, detailed landscape'", - lines=3 + lines=3, ) text_input = gr.Textbox( label="QR Code Content", placeholder="Enter URL or plain text", value="Enter your URL or text here... For example: https://github.com", - lines=3 + lines=3, ) # Import Settings section - separate accordion with gr.Accordion("Import Settings from JSON", open=False): - gr.Markdown("Paste a settings JSON string (copied from a previous generation) to load all parameters at once.") + gr.Markdown( + "Paste a settings JSON string (copied from a previous generation) to load all parameters at once." + ) import_json_input_standard = gr.Textbox( label="Paste Settings JSON", placeholder='{"pipeline": "standard", "prompt": "...", "seed": 718313, ...}', - lines=3 + lines=3, ) import_status_standard = gr.Textbox( label="Import Status", interactive=False, visible=False, - lines=2 + lines=2, ) with gr.Row(): - load_settings_btn_standard = gr.Button("Load Settings", variant="primary") - clear_json_btn_standard = gr.Button("Clear", variant="secondary") + load_settings_btn_standard = gr.Button( + "Load Settings", variant="primary" + ) + clear_json_btn_standard = gr.Button( + "Clear", variant="secondary" + ) # Change Settings Manually - separate accordion with gr.Accordion("Change Settings Manually", open=False): @@ -1492,7 +2335,7 @@ if __name__ == "__main__" and not os.environ.get('QR_TESTING_MODE'): step=64, value=512, label="Image Size", - info="Base size of the generated image. Final output will be 2x this size (e.g., 512 → 1024) due to the two-step enhancement process. Higher values use more VRAM and take longer to process." + info="Base size of the generated image. Final output will be 2x this size (e.g., 512 → 1024) due to the two-step enhancement process. Higher values use more VRAM and take longer to process.", ) # Add border size slider @@ -1502,15 +2345,20 @@ if __name__ == "__main__" and not os.environ.get('QR_TESTING_MODE'): step=1, value=4, label="QR Code Border Size", - info="Number of modules (squares) to use as border around the QR code. Higher values add more whitespace." + info="Number of modules (squares) to use as border around the QR code. Higher values add more whitespace.", ) # Add error correction dropdown error_correction = gr.Dropdown( - choices=["Low (7%)", "Medium (15%)", "Quartile (25%)", "High (30%)"], + choices=[ + "Low (7%)", + "Medium (15%)", + "Quartile (25%)", + "High (30%)", + ], value="Medium (15%)", label="Error Correction Level", - info="Higher error correction makes the QR code more scannable when damaged or obscured, but increases its size and complexity. Medium (15%) is a good starting point for most uses." + info="Higher error correction makes the QR code more scannable when damaged or obscured, but increases its size and complexity. Medium (15%) is a good starting point for most uses.", ) # Add module size slider @@ -1520,15 +2368,22 @@ if __name__ == "__main__" and not os.environ.get('QR_TESTING_MODE'): step=1, value=12, label="QR Module Size", - info="Pixel width of the smallest QR code unit. Larger values improve readability but require a larger image size. 12 is a good starting point." + info="Pixel width of the smallest QR code unit. Larger values improve readability but require a larger image size. 12 is a good starting point.", ) # Add module drawer dropdown with style examples module_drawer = gr.Dropdown( - choices=["Square", "Gapped Square", "Circle", "Rounded", "Vertical bars", "Horizontal bars"], + choices=[ + "Square", + "Gapped Square", + "Circle", + "Rounded", + "Vertical bars", + "Horizontal bars", + ], value="Square", label="QR Code Style", - info="Select the style of the QR code modules (squares). See examples below. Different styles can give your QR code a unique look while maintaining scannability." + info="Select the style of the QR code modules (squares). See examples below. Different styles can give your QR code a unique look while maintaining scannability.", ) # Add style examples with labels @@ -1538,45 +2393,75 @@ if __name__ == "__main__" and not os.environ.get('QR_TESTING_MODE'): with gr.Row(): with gr.Column(scale=1, min_width=0): gr.Markdown("**Square**", show_label=False) - gr.Image("custom_nodes/ComfyQR/img/square.png", width=100, show_label=False, show_download_button=False) + gr.Image( + "custom_nodes/ComfyQR/img/square.png", + width=100, + show_label=False, + show_download_button=False, + ) with gr.Column(scale=1, min_width=0): gr.Markdown("**Gapped Square**", show_label=False) - gr.Image("custom_nodes/ComfyQR/img/gapped_square.png", width=100, show_label=False, show_download_button=False) + gr.Image( + "custom_nodes/ComfyQR/img/gapped_square.png", + width=100, + show_label=False, + show_download_button=False, + ) with gr.Column(scale=1, min_width=0): gr.Markdown("**Circle**", show_label=False) - gr.Image("custom_nodes/ComfyQR/img/circle.png", width=100, show_label=False, show_download_button=False) + gr.Image( + "custom_nodes/ComfyQR/img/circle.png", + width=100, + show_label=False, + show_download_button=False, + ) # Second row of examples with gr.Row(): with gr.Column(scale=1, min_width=0): gr.Markdown("**Rounded**", show_label=False) - gr.Image("custom_nodes/ComfyQR/img/rounded.png", width=100, show_label=False, show_download_button=False) + gr.Image( + "custom_nodes/ComfyQR/img/rounded.png", + width=100, + show_label=False, + show_download_button=False, + ) with gr.Column(scale=1, min_width=0): gr.Markdown("**Vertical Bars**", show_label=False) - gr.Image("custom_nodes/ComfyQR/img/vertical-bars.png", width=100, show_label=False, show_download_button=False) + gr.Image( + "custom_nodes/ComfyQR/img/vertical-bars.png", + width=100, + show_label=False, + show_download_button=False, + ) with gr.Column(scale=1, min_width=0): gr.Markdown("**Horizontal Bars**", show_label=False) - gr.Image("custom_nodes/ComfyQR/img/horizontal-bars.png", width=100, show_label=False, show_download_button=False) + gr.Image( + "custom_nodes/ComfyQR/img/horizontal-bars.png", + width=100, + show_label=False, + show_download_button=False, + ) # Add upscale checkbox enable_upscale = gr.Checkbox( label="Enable Upscaling", value=False, - info="Enable upscaling with RealESRGAN for higher quality output (disabled by default for standard pipeline)" + info="Enable upscaling with RealESRGAN for higher quality output (disabled by default for standard pipeline)", ) # Add FreeU checkbox enable_freeu_standard = gr.Checkbox( label="Enable FreeU", value=False, - info="Enable FreeU quality enhancement (disabled by default for standard pipeline)" + info="Enable FreeU quality enhancement (disabled by default for standard pipeline)", ) # Add seed controls use_custom_seed = gr.Checkbox( label="Use Custom Seed", value=True, - info="Enable to use a specific seed for reproducible results" + info="Enable to use a specific seed for reproducible results", ) seed = gr.Slider( minimum=0, @@ -1585,19 +2470,23 @@ if __name__ == "__main__" and not os.environ.get('QR_TESTING_MODE'): value=718313, label="Seed", visible=True, # Initially visible since use_custom_seed=True - info="Seed value for reproducibility. Same seed with same settings will produce the same result." + info="Seed value for reproducibility. Same seed with same settings will produce the same result.", ) # ControlNet Strength Parameters - gr.Markdown("### ControlNet Strength (QR Code Preservation)") - gr.Markdown("**IMPORTANT:** Lower values preserve QR structure better (more scannable). Higher values create more artistic effects but may reduce scannability.") + gr.Markdown( + "### ControlNet Strength (QR Code Preservation)" + ) + gr.Markdown( + "**IMPORTANT:** Lower values preserve QR structure better (more scannable). Higher values create more artistic effects but may reduce scannability." + ) controlnet_strength_standard_first = gr.Slider( minimum=0.0, maximum=1.0, step=0.05, value=0.45, label="First Pass Strength (Brightness + Tile)", - info="Controls how much the AI modifies the QR in both ControlNet passes. LOWER = more scannable, HIGHER = more artistic. Try 0.35-0.50 for good balance. Default: 0.45" + info="Controls how much the AI modifies the QR in both ControlNet passes. LOWER = more scannable, HIGHER = more artistic. Try 0.35-0.50 for good balance. Default: 0.45", ) controlnet_strength_standard_final = gr.Slider( minimum=0.0, @@ -1605,11 +2494,13 @@ if __name__ == "__main__" and not os.environ.get('QR_TESTING_MODE'): step=0.05, value=1.0, label="Final Pass Strength (Tile Refinement)", - info="Controls the final tile ControlNet pass strength. Usually kept at 1.0 for clarity. Default: 1.0" + info="Controls the final tile ControlNet pass strength. Usually kept at 1.0 for clarity. Default: 1.0", ) # The generate button - generate_btn = gr.Button("Generate Standard QR", variant="primary") + generate_btn = gr.Button( + "Generate Standard QR", variant="primary" + ) with gr.Column(): # The output image @@ -1620,19 +2511,41 @@ if __name__ == "__main__" and not os.environ.get('QR_TESTING_MODE'): lines=3, ) # Wrap settings output in accordion (initially hidden) - with gr.Accordion("Shareable Settings (JSON)", open=True, visible=False) as settings_accordion_standard: + with gr.Accordion( + "Shareable Settings (JSON)", open=True, visible=False + ) as settings_accordion_standard: settings_output_standard = gr.Textbox( label="Copy this JSON to share your exact settings", interactive=True, lines=5, - show_copy_button=True + show_copy_button=True, ) # When clicking the button, it will trigger the main function generate_btn.click( fn=generate_standard_qr, - inputs=[prompt_input, text_input, input_type, image_size, border_size, error_correction, module_size, module_drawer, use_custom_seed, seed, enable_upscale, enable_freeu_standard, controlnet_strength_standard_first, controlnet_strength_standard_final], - outputs=[output_image, error_message, settings_output_standard, settings_accordion_standard] + inputs=[ + prompt_input, + text_input, + input_type, + image_size, + border_size, + error_correction, + module_size, + module_drawer, + use_custom_seed, + seed, + enable_upscale, + enable_freeu_standard, + controlnet_strength_standard_first, + controlnet_strength_standard_final, + ], + outputs=[ + output_image, + error_message, + settings_output_standard, + settings_accordion_standard, + ], ) # Load Settings button event handler @@ -1654,22 +2567,22 @@ if __name__ == "__main__" and not os.environ.get('QR_TESTING_MODE'): enable_freeu_standard, controlnet_strength_standard_first, controlnet_strength_standard_final, - import_status_standard - ] + import_status_standard, + ], ) # Clear button event handler clear_json_btn_standard.click( fn=lambda: ("", gr.update(visible=False)), inputs=[], - outputs=[import_json_input_standard, import_status_standard] + outputs=[import_json_input_standard, import_status_standard], ) # Seed slider visibility toggle use_custom_seed.change( fn=lambda x: gr.update(visible=x), inputs=[use_custom_seed], - outputs=[seed] + outputs=[seed], ) # Add examples @@ -1682,7 +2595,7 @@ if __name__ == "__main__" and not os.environ.get('QR_TESTING_MODE'): 4, "Medium (15%)", 12, - "Square" + "Square", ], [ "some cards on poker tale, realistic, great details, realistic, great details,absence of people, Detailed and Intricate, CGI, Photoshoot,rim light, 8k, 16k, ultra detail", @@ -1692,7 +2605,7 @@ if __name__ == "__main__" and not os.environ.get('QR_TESTING_MODE'): 4, "Medium (15%)", 12, - "Square" + "Square", ], [ "a beautiful sunset over mountains, photorealistic, detailed landscape, golden hour, dramatic lighting, 8k, ultra detailed", @@ -1702,7 +2615,7 @@ if __name__ == "__main__" and not os.environ.get('QR_TESTING_MODE'): 4, "Medium (15%)", 12, - "Square" + "Square", ], [ "underwater scene with coral reef and tropical fish, photorealistic, detailed, crystal clear water, sunlight rays, 8k, ultra detailed", @@ -1712,7 +2625,7 @@ if __name__ == "__main__" and not os.environ.get('QR_TESTING_MODE'): 4, "Medium (15%)", 12, - "Square" + "Square", ], [ "futuristic cityscape with flying cars and neon lights, cyberpunk style, detailed architecture, night scene, 8k, ultra detailed", @@ -1722,7 +2635,7 @@ if __name__ == "__main__" and not os.environ.get('QR_TESTING_MODE'): 4, "Medium (15%)", 12, - "Square" + "Square", ], [ "vintage camera on wooden table, photorealistic, detailed textures, soft lighting, bokeh background, 8k, ultra detailed", @@ -1732,7 +2645,7 @@ if __name__ == "__main__" and not os.environ.get('QR_TESTING_MODE'): 4, "Medium (15%)", 12, - "Square" + "Square", ], [ "business card design, professional, modern, clean layout, corporate style, detailed, 8k, ultra detailed", @@ -1742,7 +2655,7 @@ if __name__ == "__main__" and not os.environ.get('QR_TESTING_MODE'): 4, "Medium (15%)", 12, - "Square" + "Square", ], [ "wifi network symbol, modern tech, digital art, glowing blue, detailed, 8k, ultra detailed", @@ -1752,7 +2665,7 @@ if __name__ == "__main__" and not os.environ.get('QR_TESTING_MODE'): 4, "Medium (15%)", 12, - "Square" + "Square", ], [ "calendar appointment reminder, organized planner, professional office, detailed, 8k, ultra detailed", @@ -1762,7 +2675,7 @@ if __name__ == "__main__" and not os.environ.get('QR_TESTING_MODE'): 4, "Medium (15%)", 12, - "Square" + "Square", ], [ "location pin on map, travel destination, scenic view, detailed cartography, 8k, ultra detailed", @@ -1772,8 +2685,8 @@ if __name__ == "__main__" and not os.environ.get('QR_TESTING_MODE'): 4, "Medium (15%)", 12, - "Square" - ] + "Square", + ], ] gr.Examples( @@ -1786,12 +2699,14 @@ if __name__ == "__main__" and not os.environ.get('QR_TESTING_MODE'): border_size, error_correction, module_size, - module_drawer + module_drawer, ], outputs=[output_image, error_message], fn=generate_standard_qr, - cache_examples=False + cache_examples=False, ) # ARTISTIC QR TAB app.launch(share=False, mcp_server=True) + # Note: Automatic file cleanup via delete_cache not available in Gradio 5.49.1 + # Files will be cleaned up when the server is restarted