From 582bb052f73dc090bf4fb75d2ccdd05114a995b2 Mon Sep 17 00:00:00 2001
From: Vito Sansevero
Date: Fri, 3 Apr 2026 07:12:19 -0700
Subject: [PATCH 01/24] feat: add LoRA utility module for LoraManager
integration (#52)
Detection, metadata reading, trigger word cache, and prompt injection
for ComfyUI-Lora-Manager. All functions return empty results when
LoraManager is not installed.
---
py/lora_utils.py | 303 +++++++++++++++++++++++++++++++++++++++++++++++
1 file changed, 303 insertions(+)
create mode 100644 py/lora_utils.py
diff --git a/py/lora_utils.py b/py/lora_utils.py
new file mode 100644
index 0000000..9e8c329
--- /dev/null
+++ b/py/lora_utils.py
@@ -0,0 +1,303 @@
+"""Utilities for LoraManager integration.
+
+Provides detection, metadata reading, and trigger word lookup for
+ComfyUI-Lora-Manager (https://github.com/willmiao/ComfyUI-Lora-Manager).
+
+All functions are safe to call when LoraManager is not installed — they
+return empty results rather than raising.
+"""
+
+import json
+import os
+import re
+import threading
+from pathlib import Path
+from typing import Dict, List, Optional, Tuple
+
+try:
+ from ..utils.logging_config import get_logger
+except ImportError:
+ import sys
+
+ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+ from utils.logging_config import get_logger
+
+logger = get_logger("prompt_manager.lora_utils")
+
+# ── LoraManager detection ────────────────────────────────────────────
+
+_LORA_MANAGER_DIR_NAME = "ComfyUI-Lora-Manager"
+
+
+def find_comfyui_root() -> Optional[Path]:
+ """Walk upward from this file to find the ComfyUI root (contains main.py)."""
+ current = Path(__file__).resolve().parent
+ for _ in range(10):
+ if (current / "main.py").exists() and (current / "custom_nodes").exists():
+ return current
+ parent = current.parent
+ if parent == current:
+ break
+ current = parent
+ return None
+
+
+def detect_lora_manager(custom_path: str = "") -> Optional[str]:
+ """Return the absolute path to ComfyUI-Lora-Manager if installed.
+
+ Args:
+ custom_path: User-provided override path. Checked first.
+
+ Returns:
+ Absolute path string, or None if not found.
+ """
+ # 1. User override
+ if custom_path:
+ p = Path(custom_path)
+ if p.is_dir() and _looks_like_lora_manager(p):
+ return str(p.resolve())
+
+ # 2. Auto-detect via custom_nodes
+ root = find_comfyui_root()
+ if root:
+ candidate = root / "custom_nodes" / _LORA_MANAGER_DIR_NAME
+ if candidate.is_dir() and _looks_like_lora_manager(candidate):
+ return str(candidate.resolve())
+
+ return None
+
+
+def _looks_like_lora_manager(path: Path) -> bool:
+ """Heuristic: does this directory look like a LoraManager install?"""
+ # Check for characteristic files
+ markers = ["__init__.py", "README.md"]
+ has_marker = any((path / m).exists() for m in markers)
+ # Check for characteristic subdirectories or module name
+ has_lora_ref = (
+ (path / "lora_manager").is_dir()
+ or (path / "py").is_dir()
+ or "lora" in path.name.lower()
+ )
+ return has_marker and has_lora_ref
+
+
+# ── Metadata reading ─────────────────────────────────────────────────
+
+
+def find_lora_directories(lora_manager_path: str) -> List[str]:
+ """Find directories that contain LoRA models (with .metadata.json files).
+
+ Searches common locations: the LoraManager extension dir itself,
+ and the ComfyUI models/loras directory.
+ """
+ dirs = set()
+ lm_path = Path(lora_manager_path)
+
+ # Check ComfyUI models/loras
+ root = find_comfyui_root()
+ if root:
+ models_loras = root / "models" / "loras"
+ if models_loras.is_dir():
+ dirs.add(str(models_loras.resolve()))
+
+ # Check for any .metadata.json in the LoraManager dir tree
+ # (some users store loras inside the extension)
+ for meta in lm_path.rglob("*.metadata.json"):
+ dirs.add(str(meta.parent.resolve()))
+
+ return sorted(dirs)
+
+
+def read_lora_metadata(metadata_path: Path) -> Optional[Dict]:
+ """Read and parse a single .metadata.json file.
+
+ Returns:
+ Parsed dict, or None on failure.
+ """
+ try:
+ with open(metadata_path, "r", encoding="utf-8") as f:
+ return json.load(f)
+ except (json.JSONDecodeError, OSError) as e:
+ logger.debug(f"Failed to read {metadata_path}: {e}")
+ return None
+
+
+def get_trigger_words_from_metadata(metadata: Dict) -> List[str]:
+ """Extract trigger words from a parsed LoraManager metadata dict."""
+ civitai = metadata.get("civitai", {})
+ if not civitai:
+ return []
+ words = civitai.get("trainedWords", [])
+ if isinstance(words, list):
+ return [w.strip() for w in words if isinstance(w, str) and w.strip()]
+ return []
+
+
+def get_model_name_from_metadata(metadata: Dict) -> str:
+ """Extract the model display name from metadata."""
+ # Try civitai model name first, then file_name, then fallback
+ name = metadata.get("model_name", "")
+ if not name:
+ civitai = metadata.get("civitai", {})
+ model = civitai.get("model", {})
+ name = model.get("name", "")
+ if not name:
+ name = metadata.get("file_name", "unknown")
+ return name
+
+
+def get_preview_image_from_metadata(
+ metadata: Dict, metadata_path: Path
+) -> Optional[str]:
+ """Find the preview image path for a LoRA from its metadata.
+
+ Returns:
+ Absolute path string to the preview image, or None.
+ """
+ lora_dir = metadata_path.parent
+ file_name = metadata.get("file_name", "")
+ if not file_name:
+ # Derive from metadata filename: "foo.metadata.json" -> "foo"
+ stem = metadata_path.name.replace(".metadata.json", "")
+ file_name = stem
+
+ base_name = Path(file_name).stem
+
+ # Check standard preview naming conventions
+ for ext in (".png", ".jpg", ".jpeg", ".preview.png", ".preview.jpg"):
+ candidate = lora_dir / f"{base_name}{ext}"
+ if candidate.exists():
+ return str(candidate.resolve())
+
+ return None
+
+
+def get_example_images_dir(lora_manager_path: str) -> Optional[str]:
+ """Find the LoraManager example_images directory."""
+ lm_path = Path(lora_manager_path)
+
+ # Direct subdirectory
+ candidate = lm_path / "example_images"
+ if candidate.is_dir():
+ return str(candidate.resolve())
+
+ # Search one level in user data dirs
+ for child in lm_path.iterdir():
+ if child.is_dir():
+ sub = child / "example_images"
+ if sub.is_dir():
+ return str(sub.resolve())
+
+ return None
+
+
+# ── Trigger word cache & injection ───────────────────────────────────
+
+_LORA_PATTERN = re.compile(r"]+):[^>]+>", re.IGNORECASE)
+
+
+class TriggerWordCache:
+ """Thread-safe cache mapping LoRA names to their trigger words.
+
+ Built lazily on first access, refreshable on demand.
+ """
+
+ def __init__(self):
+ self._cache: Dict[str, List[str]] = {}
+ self._lock = threading.Lock()
+ self._loaded = False
+
+ def load(self, lora_manager_path: str) -> int:
+ """Scan LoRA metadata files and build the trigger word mapping.
+
+ Returns:
+ Number of LoRAs with trigger words found.
+ """
+ new_cache: Dict[str, List[str]] = {}
+
+ lora_dirs = find_lora_directories(lora_manager_path)
+ for lora_dir in lora_dirs:
+ dir_path = Path(lora_dir)
+ for meta_file in dir_path.glob("*.metadata.json"):
+ metadata = read_lora_metadata(meta_file)
+ if not metadata:
+ continue
+
+ words = get_trigger_words_from_metadata(metadata)
+ if not words:
+ continue
+
+ # Key by filename stem (what appears in )
+ file_name = metadata.get("file_name", "")
+ if file_name:
+ stem = Path(file_name).stem
+ new_cache[stem.lower()] = words
+
+ # Also key by the metadata file stem
+ meta_stem = meta_file.name.replace(".metadata.json", "")
+ if meta_stem.lower() not in new_cache:
+ new_cache[meta_stem.lower()] = words
+
+ with self._lock:
+ self._cache = new_cache
+ self._loaded = True
+
+ logger.info(
+ f"Trigger word cache loaded: {len(new_cache)} LoRAs with trigger words"
+ )
+ return len(new_cache)
+
+ def get_trigger_words(self, lora_name: str) -> List[str]:
+ """Look up trigger words for a LoRA by name (case-insensitive)."""
+ with self._lock:
+ return self._cache.get(lora_name.lower(), [])
+
+ @property
+ def is_loaded(self) -> bool:
+ with self._lock:
+ return self._loaded
+
+ def clear(self):
+ with self._lock:
+ self._cache.clear()
+ self._loaded = False
+
+
+# Module-level singleton
+_trigger_cache = TriggerWordCache()
+
+
+def get_trigger_cache() -> TriggerWordCache:
+ return _trigger_cache
+
+
+def inject_trigger_words(text: str, cache: TriggerWordCache) -> Tuple[str, List[str]]:
+ """Scan text for tags and append trigger words.
+
+ Args:
+ text: The prompt text potentially containing lora tags.
+ cache: Populated TriggerWordCache instance.
+
+ Returns:
+ Tuple of (modified_text, list_of_injected_words).
+ If no trigger words found, returns the original text unchanged.
+ """
+ if not cache.is_loaded:
+ return text, []
+
+ matches = _LORA_PATTERN.findall(text)
+ if not matches:
+ return text, []
+
+ all_words = []
+ for lora_name in matches:
+ words = cache.get_trigger_words(lora_name)
+ for w in words:
+ if w.lower() not in text.lower() and w not in all_words:
+ all_words.append(w)
+
+ if not all_words:
+ return text, []
+
+ injected = ", ".join(all_words)
+ return f"{text}, {injected}", all_words
From 4930d1fae7b83952ba8a139ada14b6a5ab2d2d80 Mon Sep 17 00:00:00 2001
From: Vito Sansevero
Date: Fri, 3 Apr 2026 07:12:24 -0700
Subject: [PATCH 02/24] feat: add IntegrationConfig for opt-in third-party
integrations (#52)
---
py/config.py | 38 ++++++++++++++++++++++++++++++++++++++
1 file changed, 38 insertions(+)
diff --git a/py/config.py b/py/config.py
index bea4b0d..360b2e2 100644
--- a/py/config.py
+++ b/py/config.py
@@ -201,6 +201,39 @@ def update_config(cls, new_config: Dict[str, Any]):
cls.METADATA_EXTRACTION_TIMEOUT = performance["metadata_extraction_timeout"]
+class IntegrationConfig:
+ """Configuration for third-party extension integrations.
+
+ Manages opt-in integration settings for extensions like LoraManager.
+ All integrations are disabled by default so PromptManager works standalone.
+ """
+
+ # LoraManager integration
+ LORA_MANAGER_ENABLED = False
+ LORA_MANAGER_PATH = "" # Auto-detected if empty
+ LORA_TRIGGER_WORDS_ENABLED = False # Auto-inject trigger words into prompts
+
+ @classmethod
+ def get_config(cls) -> Dict[str, Any]:
+ return {
+ "lora_manager": {
+ "enabled": cls.LORA_MANAGER_ENABLED,
+ "path": cls.LORA_MANAGER_PATH,
+ "trigger_words_enabled": cls.LORA_TRIGGER_WORDS_ENABLED,
+ },
+ }
+
+ @classmethod
+ def update_config(cls, new_config: Dict[str, Any]):
+ lora = new_config.get("lora_manager", {})
+ if "enabled" in lora:
+ cls.LORA_MANAGER_ENABLED = lora["enabled"]
+ if "path" in lora:
+ cls.LORA_MANAGER_PATH = lora["path"]
+ if "trigger_words_enabled" in lora:
+ cls.LORA_TRIGGER_WORDS_ENABLED = lora["trigger_words_enabled"]
+
+
class PromptManagerConfig:
"""Main configuration class for PromptManager core functionality.
@@ -274,6 +307,7 @@ def get_config(cls) -> Dict[str, Any]:
"auto_backup_interval": cls.AUTO_BACKUP_INTERVAL,
},
"gallery": GalleryConfig.get_config(),
+ "integrations": IntegrationConfig.get_config(),
}
@classmethod
@@ -389,6 +423,10 @@ def update_config(cls, new_config: Dict[str, Any]):
if "gallery" in new_config:
GalleryConfig.update_config(new_config["gallery"])
+ # Update integration config
+ if "integrations" in new_config:
+ IntegrationConfig.update_config(new_config["integrations"])
+
# Load configuration on import
try:
From c0bcce5af197309f880aa8d0bfb43394d463fffc Mon Sep 17 00:00:00 2001
From: Vito Sansevero
Date: Fri, 3 Apr 2026 07:12:29 -0700
Subject: [PATCH 03/24] feat(api): add LoraManager integration endpoints (#52)
Detection, enable/disable, scan/import, trigger word lookup,
and cache refresh endpoints under /prompt_manager/lora/*.
---
py/api/__init__.py | 3 +
py/api/lora_integration.py | 378 +++++++++++++++++++++++++++++++++++++
2 files changed, 381 insertions(+)
create mode 100644 py/api/lora_integration.py
diff --git a/py/api/__init__.py b/py/api/__init__.py
index 8d7af45..90d4695 100644
--- a/py/api/__init__.py
+++ b/py/api/__init__.py
@@ -24,6 +24,7 @@
from .admin import AdminRoutesMixin
from .logging_routes import LoggingRoutesMixin
from .autotag_routes import AutotagRoutesMixin
+from .lora_integration import LoraIntegrationMixin
try:
from ...database.operations import PromptDatabase
@@ -99,6 +100,7 @@ class PromptManagerAPI(
AdminRoutesMixin,
LoggingRoutesMixin,
AutotagRoutesMixin,
+ LoraIntegrationMixin,
):
"""REST API handler for PromptManager operations and web interface.
@@ -372,6 +374,7 @@ async def serve_js_static(request):
self._register_admin_routes(routes)
self._register_logging_routes(routes)
self._register_autotag_routes(routes)
+ self._register_lora_routes(routes)
# Register gzip compression middleware (once)
global _gzip_registered
diff --git a/py/api/lora_integration.py b/py/api/lora_integration.py
new file mode 100644
index 0000000..6557057
--- /dev/null
+++ b/py/api/lora_integration.py
@@ -0,0 +1,378 @@
+"""LoraManager integration API routes for PromptManager."""
+
+import json
+import os
+from pathlib import Path
+
+from aiohttp import web
+
+
+class LoraIntegrationMixin:
+ """Mixin providing LoraManager detection, scanning, and trigger word endpoints."""
+
+ def _register_lora_routes(self, routes):
+ @routes.get("/prompt_manager/lora/detect")
+ async def lora_detect_route(request):
+ return await self.lora_detect(request)
+
+ @routes.get("/prompt_manager/lora/status")
+ async def lora_status_route(request):
+ return await self.lora_status(request)
+
+ @routes.post("/prompt_manager/lora/enable")
+ async def lora_enable_route(request):
+ return await self.lora_enable(request)
+
+ @routes.post("/prompt_manager/lora/scan")
+ async def lora_scan_route(request):
+ return await self.lora_scan(request)
+
+ @routes.get("/prompt_manager/lora/trigger-words")
+ async def lora_trigger_words_route(request):
+ return await self.lora_trigger_words(request)
+
+ @routes.post("/prompt_manager/lora/refresh-cache")
+ async def lora_refresh_cache_route(request):
+ return await self.lora_refresh_cache(request)
+
+ # ── Detection ────────────────────────────────────────────────────
+
+ async def lora_detect(self, request):
+ """Auto-detect LoraManager installation."""
+ try:
+ from ..lora_utils import detect_lora_manager
+
+ path = await self._run_in_executor(detect_lora_manager)
+ return web.json_response(
+ {
+ "success": True,
+ "detected": path is not None,
+ "path": path or "",
+ }
+ )
+ except Exception as e:
+ self.logger.error(f"LoraManager detection failed: {e}")
+ return web.json_response({"success": False, "error": str(e)}, status=500)
+
+ # ── Status ───────────────────────────────────────────────────────
+
+ async def lora_status(self, request):
+ """Get current LoraManager integration status."""
+ try:
+ from ..config import IntegrationConfig
+ from ..lora_utils import detect_lora_manager, get_trigger_cache
+
+ config = IntegrationConfig.get_config()["lora_manager"]
+ cache = get_trigger_cache()
+
+ # Check if the configured path is still valid
+ detected_path = await self._run_in_executor(
+ detect_lora_manager, config.get("path", "")
+ )
+
+ return web.json_response(
+ {
+ "success": True,
+ "enabled": config["enabled"],
+ "path": config["path"],
+ "trigger_words_enabled": config["trigger_words_enabled"],
+ "detected": detected_path is not None,
+ "detected_path": detected_path or "",
+ "trigger_cache_loaded": cache.is_loaded,
+ }
+ )
+ except Exception as e:
+ self.logger.error(f"LoraManager status check failed: {e}")
+ return web.json_response({"success": False, "error": str(e)}, status=500)
+
+ # ── Enable / Disable ─────────────────────────────────────────────
+
+ async def lora_enable(self, request):
+ """Enable or disable LoraManager integration and save to config.json."""
+ try:
+ data = await request.json()
+ enabled = data.get("enabled", False)
+ path = data.get("path", "")
+ trigger_words = data.get("trigger_words_enabled", False)
+
+ from ..config import IntegrationConfig, PromptManagerConfig
+ from ..lora_utils import detect_lora_manager, get_trigger_cache
+
+ # If enabling, validate the path
+ if enabled:
+ resolved = await self._run_in_executor(detect_lora_manager, path)
+ if not resolved:
+ return web.json_response(
+ {
+ "success": False,
+ "error": "LoraManager not found at the specified path",
+ },
+ status=400,
+ )
+ path = resolved
+
+ # Update in-memory config
+ IntegrationConfig.LORA_MANAGER_ENABLED = enabled
+ IntegrationConfig.LORA_MANAGER_PATH = path
+ IntegrationConfig.LORA_TRIGGER_WORDS_ENABLED = trigger_words
+
+ # Persist to config.json
+ config_dir = os.path.dirname(
+ os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+ )
+ config_file = os.path.join(config_dir, "config.json")
+ PromptManagerConfig.save_to_file(config_file)
+
+ # Load trigger word cache if enabling
+ cache = get_trigger_cache()
+ if enabled and trigger_words and path:
+ count = await self._run_in_executor(cache.load, path)
+ self.logger.info(f"Trigger word cache loaded: {count} LoRAs")
+ elif not enabled:
+ cache.clear()
+
+ return web.json_response(
+ {
+ "success": True,
+ "enabled": enabled,
+ "path": path,
+ "trigger_words_enabled": trigger_words,
+ }
+ )
+ except Exception as e:
+ self.logger.error(f"LoraManager enable/disable failed: {e}")
+ return web.json_response({"success": False, "error": str(e)}, status=500)
+
+ # ── Scan LoRA example images ─────────────────────────────────────
+
+ async def lora_scan(self, request):
+ """Scan LoraManager metadata and import LoRA info + preview images.
+
+ Streams progress as SSE, matching the existing scan pattern.
+ """
+ try:
+ from ..config import IntegrationConfig
+ from ..lora_utils import (
+ find_lora_directories,
+ get_preview_image_from_metadata,
+ get_trigger_words_from_metadata,
+ get_model_name_from_metadata,
+ read_lora_metadata,
+ )
+
+ if not IntegrationConfig.LORA_MANAGER_ENABLED:
+ return web.json_response(
+ {
+ "success": False,
+ "error": "LoraManager integration is not enabled",
+ },
+ status=400,
+ )
+
+ lm_path = IntegrationConfig.LORA_MANAGER_PATH
+ if not lm_path:
+ return web.json_response(
+ {"success": False, "error": "LoraManager path not configured"},
+ status=400,
+ )
+
+ response = web.StreamResponse(
+ status=200,
+ reason="OK",
+ headers={"Content-Type": "text/event-stream"},
+ )
+ await response.prepare(request)
+
+ async def send_progress(data):
+ line = f"data: {json.dumps(data)}\n\n"
+ await response.write(line.encode("utf-8"))
+
+ await send_progress(
+ {
+ "type": "progress",
+ "status": "Finding LoRA directories...",
+ "progress": 0,
+ }
+ )
+
+ lora_dirs = await self._run_in_executor(find_lora_directories, lm_path)
+
+ # Collect all metadata files
+ meta_files = []
+ for d in lora_dirs:
+ dir_path = Path(d)
+ meta_files.extend(dir_path.glob("*.metadata.json"))
+
+ total = len(meta_files)
+ imported = 0
+ skipped = 0
+
+ await send_progress(
+ {
+ "type": "progress",
+ "status": f"Found {total} LoRA metadata files",
+ "progress": 5,
+ "total": total,
+ }
+ )
+
+ for i, meta_file in enumerate(meta_files):
+ metadata = await self._run_in_executor(read_lora_metadata, meta_file)
+ if not metadata:
+ skipped += 1
+ continue
+
+ model_name = get_model_name_from_metadata(metadata)
+ trigger_words = get_trigger_words_from_metadata(metadata)
+ preview_path = await self._run_in_executor(
+ get_preview_image_from_metadata, metadata, meta_file
+ )
+
+ # Build prompt text from trigger words or model name
+ prompt_text = ", ".join(trigger_words) if trigger_words else model_name
+
+ # Build tags
+ tags = ["lora", f"lora:{model_name}"]
+ tags.extend(trigger_words)
+
+ # Save to database via existing mechanism
+ try:
+ import hashlib
+
+ prompt_hash = hashlib.sha256(
+ prompt_text.strip().lower().encode("utf-8")
+ ).hexdigest()
+
+ existing = await self._run_in_executor(
+ self.db.get_prompt_by_hash, prompt_hash
+ )
+
+ if existing:
+ # Link preview image if we have one and it's not already linked
+ if preview_path:
+ await self._run_in_executor(
+ self.db.link_image_to_prompt,
+ existing["id"],
+ preview_path,
+ )
+ skipped += 1
+ else:
+ prompt_id = await self._run_in_executor(
+ self.db.save_prompt,
+ prompt_text,
+ "lora", # category
+ tags,
+ None, # rating
+ None, # notes
+ prompt_hash,
+ )
+
+ if prompt_id and preview_path:
+ await self._run_in_executor(
+ self.db.link_image_to_prompt,
+ prompt_id,
+ preview_path,
+ )
+
+ if prompt_id:
+ imported += 1
+ else:
+ skipped += 1
+
+ except Exception as e:
+ self.logger.warning(f"Failed to import LoRA {model_name}: {e}")
+ skipped += 1
+
+ # Progress update every 5 items or at the end
+ if (i + 1) % 5 == 0 or i == total - 1:
+ progress = int(5 + (90 * (i + 1) / max(total, 1)))
+ await send_progress(
+ {
+ "type": "progress",
+ "status": f"Processing: {model_name}",
+ "progress": progress,
+ "processed": i + 1,
+ "imported": imported,
+ "skipped": skipped,
+ }
+ )
+
+ await send_progress(
+ {
+ "type": "complete",
+ "progress": 100,
+ "total": total,
+ "imported": imported,
+ "skipped": skipped,
+ }
+ )
+
+ await response.write_eof()
+ return response
+
+ except Exception as e:
+ self.logger.error(f"LoRA scan failed: {e}")
+ return web.json_response({"success": False, "error": str(e)}, status=500)
+
+ # ── Trigger word endpoints ───────────────────────────────────────
+
+ async def lora_trigger_words(self, request):
+ """Look up trigger words for a specific LoRA name."""
+ try:
+ from ..config import IntegrationConfig
+ from ..lora_utils import get_trigger_cache
+
+ if not IntegrationConfig.LORA_MANAGER_ENABLED:
+ return web.json_response(
+ {"success": False, "error": "LoraManager integration not enabled"},
+ status=400,
+ )
+
+ lora_name = request.query.get("name", "")
+ if not lora_name:
+ return web.json_response(
+ {"success": False, "error": "Missing 'name' query parameter"},
+ status=400,
+ )
+
+ cache = get_trigger_cache()
+ if not cache.is_loaded:
+ lm_path = IntegrationConfig.LORA_MANAGER_PATH
+ if lm_path:
+ await self._run_in_executor(cache.load, lm_path)
+
+ words = cache.get_trigger_words(lora_name)
+ return web.json_response(
+ {"success": True, "lora": lora_name, "trigger_words": words}
+ )
+ except Exception as e:
+ self.logger.error(f"Trigger word lookup failed: {e}")
+ return web.json_response({"success": False, "error": str(e)}, status=500)
+
+ async def lora_refresh_cache(self, request):
+ """Force-refresh the trigger word cache from disk."""
+ try:
+ from ..config import IntegrationConfig
+ from ..lora_utils import get_trigger_cache
+
+ if not IntegrationConfig.LORA_MANAGER_ENABLED:
+ return web.json_response(
+ {"success": False, "error": "LoraManager integration not enabled"},
+ status=400,
+ )
+
+ lm_path = IntegrationConfig.LORA_MANAGER_PATH
+ if not lm_path:
+ return web.json_response(
+ {"success": False, "error": "LoraManager path not configured"},
+ status=400,
+ )
+
+ cache = get_trigger_cache()
+ count = await self._run_in_executor(cache.load, lm_path)
+ return web.json_response(
+ {"success": True, "loras_with_trigger_words": count}
+ )
+ except Exception as e:
+ self.logger.error(f"Trigger cache refresh failed: {e}")
+ return web.json_response({"success": False, "error": str(e)}, status=500)
From 171a571a0d1908b839ccd03e674a9494d1e5ca5e Mon Sep 17 00:00:00 2001
From: Vito Sansevero
Date: Fri, 3 Apr 2026 07:12:35 -0700
Subject: [PATCH 04/24] feat: inject LoRA trigger words into prompts at
encoding time (#52)
When enabled, scans for tags in prompt text and
appends trigger words from LoraManager metadata. Behind config toggle.
---
prompt_manager.py | 3 +++
prompt_manager_base.py | 39 +++++++++++++++++++++++++++++++++++++++
prompt_manager_text.py | 3 +++
3 files changed, 45 insertions(+)
diff --git a/prompt_manager.py b/prompt_manager.py
index e8d904a..a1eceff 100644
--- a/prompt_manager.py
+++ b/prompt_manager.py
@@ -148,6 +148,9 @@ def encode_prompt(
parts.append(append_text.strip())
final_text = " ".join(parts)
+ # Inject LoRA trigger words if integration is enabled
+ final_text = self._inject_lora_trigger_words(final_text)
+
# Use the combined text for encoding
encoding_text = final_text
diff --git a/prompt_manager_base.py b/prompt_manager_base.py
index 9a393fc..43a6326 100644
--- a/prompt_manager_base.py
+++ b/prompt_manager_base.py
@@ -102,6 +102,45 @@ def _save_prompt_to_database(
self.logger.error(f"Error saving prompt to database: {e}")
return None
+ def _inject_lora_trigger_words(self, text: str) -> str:
+ """Append LoRA trigger words if integration is enabled.
+
+ Returns the text unchanged if the integration is disabled or
+ LoraManager is not installed.
+ """
+ try:
+ from .py.config import IntegrationConfig
+ except ImportError:
+ try:
+ from py.config import IntegrationConfig
+ except ImportError:
+ return text
+
+ if (
+ not IntegrationConfig.LORA_MANAGER_ENABLED
+ or not IntegrationConfig.LORA_TRIGGER_WORDS_ENABLED
+ ):
+ return text
+
+ try:
+ from .py.lora_utils import get_trigger_cache, inject_trigger_words
+ except ImportError:
+ try:
+ from py.lora_utils import get_trigger_cache, inject_trigger_words
+ except ImportError:
+ return text
+
+ cache = get_trigger_cache()
+
+ # Lazy-load cache on first use
+ if not cache.is_loaded and IntegrationConfig.LORA_MANAGER_PATH:
+ cache.load(IntegrationConfig.LORA_MANAGER_PATH)
+
+ modified, injected = inject_trigger_words(text, cache)
+ if injected:
+ self.logger.info(f"Injected trigger words: {', '.join(injected)}")
+ return modified
+
def _generate_hash(self, text: str) -> str:
"""Generate SHA256 hash for the prompt text.
diff --git a/prompt_manager_text.py b/prompt_manager_text.py
index e10d146..6e0736c 100644
--- a/prompt_manager_text.py
+++ b/prompt_manager_text.py
@@ -142,6 +142,9 @@ def process_text(
parts.append(append_text.strip())
final_text = " ".join(parts)
+ # Inject LoRA trigger words if integration is enabled
+ final_text = self._inject_lora_trigger_words(final_text)
+
# For database storage, save the original main text with metadata about prepend/append
storage_text = text
From 062f430852dc18bd2b9420402944ce46101043f6 Mon Sep 17 00:00:00 2001
From: Vito Sansevero
Date: Fri, 3 Apr 2026 07:12:44 -0700
Subject: [PATCH 05/24] feat(ui): add LoraManager integration settings and
import UI (#52)
Integrations section in settings with auto-detection badge, enable
toggle, trigger word toggle, and Import LoRA Data button.
---
web/admin.html | 43 +++++++++++++++
web/js/admin.js | 141 ++++++++++++++++++++++++++++++++++++++++++++++++
2 files changed, 184 insertions(+)
diff --git a/web/admin.html b/web/admin.html
index 05282d4..3fcb643 100644
--- a/web/admin.html
+++ b/web/admin.html
@@ -406,6 +406,49 @@ Choose how the Web UI opens from ComfyUI nodes.
+
+
+
+
Integrations
+
+
+
+
+
+ checking...
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Automatically append trigger words when <lora:name:weight> is detected in prompts.
+
+
+
+
+
diff --git a/web/js/admin.js b/web/js/admin.js
index 6437764..6e4cb9b 100644
--- a/web/js/admin.js
+++ b/web/js/admin.js
@@ -755,6 +755,7 @@
document.getElementById("webuiDisplayMode").value = this.settings.webuiDisplayMode;
this.renderScanPaths();
this.updateMonitoringStatus();
+ this.detectLoraManager();
this.showModal("settingsModal");
}
@@ -776,6 +777,142 @@
}
}
+ // ── LoraManager integration ──────────────────────────────
+
+ async detectLoraManager() {
+ const badge = document.getElementById("loraDetectionBadge");
+ const toggle = document.getElementById("loraEnabled");
+ const settings = document.getElementById("loraSettings");
+
+ try {
+ // First check current status
+ const statusRes = await fetch("/prompt_manager/lora/status");
+ if (statusRes.ok) {
+ const status = await statusRes.json();
+ if (status.success && status.enabled) {
+ badge.textContent = "enabled";
+ badge.className = "text-[10px] px-1.5 py-0.5 rounded-full bg-green-500/20 text-green-400";
+ toggle.checked = true;
+ toggle.disabled = false;
+ document.getElementById("loraManagerPath").value = status.path;
+ document.getElementById("loraTriggerWords").checked = status.trigger_words_enabled;
+ settings.classList.remove("hidden");
+ this._loraPath = status.path;
+ this._bindLoraEvents();
+ return;
+ }
+ }
+
+ // Try auto-detection
+ const detectRes = await fetch("/prompt_manager/lora/detect");
+ if (!detectRes.ok) return;
+ const data = await detectRes.json();
+
+ if (data.detected) {
+ badge.textContent = "detected";
+ badge.className = "text-[10px] px-1.5 py-0.5 rounded-full bg-blue-500/20 text-blue-400";
+ toggle.disabled = false;
+ document.getElementById("loraManagerPath").value = data.path;
+ this._loraPath = data.path;
+ } else {
+ badge.textContent = "not installed";
+ badge.className = "text-[10px] px-1.5 py-0.5 rounded-full bg-pm-input text-pm-muted";
+ toggle.disabled = true;
+ }
+
+ this._bindLoraEvents();
+ } catch (e) {
+ badge.textContent = "error";
+ badge.className = "text-[10px] px-1.5 py-0.5 rounded-full bg-red-500/20 text-red-400";
+ }
+ }
+
+ _bindLoraEvents() {
+ if (this._loraBound) return;
+ this._loraBound = true;
+
+ const toggle = document.getElementById("loraEnabled");
+ const settings = document.getElementById("loraSettings");
+
+ toggle.addEventListener("change", () => {
+ if (toggle.checked) {
+ settings.classList.remove("hidden");
+ } else {
+ settings.classList.add("hidden");
+ }
+ });
+
+ document.getElementById("loraImportBtn").addEventListener("click", () => {
+ this.runLoraImport();
+ });
+ }
+
+ async saveLoraSettings() {
+ const enabled = document.getElementById("loraEnabled").checked;
+ const triggerWords = document.getElementById("loraTriggerWords").checked;
+ const path = this._loraPath || "";
+
+ try {
+ const res = await fetch("/prompt_manager/lora/enable", {
+ method: "POST",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify({
+ enabled,
+ path,
+ trigger_words_enabled: triggerWords,
+ }),
+ });
+ const data = await res.json();
+ if (!data.success) {
+ this.showNotification(data.error || "Failed to save LoRA settings", "error");
+ return false;
+ }
+ return true;
+ } catch (e) {
+ this.showNotification("Failed to save LoRA settings", "error");
+ return false;
+ }
+ }
+
+ async runLoraImport() {
+ const saved = await this.saveLoraSettings();
+ if (!saved) return;
+
+ this.showNotification("Scanning LoRA metadata...", "info");
+
+ try {
+ const res = await fetch("/prompt_manager/lora/scan", { method: "POST" });
+ const reader = res.body.getReader();
+ const decoder = new TextDecoder();
+ let buffer = "";
+
+ while (true) {
+ const { done, value } = await reader.read();
+ if (done) break;
+
+ buffer += decoder.decode(value, { stream: true });
+ const lines = buffer.split("\n\n");
+ buffer = lines.pop();
+
+ for (const line of lines) {
+ if (!line.startsWith("data: ")) continue;
+ try {
+ const data = JSON.parse(line.slice(6));
+ if (data.type === "complete") {
+ this.showNotification(
+ `LoRA import complete: ${data.imported} imported, ${data.skipped} skipped`,
+ data.imported > 0 ? "success" : "info"
+ );
+ this.loadStatistics();
+ }
+ } catch (e) { /* skip malformed SSE */ }
+ }
+ }
+ } catch (e) {
+ this.showNotification("LoRA import failed: " + e.message, "error");
+ }
+ }
+
async saveSettings() {
const timeout = parseInt(document.getElementById("resultTimeout").value);
const displayMode = document.getElementById("webuiDisplayMode").value;
@@ -798,6 +935,10 @@
if (response.ok) {
const data = await response.json();
+
+ // Save LoRA integration settings (fire-and-forget)
+ await this.saveLoraSettings();
+
if (data.restart_required) {
this.showNotification("Settings saved. Restart ComfyUI for gallery path changes to take effect.", "warning");
} else {
From a2061e29777b53544aab50d2a76aadce622f4ef1 Mon Sep 17 00:00:00 2001
From: Vito Sansevero
Date: Fri, 3 Apr 2026 07:31:29 -0700
Subject: [PATCH 06/24] fix: detect LoraManager with case-insensitive directory
scan (#52)
---
py/lora_utils.py | 15 +++++++++++----
1 file changed, 11 insertions(+), 4 deletions(-)
diff --git a/py/lora_utils.py b/py/lora_utils.py
index 9e8c329..19023ab 100644
--- a/py/lora_utils.py
+++ b/py/lora_utils.py
@@ -57,12 +57,19 @@ def detect_lora_manager(custom_path: str = "") -> Optional[str]:
if p.is_dir() and _looks_like_lora_manager(p):
return str(p.resolve())
- # 2. Auto-detect via custom_nodes
+ # 2. Auto-detect via custom_nodes (case-insensitive scan)
root = find_comfyui_root()
if root:
- candidate = root / "custom_nodes" / _LORA_MANAGER_DIR_NAME
- if candidate.is_dir() and _looks_like_lora_manager(candidate):
- return str(candidate.resolve())
+ custom_nodes = root / "custom_nodes"
+ if custom_nodes.is_dir():
+ for entry in custom_nodes.iterdir():
+ if (
+ entry.is_dir()
+ and "lora" in entry.name.lower()
+ and "manager" in entry.name.lower()
+ and _looks_like_lora_manager(entry)
+ ):
+ return str(entry.resolve())
return None
From 1a6cad42cbbcbb5b4520b8e2df48ef3a43d2313d Mon Sep 17 00:00:00 2001
From: Vito Sansevero
Date: Fri, 3 Apr 2026 07:42:05 -0700
Subject: [PATCH 07/24] fix: simplify LoraManager heuristic to match real
install structure (#52)
---
py/lora_utils.py | 16 ++++++----------
1 file changed, 6 insertions(+), 10 deletions(-)
diff --git a/py/lora_utils.py b/py/lora_utils.py
index 19023ab..93fb32f 100644
--- a/py/lora_utils.py
+++ b/py/lora_utils.py
@@ -76,16 +76,12 @@ def detect_lora_manager(custom_path: str = "") -> Optional[str]:
def _looks_like_lora_manager(path: Path) -> bool:
"""Heuristic: does this directory look like a LoraManager install?"""
- # Check for characteristic files
- markers = ["__init__.py", "README.md"]
- has_marker = any((path / m).exists() for m in markers)
- # Check for characteristic subdirectories or module name
- has_lora_ref = (
- (path / "lora_manager").is_dir()
- or (path / "py").is_dir()
- or "lora" in path.name.lower()
- )
- return has_marker and has_lora_ref
+ # Must have __init__.py (ComfyUI extension) or README.md
+ has_init = (path / "__init__.py").exists()
+ if not has_init:
+ return False
+ # Check for characteristic structure: py/ dir, or any .metadata.json nearby
+ return (path / "py").is_dir() or (path / "lora_manager").is_dir()
# ── Metadata reading ─────────────────────────────────────────────────
From bff70b6323a7e511ff35019eaa08442763bea191 Mon Sep 17 00:00:00 2001
From: Vito Sansevero
Date: Fri, 3 Apr 2026 07:46:08 -0700
Subject: [PATCH 08/24] fix: find ComfyUI root via folder_paths and handle
symlinked installs (#52)
---
py/lora_utils.py | 41 ++++++++++++++++++++++++++++++++---------
1 file changed, 32 insertions(+), 9 deletions(-)
diff --git a/py/lora_utils.py b/py/lora_utils.py
index 93fb32f..1c10f70 100644
--- a/py/lora_utils.py
+++ b/py/lora_utils.py
@@ -30,15 +30,38 @@
def find_comfyui_root() -> Optional[Path]:
- """Walk upward from this file to find the ComfyUI root (contains main.py)."""
- current = Path(__file__).resolve().parent
- for _ in range(10):
- if (current / "main.py").exists() and (current / "custom_nodes").exists():
- return current
- parent = current.parent
- if parent == current:
- break
- current = parent
+ """Walk upward from this file to find the ComfyUI root (contains main.py).
+
+ Tries both the resolved (real) path and the unresolved path to handle
+ symlinked custom_nodes installations.
+ """
+ start_paths = [Path(__file__).resolve().parent]
+
+ # If installed via symlink, the unresolved path leads through custom_nodes/
+ raw_path = Path(__file__).parent
+ if raw_path.resolve() != raw_path:
+ start_paths.append(raw_path)
+
+ # Also try via folder_paths if available (ComfyUI runtime)
+ try:
+ import folder_paths
+
+ base = Path(folder_paths.base_path)
+ if base.is_dir():
+ return base
+ except (ImportError, AttributeError):
+ pass
+
+ for start in start_paths:
+ current = start
+ for _ in range(10):
+ if (current / "main.py").exists() and (current / "custom_nodes").exists():
+ return current
+ parent = current.parent
+ if parent == current:
+ break
+ current = parent
+
return None
From 553d5c8dba95d09d678eb8f2864c1fb8ee8f2035 Mon Sep 17 00:00:00 2001
From: Vito Sansevero
Date: Fri, 3 Apr 2026 08:01:19 -0700
Subject: [PATCH 09/24] fix(ui): add pointer-events-none to toggle switch
overlays (#52)
The styled div was intercepting clicks meant for the sr-only checkbox
input, preventing toggle switches from being clickable.
---
web/admin.html | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/web/admin.html b/web/admin.html
index 3fcb643..ea58c47 100644
--- a/web/admin.html
+++ b/web/admin.html
@@ -419,7 +419,7 @@
@@ -434,7 +434,7 @@ Auto-inject trigger words
Automatically append trigger words when <lora:name:weight> is detected in prompts.
From d42fe477c32ff26e1e56d9f66e21232a69100107 Mon Sep 17 00:00:00 2001
From: Vito Sansevero
Date: Fri, 3 Apr 2026 08:03:51 -0700
Subject: [PATCH 10/24] fix: use lora-manager category and tag for imported
LoRA data (#52)
---
py/api/lora_integration.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/py/api/lora_integration.py b/py/api/lora_integration.py
index 6557057..1f215ab 100644
--- a/py/api/lora_integration.py
+++ b/py/api/lora_integration.py
@@ -232,7 +232,7 @@ async def send_progress(data):
prompt_text = ", ".join(trigger_words) if trigger_words else model_name
# Build tags
- tags = ["lora", f"lora:{model_name}"]
+ tags = ["lora-manager", f"lora:{model_name}"]
tags.extend(trigger_words)
# Save to database via existing mechanism
@@ -260,7 +260,7 @@ async def send_progress(data):
prompt_id = await self._run_in_executor(
self.db.save_prompt,
prompt_text,
- "lora", # category
+ "lora-manager", # category
tags,
None, # rating
None, # notes
From 8c93acb2cece966b45090e4ca346d9dee2ce8b52 Mon Sep 17 00:00:00 2001
From: Vito Sansevero
Date: Fri, 3 Apr 2026 08:08:01 -0700
Subject: [PATCH 11/24] fix: use rglob to find metadata in lora subdirectories
(#52)
---
py/api/lora_integration.py | 2 +-
py/lora_utils.py | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/py/api/lora_integration.py b/py/api/lora_integration.py
index 1f215ab..f9d0e4f 100644
--- a/py/api/lora_integration.py
+++ b/py/api/lora_integration.py
@@ -201,7 +201,7 @@ async def send_progress(data):
meta_files = []
for d in lora_dirs:
dir_path = Path(d)
- meta_files.extend(dir_path.glob("*.metadata.json"))
+ meta_files.extend(dir_path.rglob("*.metadata.json"))
total = len(meta_files)
imported = 0
diff --git a/py/lora_utils.py b/py/lora_utils.py
index 1c10f70..0811cd8 100644
--- a/py/lora_utils.py
+++ b/py/lora_utils.py
@@ -244,7 +244,7 @@ def load(self, lora_manager_path: str) -> int:
lora_dirs = find_lora_directories(lora_manager_path)
for lora_dir in lora_dirs:
dir_path = Path(lora_dir)
- for meta_file in dir_path.glob("*.metadata.json"):
+ for meta_file in dir_path.rglob("*.metadata.json"):
metadata = read_lora_metadata(meta_file)
if not metadata:
continue
From b71e4780e99b36dcfccfd6a886b721480c53acea Mon Sep 17 00:00:00 2001
From: Vito Sansevero
Date: Fri, 3 Apr 2026 08:10:37 -0700
Subject: [PATCH 12/24] feat: discover LoRA dirs from extra_model_paths.yaml
and folder_paths (#52)
Only models/loras under the ComfyUI root was checked. Now also parses
extra_model_paths.yaml and uses folder_paths.get_folder_paths('loras')
at runtime to find all configured LoRA directories.
---
py/lora_utils.py | 57 ++++++++++++++++++++++++++++++++++++++++++++----
1 file changed, 53 insertions(+), 4 deletions(-)
diff --git a/py/lora_utils.py b/py/lora_utils.py
index 0811cd8..feafb17 100644
--- a/py/lora_utils.py
+++ b/py/lora_utils.py
@@ -113,27 +113,76 @@ def _looks_like_lora_manager(path: Path) -> bool:
def find_lora_directories(lora_manager_path: str) -> List[str]:
"""Find directories that contain LoRA models (with .metadata.json files).
- Searches common locations: the LoraManager extension dir itself,
- and the ComfyUI models/loras directory.
+ Searches: ComfyUI models/loras, extra_model_paths.yaml lora dirs,
+ and the LoraManager extension dir itself.
"""
dirs = set()
lm_path = Path(lora_manager_path)
- # Check ComfyUI models/loras
root = find_comfyui_root()
if root:
+ # Default models/loras
models_loras = root / "models" / "loras"
if models_loras.is_dir():
dirs.add(str(models_loras.resolve()))
+ # Extra model paths from ComfyUI config
+ for extra_dir in _get_extra_lora_paths(root):
+ if extra_dir.is_dir():
+ dirs.add(str(extra_dir.resolve()))
+
+ # Also try folder_paths at runtime (catches all configured paths)
+ try:
+ import folder_paths
+
+ for p in folder_paths.get_folder_paths("loras"):
+ pp = Path(p)
+ if pp.is_dir():
+ dirs.add(str(pp.resolve()))
+ except (ImportError, AttributeError):
+ pass
+
# Check for any .metadata.json in the LoraManager dir tree
- # (some users store loras inside the extension)
for meta in lm_path.rglob("*.metadata.json"):
dirs.add(str(meta.parent.resolve()))
return sorted(dirs)
+def _get_extra_lora_paths(comfyui_root: Path) -> List[Path]:
+ """Parse extra_model_paths.yaml for additional LoRA directories."""
+ results = []
+ for name in ("extra_model_paths.yaml", "extra_model_paths.yml"):
+ config_file = comfyui_root / name
+ if not config_file.exists():
+ continue
+ try:
+ import yaml
+
+ config = yaml.safe_load(config_file.read_text())
+ if not isinstance(config, dict):
+ continue
+ for section in config.values():
+ if not isinstance(section, dict):
+ continue
+ base = Path(section.get("base_path", ""))
+ loras_val = section.get("loras", "")
+ if not loras_val:
+ continue
+ for line in str(loras_val).strip().splitlines():
+ line = line.strip()
+ if not line:
+ continue
+ p = Path(line)
+ if not p.is_absolute():
+ p = base / line
+ if p.is_dir():
+ results.append(p)
+ except Exception as e:
+ logger.debug(f"Failed to parse {config_file}: {e}")
+ return results
+
+
def read_lora_metadata(metadata_path: Path) -> Optional[Dict]:
"""Read and parse a single .metadata.json file.
From fea745deb9848fb073822236f1adf771066763e6 Mon Sep 17 00:00:00 2001
From: Vito Sansevero
Date: Fri, 3 Apr 2026 08:20:20 -0700
Subject: [PATCH 13/24] fix: use example prompts instead of trigger words,
serve LoRA preview images (#52)
- Prompt text now uses civitai example prompts (images[].meta.prompt)
when available, falling back to model name instead of trigger words
- Image serving now allows paths within LoRA directories when the
integration is enabled, fixing 403 errors on preview images
---
py/api/images.py | 23 +++++++++++++++++++----
py/api/lora_integration.py | 6 ++++--
py/lora_utils.py | 18 ++++++++++++++++++
3 files changed, 41 insertions(+), 6 deletions(-)
diff --git a/py/api/images.py b/py/api/images.py
index 0acdb4e..47c3fca 100644
--- a/py/api/images.py
+++ b/py/api/images.py
@@ -303,11 +303,26 @@ async def serve_image(self, request):
image_path = Path(image["image_path"]).resolve()
- # Validate path is within any configured output directory
- output_dirs = self._get_all_output_dirs()
- if output_dirs:
+ # Validate path is within any allowed directory
+ allowed_dirs = list(self._get_all_output_dirs())
+
+ # Also allow LoRA directories when integration is enabled
+ try:
+ from ..config import IntegrationConfig
+
+ if IntegrationConfig.LORA_MANAGER_ENABLED:
+ from ..lora_utils import find_lora_directories
+
+ lora_dirs = find_lora_directories(
+ IntegrationConfig.LORA_MANAGER_PATH
+ )
+ allowed_dirs.extend(Path(d) for d in lora_dirs)
+ except Exception:
+ pass
+
+ if allowed_dirs:
allowed = any(
- image_path.is_relative_to(d.resolve()) for d in output_dirs
+ image_path.is_relative_to(d.resolve()) for d in allowed_dirs
)
if not allowed:
return web.json_response(
diff --git a/py/api/lora_integration.py b/py/api/lora_integration.py
index f9d0e4f..942012c 100644
--- a/py/api/lora_integration.py
+++ b/py/api/lora_integration.py
@@ -154,6 +154,7 @@ async def lora_scan(self, request):
from ..config import IntegrationConfig
from ..lora_utils import (
find_lora_directories,
+ get_example_prompt_from_metadata,
get_preview_image_from_metadata,
get_trigger_words_from_metadata,
get_model_name_from_metadata,
@@ -228,8 +229,9 @@ async def send_progress(data):
get_preview_image_from_metadata, metadata, meta_file
)
- # Build prompt text from trigger words or model name
- prompt_text = ", ".join(trigger_words) if trigger_words else model_name
+ # Build prompt text: prefer example prompt, then model name
+ example_prompt = get_example_prompt_from_metadata(metadata)
+ prompt_text = example_prompt or model_name
# Build tags
tags = ["lora-manager", f"lora:{model_name}"]
diff --git a/py/lora_utils.py b/py/lora_utils.py
index feafb17..bdc2b4a 100644
--- a/py/lora_utils.py
+++ b/py/lora_utils.py
@@ -208,6 +208,24 @@ def get_trigger_words_from_metadata(metadata: Dict) -> List[str]:
return []
+def get_example_prompt_from_metadata(metadata: Dict) -> Optional[str]:
+ """Extract an example prompt from civitai image metadata.
+
+ Looks at civitai.images[].meta.prompt for the first available example.
+ """
+ civitai = metadata.get("civitai", {})
+ images = civitai.get("images", [])
+ for img in images:
+ if not isinstance(img, dict):
+ continue
+ meta = img.get("meta")
+ if isinstance(meta, dict):
+ prompt = meta.get("prompt", "")
+ if isinstance(prompt, str) and prompt.strip():
+ return prompt.strip()
+ return None
+
+
def get_model_name_from_metadata(metadata: Dict) -> str:
"""Extract the model display name from metadata."""
# Try civitai model name first, then file_name, then fallback
From 7713cecba6336dd930b04c4c3ef174e8f1a43836 Mon Sep 17 00:00:00 2001
From: Vito Sansevero
Date: Fri, 3 Apr 2026 08:24:45 -0700
Subject: [PATCH 14/24] fix: handle null civitai field, link all preview images
(#52)
- Guard against civitai: null in metadata (was crashing the scan)
- Link all local preview images per LoRA, not just the first
- Add get_civitai_image_urls() for future remote image support
---
py/api/lora_integration.py | 25 ++++++++-------
py/lora_utils.py | 63 ++++++++++++++++++++++++++++----------
2 files changed, 58 insertions(+), 30 deletions(-)
diff --git a/py/api/lora_integration.py b/py/api/lora_integration.py
index 942012c..f5f910f 100644
--- a/py/api/lora_integration.py
+++ b/py/api/lora_integration.py
@@ -155,7 +155,7 @@ async def lora_scan(self, request):
from ..lora_utils import (
find_lora_directories,
get_example_prompt_from_metadata,
- get_preview_image_from_metadata,
+ get_preview_images_from_metadata,
get_trigger_words_from_metadata,
get_model_name_from_metadata,
read_lora_metadata,
@@ -225,8 +225,8 @@ async def send_progress(data):
model_name = get_model_name_from_metadata(metadata)
trigger_words = get_trigger_words_from_metadata(metadata)
- preview_path = await self._run_in_executor(
- get_preview_image_from_metadata, metadata, meta_file
+ preview_paths = await self._run_in_executor(
+ get_preview_images_from_metadata, metadata, meta_file
)
# Build prompt text: prefer example prompt, then model name
@@ -250,12 +250,12 @@ async def send_progress(data):
)
if existing:
- # Link preview image if we have one and it's not already linked
- if preview_path:
+ # Link all preview images
+ for pp in preview_paths:
await self._run_in_executor(
self.db.link_image_to_prompt,
existing["id"],
- preview_path,
+ pp,
)
skipped += 1
else:
@@ -269,14 +269,13 @@ async def send_progress(data):
prompt_hash,
)
- if prompt_id and preview_path:
- await self._run_in_executor(
- self.db.link_image_to_prompt,
- prompt_id,
- preview_path,
- )
-
if prompt_id:
+ for pp in preview_paths:
+ await self._run_in_executor(
+ self.db.link_image_to_prompt,
+ prompt_id,
+ pp,
+ )
imported += 1
else:
skipped += 1
diff --git a/py/lora_utils.py b/py/lora_utils.py
index bdc2b4a..6bd4e80 100644
--- a/py/lora_utils.py
+++ b/py/lora_utils.py
@@ -197,11 +197,14 @@ def read_lora_metadata(metadata_path: Path) -> Optional[Dict]:
return None
+def _get_civitai(metadata: Dict) -> Dict:
+ """Safely get the civitai dict, handling None values."""
+ return metadata.get("civitai") or {}
+
+
def get_trigger_words_from_metadata(metadata: Dict) -> List[str]:
"""Extract trigger words from a parsed LoraManager metadata dict."""
- civitai = metadata.get("civitai", {})
- if not civitai:
- return []
+ civitai = _get_civitai(metadata)
words = civitai.get("trainedWords", [])
if isinstance(words, list):
return [w.strip() for w in words if isinstance(w, str) and w.strip()]
@@ -213,8 +216,8 @@ def get_example_prompt_from_metadata(metadata: Dict) -> Optional[str]:
Looks at civitai.images[].meta.prompt for the first available example.
"""
- civitai = metadata.get("civitai", {})
- images = civitai.get("images", [])
+ civitai = _get_civitai(metadata)
+ images = civitai.get("images", []) or []
for img in images:
if not isinstance(img, dict):
continue
@@ -226,43 +229,69 @@ def get_example_prompt_from_metadata(metadata: Dict) -> Optional[str]:
return None
+def get_civitai_image_urls(metadata: Dict) -> List[str]:
+ """Extract all civitai example image URLs from metadata."""
+ civitai = _get_civitai(metadata)
+ urls = []
+ for img in civitai.get("images", []) or []:
+ if not isinstance(img, dict):
+ continue
+ url = img.get("url", "")
+ if isinstance(url, str) and url.strip():
+ urls.append(url.strip())
+ return urls
+
+
def get_model_name_from_metadata(metadata: Dict) -> str:
"""Extract the model display name from metadata."""
- # Try civitai model name first, then file_name, then fallback
name = metadata.get("model_name", "")
if not name:
- civitai = metadata.get("civitai", {})
- model = civitai.get("model", {})
+ civitai = _get_civitai(metadata)
+ model = civitai.get("model") or {}
name = model.get("name", "")
if not name:
name = metadata.get("file_name", "unknown")
return name
-def get_preview_image_from_metadata(
- metadata: Dict, metadata_path: Path
-) -> Optional[str]:
- """Find the preview image path for a LoRA from its metadata.
+def get_preview_images_from_metadata(metadata: Dict, metadata_path: Path) -> List[str]:
+ """Find all local preview/example image paths for a LoRA.
Returns:
- Absolute path string to the preview image, or None.
+ List of absolute path strings to image files.
"""
+ results = []
lora_dir = metadata_path.parent
file_name = metadata.get("file_name", "")
if not file_name:
- # Derive from metadata filename: "foo.metadata.json" -> "foo"
stem = metadata_path.name.replace(".metadata.json", "")
file_name = stem
base_name = Path(file_name).stem
# Check standard preview naming conventions
- for ext in (".png", ".jpg", ".jpeg", ".preview.png", ".preview.jpg"):
+ for ext in (
+ ".png",
+ ".jpg",
+ ".jpeg",
+ ".webp",
+ ".preview.png",
+ ".preview.jpg",
+ ".preview.jpeg",
+ ):
candidate = lora_dir / f"{base_name}{ext}"
if candidate.exists():
- return str(candidate.resolve())
+ results.append(str(candidate.resolve()))
- return None
+ return results
+
+
+def get_preview_image_from_metadata(
+ metadata: Dict, metadata_path: Path
+) -> Optional[str]:
+ """Find the first preview image path for a LoRA (backward compat)."""
+ images = get_preview_images_from_metadata(metadata, metadata_path)
+ return images[0] if images else None
def get_example_images_dir(lora_manager_path: str) -> Optional[str]:
From 890fd70ac59cb033918af73a367dcd6edea5958b Mon Sep 17 00:00:00 2001
From: Vito Sansevero
Date: Fri, 3 Apr 2026 08:26:15 -0700
Subject: [PATCH 15/24] feat: reimport clears previous lora-manager data first
(#52)
Clicking Import LoRA Data now deletes all existing lora-manager
category prompts before scanning, ensuring a clean reimport.
---
database/operations.py | 32 ++++++++++++++++++++++++++++++++
py/api/lora_integration.py | 15 ++++++++++++++-
2 files changed, 46 insertions(+), 1 deletion(-)
diff --git a/database/operations.py b/database/operations.py
index ad9b6ae..ce48a29 100644
--- a/database/operations.py
+++ b/database/operations.py
@@ -444,6 +444,38 @@ def delete_prompt(self, prompt_id: int) -> bool:
conn.commit()
return cursor.rowcount > 0
+ def delete_prompts_by_category(self, category: str) -> int:
+ """Delete all prompts with the given category.
+
+ Returns:
+ Number of prompts deleted.
+ """
+ with self.model.get_connection() as conn:
+ # Get IDs first for cascade cleanup
+ ids = [
+ r[0]
+ for r in conn.execute(
+ "SELECT id FROM prompts WHERE category = ?", (category,)
+ ).fetchall()
+ ]
+ if not ids:
+ return 0
+ placeholders = ",".join("?" * len(ids))
+ conn.execute(
+ f"DELETE FROM generated_images WHERE prompt_id IN ({placeholders})",
+ ids,
+ )
+ conn.execute(
+ f"DELETE FROM prompt_tags WHERE prompt_id IN ({placeholders})",
+ ids,
+ )
+ cursor = conn.execute(
+ f"DELETE FROM prompts WHERE id IN ({placeholders})",
+ ids,
+ )
+ conn.commit()
+ return cursor.rowcount
+
def get_all_categories(self) -> List[str]:
"""
Get all unique categories from the database.
diff --git a/py/api/lora_integration.py b/py/api/lora_integration.py
index f5f910f..c2ca9d6 100644
--- a/py/api/lora_integration.py
+++ b/py/api/lora_integration.py
@@ -191,11 +191,24 @@ async def send_progress(data):
await send_progress(
{
"type": "progress",
- "status": "Finding LoRA directories...",
+ "status": "Clearing previous lora-manager imports...",
"progress": 0,
}
)
+ # Clear previous imports so reimport is always clean
+ await self._run_in_executor(
+ self.db.delete_prompts_by_category, "lora-manager"
+ )
+
+ await send_progress(
+ {
+ "type": "progress",
+ "status": "Finding LoRA directories...",
+ "progress": 2,
+ }
+ )
+
lora_dirs = await self._run_in_executor(find_lora_directories, lm_path)
# Collect all metadata files
From 451596fab3fa977e289ddc1b5e656848f4a451c6 Mon Sep 17 00:00:00 2001
From: Vito Sansevero
Date: Fri, 3 Apr 2026 08:33:47 -0700
Subject: [PATCH 16/24] feat: download civitai example images during LoRA
import (#52)
Images from civitai.images[] are downloaded to data/lora_images/ cache
and linked to prompts alongside local preview files. Cached files are
reused on reimport. Image serving allows the cache directory.
---
.gitignore | 1 +
py/api/images.py | 6 +++-
py/api/lora_integration.py | 23 +++++++++++--
py/lora_utils.py | 67 ++++++++++++++++++++++++++++++++++++++
4 files changed, 93 insertions(+), 4 deletions(-)
diff --git a/.gitignore b/.gitignore
index 9248013..59734d7 100644
--- a/.gitignore
+++ b/.gitignore
@@ -9,6 +9,7 @@ AGENTS.md
CLAUDE.md
# Local files
+data/
docs/
logs/
standalone_tagger.py
diff --git a/py/api/images.py b/py/api/images.py
index 47c3fca..95877f7 100644
--- a/py/api/images.py
+++ b/py/api/images.py
@@ -311,12 +311,16 @@ async def serve_image(self, request):
from ..config import IntegrationConfig
if IntegrationConfig.LORA_MANAGER_ENABLED:
- from ..lora_utils import find_lora_directories
+ from ..lora_utils import (
+ find_lora_directories,
+ get_lora_image_cache_dir,
+ )
lora_dirs = find_lora_directories(
IntegrationConfig.LORA_MANAGER_PATH
)
allowed_dirs.extend(Path(d) for d in lora_dirs)
+ allowed_dirs.append(get_lora_image_cache_dir())
except Exception:
pass
diff --git a/py/api/lora_integration.py b/py/api/lora_integration.py
index c2ca9d6..82171ed 100644
--- a/py/api/lora_integration.py
+++ b/py/api/lora_integration.py
@@ -153,8 +153,10 @@ async def lora_scan(self, request):
try:
from ..config import IntegrationConfig
from ..lora_utils import (
+ download_civitai_images,
find_lora_directories,
get_example_prompt_from_metadata,
+ get_lora_image_cache_dir,
get_preview_images_from_metadata,
get_trigger_words_from_metadata,
get_model_name_from_metadata,
@@ -230,6 +232,8 @@ async def send_progress(data):
}
)
+ cache_dir = get_lora_image_cache_dir()
+
for i, meta_file in enumerate(meta_files):
metadata = await self._run_in_executor(read_lora_metadata, meta_file)
if not metadata:
@@ -238,9 +242,22 @@ async def send_progress(data):
model_name = get_model_name_from_metadata(metadata)
trigger_words = get_trigger_words_from_metadata(metadata)
+
+ # Collect all images: local previews + downloaded civitai examples
preview_paths = await self._run_in_executor(
get_preview_images_from_metadata, metadata, meta_file
)
+ civitai_paths = await self._run_in_executor(
+ download_civitai_images, metadata, meta_file, cache_dir
+ )
+
+ # Merge, local first, dedup
+ seen = set(preview_paths)
+ all_images = list(preview_paths)
+ for cp in civitai_paths:
+ if cp not in seen:
+ all_images.append(cp)
+ seen.add(cp)
# Build prompt text: prefer example prompt, then model name
example_prompt = get_example_prompt_from_metadata(metadata)
@@ -263,8 +280,8 @@ async def send_progress(data):
)
if existing:
- # Link all preview images
- for pp in preview_paths:
+ # Link all images
+ for pp in all_images:
await self._run_in_executor(
self.db.link_image_to_prompt,
existing["id"],
@@ -283,7 +300,7 @@ async def send_progress(data):
)
if prompt_id:
- for pp in preview_paths:
+ for pp in all_images:
await self._run_in_executor(
self.db.link_image_to_prompt,
prompt_id,
diff --git a/py/lora_utils.py b/py/lora_utils.py
index 6bd4e80..dab96f6 100644
--- a/py/lora_utils.py
+++ b/py/lora_utils.py
@@ -7,10 +7,12 @@
return empty results rather than raising.
"""
+import hashlib
import json
import os
import re
import threading
+import urllib.request
from pathlib import Path
from typing import Dict, List, Optional, Tuple
@@ -294,6 +296,71 @@ def get_preview_image_from_metadata(
return images[0] if images else None
+def download_civitai_images(
+ metadata: Dict, metadata_path: Path, cache_dir: Path
+) -> List[str]:
+ """Download civitai example images to a local cache directory.
+
+ Images are stored as ``//.jpg``.
+ Already-downloaded files are skipped.
+
+ Returns:
+ List of absolute paths to downloaded image files.
+ """
+ civitai = _get_civitai(metadata)
+ images = civitai.get("images", []) or []
+ if not images:
+ return []
+
+ file_name = metadata.get("file_name", "")
+ if not file_name:
+ file_name = metadata_path.name.replace(".metadata.json", "")
+ lora_stem = Path(file_name).stem
+
+ lora_cache = cache_dir / lora_stem
+ lora_cache.mkdir(parents=True, exist_ok=True)
+
+ downloaded = []
+ for img in images:
+ if not isinstance(img, dict):
+ continue
+ url = img.get("url", "")
+ if not isinstance(url, str) or not url.startswith("http"):
+ continue
+
+ # Deterministic filename from URL
+ url_hash = hashlib.md5(url.encode()).hexdigest()[:12]
+ ext = ".jpg" # civitai images are typically jpeg
+ if ".png" in url.lower():
+ ext = ".png"
+ local_path = lora_cache / f"{url_hash}{ext}"
+
+ if local_path.exists():
+ downloaded.append(str(local_path.resolve()))
+ continue
+
+ try:
+ req = urllib.request.Request(
+ url, headers={"User-Agent": "ComfyUI-PromptManager/1.0"}
+ )
+ with urllib.request.urlopen(req, timeout=15) as resp:
+ local_path.write_bytes(resp.read())
+ downloaded.append(str(local_path.resolve()))
+ except Exception as e:
+ logger.debug(f"Failed to download {url}: {e}")
+
+ return downloaded
+
+
+def get_lora_image_cache_dir() -> Path:
+ """Get the directory used to cache downloaded LoRA example images."""
+ # Store in the extension's own directory
+ ext_root = Path(__file__).resolve().parent.parent
+ cache = ext_root / "data" / "lora_images"
+ cache.mkdir(parents=True, exist_ok=True)
+ return cache
+
+
def get_example_images_dir(lora_manager_path: str) -> Optional[str]:
"""Find the LoraManager example_images directory."""
lm_path = Path(lora_manager_path)
From 0e41e6d65c2d989781b07567270fdd31c815934a Mon Sep 17 00:00:00 2001
From: Vito Sansevero
Date: Fri, 3 Apr 2026 08:41:41 -0700
Subject: [PATCH 17/24] feat: add CivitAI API key setting for authenticated
image downloads (#52)
Most civitai example images (especially NSFW) require authentication.
Adds API key field to the Integrations settings panel, passed as
Bearer token when downloading example images.
---
py/api/lora_integration.py | 9 ++++++++-
py/config.py | 4 ++++
py/lora_utils.py | 12 ++++++++----
web/admin.html | 7 +++++++
web/js/admin.js | 3 +++
5 files changed, 30 insertions(+), 5 deletions(-)
diff --git a/py/api/lora_integration.py b/py/api/lora_integration.py
index 82171ed..20d7ee4 100644
--- a/py/api/lora_integration.py
+++ b/py/api/lora_integration.py
@@ -76,6 +76,7 @@ async def lora_status(self, request):
"enabled": config["enabled"],
"path": config["path"],
"trigger_words_enabled": config["trigger_words_enabled"],
+ "civitai_api_key": config.get("civitai_api_key", ""),
"detected": detected_path is not None,
"detected_path": detected_path or "",
"trigger_cache_loaded": cache.is_loaded,
@@ -94,6 +95,7 @@ async def lora_enable(self, request):
enabled = data.get("enabled", False)
path = data.get("path", "")
trigger_words = data.get("trigger_words_enabled", False)
+ civitai_key = data.get("civitai_api_key", "")
from ..config import IntegrationConfig, PromptManagerConfig
from ..lora_utils import detect_lora_manager, get_trigger_cache
@@ -115,6 +117,7 @@ async def lora_enable(self, request):
IntegrationConfig.LORA_MANAGER_ENABLED = enabled
IntegrationConfig.LORA_MANAGER_PATH = path
IntegrationConfig.LORA_TRIGGER_WORDS_ENABLED = trigger_words
+ IntegrationConfig.CIVITAI_API_KEY = civitai_key
# Persist to config.json
config_dir = os.path.dirname(
@@ -248,7 +251,11 @@ async def send_progress(data):
get_preview_images_from_metadata, metadata, meta_file
)
civitai_paths = await self._run_in_executor(
- download_civitai_images, metadata, meta_file, cache_dir
+ download_civitai_images,
+ metadata,
+ meta_file,
+ cache_dir,
+ IntegrationConfig.CIVITAI_API_KEY,
)
# Merge, local first, dedup
diff --git a/py/config.py b/py/config.py
index 360b2e2..88ef6b9 100644
--- a/py/config.py
+++ b/py/config.py
@@ -212,6 +212,7 @@ class IntegrationConfig:
LORA_MANAGER_ENABLED = False
LORA_MANAGER_PATH = "" # Auto-detected if empty
LORA_TRIGGER_WORDS_ENABLED = False # Auto-inject trigger words into prompts
+ CIVITAI_API_KEY = "" # Required to download NSFW example images
@classmethod
def get_config(cls) -> Dict[str, Any]:
@@ -220,6 +221,7 @@ def get_config(cls) -> Dict[str, Any]:
"enabled": cls.LORA_MANAGER_ENABLED,
"path": cls.LORA_MANAGER_PATH,
"trigger_words_enabled": cls.LORA_TRIGGER_WORDS_ENABLED,
+ "civitai_api_key": cls.CIVITAI_API_KEY,
},
}
@@ -232,6 +234,8 @@ def update_config(cls, new_config: Dict[str, Any]):
cls.LORA_MANAGER_PATH = lora["path"]
if "trigger_words_enabled" in lora:
cls.LORA_TRIGGER_WORDS_ENABLED = lora["trigger_words_enabled"]
+ if "civitai_api_key" in lora:
+ cls.CIVITAI_API_KEY = lora["civitai_api_key"]
class PromptManagerConfig:
diff --git a/py/lora_utils.py b/py/lora_utils.py
index dab96f6..0eb90d9 100644
--- a/py/lora_utils.py
+++ b/py/lora_utils.py
@@ -297,13 +297,16 @@ def get_preview_image_from_metadata(
def download_civitai_images(
- metadata: Dict, metadata_path: Path, cache_dir: Path
+ metadata: Dict, metadata_path: Path, cache_dir: Path, api_key: str = ""
) -> List[str]:
"""Download civitai example images to a local cache directory.
Images are stored as ``//.jpg``.
Already-downloaded files are skipped.
+ Args:
+ api_key: CivitAI API key for authenticated downloads (NSFW content).
+
Returns:
List of absolute paths to downloaded image files.
"""
@@ -340,9 +343,10 @@ def download_civitai_images(
continue
try:
- req = urllib.request.Request(
- url, headers={"User-Agent": "ComfyUI-PromptManager/1.0"}
- )
+ headers = {"User-Agent": "ComfyUI-PromptManager/1.0"}
+ if api_key:
+ headers["Authorization"] = f"Bearer {api_key}"
+ req = urllib.request.Request(url, headers=headers)
with urllib.request.urlopen(req, timeout=15) as resp:
local_path.write_bytes(resp.read())
downloaded.append(str(local_path.resolve()))
diff --git a/web/admin.html b/web/admin.html
index ea58c47..a810f20 100644
--- a/web/admin.html
+++ b/web/admin.html
@@ -439,6 +439,13 @@
Automatically append trigger words when <lora:name:weight> is detected in prompts.
+
+