pref:优化webui界面,增加prompt模板元信息

This commit is contained in:
SengokuCola
2026-05-05 17:57:19 +08:00
parent 0d43d3ec05
commit a5e4ac8531
42 changed files with 826 additions and 410 deletions

View File

@@ -1,8 +1,12 @@
from __future__ import annotations
from dataclasses import dataclass
from functools import lru_cache
from pathlib import Path
from typing import Any
from tomlkit import parse as parse_toml
import json
import logging
import os
import re
@@ -22,6 +26,19 @@ STRICT_ENV_VALUES = {"1", "true", "yes", "on"}
extract_prompt_placeholders = extract_placeholders
@dataclass(frozen=True)
class PromptMetadata:
display_name: str = ""
advanced: bool = False
description: str = ""
@dataclass(frozen=True)
class PromptTemplateInfo:
path: Path
metadata: PromptMetadata
def get_prompts_root(prompts_root: Path | None = None) -> Path:
return (prompts_root or PROMPTS_ROOT).resolve()
@@ -80,24 +97,86 @@ def iter_prompt_files(directory: Path, recursive: bool = True) -> list[Path]:
def _raise_duplicate_prompt_name(name: str, first_path: Path, second_path: Path, prompts_root: Path) -> None:
path_a = first_path.relative_to(prompts_root).as_posix()
path_b = second_path.relative_to(prompts_root).as_posix()
raise ValueError(
t(
"prompt.duplicate_template_name",
name=name,
path_a=first_path.relative_to(prompts_root),
path_b=second_path.relative_to(prompts_root),
path_a=path_a,
path_b=path_b,
)
)
def _scan_prompt_directory(directory: Path, prompts_root: Path) -> dict[str, Path]:
prompt_paths: dict[str, Path] = {}
def _coerce_metadata(raw_metadata: Any) -> PromptMetadata:
if not isinstance(raw_metadata, dict):
return PromptMetadata()
display_name = raw_metadata.get("display_name", "")
advanced = raw_metadata.get("advanced", False)
description = raw_metadata.get("description", "")
return PromptMetadata(
display_name=display_name if isinstance(display_name, str) else "",
advanced=advanced if isinstance(advanced, bool) else False,
description=description if isinstance(description, str) else "",
)
def _read_metadata_file(metadata_path: Path) -> dict[str, Any]:
if not metadata_path.is_file():
return {}
try:
if metadata_path.suffix == ".json":
metadata = json.loads(metadata_path.read_text(encoding="utf-8"))
else:
metadata = parse_toml(metadata_path.read_text(encoding="utf-8"))
except Exception as exc:
logger.warning("读取 Prompt 元信息文件 %s 失败:%s", metadata_path, exc)
return {}
return dict(metadata) if isinstance(metadata, dict) else {}
def _extract_template_metadata(metadata: dict[str, Any], prompt_name: str) -> dict[str, Any]:
templates = metadata.get("templates")
if isinstance(templates, dict) and isinstance(templates.get(prompt_name), dict):
return dict(templates[prompt_name])
prompt_metadata = metadata.get(prompt_name)
if isinstance(prompt_metadata, dict):
return dict(prompt_metadata)
return metadata if any(key in metadata for key in ("display_name", "advanced", "description")) else {}
def _load_prompt_metadata(prompt_path: Path) -> PromptMetadata:
prompt_name = prompt_path.stem
metadata_sources = (
prompt_path.with_name(f"{prompt_name}.meta.toml"),
prompt_path.with_name(f"{prompt_name}.meta.json"),
prompt_path.parent / ".meta.toml",
prompt_path.parent / ".meta.json",
)
merged_metadata: dict[str, Any] = {}
for metadata_path in reversed(metadata_sources):
raw_metadata = _read_metadata_file(metadata_path)
merged_metadata.update(_extract_template_metadata(raw_metadata, prompt_name))
return _coerce_metadata(merged_metadata)
def _scan_prompt_directory(directory: Path, prompts_root: Path) -> dict[str, PromptTemplateInfo]:
prompt_paths: dict[str, PromptTemplateInfo] = {}
for prompt_path in iter_prompt_files(directory):
prompt_name = prompt_path.stem
existing_path = prompt_paths.get(prompt_name)
if existing_path is not None:
_raise_duplicate_prompt_name(prompt_name, existing_path, prompt_path, prompts_root)
prompt_paths[prompt_name] = prompt_path
existing_info = prompt_paths.get(prompt_name)
if existing_info is not None:
_raise_duplicate_prompt_name(prompt_name, existing_info.path, prompt_path, prompts_root)
prompt_paths[prompt_name] = PromptTemplateInfo(path=prompt_path, metadata=_load_prompt_metadata(prompt_path))
return prompt_paths
@@ -115,11 +194,11 @@ def _iter_locale_candidates(requested_locale: str) -> list[str]:
return locale_candidates
def list_prompt_templates(locale: str | None = None, prompts_root: Path | None = None) -> dict[str, Path]:
def list_prompt_templates(locale: str | None = None, prompts_root: Path | None = None) -> dict[str, PromptTemplateInfo]:
resolved_prompts_root = get_prompts_root(prompts_root)
requested_locale = normalize_locale(locale or get_locale())
prompt_paths: dict[str, Path] = {}
prompt_paths: dict[str, PromptTemplateInfo] = {}
for directory in _iter_prompt_template_layers(resolved_prompts_root, requested_locale):
prompt_paths.update(_scan_prompt_directory(directory, resolved_prompts_root))
@@ -149,7 +228,7 @@ def resolve_prompt_path(
else:
prompt_paths = list_prompt_templates(locale=requested_locale, prompts_root=resolved_prompts_root)
if normalized_name in prompt_paths:
return prompt_paths[normalized_name]
return prompt_paths[normalized_name].path
raise FileNotFoundError(t("prompt.template_not_found", locale=requested_locale, name=normalized_name))