feat:实际应用自定义prompt,修复docker同级目录问题

This commit is contained in:
SengokuCola
2026-05-08 13:05:39 +08:00
parent 2c14fd8d49
commit fb3f4c28ef
8 changed files with 216 additions and 18 deletions

View File

@@ -2,6 +2,7 @@ import { useEffect, useState } from 'react'
import {
Clock,
Code2,
Copy,
FileCode2,
FileText,
RefreshCw,
@@ -10,6 +11,7 @@ import {
import { Badge } from '@/components/ui/badge'
import { Button } from '@/components/ui/button'
import { useToast } from '@/hooks/use-toast'
import { Input } from '@/components/ui/input'
import { ScrollArea } from '@/components/ui/scroll-area'
import {
@@ -49,6 +51,7 @@ function formatSize(size: number): string {
}
export function ReasoningProcessPage() {
const { toast } = useToast()
const [items, setItems] = useState<ReasoningPromptFile[]>([])
const [stages, setStages] = useState<string[]>([])
const [sessions, setSessions] = useState<string[]>([])
@@ -165,6 +168,31 @@ export function ReasoningProcessPage() {
setPage(1)
}
async function handleCopyPrompt() {
if (!textContent || contentLoading) {
toast({
title: '暂无可复制内容',
description: '请先选择一条包含 txt 的 prompt 记录',
variant: 'destructive',
})
return
}
try {
await navigator.clipboard.writeText(textContent)
toast({
title: '已复制完整 Prompt',
description: selected ? `${selected.stage}/${selected.session_id}/${selected.stem}` : undefined,
})
} catch (err) {
toast({
title: '复制失败',
description: err instanceof Error ? err.message : '请手动选择文本复制',
variant: 'destructive',
})
}
}
return (
<div className="flex h-full min-h-0 flex-col gap-3 overflow-hidden p-3 lg:p-4">
<div className="flex flex-col gap-2 lg:flex-row lg:items-center lg:justify-between">
@@ -328,6 +356,17 @@ export function ReasoningProcessPage() {
</div>
{selected && (
<div className="flex items-center gap-2 text-xs text-muted-foreground">
<Button
variant="outline"
size="sm"
className="h-8 gap-1.5"
onClick={handleCopyPrompt}
disabled={!selected.text_path || contentLoading || !textContent}
title="复制完整 Prompt"
>
<Copy className="h-3.5 w-3.5" />
</Button>
{selected.text_path && (
<span className="inline-flex items-center gap-1">
<FileText className="h-3.5 w-3.5" />

View File

@@ -12,6 +12,7 @@ services:
- EULA_AGREE=1b662741904d7155d1ce1c00b3530d0d
- PRIVACY_AGREE=9943b855e72199d0f5016ea39052f1b6
- MAIBOT_LEGACY_0X_UPGRADE_CONFIRMED=1 # Docker 无法交互确认旧版升级迁移,默认跳过确认提示
- MAIBOT_STATISTICS_REPORT_PATH=/MaiMBot/data/maibot_statistics.html # 统计数据输出到共享目录,首次运行可自动创建文件
# - EULA_AGREE=1b662741904d7155d1ce1c00b3530d0d # 同意EULA
# - PRIVACY_AGREE=9943b855e72199d0f5016ea39052f1b6 # 同意EULA
ports:
@@ -20,7 +21,6 @@ services:
volumes:
# 监听地址和端口已迁移到 ./docker-config/mmc/bot_config.toml 的 maim_message 与 webui 配置段
- ./docker-config/mmc:/MaiMBot/config # 持久化bot配置文件
- ./data/MaiMBot/maibot_statistics.html:/MaiMBot/maibot_statistics.html #统计数据输出
- ./data/MaiMBot:/MaiMBot/data # 共享目录
- ./data/MaiMBot/emoji:/data/emoji # 持久化表情包
- ./data/MaiMBot/plugins:/MaiMBot/plugins # 插件目录

View File

@@ -60,6 +60,60 @@ def test_load_prompt_with_category_falls_back_to_default_locale_root(tmp_path: P
assert rendered == "你好Mai"
def test_load_prompt_prefers_custom_prompt_override(tmp_path: Path) -> None:
prompts_root = tmp_path / "prompts"
custom_prompts_root = tmp_path / "data" / "custom_prompts"
write_prompt(prompts_root, "zh-CN", "replyer", "Base {user_name}")
write_prompt(custom_prompts_root, "zh-CN", "replyer", "Custom {user_name}")
rendered = load_prompt(
"replyer",
locale="zh-CN",
prompts_root=prompts_root,
custom_prompts_root=custom_prompts_root,
user_name="Mai",
)
assert rendered == "Custom Mai"
def test_load_prompt_prefers_custom_prompt_requested_locale(tmp_path: Path) -> None:
prompts_root = tmp_path / "prompts"
custom_prompts_root = tmp_path / "data" / "custom_prompts"
write_prompt(prompts_root, "zh-CN", "replyer", "Base zh {user_name}")
write_prompt(prompts_root, "en-US", "replyer", "Base en {user_name}")
write_prompt(custom_prompts_root, "zh-CN", "replyer", "Custom zh {user_name}")
write_prompt(custom_prompts_root, "en-US", "replyer", "Custom en {user_name}")
rendered = load_prompt(
"replyer",
locale="en-US",
prompts_root=prompts_root,
custom_prompts_root=custom_prompts_root,
user_name="Mai",
)
assert rendered == "Custom en Mai"
def test_load_prompt_uses_requested_locale_source_before_default_custom(tmp_path: Path) -> None:
prompts_root = tmp_path / "prompts"
custom_prompts_root = tmp_path / "data" / "custom_prompts"
write_prompt(prompts_root, "zh-CN", "replyer", "Base zh {user_name}")
write_prompt(prompts_root, "en-US", "replyer", "Base en {user_name}")
write_prompt(custom_prompts_root, "zh-CN", "replyer", "Custom zh {user_name}")
rendered = load_prompt(
"replyer",
locale="en-US",
prompts_root=prompts_root,
custom_prompts_root=custom_prompts_root,
user_name="Mai",
)
assert rendered == "Base en Mai"
def test_load_prompt_strict_mode_raises_on_missing_placeholder(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
prompts_root = tmp_path / "prompts"
write_prompt(prompts_root, "zh-CN", "replyer", "你好,{user_name},现在是 {current_time}")

View File

@@ -1,11 +1,13 @@
from collections import defaultdict
from datetime import datetime, timedelta
from os import getenv
from pathlib import Path
from typing import cast
import asyncio
import concurrent.futures
import json
from collections import defaultdict
from datetime import datetime, timedelta
from typing import cast
from typing_extensions import TypedDict
from sqlmodel import col, select
@@ -26,6 +28,17 @@ from src.services.statistics_service import (
logger = get_logger("maibot_statistic")
STATISTICS_REPORT_PATH_ENV = "MAIBOT_STATISTICS_REPORT_PATH"
DEFAULT_STATISTICS_REPORT_PATH = "maibot_statistics.html"
def _resolve_statistics_report_path(record_file_path: str | None = None) -> str:
if record_file_path:
return record_file_path
configured_path = getenv(STATISTICS_REPORT_PATH_ENV, "").strip()
return configured_path or DEFAULT_STATISTICS_REPORT_PATH
class StatPeriodData(TypedDict):
total_requests: int
@@ -233,7 +246,7 @@ class StatisticOutputTask(AsyncTask):
SEP_LINE = "-" * 84
def __init__(self, record_file_path: str = "maibot_statistics.html"):
def __init__(self, record_file_path: str | None = None):
# 延迟300秒启动运行间隔300秒
super().__init__(task_name="Statistics Data Output Task", wait_before_start=0, run_interval=300)
@@ -243,7 +256,7 @@ class StatisticOutputTask(AsyncTask):
注:设计记录时间的目的是方便更新名称,使联系人/群聊名称保持最新
"""
self.record_file_path: str = record_file_path
self.record_file_path: str = _resolve_statistics_report_path(record_file_path)
"""
记录文件路径
"""
@@ -1730,7 +1743,11 @@ class StatisticOutputTask(AsyncTask):
"""
)
with open(self.record_file_path, "w", encoding="utf-8") as f:
record_file = Path(self.record_file_path)
if record_file.parent != Path("."):
record_file.parent.mkdir(parents=True, exist_ok=True)
with open(record_file, "w", encoding="utf-8") as f:
f.write(html_template)
def _generate_chart_data(self, stat: StatPeriodMapping) -> dict[str, dict[str, object]]:
@@ -2431,7 +2448,7 @@ class StatisticOutputTask(AsyncTask):
class AsyncStatisticOutputTask(AsyncTask):
"""完全异步的统计输出任务 - 更高性能版本"""
def __init__(self, record_file_path: str = "maibot_statistics.html"):
def __init__(self, record_file_path: str | None = None):
# 延迟0秒启动运行间隔300秒
super().__init__(task_name="Async Statistics Data Output Task", wait_before_start=0, run_interval=300)

View File

@@ -18,10 +18,12 @@ logger = logging.getLogger("maibot.prompt_i18n")
PROJECT_ROOT = Path(__file__).resolve().parents[2]
PROMPTS_ROOT = (PROJECT_ROOT / "prompts").resolve()
CUSTOM_PROMPTS_ROOT = (PROJECT_ROOT / "data" / "custom_prompts").resolve()
PROMPT_EXTENSIONS = (".prompt",)
SAFE_SEGMENT_PATTERN = re.compile(r"^[A-Za-z0-9_.-]+$")
STRICT_ENV_KEYS = ("MAIBOT_PROMPT_I18N_STRICT", "MAIBOT_I18N_STRICT")
STRICT_ENV_VALUES = {"1", "true", "yes", "on"}
_PROMPT_CACHE_REVISION = 0
extract_prompt_placeholders = extract_placeholders
@@ -43,6 +45,17 @@ def get_prompts_root(prompts_root: Path | None = None) -> Path:
return (prompts_root or PROMPTS_ROOT).resolve()
def get_custom_prompts_root(
custom_prompts_root: Path | None = None,
prompts_root: Path | None = None,
) -> Path:
if custom_prompts_root is not None:
return custom_prompts_root.resolve()
if prompts_root is not None:
return (prompts_root.resolve().parent / "data" / "custom_prompts").resolve()
return CUSTOM_PROMPTS_ROOT
def normalize_prompt_name(name: str) -> str:
candidate_name = name.strip()
for suffix in PROMPT_EXTENSIONS:
@@ -194,6 +207,28 @@ def _iter_locale_candidates(requested_locale: str) -> list[str]:
return locale_candidates
def _iter_prompt_path_candidates(base_dir: Path, name: str, category: str | None = None) -> list[Path]:
candidates: list[Path] = []
for suffix in PROMPT_EXTENSIONS:
if category is not None:
candidates.append((base_dir / category / f"{name}{suffix}").resolve())
candidates.append((base_dir / f"{name}{suffix}").resolve())
return candidates
def _resolve_custom_prompt_path(
name: str,
locale: str,
category: str | None,
custom_prompts_root: Path,
) -> Path | None:
custom_locale_dir = custom_prompts_root / locale
for candidate_path in _iter_prompt_path_candidates(custom_locale_dir, name, category):
if candidate_path.is_file():
return candidate_path
return None
def list_prompt_templates(locale: str | None = None, prompts_root: Path | None = None) -> dict[str, PromptTemplateInfo]:
resolved_prompts_root = get_prompts_root(prompts_root)
requested_locale = normalize_locale(locale or get_locale())
@@ -206,15 +241,29 @@ def list_prompt_templates(locale: str | None = None, prompts_root: Path | None =
def resolve_prompt_path(
name: str, locale: str | None = None, category: str | None = None, prompts_root: Path | None = None
name: str,
locale: str | None = None,
category: str | None = None,
prompts_root: Path | None = None,
custom_prompts_root: Path | None = None,
) -> Path:
resolved_prompts_root = get_prompts_root(prompts_root)
resolved_custom_prompts_root = get_custom_prompts_root(custom_prompts_root, prompts_root)
normalized_name = normalize_prompt_name(name)
normalized_category = normalize_prompt_category(category)
requested_locale = normalize_locale(locale or get_locale())
if normalized_category is not None:
for locale_candidate in _iter_locale_candidates(requested_locale):
custom_path = _resolve_custom_prompt_path(
normalized_name,
locale_candidate,
normalized_category,
resolved_custom_prompts_root,
)
if custom_path is not None:
return custom_path
base_dir = resolved_prompts_root / locale_candidate
for suffix in PROMPT_EXTENSIONS:
candidate_path = (base_dir / normalized_category / f"{normalized_name}{suffix}").resolve()
@@ -226,9 +275,20 @@ def resolve_prompt_path(
if fallback_path.is_file():
return fallback_path
else:
prompt_paths = list_prompt_templates(locale=requested_locale, prompts_root=resolved_prompts_root)
if normalized_name in prompt_paths:
return prompt_paths[normalized_name].path
for locale_candidate in _iter_locale_candidates(requested_locale):
custom_path = _resolve_custom_prompt_path(
normalized_name,
locale_candidate,
None,
resolved_custom_prompts_root,
)
if custom_path is not None:
return custom_path
base_dir = resolved_prompts_root / locale_candidate
for candidate_path in _iter_prompt_path_candidates(base_dir, normalized_name):
if candidate_path.is_file():
return candidate_path
raise FileNotFoundError(t("prompt.template_not_found", locale=requested_locale, name=normalized_name))
@@ -263,13 +323,26 @@ def load_prompt(
locale: str | None = None,
category: str | None = None,
prompts_root: Path | None = None,
custom_prompts_root: Path | None = None,
**kwargs: object,
) -> str:
normalized_name = normalize_prompt_name(name)
prompt_path = resolve_prompt_path(name=normalized_name, locale=locale, category=category, prompts_root=prompts_root)
prompt_path = resolve_prompt_path(
name=normalized_name,
locale=locale,
category=category,
prompts_root=prompts_root,
custom_prompts_root=custom_prompts_root,
)
template = _read_prompt_template(prompt_path)
return _format_prompt_template(normalized_name, template, **kwargs)
def clear_prompt_cache() -> None:
global _PROMPT_CACHE_REVISION
_PROMPT_CACHE_REVISION += 1
_read_prompt_template.cache_clear()
def get_prompt_cache_revision() -> int:
return _PROMPT_CACHE_REVISION

View File

@@ -10,7 +10,7 @@ from rich.console import RenderableType
from src.common.data_models.llm_service_data_models import LLMGenerationOptions
from src.common.i18n import get_locale
from src.common.logger import get_logger
from src.common.prompt_i18n import load_prompt
from src.common.prompt_i18n import get_prompt_cache_revision, load_prompt
from src.common.utils.utils_config import ChatConfigUtils
from src.config.config import global_config
from src.core.tooling import ToolAvailabilityContext, ToolRegistry
@@ -219,6 +219,7 @@ class MaisakaChatLoopService:
self._interrupt_flag: asyncio.Event | None = None
self._tool_registry: ToolRegistry | None = None
self._prompts_loaded = chat_system_prompt is not None
self._prompt_cache_revision = get_prompt_cache_revision()
self._prompt_load_lock = asyncio.Lock()
self._personality_prompt = self._build_personality_prompt()
if chat_system_prompt is None:
@@ -354,6 +355,7 @@ class MaisakaChatLoopService:
self._chat_system_prompt = f"{self._personality_prompt}\n\nYou are a helpful AI assistant."
self._prompts_loaded = True
self._prompt_cache_revision = get_prompt_cache_revision()
def build_prompt_template_context(self, tools_section: str = "") -> dict[str, str]:
"""构造 Maisaka prompt 模板的公共渲染参数。"""
@@ -519,7 +521,7 @@ class MaisakaChatLoopService:
ChatResponse: 本轮规划器返回结果。
"""
if not self._prompts_loaded:
if not self._prompts_loaded or self._prompt_cache_revision != get_prompt_cache_revision():
await self.ensure_chat_prompt_loaded()
enable_visual_message = self._resolve_enable_visual_message(request_kind)
selected_history, selection_reason = self.select_llm_context_messages(

View File

@@ -18,6 +18,8 @@ logger = get_logger("webui.app")
_DASHBOARD_PACKAGE_NAME = "maibot-dashboard"
_LOCAL_DASHBOARD_ENV = "MAIBOT_WEBUI_USE_LOCAL_DASHBOARD"
_STATISTICS_REPORT_PATH_ENV = "MAIBOT_STATISTICS_REPORT_PATH"
_DEFAULT_STATISTICS_REPORT_PATH = "maibot_statistics.html"
_MANUAL_INSTALL_COMMAND = f"pip install {_DASHBOARD_PACKAGE_NAME}"
@@ -38,6 +40,15 @@ def _get_project_root() -> Path:
return Path(__file__).resolve().parents[2]
def _resolve_statistics_report_path() -> Path:
configured_path = getenv(_STATISTICS_REPORT_PATH_ENV, "").strip()
report_path = Path(configured_path or _DEFAULT_STATISTICS_REPORT_PATH)
if report_path.is_absolute():
return report_path.resolve()
return (_get_project_root() / report_path).resolve()
def _is_local_dashboard_enabled() -> bool:
return getenv(_LOCAL_DASHBOARD_ENV, "").strip().lower() in {"1", "true", "yes", "on"}
@@ -187,7 +198,7 @@ def _setup_static_files(app: FastAPI):
@app.get("/maibot_statistics.html", include_in_schema=False)
async def serve_statistics_report():
report_path = (_get_project_root() / "maibot_statistics.html").resolve()
report_path = _resolve_statistics_report_path()
if not report_path.exists() or not report_path.is_file():
raise HTTPException(status_code=404, detail=t("core.not_found"))

View File

@@ -14,7 +14,7 @@ from pydantic import BaseModel, Field
import tomlkit
from src.common.logger import get_logger
from src.common.prompt_i18n import list_prompt_templates
from src.common.prompt_i18n import clear_prompt_cache, list_prompt_templates
from src.config.config import CONFIG_DIR, PROJECT_ROOT, Config, ModelConfig
from src.config.config_base import AttributeData, ConfigBase
from src.config.model_configs import (
@@ -323,6 +323,7 @@ async def update_prompt_file(language: str, filename: str, content: PromptConten
try:
custom_prompt_path.parent.mkdir(parents=True, exist_ok=True)
custom_prompt_path.write_text(content, encoding="utf-8", newline="\n")
clear_prompt_cache()
return PromptFileResponse(language=language, filename=filename, content=content, customized=True)
except Exception as e:
logger.error(f"保存 Prompt 文件失败: {prompt_path} {e}", exc_info=True)
@@ -341,6 +342,7 @@ async def reset_prompt_file(language: str, filename: str):
try:
if custom_prompt_path.exists():
custom_prompt_path.unlink()
clear_prompt_cache()
content = prompt_path.read_text(encoding="utf-8")
return PromptFileResponse(language=language, filename=filename, content=content, customized=False)
except Exception as e: