对接skill平台接口,获取skill.yaml并创建文件、目录结构
This commit is contained in:
parent
e2fdfa75d7
commit
ef9a071aa1
|
|
@ -9,6 +9,10 @@ class GatewayConfig(BaseModel):
|
||||||
host: str = Field(default="0.0.0.0", description="Host to bind the gateway server")
|
host: str = Field(default="0.0.0.0", description="Host to bind the gateway server")
|
||||||
port: int = Field(default=8001, description="Port to bind the gateway server")
|
port: int = Field(default=8001, description="Port to bind the gateway server")
|
||||||
cors_origins: list[str] = Field(default_factory=lambda: ["http://localhost:3000"], description="Allowed CORS origins")
|
cors_origins: list[str] = Field(default_factory=lambda: ["http://localhost:3000"], description="Allowed CORS origins")
|
||||||
|
skill_content_api_url: str = Field(
|
||||||
|
default="https://skills.xueai.art/api/cmsContent/getContent",
|
||||||
|
description="Remote API URL used to fetch skill YAML content by contentId",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
_gateway_config: GatewayConfig | None = None
|
_gateway_config: GatewayConfig | None = None
|
||||||
|
|
@ -23,5 +27,9 @@ def get_gateway_config() -> GatewayConfig:
|
||||||
host=os.getenv("GATEWAY_HOST", "0.0.0.0"),
|
host=os.getenv("GATEWAY_HOST", "0.0.0.0"),
|
||||||
port=int(os.getenv("GATEWAY_PORT", "8001")),
|
port=int(os.getenv("GATEWAY_PORT", "8001")),
|
||||||
cors_origins=cors_origins_str.split(","),
|
cors_origins=cors_origins_str.split(","),
|
||||||
|
skill_content_api_url=os.getenv(
|
||||||
|
"SKILL_CONTENT_API_URL",
|
||||||
|
"https://skills.xueai.art/api/cmsContent/getContent",
|
||||||
|
),
|
||||||
)
|
)
|
||||||
return _gateway_config
|
return _gateway_config
|
||||||
|
|
|
||||||
|
|
@ -6,12 +6,16 @@ import tempfile
|
||||||
import zipfile
|
import zipfile
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
|
import httpx
|
||||||
import yaml
|
import yaml
|
||||||
from fastapi import APIRouter, HTTPException
|
from fastapi import APIRouter, HTTPException
|
||||||
from pydantic import BaseModel, Field
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
from src.config.extensions_config import ExtensionsConfig, SkillStateConfig, get_extensions_config, reload_extensions_config
|
from src.config.extensions_config import ExtensionsConfig, SkillStateConfig, get_extensions_config, reload_extensions_config
|
||||||
|
from src.gateway.config import get_gateway_config
|
||||||
from src.gateway.path_utils import resolve_thread_virtual_path
|
from src.gateway.path_utils import resolve_thread_virtual_path
|
||||||
|
from src.gateway.skill_yaml_importer import materialize_skill_tree, parse_skill_yaml_spec
|
||||||
|
from src.sandbox.sandbox_provider import get_sandbox_provider
|
||||||
from src.skills import Skill, load_skills
|
from src.skills import Skill, load_skills
|
||||||
from src.skills.loader import get_skills_root_path
|
from src.skills.loader import get_skills_root_path
|
||||||
|
|
||||||
|
|
@ -56,6 +60,58 @@ class SkillInstallResponse(BaseModel):
|
||||||
message: str = Field(..., description="Installation result message")
|
message: str = Field(..., description="Installation result message")
|
||||||
|
|
||||||
|
|
||||||
|
class SkillYamlMaterializeRequest(BaseModel):
|
||||||
|
"""Request model for creating a skill directory tree from YAML."""
|
||||||
|
|
||||||
|
thread_id: str = Field(..., description="Thread ID where target virtual path is resolved")
|
||||||
|
path: str = Field(..., description="Virtual path to YAML file, e.g. /mnt/user-data/uploads/skill-package.yaml")
|
||||||
|
target_dir: str = Field(
|
||||||
|
default="/mnt/user-data/uploads/skill",
|
||||||
|
description="Virtual target directory where files/directories will be created",
|
||||||
|
)
|
||||||
|
clear_target: bool = Field(
|
||||||
|
default=True,
|
||||||
|
description="Whether to clear target directory before creating parsed structure",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class SkillYamlMaterializeResponse(BaseModel):
|
||||||
|
"""Response model for YAML skill materialization."""
|
||||||
|
|
||||||
|
success: bool = Field(..., description="Whether the operation succeeded")
|
||||||
|
target_dir: str = Field(..., description="Virtual target directory")
|
||||||
|
created_directories: int = Field(..., description="Number of created directories")
|
||||||
|
created_files: int = Field(..., description="Number of created files")
|
||||||
|
message: str = Field(..., description="Operation result message")
|
||||||
|
|
||||||
|
|
||||||
|
class RemoteSkillBootstrapRequest(BaseModel):
|
||||||
|
"""Request model for bootstrapping skill files from remote content API."""
|
||||||
|
|
||||||
|
thread_id: str = Field(..., description="Thread ID used for sandbox and user-data path binding")
|
||||||
|
content_id: int = Field(..., description="Remote content ID (maps from frontend query param skill_id)")
|
||||||
|
language_type: int = Field(default=0, description="Language type for remote API request body")
|
||||||
|
target_dir: str = Field(
|
||||||
|
default="/mnt/user-data/uploads/skill",
|
||||||
|
description="Virtual target directory where parsed files/directories are created",
|
||||||
|
)
|
||||||
|
clear_target: bool = Field(
|
||||||
|
default=True,
|
||||||
|
description="Whether to clear target directory before writing parsed files",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class RemoteSkillBootstrapResponse(BaseModel):
|
||||||
|
"""Response model for remote bootstrap endpoint."""
|
||||||
|
|
||||||
|
success: bool = Field(..., description="Whether bootstrap succeeded")
|
||||||
|
target_dir: str = Field(..., description="Virtual target directory")
|
||||||
|
created_directories: int = Field(..., description="Number of created directories")
|
||||||
|
created_files: int = Field(..., description="Number of created files")
|
||||||
|
sandbox_id: str = Field(..., description="Acquired sandbox ID")
|
||||||
|
message: str = Field(..., description="Operation result message")
|
||||||
|
|
||||||
|
|
||||||
# Allowed properties in SKILL.md frontmatter
|
# Allowed properties in SKILL.md frontmatter
|
||||||
ALLOWED_FRONTMATTER_PROPERTIES = {"name", "description", "license", "allowed-tools", "metadata"}
|
ALLOWED_FRONTMATTER_PROPERTIES = {"name", "description", "license", "allowed-tools", "metadata"}
|
||||||
|
|
||||||
|
|
@ -440,3 +496,144 @@ async def install_skill(request: SkillInstallRequest) -> SkillInstallResponse:
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Failed to install skill: {e}", exc_info=True)
|
logger.error(f"Failed to install skill: {e}", exc_info=True)
|
||||||
raise HTTPException(status_code=500, detail=f"Failed to install skill: {str(e)}")
|
raise HTTPException(status_code=500, detail=f"Failed to install skill: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
|
@router.post(
|
||||||
|
"/skills/materialize-yaml",
|
||||||
|
response_model=SkillYamlMaterializeResponse,
|
||||||
|
summary="Materialize Skill YAML",
|
||||||
|
description=(
|
||||||
|
"Parse a YAML file that describes files/directories and create the described "
|
||||||
|
"structure under a target virtual directory (default: /mnt/user-data/uploads/skill)."
|
||||||
|
),
|
||||||
|
)
|
||||||
|
async def materialize_skill_yaml(request: SkillYamlMaterializeRequest) -> SkillYamlMaterializeResponse:
|
||||||
|
"""Create skill package files from a YAML specification.
|
||||||
|
|
||||||
|
Supported YAML formats include:
|
||||||
|
- entries (tree objects)
|
||||||
|
- files + directories
|
||||||
|
- tree/structure nested maps
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
yaml_path = resolve_thread_virtual_path(request.thread_id, request.path)
|
||||||
|
if not yaml_path.exists() or not yaml_path.is_file():
|
||||||
|
raise HTTPException(status_code=404, detail=f"YAML file not found: {request.path}")
|
||||||
|
|
||||||
|
target_path = resolve_thread_virtual_path(request.thread_id, request.target_dir)
|
||||||
|
yaml_text = yaml_path.read_text(encoding="utf-8")
|
||||||
|
|
||||||
|
parsed = parse_skill_yaml_spec(yaml_text)
|
||||||
|
materialize_skill_tree(parsed, target_path, clear_target=request.clear_target)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
"Materialized skill YAML for thread %s: source=%s target=%s dirs=%d files=%d",
|
||||||
|
request.thread_id,
|
||||||
|
request.path,
|
||||||
|
request.target_dir,
|
||||||
|
len(parsed.directories),
|
||||||
|
len(parsed.files),
|
||||||
|
)
|
||||||
|
|
||||||
|
return SkillYamlMaterializeResponse(
|
||||||
|
success=True,
|
||||||
|
target_dir=request.target_dir,
|
||||||
|
created_directories=len(parsed.directories),
|
||||||
|
created_files=len(parsed.files),
|
||||||
|
message=(
|
||||||
|
f"Created {len(parsed.files)} files and {len(parsed.directories)} directories "
|
||||||
|
f"under '{request.target_dir}'"
|
||||||
|
),
|
||||||
|
)
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except ValueError as e:
|
||||||
|
raise HTTPException(status_code=400, detail=str(e))
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to materialize skill YAML: {e}", exc_info=True)
|
||||||
|
raise HTTPException(status_code=500, detail=f"Failed to materialize skill YAML: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
|
@router.post(
|
||||||
|
"/skills/bootstrap-remote",
|
||||||
|
response_model=RemoteSkillBootstrapResponse,
|
||||||
|
summary="Bootstrap Skill Files From Remote API",
|
||||||
|
description=(
|
||||||
|
"Fetch YAML text from configured remote API by content_id/language_type, "
|
||||||
|
"acquire sandbox for the thread, and materialize files into "
|
||||||
|
"/mnt/user-data/uploads/skill before first thread submit."
|
||||||
|
),
|
||||||
|
)
|
||||||
|
async def bootstrap_skill_from_remote(request: RemoteSkillBootstrapRequest) -> RemoteSkillBootstrapResponse:
|
||||||
|
"""Initialize thread skill directory from remote YAML content service."""
|
||||||
|
try:
|
||||||
|
# 1) Ensure sandbox and thread personal dirs are initialized first.
|
||||||
|
sandbox_provider = get_sandbox_provider()
|
||||||
|
sandbox_id = sandbox_provider.acquire(request.thread_id)
|
||||||
|
|
||||||
|
# 2) Fetch YAML content from configured remote endpoint.
|
||||||
|
cfg = get_gateway_config()
|
||||||
|
api_url = cfg.skill_content_api_url
|
||||||
|
payload = {
|
||||||
|
"contentId": request.content_id,
|
||||||
|
"languageType": request.language_type,
|
||||||
|
}
|
||||||
|
|
||||||
|
async with httpx.AsyncClient(timeout=20.0) as client:
|
||||||
|
response = await client.post(api_url, json=payload)
|
||||||
|
|
||||||
|
if response.status_code >= 400:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=502,
|
||||||
|
detail=f"Remote skill content API failed with HTTP {response.status_code}",
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
response_json = response.json()
|
||||||
|
except ValueError as e:
|
||||||
|
raise HTTPException(status_code=502, detail=f"Remote API did not return valid JSON: {e}") from e
|
||||||
|
|
||||||
|
status = response_json.get("status")
|
||||||
|
if status != 1000:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=502,
|
||||||
|
detail=f"Remote API returned non-success status: {status}, message: {response_json.get('message')}",
|
||||||
|
)
|
||||||
|
|
||||||
|
yaml_text = response_json.get("data")
|
||||||
|
if not isinstance(yaml_text, str) or not yaml_text.strip():
|
||||||
|
raise HTTPException(status_code=502, detail="Remote API returned empty or invalid YAML content")
|
||||||
|
|
||||||
|
# 3) Parse and write into thread uploads/skill.
|
||||||
|
target_path = resolve_thread_virtual_path(request.thread_id, request.target_dir)
|
||||||
|
parsed = parse_skill_yaml_spec(yaml_text)
|
||||||
|
materialize_skill_tree(parsed, target_path, clear_target=request.clear_target)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
"Bootstrapped remote skill YAML for thread %s (content_id=%s, language_type=%s) to %s: dirs=%d files=%d",
|
||||||
|
request.thread_id,
|
||||||
|
request.content_id,
|
||||||
|
request.language_type,
|
||||||
|
request.target_dir,
|
||||||
|
len(parsed.directories),
|
||||||
|
len(parsed.files),
|
||||||
|
)
|
||||||
|
|
||||||
|
return RemoteSkillBootstrapResponse(
|
||||||
|
success=True,
|
||||||
|
target_dir=request.target_dir,
|
||||||
|
created_directories=len(parsed.directories),
|
||||||
|
created_files=len(parsed.files),
|
||||||
|
sandbox_id=sandbox_id,
|
||||||
|
message=(
|
||||||
|
f"Bootstrapped {len(parsed.files)} files and {len(parsed.directories)} directories "
|
||||||
|
f"under '{request.target_dir}'"
|
||||||
|
),
|
||||||
|
)
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except ValueError as e:
|
||||||
|
raise HTTPException(status_code=400, detail=str(e))
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to bootstrap skill from remote API: {e}", exc_info=True)
|
||||||
|
raise HTTPException(status_code=500, detail=f"Failed to bootstrap skill from remote API: {str(e)}")
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,331 @@
|
||||||
|
"""Utilities for parsing YAML-defined skill package structures.
|
||||||
|
|
||||||
|
This module supports turning a YAML document describing files/directories into
|
||||||
|
real filesystem content under a thread's virtual path (for example,
|
||||||
|
``/mnt/user-data/uploads/skill``).
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class ParsedSkillTree:
|
||||||
|
"""Normalized parsed structure from YAML spec."""
|
||||||
|
|
||||||
|
directories: set[str]
|
||||||
|
files: dict[str, str]
|
||||||
|
|
||||||
|
|
||||||
|
def _pick_first_existing(data: dict, keys: tuple[str, ...]):
|
||||||
|
for key in keys:
|
||||||
|
if key in data:
|
||||||
|
return data[key]
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _extract_spec_root(data: dict) -> dict:
|
||||||
|
"""Extract the effective spec root.
|
||||||
|
|
||||||
|
Supports nested wrappers like:
|
||||||
|
- skill: { ... }
|
||||||
|
- package: { ... }
|
||||||
|
- spec: { ... }
|
||||||
|
"""
|
||||||
|
if not isinstance(data, dict):
|
||||||
|
raise ValueError("YAML root must be an object")
|
||||||
|
|
||||||
|
known_keys = {
|
||||||
|
"entries",
|
||||||
|
"files",
|
||||||
|
"directories",
|
||||||
|
"dirs",
|
||||||
|
"tree",
|
||||||
|
"structure",
|
||||||
|
"file_tree",
|
||||||
|
"fileTree",
|
||||||
|
"file_structure",
|
||||||
|
"paths",
|
||||||
|
}
|
||||||
|
if any(k in data for k in known_keys):
|
||||||
|
return data
|
||||||
|
|
||||||
|
wrapper_candidates = ("skill", "package", "spec", "data", "content", "payload")
|
||||||
|
for wrapper in wrapper_candidates:
|
||||||
|
candidate = data.get(wrapper)
|
||||||
|
if isinstance(candidate, dict) and any(k in candidate for k in known_keys):
|
||||||
|
return candidate
|
||||||
|
|
||||||
|
# Fallback: if exactly one nested object exists, try it as spec root.
|
||||||
|
nested_dicts = [v for v in data.values() if isinstance(v, dict)]
|
||||||
|
if len(nested_dicts) == 1:
|
||||||
|
return nested_dicts[0]
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
def _normalize_relative_path(path: str) -> str:
|
||||||
|
"""Normalize and validate a relative path.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If path is unsafe or invalid.
|
||||||
|
"""
|
||||||
|
if not isinstance(path, str):
|
||||||
|
raise ValueError("Path must be a string")
|
||||||
|
|
||||||
|
normalized = path.strip().replace("\\", "/")
|
||||||
|
if normalized in {"/", ".", "./"}:
|
||||||
|
return ""
|
||||||
|
if not normalized:
|
||||||
|
raise ValueError("Path cannot be empty")
|
||||||
|
|
||||||
|
if normalized.startswith("/"):
|
||||||
|
raise ValueError(f"Path must be relative, got absolute path: {path}")
|
||||||
|
|
||||||
|
if ":" in normalized:
|
||||||
|
raise ValueError(f"Path cannot contain ':' (possible drive path): {path}")
|
||||||
|
|
||||||
|
parts = [part for part in normalized.split("/") if part]
|
||||||
|
if not parts:
|
||||||
|
raise ValueError("Path cannot be empty")
|
||||||
|
|
||||||
|
if any(part in {".", ".."} for part in parts):
|
||||||
|
raise ValueError(f"Path traversal is not allowed: {path}")
|
||||||
|
|
||||||
|
return "/".join(parts)
|
||||||
|
|
||||||
|
|
||||||
|
def _add_directory(path: str, directories: set[str]) -> None:
|
||||||
|
normalized = _normalize_relative_path(path)
|
||||||
|
if not normalized:
|
||||||
|
return
|
||||||
|
directories.add(normalized)
|
||||||
|
|
||||||
|
|
||||||
|
def _add_file(path: str, content: str, files: dict[str, str], directories: set[str]) -> None:
|
||||||
|
normalized = _normalize_relative_path(path)
|
||||||
|
if not normalized:
|
||||||
|
raise ValueError("File path cannot be root ('/')")
|
||||||
|
if not isinstance(content, str):
|
||||||
|
raise ValueError(f"File content must be a string for '{normalized}'")
|
||||||
|
|
||||||
|
parent = Path(normalized).parent
|
||||||
|
if str(parent) != ".":
|
||||||
|
directories.add(str(parent).replace("\\", "/"))
|
||||||
|
|
||||||
|
files[normalized] = content
|
||||||
|
|
||||||
|
|
||||||
|
def _walk_tree_dict(tree: dict, base: str, files: dict[str, str], directories: set[str]) -> None:
|
||||||
|
for name, value in tree.items():
|
||||||
|
if not isinstance(name, str):
|
||||||
|
raise ValueError("Tree keys must be strings")
|
||||||
|
|
||||||
|
if name.strip() in {"/", ".", "./"}:
|
||||||
|
if isinstance(value, dict):
|
||||||
|
_walk_tree_dict(value, base, files, directories)
|
||||||
|
continue
|
||||||
|
raise ValueError("Root sentinel '/' can only be used for directory/object nodes")
|
||||||
|
|
||||||
|
node_path = f"{base}/{name}" if base else name
|
||||||
|
|
||||||
|
if isinstance(value, dict):
|
||||||
|
_add_directory(node_path, directories)
|
||||||
|
_walk_tree_dict(value, _normalize_relative_path(node_path), files, directories)
|
||||||
|
elif isinstance(value, str):
|
||||||
|
_add_file(node_path, value, files, directories)
|
||||||
|
else:
|
||||||
|
raise ValueError(
|
||||||
|
f"Unsupported tree node type for '{node_path}': {type(value).__name__}. "
|
||||||
|
"Use object (directory) or string (file content)."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_entries_node(
|
||||||
|
node: dict,
|
||||||
|
base: str,
|
||||||
|
files: dict[str, str],
|
||||||
|
directories: set[str],
|
||||||
|
) -> None:
|
||||||
|
raw_path = node.get("path")
|
||||||
|
raw_name = node.get("name")
|
||||||
|
|
||||||
|
if raw_path is None and raw_name is None:
|
||||||
|
raise ValueError("Each entry must have at least one of: 'path' or 'name'")
|
||||||
|
|
||||||
|
if raw_path is not None and not isinstance(raw_path, str):
|
||||||
|
raise ValueError("Entry 'path' must be a string")
|
||||||
|
if raw_name is not None and not isinstance(raw_name, str):
|
||||||
|
raise ValueError("Entry 'name' must be a string")
|
||||||
|
|
||||||
|
# Common schema compatibility:
|
||||||
|
# - `path` is parent directory (e.g. "/")
|
||||||
|
# - `name` is current node name (e.g. "README.md")
|
||||||
|
# Build parent then append name when both are present.
|
||||||
|
parent = base
|
||||||
|
if isinstance(raw_path, str) and raw_path.strip():
|
||||||
|
rp = raw_path.strip()
|
||||||
|
if rp not in {"/", ".", "./"}:
|
||||||
|
parent = _normalize_relative_path(f"{base}/{rp}" if base else rp)
|
||||||
|
|
||||||
|
if isinstance(raw_name, str) and raw_name.strip():
|
||||||
|
if parent:
|
||||||
|
node_path = _normalize_relative_path(f"{parent}/{raw_name.strip()}")
|
||||||
|
else:
|
||||||
|
node_path = _normalize_relative_path(raw_name.strip())
|
||||||
|
else:
|
||||||
|
# Fallback: only path provided
|
||||||
|
if not isinstance(raw_path, str) or not raw_path.strip():
|
||||||
|
raise ValueError("Each entry must have a non-empty 'path' or 'name'")
|
||||||
|
rp = raw_path.strip()
|
||||||
|
if rp in {"/", ".", "./"}:
|
||||||
|
node_path = base
|
||||||
|
else:
|
||||||
|
node_path = _normalize_relative_path(f"{base}/{rp}" if base else rp)
|
||||||
|
|
||||||
|
node_type = node.get("type")
|
||||||
|
content = node.get("content")
|
||||||
|
children = node.get("children")
|
||||||
|
|
||||||
|
inferred_type = "directory" if isinstance(children, list) else "file" if content is not None else None
|
||||||
|
final_type = node_type or inferred_type
|
||||||
|
|
||||||
|
if final_type == "directory":
|
||||||
|
_add_directory(node_path, directories)
|
||||||
|
if children is None:
|
||||||
|
return
|
||||||
|
if not isinstance(children, list):
|
||||||
|
raise ValueError(f"Entry '{node_path}' children must be a list")
|
||||||
|
for child in children:
|
||||||
|
if not isinstance(child, dict):
|
||||||
|
raise ValueError(f"Entry '{node_path}' children must be objects")
|
||||||
|
_parse_entries_node(child, node_path, files, directories)
|
||||||
|
return
|
||||||
|
|
||||||
|
if final_type == "file":
|
||||||
|
if content is None:
|
||||||
|
raise ValueError(f"File entry '{node_path}' is missing 'content'")
|
||||||
|
_add_file(node_path, content, files, directories)
|
||||||
|
return
|
||||||
|
|
||||||
|
raise ValueError(
|
||||||
|
f"Unable to infer entry type for '{node_path}'. Set 'type' to 'file' or 'directory'."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_skill_yaml_spec(yaml_text: str) -> ParsedSkillTree:
|
||||||
|
"""Parse YAML text into normalized directories and files.
|
||||||
|
|
||||||
|
Supported forms:
|
||||||
|
- entries: [{type,path/content/children}, ...]
|
||||||
|
- files: {"path/to/file": "text"} + optional directories/dirs
|
||||||
|
- tree/structure: nested dict where dict=directory and string=file content
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
data = yaml.safe_load(yaml_text)
|
||||||
|
except yaml.YAMLError as e:
|
||||||
|
raise ValueError(f"Invalid YAML: {e}") from e
|
||||||
|
|
||||||
|
if data is None:
|
||||||
|
raise ValueError("YAML is empty")
|
||||||
|
if not isinstance(data, dict):
|
||||||
|
raise ValueError("YAML root must be an object")
|
||||||
|
|
||||||
|
data = _extract_spec_root(data)
|
||||||
|
|
||||||
|
directories: set[str] = set()
|
||||||
|
files: dict[str, str] = {}
|
||||||
|
|
||||||
|
# Form 1: explicit entries list
|
||||||
|
entries = _pick_first_existing(data, ("entries", "nodes", "items"))
|
||||||
|
if entries is not None:
|
||||||
|
if not isinstance(entries, list):
|
||||||
|
raise ValueError("'entries' must be a list")
|
||||||
|
for entry in entries:
|
||||||
|
if not isinstance(entry, dict):
|
||||||
|
raise ValueError("Each item in 'entries' must be an object")
|
||||||
|
_parse_entries_node(entry, "", files, directories)
|
||||||
|
|
||||||
|
# Form 2: files + directories
|
||||||
|
file_map = _pick_first_existing(data, ("files", "paths", "file_map", "fileMap", "documents"))
|
||||||
|
if file_map is not None:
|
||||||
|
if isinstance(file_map, dict):
|
||||||
|
for path, content in file_map.items():
|
||||||
|
_add_file(path, content, files, directories)
|
||||||
|
elif isinstance(file_map, list):
|
||||||
|
for item in file_map:
|
||||||
|
if not isinstance(item, dict):
|
||||||
|
raise ValueError("Each item in 'files' list must be an object")
|
||||||
|
path = item.get("path") or item.get("name") or item.get("file")
|
||||||
|
content = item.get("content")
|
||||||
|
if content is None:
|
||||||
|
content = item.get("text")
|
||||||
|
if content is None:
|
||||||
|
content = item.get("body")
|
||||||
|
if path is None or content is None:
|
||||||
|
raise ValueError("Each file item needs 'path' and 'content'")
|
||||||
|
_add_file(path, content, files, directories)
|
||||||
|
else:
|
||||||
|
raise ValueError("'files' must be a map or list")
|
||||||
|
|
||||||
|
directory_list = _pick_first_existing(data, ("directories", "dirs", "folders", "folder_paths"))
|
||||||
|
if directory_list is not None:
|
||||||
|
if not isinstance(directory_list, list):
|
||||||
|
raise ValueError("'directories'/'dirs' must be a list")
|
||||||
|
for path in directory_list:
|
||||||
|
_add_directory(path, directories)
|
||||||
|
|
||||||
|
# Form 3: nested tree
|
||||||
|
tree = _pick_first_existing(data, ("tree", "structure", "file_tree", "fileTree", "file_structure"))
|
||||||
|
if tree is not None:
|
||||||
|
if isinstance(tree, dict):
|
||||||
|
_walk_tree_dict(tree, "", files, directories)
|
||||||
|
elif isinstance(tree, list):
|
||||||
|
for item in tree:
|
||||||
|
if not isinstance(item, dict):
|
||||||
|
raise ValueError("Items in 'tree' list must be objects")
|
||||||
|
_parse_entries_node(item, "", files, directories)
|
||||||
|
else:
|
||||||
|
raise ValueError("'tree'/'structure' must be an object or list")
|
||||||
|
|
||||||
|
# Heuristic fallback: treat root as path->content map when possible.
|
||||||
|
if not files and not directories:
|
||||||
|
candidate_keys = [k for k in data.keys() if isinstance(k, str)]
|
||||||
|
if candidate_keys and all(isinstance(data[k], str) for k in candidate_keys):
|
||||||
|
for path, content in data.items():
|
||||||
|
_add_file(path, content, files, directories)
|
||||||
|
|
||||||
|
if not files and not directories:
|
||||||
|
raise ValueError(
|
||||||
|
"No content found. Provide at least one of: entries, files, directories/dirs, tree/structure"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Ensure parent directories exist for every file
|
||||||
|
for rel_file in files:
|
||||||
|
parent = Path(rel_file).parent
|
||||||
|
if str(parent) != ".":
|
||||||
|
directories.add(str(parent).replace("\\", "/"))
|
||||||
|
|
||||||
|
return ParsedSkillTree(directories=directories, files=files)
|
||||||
|
|
||||||
|
|
||||||
|
def materialize_skill_tree(parsed: ParsedSkillTree, target_root: Path, clear_target: bool = True) -> None:
|
||||||
|
"""Create parsed directories/files under target root."""
|
||||||
|
if clear_target and target_root.exists():
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
shutil.rmtree(target_root)
|
||||||
|
|
||||||
|
target_root.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
for rel_dir in sorted(parsed.directories, key=lambda p: (p.count("/"), p)):
|
||||||
|
(target_root / rel_dir).mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
for rel_file, content in parsed.files.items():
|
||||||
|
file_path = target_root / rel_file
|
||||||
|
file_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
file_path.write_text(content, encoding="utf-8")
|
||||||
|
|
@ -30,6 +30,7 @@ import { Welcome } from "@/components/workspace/welcome";
|
||||||
import { useI18n } from "@/core/i18n/hooks";
|
import { useI18n } from "@/core/i18n/hooks";
|
||||||
import { useNotification } from "@/core/notification/hooks";
|
import { useNotification } from "@/core/notification/hooks";
|
||||||
import { useLocalSettings } from "@/core/settings";
|
import { useLocalSettings } from "@/core/settings";
|
||||||
|
import { bootstrapRemoteSkill } from "@/core/skills";
|
||||||
import { type AgentThread, type AgentThreadState } from "@/core/threads";
|
import { type AgentThread, type AgentThreadState } from "@/core/threads";
|
||||||
import { useSubmitThread, useThreadStream } from "@/core/threads/hooks";
|
import { useSubmitThread, useThreadStream } from "@/core/threads/hooks";
|
||||||
import {
|
import {
|
||||||
|
|
@ -99,6 +100,26 @@ export default function ChatPage() {
|
||||||
return target === "skill" ? "skill" : undefined;
|
return target === "skill" ? "skill" : undefined;
|
||||||
}, [searchParams]);
|
}, [searchParams]);
|
||||||
|
|
||||||
|
const skillBootstrap = useMemo(() => {
|
||||||
|
const skillIdRaw = searchParams.get("skill_id")?.trim();
|
||||||
|
if (!skillIdRaw) return undefined;
|
||||||
|
|
||||||
|
const contentId = Number(skillIdRaw);
|
||||||
|
if (!Number.isFinite(contentId)) return undefined;
|
||||||
|
|
||||||
|
const languageTypeRaw =
|
||||||
|
searchParams.get("languageType")?.trim() ??
|
||||||
|
searchParams.get("language_type")?.trim();
|
||||||
|
const languageType = languageTypeRaw
|
||||||
|
? Number(languageTypeRaw)
|
||||||
|
: 0;
|
||||||
|
|
||||||
|
return {
|
||||||
|
contentId,
|
||||||
|
languageType: Number.isFinite(languageType) ? languageType : 0,
|
||||||
|
};
|
||||||
|
}, [threadIdFromPath, searchParams]);
|
||||||
|
|
||||||
const [threadId, setThreadId] = useState<string | null>(null);
|
const [threadId, setThreadId] = useState<string | null>(null);
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (threadIdFromPath !== "new") {
|
if (threadIdFromPath !== "new") {
|
||||||
|
|
@ -118,6 +139,11 @@ export default function ChatPage() {
|
||||||
);
|
);
|
||||||
|
|
||||||
const { showNotification } = useNotification();
|
const { showNotification } = useNotification();
|
||||||
|
const [isSkillBootstrapping, setIsSkillBootstrapping] = useState(false);
|
||||||
|
const [skillBootstrapError, setSkillBootstrapError] = useState<string | null>(
|
||||||
|
null,
|
||||||
|
);
|
||||||
|
const skillBootstrappedKeyRef = useRef<string | null>(null);
|
||||||
const [finalState, setFinalState] = useState<AgentThreadState | null>(null);
|
const [finalState, setFinalState] = useState<AgentThreadState | null>(null);
|
||||||
const thread = useThreadStream({
|
const thread = useThreadStream({
|
||||||
// Keep UI in new-page mode, but runtime may reuse existing thread
|
// Keep UI in new-page mode, but runtime may reuse existing thread
|
||||||
|
|
@ -211,6 +237,54 @@ export default function ChatPage() {
|
||||||
|
|
||||||
const [todoListCollapsed, setTodoListCollapsed] = useState(true);
|
const [todoListCollapsed, setTodoListCollapsed] = useState(true);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (!threadId || !skillBootstrap?.contentId) {
|
||||||
|
setIsSkillBootstrapping(false);
|
||||||
|
setSkillBootstrapError(null);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const languageType = skillBootstrap.languageType ?? 0;
|
||||||
|
const initKey = `${threadId}:${skillBootstrap.contentId}:${languageType}`;
|
||||||
|
if (skillBootstrappedKeyRef.current === initKey) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let cancelled = false;
|
||||||
|
|
||||||
|
const runBootstrap = async () => {
|
||||||
|
setIsSkillBootstrapping(true);
|
||||||
|
setSkillBootstrapError(null);
|
||||||
|
try {
|
||||||
|
await bootstrapRemoteSkill({
|
||||||
|
thread_id: threadId,
|
||||||
|
content_id: skillBootstrap.contentId,
|
||||||
|
language_type: languageType,
|
||||||
|
target_dir: "/mnt/user-data/uploads/skill",
|
||||||
|
clear_target: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!cancelled) {
|
||||||
|
skillBootstrappedKeyRef.current = initKey;
|
||||||
|
setIsSkillBootstrapping(false);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
if (!cancelled) {
|
||||||
|
const message = error instanceof Error ? error.message : "Skill 初始化失败";
|
||||||
|
setSkillBootstrapError(message);
|
||||||
|
setIsSkillBootstrapping(false);
|
||||||
|
showNotification("Skill 初始化失败", { body: message });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
void runBootstrap();
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
cancelled = true;
|
||||||
|
};
|
||||||
|
}, [threadId, skillBootstrap, showNotification]);
|
||||||
|
|
||||||
const submitThread = useSubmitThread({
|
const submitThread = useSubmitThread({
|
||||||
isNewThread,
|
isNewThread,
|
||||||
createNewSession,
|
createNewSession,
|
||||||
|
|
@ -230,10 +304,13 @@ export default function ChatPage() {
|
||||||
});
|
});
|
||||||
const handleSubmit = useCallback(
|
const handleSubmit = useCallback(
|
||||||
(message: Parameters<typeof submitThread>[0]) => {
|
(message: Parameters<typeof submitThread>[0]) => {
|
||||||
|
if (isSkillBootstrapping) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
setHasSubmitted(true);
|
setHasSubmitted(true);
|
||||||
void submitThread(message);
|
void submitThread(message);
|
||||||
},
|
},
|
||||||
[submitThread],
|
[isSkillBootstrapping, submitThread],
|
||||||
);
|
);
|
||||||
const handleStop = useCallback(async () => {
|
const handleStop = useCallback(async () => {
|
||||||
await thread.stop();
|
await thread.stop();
|
||||||
|
|
@ -341,13 +418,23 @@ export default function ChatPage() {
|
||||||
extraHeader={
|
extraHeader={
|
||||||
isNewThread && <Welcome mode={settings.context.mode} />
|
isNewThread && <Welcome mode={settings.context.mode} />
|
||||||
}
|
}
|
||||||
disabled={env.NEXT_PUBLIC_STATIC_WEBSITE_ONLY === "true"}
|
disabled={
|
||||||
|
env.NEXT_PUBLIC_STATIC_WEBSITE_ONLY === "true" ||
|
||||||
|
isSkillBootstrapping
|
||||||
|
}
|
||||||
onContextChange={(context) =>
|
onContextChange={(context) =>
|
||||||
setSettings("context", context)
|
setSettings("context", context)
|
||||||
}
|
}
|
||||||
onSubmit={handleSubmit}
|
onSubmit={handleSubmit}
|
||||||
onStop={handleStop}
|
onStop={handleStop}
|
||||||
/>
|
/>
|
||||||
|
{(isSkillBootstrapping || skillBootstrapError) && (
|
||||||
|
<div className="text-muted-foreground w-full translate-y-8 text-center text-xs">
|
||||||
|
{isSkillBootstrapping
|
||||||
|
? "正在初始化 Skill 文件..."
|
||||||
|
: `Skill 初始化失败:${skillBootstrapError}`}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
{env.NEXT_PUBLIC_STATIC_WEBSITE_ONLY === "true" && (
|
{env.NEXT_PUBLIC_STATIC_WEBSITE_ONLY === "true" && (
|
||||||
<div className="text-muted-foreground/67 w-full translate-y-12 text-center text-xs">
|
<div className="text-muted-foreground/67 w-full translate-y-12 text-center text-xs">
|
||||||
{t.common.notAvailableInDemoMode}
|
{t.common.notAvailableInDemoMode}
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
import type { Message } from "@langchain/langgraph-sdk";
|
import type { Message } from "@langchain/langgraph-sdk";
|
||||||
import { FileIcon } from "lucide-react";
|
import { FileIcon } from "lucide-react";
|
||||||
import { useParams } from "next/navigation";
|
import { useParams } from "next/navigation";
|
||||||
import { memo, useMemo, type ImgHTMLAttributes } from "react";
|
import { memo, useMemo, useState, type ImgHTMLAttributes } from "react";
|
||||||
import rehypeKatex from "rehype-katex";
|
import rehypeKatex from "rehype-katex";
|
||||||
|
|
||||||
import {
|
import {
|
||||||
|
|
@ -11,6 +11,7 @@ import {
|
||||||
MessageToolbar,
|
MessageToolbar,
|
||||||
} from "@/components/ai-elements/message";
|
} from "@/components/ai-elements/message";
|
||||||
import { Badge } from "@/components/ui/badge";
|
import { Badge } from "@/components/ui/badge";
|
||||||
|
import { Button } from "@/components/ui/button";
|
||||||
import { resolveArtifactURL } from "@/core/artifacts/utils";
|
import { resolveArtifactURL } from "@/core/artifacts/utils";
|
||||||
import {
|
import {
|
||||||
extractContentFromMessage,
|
extractContentFromMessage,
|
||||||
|
|
@ -18,6 +19,7 @@ import {
|
||||||
parseUploadedFiles,
|
parseUploadedFiles,
|
||||||
type UploadedFile,
|
type UploadedFile,
|
||||||
} from "@/core/messages/utils";
|
} from "@/core/messages/utils";
|
||||||
|
import { materializeSkillYaml } from "@/core/skills";
|
||||||
import { useRehypeSplitWordsIntoSpans } from "@/core/rehype";
|
import { useRehypeSplitWordsIntoSpans } from "@/core/rehype";
|
||||||
import { humanMessagePlugins } from "@/core/streamdown";
|
import { humanMessagePlugins } from "@/core/streamdown";
|
||||||
import { cn } from "@/lib/utils";
|
import { cn } from "@/lib/utils";
|
||||||
|
|
@ -221,6 +223,11 @@ function isImageFile(filename: string): boolean {
|
||||||
return IMAGE_EXTENSIONS.includes(getFileExt(filename));
|
return IMAGE_EXTENSIONS.includes(getFileExt(filename));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function isYamlFile(filename: string): boolean {
|
||||||
|
const ext = getFileExt(filename);
|
||||||
|
return ext === "yaml" || ext === "yml";
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Uploaded files list component
|
* Uploaded files list component
|
||||||
*/
|
*/
|
||||||
|
|
@ -256,11 +263,39 @@ function UploadedFileCard({
|
||||||
file: UploadedFile;
|
file: UploadedFile;
|
||||||
threadId: string;
|
threadId: string;
|
||||||
}) {
|
}) {
|
||||||
|
const [isMaterializing, setIsMaterializing] = useState(false);
|
||||||
|
const [materializeMessage, setMaterializeMessage] = useState<string | null>(
|
||||||
|
null,
|
||||||
|
);
|
||||||
|
|
||||||
if (!threadId) return null;
|
if (!threadId) return null;
|
||||||
|
|
||||||
const isImage = isImageFile(file.filename);
|
const isImage = isImageFile(file.filename);
|
||||||
|
const isYaml = isYamlFile(file.filename);
|
||||||
const fileUrl = resolveArtifactURL(file.path, threadId);
|
const fileUrl = resolveArtifactURL(file.path, threadId);
|
||||||
|
|
||||||
|
const handleMaterializeYaml = async () => {
|
||||||
|
if (isMaterializing) return;
|
||||||
|
setIsMaterializing(true);
|
||||||
|
setMaterializeMessage(null);
|
||||||
|
try {
|
||||||
|
const result = await materializeSkillYaml({
|
||||||
|
thread_id: threadId,
|
||||||
|
path: file.path,
|
||||||
|
target_dir: "/mnt/user-data/uploads/skill",
|
||||||
|
clear_target: true,
|
||||||
|
});
|
||||||
|
setMaterializeMessage(
|
||||||
|
`已创建 ${result.created_files} 个文件 / ${result.created_directories} 个目录`,
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : "解析失败";
|
||||||
|
setMaterializeMessage(`失败: ${message}`);
|
||||||
|
} finally {
|
||||||
|
setIsMaterializing(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
if (isImage) {
|
if (isImage) {
|
||||||
return (
|
return (
|
||||||
<a
|
<a
|
||||||
|
|
@ -298,6 +333,27 @@ function UploadedFileCard({
|
||||||
</Badge>
|
</Badge>
|
||||||
<span className="text-muted-foreground text-[10px]">{file.size}</span>
|
<span className="text-muted-foreground text-[10px]">{file.size}</span>
|
||||||
</div>
|
</div>
|
||||||
|
{/* 注释掉测试按钮,后续根据需求再决定是否保留 */}
|
||||||
|
{/* {isYaml && (
|
||||||
|
<div className="mt-1 flex flex-col gap-1">
|
||||||
|
<Button
|
||||||
|
size="sm"
|
||||||
|
variant="secondary"
|
||||||
|
className="h-7 text-xs"
|
||||||
|
onClick={() => {
|
||||||
|
void handleMaterializeYaml();
|
||||||
|
}}
|
||||||
|
disabled={isMaterializing}
|
||||||
|
>
|
||||||
|
{isMaterializing ? "解析中..." : "一键导入为 Skill 目录"}
|
||||||
|
</Button>
|
||||||
|
{materializeMessage && (
|
||||||
|
<span className="text-muted-foreground text-[10px] leading-tight">
|
||||||
|
{materializeMessage}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
)} */}
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -35,6 +35,38 @@ export interface InstallSkillResponse {
|
||||||
message: string;
|
message: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface MaterializeSkillYamlRequest {
|
||||||
|
thread_id: string;
|
||||||
|
path: string;
|
||||||
|
target_dir?: string;
|
||||||
|
clear_target?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface MaterializeSkillYamlResponse {
|
||||||
|
success: boolean;
|
||||||
|
target_dir: string;
|
||||||
|
created_directories: number;
|
||||||
|
created_files: number;
|
||||||
|
message: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface BootstrapRemoteSkillRequest {
|
||||||
|
thread_id: string;
|
||||||
|
content_id: number;
|
||||||
|
language_type?: number;
|
||||||
|
target_dir?: string;
|
||||||
|
clear_target?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface BootstrapRemoteSkillResponse {
|
||||||
|
success: boolean;
|
||||||
|
target_dir: string;
|
||||||
|
created_directories: number;
|
||||||
|
created_files: number;
|
||||||
|
sandbox_id: string;
|
||||||
|
message: string;
|
||||||
|
}
|
||||||
|
|
||||||
export async function installSkill(
|
export async function installSkill(
|
||||||
request: InstallSkillRequest,
|
request: InstallSkillRequest,
|
||||||
): Promise<InstallSkillResponse> {
|
): Promise<InstallSkillResponse> {
|
||||||
|
|
@ -60,3 +92,51 @@ export async function installSkill(
|
||||||
|
|
||||||
return response.json();
|
return response.json();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function materializeSkillYaml(
|
||||||
|
request: MaterializeSkillYamlRequest,
|
||||||
|
): Promise<MaterializeSkillYamlResponse> {
|
||||||
|
const response = await fetch(
|
||||||
|
`${getBackendBaseURL()}/api/skills/materialize-yaml`,
|
||||||
|
{
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
},
|
||||||
|
body: JSON.stringify(request),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorData = await response.json().catch(() => ({}));
|
||||||
|
const errorMessage =
|
||||||
|
errorData.detail ?? `HTTP ${response.status}: ${response.statusText}`;
|
||||||
|
throw new Error(errorMessage);
|
||||||
|
}
|
||||||
|
|
||||||
|
return response.json();
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function bootstrapRemoteSkill(
|
||||||
|
request: BootstrapRemoteSkillRequest,
|
||||||
|
): Promise<BootstrapRemoteSkillResponse> {
|
||||||
|
const response = await fetch(
|
||||||
|
`${getBackendBaseURL()}/api/skills/bootstrap-remote`,
|
||||||
|
{
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
},
|
||||||
|
body: JSON.stringify(request),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorData = await response.json().catch(() => ({}));
|
||||||
|
const errorMessage =
|
||||||
|
errorData.detail ?? `HTTP ${response.status}: ${response.statusText}`;
|
||||||
|
throw new Error(errorMessage);
|
||||||
|
}
|
||||||
|
|
||||||
|
return response.json();
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -104,6 +104,12 @@ export function useSubmitThread({
|
||||||
async (message: PromptInputMessage) => {
|
async (message: PromptInputMessage) => {
|
||||||
const text = message.text.trim();
|
const text = message.text.trim();
|
||||||
|
|
||||||
|
// Guard: ignore empty submits (avoids unintended side effects during page init).
|
||||||
|
const hasFiles = !!(message.files && message.files.length > 0);
|
||||||
|
if (!text && !hasFiles) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
// For "new session" semantics, ensure the target thread id starts fresh.
|
// For "new session" semantics, ensure the target thread id starts fresh.
|
||||||
// If the same id already exists, delete it first and let submit recreate it.
|
// If the same id already exists, delete it first and let submit recreate it.
|
||||||
if (createNewSession && threadId) {
|
if (createNewSession && threadId) {
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue