feat:优化skill注入流程
This commit is contained in:
parent
22eaa3cf86
commit
5242df028b
|
|
@ -108,7 +108,7 @@ class RemoteSkillBootstrapResponse(BaseModel):
|
||||||
target_dir: str = Field(..., description="Virtual target directory")
|
target_dir: str = Field(..., description="Virtual target directory")
|
||||||
created_directories: int = Field(..., description="Number of created directories")
|
created_directories: int = Field(..., description="Number of created directories")
|
||||||
created_files: int = Field(..., description="Number of created files")
|
created_files: int = Field(..., description="Number of created files")
|
||||||
sandbox_id: str = Field(..., description="Acquired sandbox ID")
|
sandbox_id: str | None = Field(default=None, description="Acquired sandbox ID (null when sandbox is not acquired)")
|
||||||
message: str = Field(..., description="Operation result message")
|
message: str = Field(..., description="Operation result message")
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -568,8 +568,8 @@ async def bootstrap_skill_from_remote(request: RemoteSkillBootstrapRequest) -> R
|
||||||
"""Initialize thread skill directory from remote YAML content service."""
|
"""Initialize thread skill directory from remote YAML content service."""
|
||||||
try:
|
try:
|
||||||
# 1) Ensure sandbox and thread personal dirs are initialized first.
|
# 1) Ensure sandbox and thread personal dirs are initialized first.
|
||||||
sandbox_provider = get_sandbox_provider()
|
# sandbox_provider = get_sandbox_provider()
|
||||||
sandbox_id = sandbox_provider.acquire(request.thread_id)
|
# sandbox_id = sandbox_provider.acquire(request.thread_id)
|
||||||
|
|
||||||
# 2) Fetch YAML content from configured remote endpoint.
|
# 2) Fetch YAML content from configured remote endpoint.
|
||||||
cfg = get_gateway_config()
|
cfg = get_gateway_config()
|
||||||
|
|
@ -624,7 +624,7 @@ async def bootstrap_skill_from_remote(request: RemoteSkillBootstrapRequest) -> R
|
||||||
target_dir=request.target_dir,
|
target_dir=request.target_dir,
|
||||||
created_directories=len(parsed.directories),
|
created_directories=len(parsed.directories),
|
||||||
created_files=len(parsed.files),
|
created_files=len(parsed.files),
|
||||||
sandbox_id=sandbox_id,
|
sandbox_id=None,
|
||||||
message=(
|
message=(
|
||||||
f"Bootstrapped {len(parsed.files)} files and {len(parsed.directories)} directories "
|
f"Bootstrapped {len(parsed.files)} files and {len(parsed.directories)} directories "
|
||||||
f"under '{request.target_dir}'"
|
f"under '{request.target_dir}'"
|
||||||
|
|
|
||||||
|
|
@ -7,6 +7,9 @@ real filesystem content under a thread's virtual path (for example,
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
@ -329,3 +332,158 @@ def materialize_skill_tree(parsed: ParsedSkillTree, target_root: Path, clear_tar
|
||||||
file_path = target_root / rel_file
|
file_path = target_root / rel_file
|
||||||
file_path.parent.mkdir(parents=True, exist_ok=True)
|
file_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
file_path.write_text(content, encoding="utf-8")
|
file_path.write_text(content, encoding="utf-8")
|
||||||
|
|
||||||
|
|
||||||
|
def _build_cli_parser() -> argparse.ArgumentParser:
|
||||||
|
"""Build command-line argument parser.
|
||||||
|
|
||||||
|
CLI usage:
|
||||||
|
python skill_yaml_importer.py <input_path> [options]
|
||||||
|
|
||||||
|
Positional arguments:
|
||||||
|
input_path Path to a YAML file, or a directory containing YAML files.
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--show-files Include parsed file paths in output.
|
||||||
|
--show-directories Include parsed directory paths in output.
|
||||||
|
--json Print JSON output instead of plain text.
|
||||||
|
--recursive Recursively scan subdirectories when input is a directory.
|
||||||
|
--log-file <path> Save full report (summary + successes + failures) to JSON file.
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
python skill_yaml_importer.py ./sample.yaml --json
|
||||||
|
python skill_yaml_importer.py ./generated_yaml --recursive --log-file ./parse_log.json
|
||||||
|
"""
|
||||||
|
parser = argparse.ArgumentParser(description="Parse and validate a skill YAML spec file")
|
||||||
|
parser.add_argument("input_path", help="Path to a YAML file or a directory containing YAML files")
|
||||||
|
parser.add_argument(
|
||||||
|
"--show-files",
|
||||||
|
action="store_true",
|
||||||
|
help="Print sorted parsed file paths",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--show-directories",
|
||||||
|
action="store_true",
|
||||||
|
help="Print sorted parsed directory paths",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--json",
|
||||||
|
action="store_true",
|
||||||
|
help="Print parsed summary as JSON",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--recursive",
|
||||||
|
action="store_true",
|
||||||
|
help="When input path is a directory, scan YAML files recursively",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--log-file",
|
||||||
|
default=None,
|
||||||
|
help="Optional path to save full execution results and summary as JSON",
|
||||||
|
)
|
||||||
|
return parser
|
||||||
|
|
||||||
|
|
||||||
|
def _collect_yaml_files(input_path: Path, recursive: bool) -> list[Path]:
|
||||||
|
if input_path.is_file():
|
||||||
|
return [input_path]
|
||||||
|
|
||||||
|
if not input_path.is_dir():
|
||||||
|
return []
|
||||||
|
|
||||||
|
patterns = ("*.yaml", "*.yml")
|
||||||
|
files: list[Path] = []
|
||||||
|
for pattern in patterns:
|
||||||
|
iterator = input_path.rglob(pattern) if recursive else input_path.glob(pattern)
|
||||||
|
files.extend(iterator)
|
||||||
|
|
||||||
|
# Stable order for deterministic output
|
||||||
|
return sorted({p.resolve() for p in files})
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_one_yaml_file(yaml_path: Path, show_files: bool, show_directories: bool) -> dict:
|
||||||
|
yaml_text = yaml_path.read_text(encoding="utf-8")
|
||||||
|
parsed = parse_skill_yaml_spec(yaml_text)
|
||||||
|
directories = sorted(parsed.directories)
|
||||||
|
files = sorted(parsed.files.keys())
|
||||||
|
|
||||||
|
return {
|
||||||
|
"yaml_file": str(yaml_path),
|
||||||
|
"directories_count": len(directories),
|
||||||
|
"files_count": len(files),
|
||||||
|
"directories": directories if show_directories else None,
|
||||||
|
"files": files if show_files else None,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _main() -> int:
|
||||||
|
"""CLI entrypoint for parsing one YAML file or a batch of YAML files.
|
||||||
|
|
||||||
|
Exit codes:
|
||||||
|
0: all files parsed successfully
|
||||||
|
1: invalid input path or no YAML files found
|
||||||
|
2: processed completed with one or more parse failures
|
||||||
|
"""
|
||||||
|
args = _build_cli_parser().parse_args()
|
||||||
|
|
||||||
|
input_path = Path(args.input_path)
|
||||||
|
if not input_path.exists():
|
||||||
|
print(f"Input path not found: {input_path}", file=sys.stderr)
|
||||||
|
return 1
|
||||||
|
|
||||||
|
yaml_files = _collect_yaml_files(input_path, recursive=args.recursive)
|
||||||
|
if not yaml_files:
|
||||||
|
print(f"No YAML files found under: {input_path}", file=sys.stderr)
|
||||||
|
return 1
|
||||||
|
|
||||||
|
successes: list[dict] = []
|
||||||
|
failures: list[dict[str, str]] = []
|
||||||
|
|
||||||
|
for yaml_path in yaml_files:
|
||||||
|
try:
|
||||||
|
result = _parse_one_yaml_file(
|
||||||
|
yaml_path,
|
||||||
|
show_files=args.show_files,
|
||||||
|
show_directories=args.show_directories,
|
||||||
|
)
|
||||||
|
successes.append(result)
|
||||||
|
if not args.json:
|
||||||
|
print(f"OK: {yaml_path}")
|
||||||
|
print(f" Directories: {result['directories_count']}")
|
||||||
|
print(f" Files: {result['files_count']}")
|
||||||
|
except Exception as e: # noqa: BLE001
|
||||||
|
failures.append({"yaml_file": str(yaml_path), "error": str(e)})
|
||||||
|
print(f"ERROR: {yaml_path}: {e}", file=sys.stderr)
|
||||||
|
|
||||||
|
summary = {
|
||||||
|
"input_path": str(input_path),
|
||||||
|
"total": len(yaml_files),
|
||||||
|
"success": len(successes),
|
||||||
|
"failed": len(failures),
|
||||||
|
}
|
||||||
|
|
||||||
|
report = {"summary": summary, "successes": successes, "failures": failures}
|
||||||
|
|
||||||
|
if args.log_file:
|
||||||
|
try:
|
||||||
|
log_path = Path(args.log_file)
|
||||||
|
log_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
log_path.write_text(json.dumps(report, ensure_ascii=False, indent=2), encoding="utf-8")
|
||||||
|
print(f"Log saved: {log_path}")
|
||||||
|
except Exception as e: # noqa: BLE001
|
||||||
|
print(f"Failed to write log file '{args.log_file}': {e}", file=sys.stderr)
|
||||||
|
|
||||||
|
if args.json:
|
||||||
|
print(json.dumps(report, ensure_ascii=False, indent=2))
|
||||||
|
else:
|
||||||
|
print("\n[Summary]")
|
||||||
|
print(f"Input: {summary['input_path']}")
|
||||||
|
print(f"Total: {summary['total']}")
|
||||||
|
print(f"Success: {summary['success']}")
|
||||||
|
print(f"Failed: {summary['failed']}")
|
||||||
|
|
||||||
|
return 0 if not failures else 2
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
raise SystemExit(_main())
|
||||||
|
|
@ -166,6 +166,10 @@ services:
|
||||||
- CI=true
|
- CI=true
|
||||||
# Docker environment for aio sandbox
|
# Docker environment for aio sandbox
|
||||||
- DOCKER_HOST=unix:///var/run/docker.sock
|
- DOCKER_HOST=unix:///var/run/docker.sock
|
||||||
|
- LOG_LEVEL=DEBUG
|
||||||
|
- LANGGRAPH_DEBUG=true
|
||||||
|
- LANGCHAIN_DEBUG=true
|
||||||
|
- PYTHONUNBUFFERED=1
|
||||||
env_file:
|
env_file:
|
||||||
- ../.env
|
- ../.env
|
||||||
extra_hosts:
|
extra_hosts:
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue