feat(references): 统一引用来源并过滤 uploads/skill
This commit is contained in:
parent
2d50c49369
commit
299d819026
|
|
@ -10,6 +10,7 @@ from fastapi import APIRouter, HTTPException, Request
|
||||||
from fastapi.responses import FileResponse, PlainTextResponse, Response
|
from fastapi.responses import FileResponse, PlainTextResponse, Response
|
||||||
|
|
||||||
from app.gateway.path_utils import resolve_thread_virtual_path
|
from app.gateway.path_utils import resolve_thread_virtual_path
|
||||||
|
from deerflow.config.paths import VIRTUAL_PATH_PREFIX, get_paths
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
@ -62,6 +63,38 @@ def _find_compat_filename_match(missing_path: Path) -> Path | None:
|
||||||
return matches[0] if len(matches) == 1 else None
|
return matches[0] if len(matches) == 1 else None
|
||||||
|
|
||||||
|
|
||||||
|
def _list_reference_files_in_dir(
|
||||||
|
thread_id: str,
|
||||||
|
root_dir: Path,
|
||||||
|
virtual_prefix: str,
|
||||||
|
source: str,
|
||||||
|
) -> list[dict[str, str]]:
|
||||||
|
if not root_dir.is_dir():
|
||||||
|
return []
|
||||||
|
|
||||||
|
files: list[dict[str, str]] = []
|
||||||
|
for file_path in sorted(root_dir.rglob("*")):
|
||||||
|
if not file_path.is_file():
|
||||||
|
continue
|
||||||
|
relative_path = file_path.relative_to(root_dir).as_posix()
|
||||||
|
# Internal uploaded skills are bootstrap assets, not user-facing references.
|
||||||
|
if source == "upload" and relative_path.startswith("skill/"):
|
||||||
|
continue
|
||||||
|
virtual_path = f"{virtual_prefix}/{relative_path}"
|
||||||
|
encoded_virtual_path = quote(virtual_path, safe="/")
|
||||||
|
files.append(
|
||||||
|
{
|
||||||
|
"filename": file_path.name,
|
||||||
|
"size": str(file_path.stat().st_size),
|
||||||
|
"virtual_path": virtual_path,
|
||||||
|
"artifact_url": f"/api/threads/{thread_id}/artifacts{encoded_virtual_path}",
|
||||||
|
"source": source,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return files
|
||||||
|
|
||||||
|
|
||||||
def is_text_file_by_content(path: Path, sample_size: int = 8192) -> bool:
|
def is_text_file_by_content(path: Path, sample_size: int = 8192) -> bool:
|
||||||
"""Check if file is text by examining content for null bytes."""
|
"""Check if file is text by examining content for null bytes."""
|
||||||
try:
|
try:
|
||||||
|
|
@ -106,6 +139,38 @@ def _extract_file_from_skill_archive(zip_path: Path, internal_path: str) -> byte
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
@router.get(
|
||||||
|
"/threads/{thread_id}/artifacts/list",
|
||||||
|
summary="List Reference Files",
|
||||||
|
description="List current files under outputs and uploads for @ references.",
|
||||||
|
)
|
||||||
|
async def list_reference_files(thread_id: str) -> dict:
|
||||||
|
"""List real files from outputs/uploads so mention candidates stay fresh."""
|
||||||
|
paths = get_paths()
|
||||||
|
outputs_dir = paths.sandbox_outputs_dir(thread_id)
|
||||||
|
uploads_dir = paths.sandbox_uploads_dir(thread_id)
|
||||||
|
|
||||||
|
outputs_virtual_prefix = f"{VIRTUAL_PATH_PREFIX}/outputs"
|
||||||
|
uploads_virtual_prefix = f"{VIRTUAL_PATH_PREFIX}/uploads"
|
||||||
|
output_files = _list_reference_files_in_dir(
|
||||||
|
thread_id,
|
||||||
|
outputs_dir,
|
||||||
|
outputs_virtual_prefix,
|
||||||
|
"artifact",
|
||||||
|
)
|
||||||
|
upload_files = _list_reference_files_in_dir(
|
||||||
|
thread_id,
|
||||||
|
uploads_dir,
|
||||||
|
uploads_virtual_prefix,
|
||||||
|
"upload",
|
||||||
|
)
|
||||||
|
files = [*output_files, *upload_files]
|
||||||
|
return {
|
||||||
|
"files": files,
|
||||||
|
"count": len(files),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@router.get(
|
@router.get(
|
||||||
"/threads/{thread_id}/artifacts/{path:path}",
|
"/threads/{thread_id}/artifacts/{path:path}",
|
||||||
summary="Get Artifact File",
|
summary="Get Artifact File",
|
||||||
|
|
|
||||||
|
|
@ -130,3 +130,43 @@ def test_get_artifact_compat_fallback_for_dash_spacing(tmp_path, monkeypatch) ->
|
||||||
|
|
||||||
assert bytes(response.body).decode("utf-8") == "ok"
|
assert bytes(response.body).decode("utf-8") == "ok"
|
||||||
assert response.media_type == "text/markdown"
|
assert response.media_type == "text/markdown"
|
||||||
|
|
||||||
|
|
||||||
|
def test_list_reference_files_returns_outputs_and_uploads(tmp_path, monkeypatch) -> None:
|
||||||
|
outputs_dir = tmp_path / "outputs"
|
||||||
|
uploads_dir = tmp_path / "uploads"
|
||||||
|
outputs_dir.mkdir()
|
||||||
|
uploads_dir.mkdir()
|
||||||
|
(outputs_dir / "notes.md").write_text("hello", encoding="utf-8")
|
||||||
|
(outputs_dir / "figures").mkdir()
|
||||||
|
(outputs_dir / "figures" / "plot.png").write_bytes(b"png")
|
||||||
|
(uploads_dir / "dataset.csv").write_text("a,b\n1,2\n", encoding="utf-8")
|
||||||
|
(uploads_dir / "skill").mkdir()
|
||||||
|
(uploads_dir / "skill" / "internal.txt").write_text("hidden", encoding="utf-8")
|
||||||
|
|
||||||
|
class _FakePaths:
|
||||||
|
def sandbox_outputs_dir(self, _thread_id: str) -> Path:
|
||||||
|
return outputs_dir
|
||||||
|
|
||||||
|
def sandbox_uploads_dir(self, _thread_id: str) -> Path:
|
||||||
|
return uploads_dir
|
||||||
|
|
||||||
|
monkeypatch.setattr(artifacts_router, "get_paths", lambda: _FakePaths())
|
||||||
|
|
||||||
|
app = FastAPI()
|
||||||
|
app.include_router(artifacts_router.router)
|
||||||
|
|
||||||
|
with TestClient(app) as client:
|
||||||
|
response = client.get("/api/threads/thread-1/artifacts/list")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
payload = response.json()
|
||||||
|
assert payload["count"] == 3
|
||||||
|
by_path = {item["virtual_path"]: item for item in payload["files"]}
|
||||||
|
|
||||||
|
assert "/mnt/user-data/outputs/notes.md" in by_path
|
||||||
|
assert "/mnt/user-data/outputs/figures/plot.png" in by_path
|
||||||
|
assert "/mnt/user-data/uploads/dataset.csv" in by_path
|
||||||
|
assert "/mnt/user-data/uploads/skill/internal.txt" not in by_path
|
||||||
|
assert by_path["/mnt/user-data/outputs/notes.md"]["source"] == "artifact"
|
||||||
|
assert by_path["/mnt/user-data/uploads/dataset.csv"]["source"] == "upload"
|
||||||
|
|
|
||||||
|
|
@ -70,6 +70,7 @@ import {
|
||||||
DropdownMenuTrigger,
|
DropdownMenuTrigger,
|
||||||
} from "@/components/ui/dropdown-menu";
|
} from "@/components/ui/dropdown-menu";
|
||||||
import { Tag } from "@/components/ui/tag";
|
import { Tag } from "@/components/ui/tag";
|
||||||
|
import { useReferenceFiles } from "@/core/artifacts/references";
|
||||||
import { urlOfArtifact } from "@/core/artifacts/utils";
|
import { urlOfArtifact } from "@/core/artifacts/utils";
|
||||||
import { useI18n } from "@/core/i18n/hooks";
|
import { useI18n } from "@/core/i18n/hooks";
|
||||||
import type { SelectedSkillPayloadItem } from "@/core/i18n/locales/types";
|
import type { SelectedSkillPayloadItem } from "@/core/i18n/locales/types";
|
||||||
|
|
@ -80,7 +81,6 @@ import {
|
||||||
MENTION_REFERENCE_EVENT,
|
MENTION_REFERENCE_EVENT,
|
||||||
type MentionReferenceEventDetail,
|
type MentionReferenceEventDetail,
|
||||||
} from "@/core/threads/reference-events";
|
} from "@/core/threads/reference-events";
|
||||||
import { useUploadedFiles } from "@/core/uploads/hooks";
|
|
||||||
import { useIframeSkill } from "@/hooks/use-iframe-skill";
|
import { useIframeSkill } from "@/hooks/use-iframe-skill";
|
||||||
import { cn } from "@/lib/utils";
|
import { cn } from "@/lib/utils";
|
||||||
|
|
||||||
|
|
@ -96,7 +96,6 @@ import {
|
||||||
import { Suggestion, Suggestions } from "../ai-elements/suggestion";
|
import { Suggestion, Suggestions } from "../ai-elements/suggestion";
|
||||||
import { ScrollArea } from "../ui/scroll-area";
|
import { ScrollArea } from "../ui/scroll-area";
|
||||||
|
|
||||||
import { useThread } from "./messages/context";
|
|
||||||
import { ModeHoverGuide } from "./mode-hover-guide";
|
import { ModeHoverGuide } from "./mode-hover-guide";
|
||||||
import { Tooltip } from "./tooltip";
|
import { Tooltip } from "./tooltip";
|
||||||
|
|
||||||
|
|
@ -260,7 +259,6 @@ export function InputBox({
|
||||||
}),
|
}),
|
||||||
[t],
|
[t],
|
||||||
);
|
);
|
||||||
const { thread } = useThread();
|
|
||||||
const searchParams = useSearchParams();
|
const searchParams = useSearchParams();
|
||||||
const iframeSkill = useIframeSkill({ threadId: threadIdFromProps });
|
const iframeSkill = useIframeSkill({ threadId: threadIdFromProps });
|
||||||
const isInputDisabled = (disabled ?? false) || iframeSkill.isBootstrapping;
|
const isInputDisabled = (disabled ?? false) || iframeSkill.isBootstrapping;
|
||||||
|
|
@ -294,7 +292,7 @@ export function InputBox({
|
||||||
} | null>(null);
|
} | null>(null);
|
||||||
const [isInputToolsTourOpen, setIsInputToolsTourOpen] = useState(false);
|
const [isInputToolsTourOpen, setIsInputToolsTourOpen] = useState(false);
|
||||||
const [isInputToolsTourReady, setIsInputToolsTourReady] = useState(false);
|
const [isInputToolsTourReady, setIsInputToolsTourReady] = useState(false);
|
||||||
const { data: uploadedFilesData } = useUploadedFiles(threadIdFromProps);
|
const { data: referenceFilesData } = useReferenceFiles(threadIdFromProps);
|
||||||
|
|
||||||
// isNewThread 时禁用收缩,始终保持展开(除非已提交消息)
|
// isNewThread 时禁用收缩,始终保持展开(除非已提交消息)
|
||||||
const effectiveIsFocused =
|
const effectiveIsFocused =
|
||||||
|
|
@ -439,49 +437,41 @@ export function InputBox({
|
||||||
);
|
);
|
||||||
|
|
||||||
const mentionCandidates = useMemo<MentionCandidate[]>(() => {
|
const mentionCandidates = useMemo<MentionCandidate[]>(() => {
|
||||||
const artifactCandidates = (thread.values.artifacts ?? []).map((path) => {
|
const deduped = new Map<string, MentionCandidate>();
|
||||||
const filename = path.split("/").pop() ?? path;
|
(referenceFilesData?.files ?? []).forEach((file) => {
|
||||||
return {
|
const path = file.virtual_path || "";
|
||||||
key: `artifact:${path}`,
|
const filename = file.filename ?? path.split("/").pop() ?? path;
|
||||||
filename,
|
const refSource = file.source === "upload" ? "upload" : "artifact";
|
||||||
path,
|
const typeLabel =
|
||||||
pathTail: getPathTail(path),
|
refSource === "upload"
|
||||||
ref_source: "artifact" as const,
|
? referenceSourceLabels.upload
|
||||||
ref_kind: "mention" as const,
|
: referenceSourceLabels.artifact;
|
||||||
typeLabel: referenceSourceLabels.artifact,
|
const previewUrl =
|
||||||
isImage: isImageFilename(filename),
|
file.artifact_url ||
|
||||||
previewUrl: threadId
|
(threadId
|
||||||
? urlOfArtifact({
|
? urlOfArtifact({
|
||||||
filepath: path,
|
filepath: path,
|
||||||
threadId,
|
threadId,
|
||||||
})
|
})
|
||||||
: undefined,
|
: undefined);
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
const uploadCandidates =
|
deduped.set(`${refSource}:${path || filename}`, {
|
||||||
uploadedFilesData?.files.map((file) => ({
|
key: `${refSource}:${path || filename}`,
|
||||||
key: `upload:${file.virtual_path || file.filename}`,
|
filename,
|
||||||
filename: file.filename,
|
path,
|
||||||
path: file.virtual_path,
|
pathTail: getPathTail(path),
|
||||||
pathTail: getPathTail(file.virtual_path),
|
ref_source: refSource,
|
||||||
ref_source: "upload" as const,
|
ref_kind: "mention",
|
||||||
ref_kind: "mention" as const,
|
typeLabel,
|
||||||
typeLabel: referenceSourceLabels.upload,
|
isImage: isImageFilename(filename),
|
||||||
isImage: isImageFilename(file.filename),
|
previewUrl,
|
||||||
previewUrl: file.artifact_url,
|
});
|
||||||
})) ?? [];
|
|
||||||
|
|
||||||
const deduped = new Map<string, MentionCandidate>();
|
|
||||||
[...artifactCandidates, ...uploadCandidates].forEach((candidate) => {
|
|
||||||
deduped.set(candidate.key, candidate);
|
|
||||||
});
|
});
|
||||||
return [...deduped.values()];
|
return [...deduped.values()];
|
||||||
}, [
|
}, [
|
||||||
|
referenceFilesData?.files,
|
||||||
referenceSourceLabels.artifact,
|
referenceSourceLabels.artifact,
|
||||||
referenceSourceLabels.upload,
|
referenceSourceLabels.upload,
|
||||||
thread.values.artifacts,
|
|
||||||
uploadedFilesData?.files,
|
|
||||||
threadId,
|
threadId,
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,38 @@
|
||||||
|
import { useQuery } from "@tanstack/react-query";
|
||||||
|
|
||||||
|
import { getBackendBaseURL } from "../config";
|
||||||
|
|
||||||
|
export type ReferenceFileInfo = {
|
||||||
|
filename: string;
|
||||||
|
size: string;
|
||||||
|
virtual_path: string;
|
||||||
|
artifact_url: string;
|
||||||
|
source: "artifact" | "upload";
|
||||||
|
};
|
||||||
|
|
||||||
|
type ListReferenceFilesResponse = {
|
||||||
|
files: ReferenceFileInfo[];
|
||||||
|
count: number;
|
||||||
|
};
|
||||||
|
|
||||||
|
async function listReferenceFiles(
|
||||||
|
threadId: string,
|
||||||
|
): Promise<ListReferenceFilesResponse> {
|
||||||
|
const response = await fetch(
|
||||||
|
`${getBackendBaseURL()}/api/threads/${threadId}/artifacts/list`,
|
||||||
|
);
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error("Failed to list reference files");
|
||||||
|
}
|
||||||
|
return response.json();
|
||||||
|
}
|
||||||
|
|
||||||
|
export function useReferenceFiles(threadId: string | undefined) {
|
||||||
|
return useQuery({
|
||||||
|
queryKey: ["references", "list", threadId],
|
||||||
|
queryFn: () => listReferenceFiles(threadId ?? ""),
|
||||||
|
enabled: Boolean(threadId),
|
||||||
|
refetchInterval: 5000,
|
||||||
|
refetchOnWindowFocus: true,
|
||||||
|
});
|
||||||
|
}
|
||||||
Loading…
Reference in New Issue