feat: 更新了UI组件和Tailwind配置,并同步了依赖项和新增了环境变量文件。新增代理服务器和通义百炼平台的。

This commit is contained in:
肖应宇 2026-03-01 13:31:54 +08:00
parent 83fbfc2c37
commit c97e227685
51 changed files with 20690 additions and 19414 deletions

2
.gitignore vendored
View File

@ -12,6 +12,8 @@ dist
dist-ssr
*.local
.env
# Editor directories and files
.vscode/*
!.vscode/extensions.json

2
package-lock.json generated
View File

@ -5,7 +5,7 @@
"requires": true,
"packages": {
"": {
"name": "enterprise-chat",
"name": "ai-chat-ui",
"version": "0.0.0",
"dependencies": {
"@microsoft/fetch-event-source": "^2.0.1",

6
server/.env.example Normal file
View File

@ -0,0 +1,6 @@
# 阿里云百炼 API Key
# 请在百炼控制台申请并填入此处
ALIYUN_API_KEY=your_api_key_here
# 本地中转服务器运行端口
PORT=3000

1304
server/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

23
server/package.json Normal file
View File

@ -0,0 +1,23 @@
{
"name": "server",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"start": "node server.js",
"dev": "nodemon server.js"
},
"keywords": [],
"author": "",
"license": "ISC",
"type": "commonjs",
"dependencies": {
"cors": "^2.8.6",
"dotenv": "^17.3.1",
"express": "^5.2.1",
"http-proxy-middleware": "^3.0.5",
"morgan": "^1.10.1",
"multer": "^2.1.0",
"uuid": "^13.0.0"
}
}

165
server/server.js Normal file
View File

@ -0,0 +1,165 @@
const express = require('express');
const cors = require('cors');
const { createProxyMiddleware } = require('http-proxy-middleware');
const multer = require('multer');
const { v4: uuidv4 } = require('uuid');
const path = require('path');
const fs = require('fs');
const morgan = require('morgan');
require('dotenv').config();
const app = express();
const PORT = process.env.PORT || 3000;
// 配置全局请求日志,可以在终端里看到每个到达 Node 端点的请求记录
app.use(morgan(':method :url :status :res[content-length] - :response-time ms'));
// 配置 CORS允许前端项目的跨域请求
app.use(cors());
// --- 1. 流式对话请求代理配置 ---
// 在请求交给代理组件之前,拦截所有的 /api/chat-ui/chat 并强行给 Header 加上 Bearer Token
app.use('/api/chat-ui/chat', (req, res, next) => {
const apiKey = process.env.ALIYUN_API_KEY;
if (!apiKey) {
console.error("【错误】发送代理请求前未配置 ALIYUN_API_KEY !");
} else {
req.headers['authorization'] = `Bearer ${apiKey}`;
}
next();
});
// 注意:代理中间件需要在 body-parser (express.json) 之前,不然代理会导致请求体丢失
app.use(
'/api/chat-ui/chat',
createProxyMiddleware({
// 阿里云百炼由于兼容 OpenAI 格式,所以代理到此接口
target: 'https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions',
changeOrigin: true,
// 去除路径前缀,确保发往阿里云的路径是纯正的 completions 路径
pathRewrite: {
'^/api/chat-ui/chat': '',
}
})
);
// --- 下面的路由专门走 Node.js 业务逻辑,因此需要解析 JSON Body ---
app.use(express.json());
// --- 2. 获取模型列表 ---
app.get('/api/chat-ui/models', (req, res) => {
res.json([
{
id: "qwen-max",
name: "通义千问 Max",
description: "最强大的模型",
maxTokens: 8192,
provider: "Aliyun"
},
{
id: "qwen-plus",
name: "通义千问 Plus",
description: "能力均衡",
maxTokens: 8192,
provider: "Aliyun"
}
]);
});
// 内存中暂时存放对话数据用于 Mock
const conversationsDB = {};
// --- 3. 获取所有对话历史 ---
app.get('/api/chat-ui/conversations', (req, res) => {
res.json(Object.values(conversationsDB));
});
// --- 4. 获取单个对话 ---
app.get('/api/chat-ui/conversations/:id', (req, res) => {
const { id } = req.params;
const conversation = conversationsDB[id];
if (conversation) {
res.json(conversation);
} else {
res.status(404).json({ error: '对话不存在' });
}
});
// --- 5. 保存或更新对话 ---
// 前端可能会在 /api/chat-ui/conversations/:id 用 POST 或 PUT 更新? 也可以直接提供一个保存接口
app.post('/api/chat-ui/conversations', (req, res) => {
const data = req.body;
if(!data.id) data.id = uuidv4();
conversationsDB[data.id] = data;
res.json(data);
})
// --- 6. 删除对话 ---
app.delete('/api/chat-ui/conversations/:id', (req, res) => {
const { id } = req.params;
if (conversationsDB[id]) {
delete conversationsDB[id];
res.json({ success: true, message: "删除成功" });
} else {
res.status(404).json({ error: '对话不存在' });
}
});
// 为了存储上传文件而建立临时目录
const uploadDir = path.join(__dirname, 'uploads');
if (!fs.existsSync(uploadDir)) {
fs.mkdirSync(uploadDir);
}
// 配置文件上传
const storage = multer.diskStorage({
destination: function (req, file, cb) {
cb(null, uploadDir);
},
filename: function (req, file, cb) {
const uniqueSuffix = Date.now() + '-' + Math.round(Math.random() * 1E9);
cb(null, uniqueSuffix + '-' + file.originalname);
}
});
const upload = multer({ storage: storage });
// 提供静态文件访问支持
app.use('/uploads', express.static(uploadDir));
// --- 7. 上传文件 ---
app.post('/api/chat-ui/upload', upload.single('file'), (req, res) => {
if (!req.file) {
return res.status(400).json({ error: '没有文件上传' });
}
// 返回供前端使用和访问的 URL
res.json({
url: `http://localhost:${PORT}/uploads/${req.file.filename}`,
name: req.file.originalname,
size: req.file.size,
mimeType: req.file.mimetype
});
});
// --- 8. 停止生成 ---
// 这个接口对于本地代理没有实际效果,因为流的断开是通过底层 AbortController 控制的,此处直接返回成功
app.post(['/api/chat-ui/stop', '/api/chat-ui/stop/:id'], (req, res) => {
res.json({ success: true, message: "已发出停止指令" });
});
// 其他所有路由返回404
app.use((req, res) => {
res.status(404).json({ error: 'Endpoint not found' });
});
app.listen(PORT, () => {
console.log('====================================');
console.log(`本地代理服务器已启动,监听端口: ${PORT}`);
console.log('====================================');
if (!process.env.ALIYUN_API_KEY) {
console.log('⚠️ 警告: 未在 .env 文件中检测到 ALIYUN_API_KEY!');
console.log('请在 server/.env 中添加您的百炼 API Key。');
} else {
console.log('✅ 检测到了 API Key。');
}
});

View File

@ -53,8 +53,7 @@ import MessageList from "./MessageList.vue";
import ChatInput from "@/components/input/ChatInput.vue";
import { MessageType, MessageRole } from "@/types/chat";
import type { Attachment } from "@/types/chat";
import { chatApi } from "@/services/api.ts";
import { streamAIResponse, generateSuggestions } from "@/services/mockAI";
import { chatApi } from "@/services/api";
defineEmits<{
"toggle-sidebar": [];
@ -144,39 +143,47 @@ async function handleSend(text: string, attachments: Attachment[]) {
// AbortController
abortController.value = new AbortController();
await streamAIResponse(
text,
try {
const stream = chatApi.streamChat(
{
onStart: () => {
message: text,
conversationId: currentConversation.value?.id || "",
model: settings.value.defaultModel,
stream: true,
},
abortController.value.signal,
);
let fullText = "";
isTyping.value = false;
},
onToken: (_token, fullText) => {
for await (const chunk of stream) {
if (abortController.value?.signal.aborted) break;
fullText += chunk;
chatStore.updateMessageContent(aiMessage.id, fullText);
},
onComplete: (fullText) => {
}
if (!abortController.value?.signal.aborted) {
chatStore.updateMessage(aiMessage.id, {
isStreaming: false,
content: {
type: MessageType.TEXT,
text: fullText,
suggestions: generateSuggestions(),
},
});
chatStore.stopStreaming();
currentStreamingMessageId.value = null;
},
onError: (error) => {
}
} catch (error: any) {
if (error.name !== "AbortError") {
chatStore.updateMessage(aiMessage.id, {
isStreaming: false,
isError: true,
errorMessage: error.message,
errorMessage: error.message || "请求失败",
});
}
} finally {
chatStore.stopStreaming();
currentStreamingMessageId.value = null;
},
},
chatStore.streamController?.signal,
);
}
}
//
@ -219,36 +226,47 @@ async function handleRetry(messageId: string) {
currentStreamingMessageId.value = messageId;
chatStore.startStreaming();
abortController.value = new AbortController();
await streamAIResponse(
userMessage.content.text || "",
try {
const stream = chatApi.streamChat(
{
onToken: (_token, fullText) => {
chatStore.updateMessageContent(messageId, fullText);
message: userMessage.content.text || "",
conversationId: currentConversation.value?.id,
model: settings.value.defaultModel,
stream: true,
},
onComplete: (fullText) => {
abortController.value.signal,
);
let fullText = "";
for await (const chunk of stream) {
if (abortController.value?.signal.aborted) break;
fullText += chunk;
chatStore.updateMessageContent(messageId, fullText);
}
if (!abortController.value?.signal.aborted) {
chatStore.updateMessage(messageId, {
isStreaming: false,
content: {
type: MessageType.TEXT,
text: fullText,
suggestions: generateSuggestions(),
},
});
chatStore.stopStreaming();
currentStreamingMessageId.value = null;
},
onError: (error) => {
}
} catch (error: any) {
if (error.name !== "AbortError") {
chatStore.updateMessage(messageId, {
isStreaming: false,
isError: true,
errorMessage: error.message,
errorMessage: error.message || "请求失败",
});
}
} finally {
chatStore.stopStreaming();
currentStreamingMessageId.value = null;
},
},
chatStore.streamController?.signal,
);
}
}
function handleRegenerate(messageId: string) {

View File

@ -89,6 +89,18 @@ class ChatApi {
request: ChatRequest,
signal?: AbortSignal,
): AsyncGenerator<string> {
// 将前端简化的请求翻译为 OpenAI 兼容的规范请求体
const openAiRequest = {
model: request.model || "qwen-plus",
messages: [
{ role: "system", content: request.systemPrompt || "你是一个有用的助手。" },
{ role: "user", content: request.message }
],
stream: true,
temperature: request.temperature,
max_tokens: request.maxTokens
};
const response = await fetch(
`${this.baseUrl}${API_ENDPOINTS.CHAT_STREAM}`,
{
@ -97,7 +109,7 @@ class ChatApi {
"Content-Type": "application/json",
Accept: "text/event-stream",
},
body: JSON.stringify(request),
body: JSON.stringify(openAiRequest),
signal,
},
);
@ -112,15 +124,32 @@ class ChatApi {
throw new Error("Response body is not readable");
}
const decoder = new TextDecoder();
const decoder = new TextDecoder("utf-8");
let buffer = "";
while (true) {
const { done, value } = await reader.read();
if (done) break;
const text = decoder.decode(value, { stream: true });
const match = text.match(/data:\s*(\{.*\})/);
buffer += decoder.decode(value, { stream: true });
const lines = buffer.split("\n");
// 保留最后一行未完整的 JSON
buffer = lines.pop() || "";
for (const line of lines) {
if (line.trim() === "" || line.includes("[DONE]")) continue;
const match = line.match(/^data:\s*(.+)$/);
if (match) {
yield JSON.parse(match[1])["message"];
try {
const data = JSON.parse(match[1]);
const content = data.choices?.[0]?.delta?.content;
if (content) {
yield content;
}
} catch (e) {
console.warn("JSON解析错误", e, line);
}
}
}
}
}
@ -163,18 +192,18 @@ class ChatApi {
async getModels(): Promise<ModelInfo[]> {
return [
{
id: "gpt-4",
name: "GPT-4",
id: "qwen-max",
name: "通义千问 Max",
description: "最强大的模型",
maxTokens: 8192,
provider: "OpenAI",
provider: "Aliyun",
},
{
id: "gpt-3.5-turbo",
name: "GPT-3.5 Turbo",
description: "快速高效",
maxTokens: 16384,
provider: "OpenAI",
id: "qwen-plus",
name: "通义千问 Plus",
description: "能力均衡",
maxTokens: 8192,
provider: "Aliyun",
},
];
}

View File

@ -1,270 +0,0 @@
import { generateId } from '@/utils/helpers'
// 模拟响应数据
const mockResponses: Record<string, string> = {
default: `你好!我是 AI 智能助手,很高兴为你服务。
-
-
-
-
`,
code: `好的,这是一个 Vue 3 组件示例:
\`\`\`vue
<template>
<div class="counter">
<h2>: {{ count }}</h2>
<button @click="increment"></button>
<button @click="decrement"></button>
</div>
</template>
<script setup lang="ts">
import { ref } from 'vue'
const count = ref(0)
function increment() {
count.value++
}
function decrement() {
count.value--
}
</script>
<style scoped>
.counter {
padding: 20px;
text-align: center;
}
button {
margin: 0 8px;
padding: 8px 16px;
}
</style>
\`\`\`
Vue 3
1. **Composition API**: 使 \`<script setup>\` 语法
2. ****: 使 \`ref\` 创建响应式变量
3. ****: 使 \`@click\` 绑定事件
4. ****: 使 \`scoped\` 样式`,
ml: `**机器学习Machine Learning** 是人工智能的一个分支,它使计算机系统能够从数据中学习并改进,而无需进行明确的编程。
##
### 1.
- 使
-
### 2.
- 使
-
### 3.
-
- AI
##
| | |
|------|----------|
| | |
| | |
| | |
> 💡 `,
email: `好的,这是一封商务邮件模板:
---
****
[]
****
1. []
2. []
3. []
[]
[]
[]
---
`,
react: `# React 应用性能优化指南
## 1.
### 使 React.memo
\`\`\`jsx
const MyComponent = React.memo(({ data }) => {
return <div>{data.name}</div>
})
\`\`\`
### 使 useMemo useCallback
\`\`\`javascript
const memoizedValue = useMemo(() => computeExpensiveValue(a, b), [a, b])
const memoizedCallback = useCallback(() => doSomething(a, b), [a, b])
\`\`\`
## 2.
\`\`\`javascript
const LazyComponent = React.lazy(() => import('./LazyComponent'))
function App() {
return (
<Suspense fallback={<Loading />}>
<LazyComponent />
</Suspense>
)
}
\`\`\`
## 3.
使 **react-window** **react-virtualized**
\`\`\`javascript
import { FixedSizeList } from 'react-window'
<FixedSizeList
height={400}
itemCount={1000}
itemSize={35}
>
{Row}
</FixedSizeList>
\`\`\`
## 4.
使 React DevTools Profiler
> 🚀 ****`,
}
// 根据输入内容匹配响应
function matchResponse(input: string): string {
const lowerInput = input.toLowerCase()
if (lowerInput.includes('vue') || lowerInput.includes('组件')) {
return mockResponses.code
}
if (lowerInput.includes('机器学习') || lowerInput.includes('ml') || lowerInput.includes('学习')) {
return mockResponses.ml
}
if (lowerInput.includes('邮件') || lowerInput.includes('商务')) {
return mockResponses.email
}
if (lowerInput.includes('react') || lowerInput.includes('性能') || lowerInput.includes('优化')) {
return mockResponses.react
}
return mockResponses.default
}
// 流式输出生成器
async function* streamText(text: string, signal?: AbortSignal): AsyncGenerator<string> {
const chars = text.split('')
let buffer = ''
for (let i = 0; i < chars.length; i++) {
if (signal?.aborted) {
break
}
buffer += chars[i]
const delay = Math.random() * 20 + 5
await new Promise(resolve => setTimeout(resolve, delay))
if (buffer.length >= 3 || i === chars.length - 1) {
yield buffer
buffer = ''
}
}
}
// 模拟 AI 响应接口
export interface StreamCallbacks {
onStart?: () => void
onToken?: (token: string, fullText: string) => void
onComplete?: (fullText: string) => void
onError?: (error: Error) => void
}
export async function streamAIResponse(
userMessage: string,
callbacks: StreamCallbacks,
signal?: AbortSignal
): Promise<void> {
try {
callbacks.onStart?.()
await new Promise(resolve => setTimeout(resolve, 500))
if (signal?.aborted) {
return
}
const response = matchResponse(userMessage)
let fullText = ''
for await (const token of streamText(response, signal)) {
if (signal?.aborted) {
break
}
fullText += token
callbacks.onToken?.(token, fullText)
}
if (!signal?.aborted) {
callbacks.onComplete?.(fullText)
}
} catch (error) {
if (error instanceof Error && error.name !== 'AbortError') {
callbacks.onError?.(error)
}
}
}
// 生成推荐选项
export function generateSuggestions(): { id: string; text: string }[] {
const suggestions = [
'继续深入讲解',
'给我一个实际例子',
'有什么最佳实践吗?',
'可以用中文解释吗?',
]
return suggestions.map(text => ({
id: generateId(),
text,
}))
}

View File

@ -16,7 +16,7 @@ export const useSettingsStore = defineStore('settings', () => {
compactMode: false,
// AI 默认设置
defaultModel: 'gpt-4',
defaultModel: 'qwen-plus',
defaultTemperature: 0.7,
defaultMaxTokens: 4096,
defaultSystemPrompt: '你是一个有帮助的 AI 助手。',
@ -34,39 +34,32 @@ export const useSettingsStore = defineStore('settings', () => {
// 可用的 AI 模型
const availableModels: AIModel[] = [
{
id: 'gpt-4',
name: 'GPT-4',
id: 'qwen-max',
name: '通义千问 Max',
description: '最强大的模型,适合复杂任务',
maxTokens: 8192,
provider: 'OpenAI',
provider: 'Aliyun',
},
{
id: 'gpt-4-turbo',
name: 'GPT-4 Turbo',
description: '更快的响应速度128K 上下文',
maxTokens: 128000,
provider: 'OpenAI',
id: 'qwen-plus',
name: '通义千问 Plus',
description: '能力均衡,更快的响应速度',
maxTokens: 8192,
provider: 'Aliyun',
},
{
id: 'gpt-3.5-turbo',
name: 'GPT-3.5 Turbo',
id: 'qwen-turbo',
name: '通义千问 Turbo',
description: '快速高效,适合日常对话',
maxTokens: 16384,
provider: 'OpenAI',
maxTokens: 8192,
provider: 'Aliyun',
},
{
id: 'claude-3-opus',
name: 'Claude 3 Opus',
description: '优秀的长文本处理能力',
maxTokens: 200000,
provider: 'Anthropic',
},
{
id: 'claude-3-sonnet',
name: 'Claude 3 Sonnet',
description: '平衡性能与成本',
maxTokens: 200000,
provider: 'Anthropic',
id: 'qwen-vl-max',
name: '通义千问 VL-Max',
description: '强大的视觉理解模型',
maxTokens: 8192,
provider: 'Aliyun',
},
]

View File

@ -1 +1 @@
{"root":["./src/main.ts","./src/components/icons/index.ts","./src/composables/usekeyboard.ts","./src/services/api.ts","./src/services/mockai.ts","./src/stores/chat.ts","./src/stores/settings.ts","./src/types/chat.ts","./src/utils/helpers.ts","./src/app.vue","./src/components/chat/chatheader.vue","./src/components/chat/chatmain.vue","./src/components/chat/messagelist.vue","./src/components/chat/welcomescreen.vue","./src/components/input/attachmentpreview.vue","./src/components/input/chatinput.vue","./src/components/message/codeblock.vue","./src/components/message/messageactions.vue","./src/components/message/messagebubble.vue","./src/components/message/components/echartscontainernode.vue","./src/components/message/components/loading.vue","./src/components/message/components/thinkingnode.vue","./src/components/modals/conversationsettingsmodal.vue","./src/components/modals/searchmodal.vue","./src/components/modals/settingsmodal.vue","./src/components/modals/shortcutsmodal.vue","./src/components/sidebar/chatsidebar.vue","./src/components/sidebar/conversationitem.vue","./src/components/ui/formselect.vue","./src/components/ui/formslider.vue","./src/components/ui/formswitch.vue"],"version":"5.9.3"}
{"root":["./src/main.ts","./src/components/icons/index.ts","./src/composables/useKeyboard.ts","./src/services/api.ts","./src/stores/chat.ts","./src/stores/settings.ts","./src/types/chat.ts","./src/utils/helpers.ts","./src/App.vue","./src/components/chat/ChatHeader.vue","./src/components/chat/ChatMain.vue","./src/components/chat/MessageList.vue","./src/components/chat/WelcomeScreen.vue","./src/components/input/AttachmentPreview.vue","./src/components/input/ChatInput.vue","./src/components/message/CodeBlock.vue","./src/components/message/MessageActions.vue","./src/components/message/MessageBubble.vue","./src/components/message/components/EChartsContainerNode.vue","./src/components/message/components/Loading.vue","./src/components/message/components/ThinkingNode.vue","./src/components/modals/ConversationSettingsModal.vue","./src/components/modals/SearchModal.vue","./src/components/modals/SettingsModal.vue","./src/components/modals/ShortcutsModal.vue","./src/components/sidebar/ChatSidebar.vue","./src/components/sidebar/ConversationItem.vue","./src/components/ui/FormSelect.vue","./src/components/ui/FormSlider.vue","./src/components/ui/FormSwitch.vue"],"version":"5.9.3"}

View File

@ -16,6 +16,12 @@ export default defineConfig({
},
server: {
host: "0.0.0.0",
proxy: {
"/api/chat-ui": {
target: "http://localhost:3000",
changeOrigin: true,
},
},
},
build: {
// 输出目录