Vue3 + Composition API + TypeScript
Pinia状态管理
TailwindCSS + HeadlessUI
Markdown渲染(marked + highlight.js)
流式传输(WebSocket/SSE)
FastAPI(Python)
WebSocket实时通信
代码高亮:Prism.js
图标库:Lucide
动画库:Animate.css
src/ ├─ assets/ ├─ components/ │ ├─ ChatMessage.vue # 单条消息组件 │ ├─ MessageInput.vue # 输入框组件 │ └─ Sidebar.vue # 对话历史侧边栏 ├─ stores/ │ └─ chat.ts # Pinia存储 ├─ types/ │ └─ chat.d.ts # TypeScript类型定义 ├─ utils/ │ ├─ markdown.ts # Markdown处理器 │ └─ stream.ts # 流式数据处理 ├─ App.vue └─ main.ts
interface Message {
id: string
content: string
role: 'user' | 'assistant'
timestamp: number
status: 'pending' | 'complete' | 'error'
isCode?: boolean
}
interface Conversation {
id: string
title: string
messages: Message[]
lastUpdated: number
}
import { defineStore } from 'pinia'
import type { Conversation, Message } from '@/types/chat'
export const useChatStore = defineStore('chat', () => {
const currentConversation = ref(null)
const conversations = ref([])
const apiEndpoint = import.meta.env.VITE_API_URL
// 创建新对话
const createNewConversation = () => {
const newConv: Conversation = {
id: crypto.randomUUID(),
title: 'New Chat',
messages: [],
lastUpdated: Date.now()
}
conversations.value.unshift(newConv)
currentConversation.value = newConv
}
// 发送消息(带流式处理)
const sendMessage = async (content: string) => {
if (!currentConversation.value) return
const userMessage: Message = {
id: crypto.randomUUID(),
content,
role: 'user',
timestamp: Date.now(),
status: 'complete'
}
const botMessage: Message = {
id: crypto.randomUUID(),
content: '',
role: 'assistant',
timestamp: Date.now(),
status: 'pending'
}
currentConversation.value.messages.push(userMessage, botMessage)
try {
const response = await fetch(`${apiEndpoint}/chat`, {
method: 'POST',
body: JSON.stringify({
message: content,
conversation_id: currentConversation.value.id
})
})
const reader = response.body?.getReader()
const decoder = new TextDecoder()
while (true) {
const { done, value } = await reader!.read()
if (done) break
const chunk = decoder.decode(value)
botMessage.content += chunk
botMessage.content = parseMarkdown(botMessage.content) // Markdown处理
}
botMessage.status = 'complete'
} catch (error) {
botMessage.content = '请求失败,请重试'
botMessage.status = 'error'
}
}
return {
currentConversation,
conversations,
createNewConversation,
sendMessage
}
})
思考中...
export async function handleStreamResponse(
response: Response,
callback: (chunk: string) => void
) {
const reader = response.body?.getReader()
const decoder = new TextDecoder('utf-8')
while (reader) {
const { done, value } = await reader.read()
if (done) break
const chunk = decoder.decode(value, { stream: true })
callback(chunk)
}
}
import { marked } from 'marked'
import hljs from 'highlight.js'
// 配置Markdown解析器
marked.setOptions({
highlight: (code, lang) => {
const language = hljs.getLanguage(lang) ? lang : 'plaintext'
return hljs.highlight(code, { language }).value
},
breaks: true,
gfm: true
})
// 安全过滤
const sanitizeHtml = (html: string) => {
return html.replace(/.*?<\/script>/gmi, '')
}
export const parseMarkdown = (content: string): string => {
return sanitizeHtml(marked.parse(content))
}
// 修改sendMessage方法中的响应处理
const response = await fetch(`${apiEndpoint}/chat`, {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({
message: content,
conversation_id: currentConversation.value.id
})
})
if (!response.body) return
await handleStreamResponse(response, (chunk) => {
botMessage.content += chunk
botMessage.content = parseMarkdown(botMessage.content)
})
// 在Pinia存储中添加
const loadConversations = async () => {
const saved = localStorage.getItem('conversations')
if (saved) {
conversations.value = JSON.parse(saved)
}
}
const saveConversations = () => {
localStorage.setItem(
'conversations',
JSON.stringify(conversations.value)
)
}
// 自动保存
watch(conversations, saveConversations, { deep: true })
# 前端Dockerfile
FROM node:18-alpine as builder
WORK /app
COPY package*.json ./
RUN npm ci
COPY . .
RUN npm run build
FROM nginx:alpine
COPY --from=builder /app/dist /usr/share/nginx/html
COPY nginx.conf /etc/nginx/conf.d/default.conf
EXPOSE 80
server {
listen 80;
server_name your-domain.com;
location / {
root /usr/share/nginx/html;
try_files $uri $uri/ /index.html;
# 流式响应需要关闭缓冲
proxy_buffering off;
}
location /api {
proxy_pass http://backend:8000;
proxy_set_header Host $host;
}
location /websocket {
proxy_pass http://backend:8000;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "Upgrade";
}
}