import { currentHost } from '@/utils/location'
/**
* 转义处理响应值为 data: 的 json 字符串
* 如: 科大讯飞星火、Kimi Moonshot 等大模型的 response
*/
export const createParser = () => {
let keepAliveShown = false
const resetKeepAliveParser = () => {
keepAliveShown = false
}
const parseJsonLikeData = (content) => {
// 若是终止信号,则直接结束
if (content === '[DONE]') {
// 重置 keepAlive 标志
keepAliveShown = false
return {
done: true
}
}
if (content.startsWith('data: ')) {
keepAliveShown = false
const dataString = content.substring(6).trim()
if (dataString === '[DONE]') {
return {
done: true
}
}
try {
return JSON.parse(dataString)
} catch (error) {
console.error('JSON 解析错误:', error)
}
}
// 尝试直接解析 JSON 字符串
try {
const trimmedContent = content.trim()
if (trimmedContent === ': keep-alive') {
// 如果还没有显示过 keep-alive 提示,则显示
if (!keepAliveShown) {
keepAliveShown = true
return {
isWaitQueuing: true
}
} else {
return null
}
}
if (!trimmedContent) {
return null
}
if (trimmedContent.startsWith('{') && trimmedContent.endsWith('}')) {
return JSON.parse(trimmedContent)
}
if (trimmedContent.startsWith('[') && trimmedContent.endsWith(']')) {
return JSON.parse(trimmedContent)
}
} catch (error) {
console.error('尝试直接解析 JSON 失败:', error)
}
return null
}
return {
resetKeepAliveParser,
parseJsonLikeData
}
}
export const createStreamThinkTransformer = () => {
let isThinking = false
const resetThinkTransformer = () => {
isThinking = false
}
const transformStreamThinkData = (content) => {
const stream = parseJsonLikeData(content)
if (stream && stream.done) {
return {
done: true
}
}
// DeepSeek 存在限速问题,这里做一个简单处理
// https://api-docs.deepseek.com/zh-cn/quick_start/rate_limit
if (stream && stream.isWaitQueuing) {
return {
isWaitQueuing: stream.isWaitQueuing
}
}
if (!stream || !stream.choices || stream.choices.length === 0) {
return {
content: ''
}
}
const delta = stream.choices[0].delta
const contentText = delta.content || ''
const reasoningText = delta.reasoning_content || ''
let transformedContent = ''
// 开始处理推理过程
if (delta.content === null && delta.reasoning_content !== null) {
if (!isThinking) {
transformedContent += ''
isThinking = true
}
transformedContent += reasoningText
}
// 当 content 出现时,说明推理结束
else if (delta.content !== null && delta.reasoning_content === null) {
if (isThinking) {
transformedContent += '\n\n'
isThinking = false
}
transformedContent += contentText
}
// 当为普通模型,即不包含推理字段时,直接追加 content
else if (delta.content !== null && delta.reasoning_content === undefined) {
isThinking = false
transformedContent += contentText
}
return {
content: transformedContent
}
}
return {
resetThinkTransformer,
transformStreamThinkData
}
}
const { resetKeepAliveParser, parseJsonLikeData } = createParser()
const { resetThinkTransformer, transformStreamThinkData } = createStreamThinkTransformer()
/**
* 处理大模型调用暂停、异常或结束后触发的操作
*/
export const triggerModelTermination = () => {
resetKeepAliveParser()
resetThinkTransformer()
}
type ContentResult = {
content: any
} | {
done: boolean
}
type DoneResult = {
content: any
isWaitQueuing?: any
} & {
done: boolean
}
export type CrossTransformFunction = (readValue: Uint8Array | string, textDecoder: TextDecoder) => DoneResult
export type TransformFunction = (readValue: Uint8Array | string, textDecoder: TextDecoder) => ContentResult
interface TypesModelLLM {
// 模型昵称
label: string
// 模型标识符
modelName: string
// Stream 结果转换器
transformStreamValue: TransformFunction
// 每个大模型调用的 API 请求
chatFetch: (text: string) => Promise
}
/** ---------------- 大模型映射列表 & Response Transform 用于处理不同类型流的值转换器 ---------------- */
/**
* Mock 模拟模型的 name
*/
export const defaultMockModelName = 'standard'
/**
* 项目默认使用模型,按需修改此字段即可
*/
// export const defaultModelName = 'spark'
export const defaultModelName = defaultMockModelName
export const modelMappingList: TypesModelLLM[] = [
{
label: '🧪 模拟数据模型',
modelName: 'standard',
transformStreamValue(readValue, textDecoder) {
let content = ''
if (readValue instanceof Uint8Array) {
content = textDecoder.decode(readValue, {
stream: true
})
} else {
content = readValue
}
return {
content
}
},
// Mock Event Stream 用于模拟读取大模型接口 Mock 数据
async chatFetch(text): Promise {
const formData = new FormData()
const history = localStorage.getItem('kaConversationsChatList') || '[]'
const payload = {
client_id: `${ Date.now() + 10 }`,
prompt: text,
history
}
Object.entries(payload).forEach(([key, value]) => {
formData.append(key, value)
})
const res = await fetch(`${ currentHost.baseApi }/chat`, {
method: 'POST',
body: formData
})
return res
}
}
]