mirror of
https://github.com/ikechan8370/chatgpt-plugin.git
synced 2025-12-16 21:37:11 +00:00
fix: WIP openai rewrite
This commit is contained in:
parent
30f9c82d73
commit
a77a0e430f
5 changed files with 101 additions and 54 deletions
|
|
@ -0,0 +1,7 @@
|
|||
import OpenAI from 'openai';
|
||||
import { BaseClient } from './BaseClient.js'
|
||||
|
||||
export default class OpenAILikeClient extends BaseClient {
|
||||
|
||||
|
||||
}
|
||||
|
|
@ -59,6 +59,7 @@ import { ChatGPTAPI } from '../utils/openai/chatgpt-api.js'
|
|||
import { newFetch } from '../utils/proxy.js'
|
||||
import { ChatGLM4Client } from '../client/ChatGLM4Client.js'
|
||||
import { QwenApi } from '../utils/alibaba/qwen-api.js'
|
||||
import OpenAI from 'openai';
|
||||
|
||||
const roleMap = {
|
||||
owner: 'group owner',
|
||||
|
|
@ -872,7 +873,13 @@ class Core {
|
|||
// 如果配了proxy(或者不在国内),而且有反代,但是没开启强制反代,将baseurl删掉
|
||||
delete opts.apiBaseUrl
|
||||
}
|
||||
this.chatGPTApi = new ChatGPTAPI(opts)
|
||||
const client = new OpenAI({
|
||||
apiKey: Config.apiKey,
|
||||
baseURL: opts.apiBaseUrl,
|
||||
fetch: newFetch
|
||||
})
|
||||
|
||||
// this.chatGPTApi = new ChatGPTAPI(opts)
|
||||
let option = {
|
||||
timeoutMs: 600000,
|
||||
completionParams,
|
||||
|
|
@ -1054,7 +1061,7 @@ async function collectTools (e) {
|
|||
tools.push(...[new EliMusicTool(), new EliMovieTool()])
|
||||
} catch (err) {
|
||||
tools.push(...[new SendMusicTool(), new SearchMusicTool()])
|
||||
logger.debug(logger.green('【ChatGPT-Plugin】插件avocado-plugin未安装') + ',安装后可查看最近热映电影与体验可玩性更高的点歌工具。\n可前往 https://github.com/Qz-Sean/avocado-plugin 获取')
|
||||
// logger.debug(logger.green('【ChatGPT-Plugin】插件avocado-plugin未安装') + ',安装后可查看最近热映电影与体验可玩性更高的点歌工具。\n可前往 https://github.com/Qz-Sean/avocado-plugin 获取')
|
||||
}
|
||||
let systemAddition = ''
|
||||
if (e.isGroup) {
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@
|
|||
"lodash": "^4.17.21",
|
||||
"microsoft-cognitiveservices-speech-sdk": "1.32.0",
|
||||
"node-fetch": "^3.3.1",
|
||||
"openai": "^3.2.1",
|
||||
"openai": "^4.72.0",
|
||||
"p-timeout": "^6.1.2",
|
||||
"quick-lru": "6.1.1",
|
||||
"random": "^4.1.0",
|
||||
|
|
|
|||
|
|
@ -7,9 +7,9 @@ import * as tokenizer from './tokenizer'
|
|||
import * as types from './types'
|
||||
import globalFetch from 'node-fetch'
|
||||
import { fetchSSE } from './fetch-sse'
|
||||
import {ChatCompletionRequestMessage, openai, Role} from "./types";
|
||||
import {openai, Role} from "./types";
|
||||
|
||||
const CHATGPT_MODEL = 'gpt-3.5-turbo-0613'
|
||||
const CHATGPT_MODEL = 'gpt-4o-mini'
|
||||
|
||||
const USER_LABEL_DEFAULT = 'User'
|
||||
const ASSISTANT_LABEL_DEFAULT = 'ChatGPT'
|
||||
|
|
@ -183,6 +183,7 @@ export class ChatGPTAPI {
|
|||
parentMessageId: messageId,
|
||||
text: '',
|
||||
functionCall: undefined,
|
||||
toolCalls: undefined,
|
||||
conversation: []
|
||||
}
|
||||
|
||||
|
|
@ -247,7 +248,16 @@ export class ChatGPTAPI {
|
|||
} else {
|
||||
result.functionCall.arguments = (result.functionCall.arguments || '') + delta.function_call.arguments
|
||||
}
|
||||
|
||||
} else if (delta.tool_calls) {
|
||||
let fc = delta.tool_calls[0].function
|
||||
if (fc.name) {
|
||||
result.functionCall = {
|
||||
name: fc.name,
|
||||
arguments: fc.arguments
|
||||
}
|
||||
} else {
|
||||
result.functionCall.arguments = (result.functionCall.arguments || '') + fc.arguments
|
||||
}
|
||||
} else {
|
||||
result.delta = delta.content
|
||||
if (delta?.content) result.text += delta.content
|
||||
|
|
@ -255,7 +265,6 @@ export class ChatGPTAPI {
|
|||
if (delta.role) {
|
||||
result.role = delta.role
|
||||
}
|
||||
|
||||
result.detail = response
|
||||
onProgress?.(result)
|
||||
}
|
||||
|
|
@ -303,6 +312,8 @@ export class ChatGPTAPI {
|
|||
result.text = message.content
|
||||
} else if (message.function_call) {
|
||||
result.functionCall = message.function_call
|
||||
} else if (message.tool_calls) {
|
||||
result.functionCall = message.tool_calls.map(tool => tool.function)[0]
|
||||
}
|
||||
if (message.role) {
|
||||
result.role = message.role
|
||||
|
|
@ -417,46 +428,47 @@ export class ChatGPTAPI {
|
|||
let functionToken = 0
|
||||
|
||||
let numTokens = functionToken
|
||||
if (completionParams.functions) {
|
||||
for (const func of completionParams.functions) {
|
||||
functionToken += await this._getTokenCount(func?.name)
|
||||
functionToken += await this._getTokenCount(func?.description)
|
||||
if (func?.parameters?.properties) {
|
||||
for (let key of Object.keys(func.parameters.properties)) {
|
||||
functionToken += await this._getTokenCount(key)
|
||||
let property = func.parameters.properties[key]
|
||||
for (let field of Object.keys(property)) {
|
||||
switch (field) {
|
||||
case 'type': {
|
||||
functionToken += 2
|
||||
functionToken += await this._getTokenCount(property?.type)
|
||||
break
|
||||
}
|
||||
case 'description': {
|
||||
functionToken += 2
|
||||
functionToken += await this._getTokenCount(property?.description)
|
||||
break
|
||||
}
|
||||
case 'enum': {
|
||||
functionToken -= 3
|
||||
for (let enumElement of property?.enum) {
|
||||
functionToken += 3
|
||||
functionToken += await this._getTokenCount(enumElement)
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (func?.parameters?.required) {
|
||||
for (let string of func.parameters.required) {
|
||||
functionToken += 2
|
||||
functionToken += await this._getTokenCount(string)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// deprecated function call token calculation due to low efficiency
|
||||
// if (completionParams.functions) {
|
||||
// for (const func of completionParams.functions) {
|
||||
// functionToken += await this._getTokenCount(func?.name)
|
||||
// functionToken += await this._getTokenCount(func?.description)
|
||||
// if (func?.parameters?.properties) {
|
||||
// for (let key of Object.keys(func.parameters.properties)) {
|
||||
// functionToken += await this._getTokenCount(key)
|
||||
// let property = func.parameters.properties[key]
|
||||
// for (let field of Object.keys(property)) {
|
||||
// switch (field) {
|
||||
// case 'type': {
|
||||
// functionToken += 2
|
||||
// functionToken += await this._getTokenCount(property?.type)
|
||||
// break
|
||||
// }
|
||||
// case 'description': {
|
||||
// functionToken += 2
|
||||
// functionToken += await this._getTokenCount(property?.description)
|
||||
// break
|
||||
// }
|
||||
// case 'enum': {
|
||||
// functionToken -= 3
|
||||
// for (let enumElement of property?.enum) {
|
||||
// functionToken += 3
|
||||
// functionToken += await this._getTokenCount(enumElement)
|
||||
// }
|
||||
// break
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// if (func?.parameters?.required) {
|
||||
// for (let string of func.parameters.required) {
|
||||
// functionToken += 2
|
||||
// functionToken += await this._getTokenCount(string)
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
do {
|
||||
const prompt = nextMessages
|
||||
|
|
@ -467,7 +479,9 @@ export class ChatGPTAPI {
|
|||
case 'user':
|
||||
return prompt.concat([`${userLabel}:\n${message.content}`])
|
||||
case 'function':
|
||||
// leave befind
|
||||
// leave behind
|
||||
return prompt
|
||||
case 'assistant':
|
||||
return prompt
|
||||
default:
|
||||
return message.content ? prompt.concat([`${assistantLabel}:\n${message.content}`]) : prompt
|
||||
|
|
@ -510,7 +524,8 @@ export class ChatGPTAPI {
|
|||
role: parentMessageRole,
|
||||
content: parentMessage.text,
|
||||
name: parentMessage.name,
|
||||
function_call: parentMessage.functionCall ? parentMessage.functionCall : undefined
|
||||
function_call: parentMessage.functionCall ? parentMessage.functionCall : undefined,
|
||||
tools: parentMessage.toolCalls ? parentMessage.toolCalls : undefined
|
||||
},
|
||||
...nextMessages.slice(systemMessageOffset)
|
||||
])
|
||||
|
|
|
|||
|
|
@ -79,7 +79,8 @@ export interface ChatMessage {
|
|||
|
||||
// only relevant for ChatGPTUnofficialProxyAPI (optional for ChatGPTAPI)
|
||||
conversationId?: string
|
||||
functionCall?: openai.FunctionCall
|
||||
functionCall?: openai.FunctionCall,
|
||||
toolCalls?: openai.ToolCall[],
|
||||
}
|
||||
|
||||
export class ChatGPTError extends Error {
|
||||
|
|
@ -203,7 +204,8 @@ export namespace openai {
|
|||
delta: {
|
||||
role: Role
|
||||
content?: string,
|
||||
function_call?: {name: string, arguments: string}
|
||||
function_call?: FunctionCall,
|
||||
tool_calls: ToolCall[]
|
||||
}
|
||||
index: number
|
||||
finish_reason: string | null
|
||||
|
|
@ -236,7 +238,9 @@ export namespace openai {
|
|||
*/
|
||||
name?: string
|
||||
function_call?: FunctionCall
|
||||
|
||||
tool_calls?: ToolCall,
|
||||
// required todo
|
||||
// tool_choice?: 'none' | 'auto' | 'required'
|
||||
}
|
||||
|
||||
export interface FunctionCall {
|
||||
|
|
@ -244,6 +248,17 @@ export namespace openai {
|
|||
arguments: string
|
||||
}
|
||||
|
||||
export interface ToolCall {
|
||||
id: string
|
||||
type: "function"
|
||||
function: FunctionCall
|
||||
}
|
||||
|
||||
export interface Tools {
|
||||
type: "function" | string,
|
||||
function: Function
|
||||
}
|
||||
|
||||
export declare const ChatCompletionRequestMessageRoleEnum: {
|
||||
readonly System: 'system'
|
||||
readonly User: 'user'
|
||||
|
|
@ -271,7 +286,8 @@ export namespace openai {
|
|||
*/
|
||||
content: string
|
||||
|
||||
function_call: FunctionCall
|
||||
function_call: FunctionCall,
|
||||
tool_calls: ToolCall[]
|
||||
}
|
||||
export declare const ChatCompletionResponseMessageRoleEnum: {
|
||||
readonly System: 'system'
|
||||
|
|
@ -360,6 +376,8 @@ export namespace openai {
|
|||
user?: string
|
||||
|
||||
functions?: Function[]
|
||||
|
||||
tools?: Tools[]
|
||||
}
|
||||
export interface Function {
|
||||
name: string
|
||||
|
|
@ -470,4 +488,4 @@ export namespace openai {
|
|||
*/
|
||||
total_tokens: number
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue