Merge branch 'ikechan8370:v2' into v2

This commit is contained in:
ifeif 2024-03-14 22:04:51 +08:00 committed by GitHub
commit 3d9fdb7ac9
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
73 changed files with 2991 additions and 4610 deletions

View file

@ -1,5 +1,5 @@
import plugin from '../../../lib/plugins/plugin.js'
import {Config} from "../utils/config.js";
import { Config } from '../utils/config.js'
const PLUGIN_CHAT = 'ChatGpt 对话'
const PLUGIN_MANAGEMENT = 'ChatGPT-Plugin 管理'
@ -13,6 +13,7 @@ const FUNCTION_XH = 'xh'
const FUNCTION_QWEN = 'qwen'
const FUNCTION_GLM4 = 'glm4'
const FUNCTION_CLAUDE2 = 'claude2'
const FUNCTION_CLAUDE = 'claude'
const FUNCTION_END = 'destroyConversations'
const FUNCTION_END_ALL = 'endAllConversations'
@ -66,6 +67,7 @@ export class ChatGPTButtonHandler extends plugin {
case `[${PLUGIN_CHAT}][${FUNCTION_XH}]`:
case `[${PLUGIN_CHAT}][${FUNCTION_QWEN}]`:
case `[${PLUGIN_CHAT}][${FUNCTION_CLAUDE2}]`:
case `[${PLUGIN_CHAT}][${FUNCTION_CLAUDE}]`:
case `[${PLUGIN_CHAT}][${FUNCTION_GLM4}]`:
case `[${PLUGIN_CHAT}][${FUNCTION_CHAT}]`: {
return this.makeButtonChat(options?.btnData)
@ -177,8 +179,11 @@ export class ChatGPTButtonHandler extends plugin {
if (Config.chatglmRefreshToken) {
buttons[buttons[0].length >= 4 ? 1 : 0].push(createButtonBase('ChatGLM4', '#glm4', false))
}
if (Config.claudeAISessionKey) {
buttons[buttons[0].length >= 4 ? 1 : 0].push(createButtonBase('Claude', '#claude.ai', false))
// 两个claude只显示一个 优先API
if (Config.claudeApiKey) {
buttons[buttons[0].length >= 4 ? 1 : 0].push(createButtonBase('Claude', '#claude', false))
} else if (Config.claudeAISessionKey) {
buttons[buttons[0].length >= 4 ? 1 : 0].push(createButtonBase('Claude.ai', '#claude.ai', false))
}
rows.push({
buttons: buttons[0]

File diff suppressed because it is too large Load diff

View file

@ -4,7 +4,6 @@ import { generateHello } from '../utils/randomMessage.js'
import { generateVitsAudio } from '../utils/tts.js'
import fs from 'fs'
import { emojiRegex, googleRequestUrl } from '../utils/emoj/index.js'
import fetch from 'node-fetch'
import { getImageOcrText, getImg, makeForwardMsg, mkdirs, renderUrl } from '../utils/common.js'
import uploadRecord from '../utils/uploadRecord.js'
import { makeWordcloud } from '../utils/wordcloud/wordcloud.js'
@ -343,7 +342,7 @@ ${translateLangLabels}
logger.info('combine ' + e.msg)
let resultFileLoc = `data/chatgpt/emoji/${left}_${right}.jpg`
if (fs.existsSync(resultFileLoc)) {
let image = segment.image(fs.createReadStream(resultFileLoc))
let image = segment.image(resultFileLoc)
image.asface = true
await this.reply(image, true)
return true
@ -370,12 +369,12 @@ ${translateLangLabels}
await this.reply('不支持合成', true)
return false
}
let response = await fetch(url)
const resultBlob = await response.blob()
const resultArrayBuffer = await resultBlob.arrayBuffer()
const resultBuffer = Buffer.from(resultArrayBuffer)
await fs.writeFileSync(resultFileLoc, resultBuffer)
let image = segment.image(fs.createReadStream(resultFileLoc))
// let response = await fetch(url)
// const resultBlob = await response.blob()
// const resultArrayBuffer = await resultBlob.arrayBuffer()
// const resultBuffer = Buffer.from(resultArrayBuffer)
// await fs.writeFileSync(resultFileLoc, resultBuffer)
let image = segment.image(url)
image.asface = true
await this.reply(image, true)
return true

View file

@ -23,6 +23,7 @@ import VoiceVoxTTS, { supportConfigurations as voxRoleList } from '../utils/tts/
import { supportConfigurations as azureRoleList } from '../utils/tts/microsoft-azure.js'
import fetch from 'node-fetch'
import { newFetch } from '../utils/proxy.js'
import { createServer, runServer, stopServer } from '../server/index.js'
export class ChatgptManagement extends plugin {
constructor (e) {
@ -98,13 +99,8 @@ export class ChatgptManagement extends plugin {
permission: 'master'
},
{
reg: '^#chatgpt切换(Poe|poe)$',
fnc: 'useClaudeBasedSolution',
permission: 'master'
},
{
reg: '^#chatgpt切换(Claude|claude|slack)$',
fnc: 'useSlackClaudeBasedSolution',
reg: '^#chatgpt切换(Claude|claude)$',
fnc: 'useClaudeAPIBasedSolution',
permission: 'master'
},
{
@ -181,6 +177,11 @@ export class ChatgptManagement extends plugin {
fnc: 'setAPIKey',
permission: 'master'
},
{
reg: '^#chatgpt设置(claude|Claude)(Key|key)$',
fnc: 'setClaudeKey',
permission: 'master'
},
{
reg: '^#chatgpt设置(Gemini|gemini)(Key|key)$',
fnc: 'setGeminiKey',
@ -322,6 +323,11 @@ export class ChatgptManagement extends plugin {
fnc: 'setXinghuoModel',
permission: 'master'
},
{
reg: '^#chatgpt设置(claude|Claude)模型$',
fnc: 'setClaudeModel',
permission: 'master'
},
{
reg: '^#chatgpt必应(禁用|禁止|关闭|启用|开启)搜索$',
fnc: 'switchBingSearch',
@ -336,6 +342,11 @@ export class ChatgptManagement extends plugin {
reg: '^#chatgpt(开启|关闭)(api|API)流$',
fnc: 'switchStream',
permission: 'master'
},
{
reg: '^#chatgpt(开启|关闭)(工具箱|后台服务)$',
fnc: 'switchToolbox',
permission: 'master'
}
]
})
@ -920,23 +931,13 @@ azure语音Azure 语音是微软 Azure 平台提供的一项语音服务,
}
}
async useClaudeBasedSolution (e) {
let use = await redis.get('CHATGPT:USE')
if (use !== 'poe') {
await redis.set('CHATGPT:USE', 'poe')
await this.reply('已切换到基于Quora\'s POE的解决方案')
} else {
await this.reply('当前已经是POE模式了')
}
}
async useSlackClaudeBasedSolution () {
async useClaudeAPIBasedSolution () {
let use = await redis.get('CHATGPT:USE')
if (use !== 'claude') {
await redis.set('CHATGPT:USE', 'claude')
await this.reply('已切换到基于slack claude机器人的解决方案')
await this.reply('已切换到基于ClaudeAPI的解决方案')
} else {
await this.reply('当前已经是claude模式了')
await this.reply('当前已经是Claude模式了')
}
}
@ -946,7 +947,7 @@ azure语音Azure 语音是微软 Azure 平台提供的一项语音服务,
await redis.set('CHATGPT:USE', 'claude2')
await this.reply('已切换到基于claude.ai的解决方案')
} else {
await this.reply('当前已经是claude2模式了')
await this.reply('当前已经是claude.ai模式了')
}
}
@ -1268,6 +1269,25 @@ azure语音Azure 语音是微软 Azure 平台提供的一项语音服务,
this.finish('saveAPIKey')
}
async setClaudeKey (e) {
this.setContext('saveClaudeKey')
await this.reply('请发送Claude API Key。\n如果要设置多个key请用逗号隔开。\n此操作会覆盖当前配置请谨慎操作', true)
return false
}
async saveClaudeKey () {
if (!this.e.msg) return
let token = this.e.msg
if (!token.startsWith('sk-ant')) {
await this.reply('Claude API Key格式错误。如果是格式特殊的非官方Key请前往锅巴或工具箱手动设置', true)
this.finish('saveClaudeKey')
return
}
Config.claudeApiKey = token
await this.reply('Claude API Key设置成功', true)
this.finish('saveClaudeKey')
}
async setGeminiKey (e) {
this.setContext('saveGeminiKey')
await this.reply('请发送Gemini API Key.获取地址https://makersuite.google.com/app/apikey', true)
@ -1688,6 +1708,20 @@ azure语音Azure 语音是微软 Azure 平台提供的一项语音服务,
this.finish('saveAPIModel')
}
async setClaudeModel (e) {
this.setContext('saveClaudeModel')
await this.reply('请发送Claude模型官方推荐模型\nclaude-3-opus-20240229\nclaude-3-sonnet-20240229\nclaude-3-haiku-20240307', true)
return false
}
async saveClaudeModel () {
if (!this.e.msg) return
let token = this.e.msg
Config.claudeApiModel = token
await this.reply('Claude模型设置成功', true)
this.finish('saveClaudeModel')
}
async setOpenAiBaseUrl (e) {
this.setContext('saveOpenAiBaseUrl')
await this.reply('请发送API反代', true)
@ -1788,4 +1822,25 @@ azure语音Azure 语音是微软 Azure 平台提供的一项语音服务,
await this.reply('好的已经关闭API流式输出')
}
}
async switchToolbox (e) {
if (e.msg.includes('开启')) {
if (Config.enableToolbox) {
await this.reply('已经开启了')
return
}
Config.enableToolbox = true
await this.reply('开启中', true)
await runServer()
await this.reply('好的,已经打开工具箱')
} else {
if (!Config.enableToolbox) {
await this.reply('已经是关闭的了')
return
}
Config.enableToolbox = false
await stopServer()
await this.reply('好的,已经关闭工具箱')
}
}
}

View file

@ -1,5 +1,5 @@
import plugin from '../../../lib/plugins/plugin.js'
import {Config} from '../utils/config.js'
import { Config } from '../utils/config.js'
export class ChatGPTMarkdownHandler extends plugin {
constructor () {
@ -34,7 +34,8 @@ function transUse (use) {
qwen: '通义千问 ' + Config.qwenModel,
claude2: 'Claude 3 Sonnet',
glm4: 'ChatGLM4',
chat3: 'ChatGPT官网'
chat3: 'ChatGPT官网',
claude: Config.claudeApiModel
}
return useMap[use] || use
}

View file

@ -1,10 +1,8 @@
import plugin from '../../../lib/plugins/plugin.js'
import fs from 'fs'
import _ from 'lodash'
import { Config } from '../utils/config.js'
import { getMasterQQ, limitString, makeForwardMsg, maskQQ, getUin } from '../utils/common.js'
import { deleteOnePrompt, getPromptByName, readPrompts, saveOnePrompt } from '../utils/prompts.js'
import AzureTTS from "../utils/tts/microsoft-azure.js";
import AzureTTS from '../utils/tts/microsoft-azure.js'
export class help extends plugin {
constructor (e) {
super({
@ -66,21 +64,6 @@ export class help extends plugin {
fnc: 'helpPrompt',
permission: 'master'
}
// {
// reg: '^#(chatgpt|ChatGPT)(开启|关闭)洗脑$',
// fnc: 'setSydneyBrainWash',
// permission: 'master'
// },
// {
// reg: '^#(chatgpt|ChatGPT)(设置)?洗脑强度',
// fnc: 'setSydneyBrainWashStrength',
// permission: 'master'
// },
// {
// reg: '^#(chatgpt|ChatGPT)(设置)?洗脑名称',
// fnc: 'setSydneyBrainWashName',
// permission: 'master'
// }
]
})
}
@ -152,7 +135,7 @@ export class help extends plugin {
const keyMap = {
api: 'promptPrefixOverride',
bing: 'sydney',
claude: 'slackClaudeGlobalPreset',
claude: 'claudeSystemPrompt',
qwen: 'promptPrefixOverride',
gemini: 'geminiPrompt',
xh: 'xhPrompt'
@ -168,6 +151,20 @@ export class help extends plugin {
if (use === 'xh') {
Config.xhPromptSerialize = false
}
if (use === 'bing') {
/**
* @type {{user: string, bot: string}[]} examples
*/
let examples = prompt.example
for (let i = 1; i <= 3; i++) {
Config[`chatExampleUser${i}`] = ''
Config[`chatExampleBot${i}`] = ''
}
for (let i = 1; i <= examples.length; i++) {
Config[`chatExampleUser${i}`] = examples[i - 1].user
Config[`chatExampleBot${i}`] = examples[i - 1].bot
}
}
await redis.set(`CHATGPT:PROMPT_USE_${use}`, promptName)
await e.reply(`你当前正在使用${use}模式,已将该模式设定应用为"${promptName}"。更该设定后建议结束对话以使设定更好生效`, true)
} else {
@ -347,13 +344,23 @@ export class help extends plugin {
let extraData = JSON.parse(await redis.get('CHATGPT:UPLOAD_PROMPT'))
const { currentUse, description } = extraData
const { content } = getPromptByName(currentUse)
let examples = []
for (let i = 1; i < 4; i++) {
if (Config[`chatExampleUser${i}`]) {
examples.push({
user: Config[`chatExampleUser${i}`],
bot: Config[`chatExampleBot${i}`]
})
}
}
let toUploadBody = {
title: currentUse,
prompt: content,
qq: master || (getUin(this.e) + ''), // 上传者设定为主人qq或机器人qq
use: extraData.use === 'bing' ? 'Bing' : 'ChatGPT',
r18,
description
description,
examples
}
logger.info(toUploadBody)
let response = await fetch('https://chatgpt.roki.best/prompt', {
@ -448,8 +455,8 @@ export class help extends plugin {
await e.reply('没有这个设定', true)
return true
}
const { prompt, title } = r.data
saveOnePrompt(title, prompt)
const { prompt, title, examples } = r.data
saveOnePrompt(title, prompt, examples)
e.reply(`导入成功。您现在可以使用 #chatgpt使用设定${title} 来体验这个设定了。`)
} else {
await e.reply('导入失败:' + r.msg)

View file

@ -3,21 +3,11 @@ import plugin from '../../../lib/plugins/plugin.js'
import { createRequire } from 'module'
import _ from 'lodash'
import { Restart } from '../../other/restart.js'
import fs from 'fs'
import {} from '../utils/common.js'
const _path = process.cwd()
const require = createRequire(import.meta.url)
const { exec, execSync } = require('child_process')
const checkAuth = async function (e) {
if (!e.isMaster) {
e.reply('只有主人才能命令ChatGPT哦~(*/ω\*)')
return false
}
return true
}
// 是否在更新中
let uping = false

View file

@ -1,7 +1,7 @@
import plugin from '../../../lib/plugins/plugin.js'
import { SunoClient } from '../client/SunoClient.js'
import { Config } from '../utils/config.js'
import { downloadFile, maskEmail } from '../utils/common.js'
import { maskEmail } from '../utils/common.js'
import common from '../../../lib/common/common.js'
import lodash from 'lodash'
@ -86,6 +86,10 @@ export class Vocal extends plugin {
}
let songs = await client.createSong(description)
if (!songs || songs.length === 0) {
e.reply('生成失败,可能是提示词太长或者违规,请检查日志')
return
}
let messages = ['提示词:' + description]
for (let song of songs) {
messages.push(`歌名:${song.title}\n风格: ${song.metadata.tags}\n长度: ${lodash.round(song.metadata.duration, 0)}\n歌词:\n${song.metadata.prompt}\n`)

195
client/ClaudeAPIClient.js Normal file
View file

@ -0,0 +1,195 @@
import crypto from 'crypto'
import { newFetch } from '../utils/proxy.js'
import _ from 'lodash'
import { getMessageById, upsertMessage } from '../utils/history.js'
import { BaseClient } from './BaseClient.js'
const BASEURL = 'https://api.anthropic.com'
/**
* @typedef {Object} Content
* @property {string} model
* @property {string} system
* @property {number} max_tokens
* @property {boolean} stream
* @property {Array<{
* role: 'user'|'assistant',
* content: string|Array<{
* type: 'text'|'image',
* text?: string,
* source?: {
* type: 'base64',
* media_type: 'image/jpeg'|'image/png'|'image/gif'|'image/webp',
* data: string
* }
* }>
* }>} messages
*
* Claude消息的基本格式
*/
/**
* @typedef {Object} ClaudeResponse
* @property {string} id
* @property {string} type
* @property {number} role
* @property {number} model
* @property {number} stop_reason
* @property {number} stop_sequence
* @property {number} role
* @property {boolean} stream
* @property {Array<{
* type: string,
* text: string
* }>} content
* @property {Array<{
* input_tokens: number,
* output_tokens: number,
* }>} usage
* @property {{
* type: string,
* message: string,
* }} error
* Claude响应的基本格式
*/
export class ClaudeAPIClient extends BaseClient {
constructor (props) {
if (!props.upsertMessage) {
props.upsertMessage = async function umGemini (message) {
return await upsertMessage(message, 'Claude')
}
}
if (!props.getMessageById) {
props.getMessageById = async function umGemini (message) {
return await getMessageById(message, 'Claude')
}
}
super(props)
this.model = props.model
this.key = props.key
if (!this.key) {
throw new Error('no claude API key')
}
this.baseUrl = props.baseUrl || BASEURL
this.supportFunction = false
this.debug = props.debug
}
async getHistory (parentMessageId, userId = this.userId, opt = {}) {
const history = []
let cursor = parentMessageId
if (!cursor) {
return history
}
do {
let parentMessage = await this.getMessageById(cursor)
if (!parentMessage) {
break
} else {
history.push(parentMessage)
cursor = parentMessage.parentMessageId
if (!cursor) {
break
}
}
} while (true)
return history.reverse()
}
/**
*
* @param text
* @param {{conversationId: string?, parentMessageId: string?, stream: boolean?, onProgress: function?, functionResponse: FunctionResponse?, system: string?, image: string?, model: string?}} opt
* @returns {Promise<{conversationId: string?, parentMessageId: string, text: string, id: string}>}
*/
async sendMessage (text, opt = {}) {
let history = await this.getHistory(opt.parentMessageId)
/**
* 发送的body
* @type {Content}
* @see https://docs.anthropic.com/claude/reference/messages_post
*/
let body = {}
if (opt.system) {
body.system = opt.system
}
const idThis = crypto.randomUUID()
const idModel = crypto.randomUUID()
/**
* @type {Array<{
* role: 'user'|'assistant',
* content: string|Array<{
* type: 'text'|'image',
* text?: string,
* source?: {
* type: 'base64',
* media_type: 'image/jpeg'|'image/png'|'image/gif'|'image/webp',
* data: string
* }
* }>
* }>}
*/
let thisContent = [{ type: 'text', text }]
if (opt.image) {
thisContent.push({
type: 'image',
source: {
type: 'base64',
media_type: 'image/jpeg',
data: opt.image
}
})
}
const thisMessage = {
role: 'user',
content: thisContent,
id: idThis,
parentMessageId: opt.parentMessageId || undefined
}
history.push(_.cloneDeep(thisMessage))
let messages = history.map(h => { return { role: h.role, content: h.content } })
body = Object.assign(body, {
model: opt.model || this.model || 'claude-3-opus-20240229',
max_tokens: opt.max_tokens || 1024,
messages,
stream: false
})
let url = `${this.baseUrl}/v1/messages`
let result = await newFetch(url, {
headers: {
'anthropic-version': '2023-06-01',
'x-api-key': this.key,
'content-type': 'application/json'
},
method: 'POST',
body: JSON.stringify(body)
})
if (result.status !== 200) {
throw new Error(await result.text())
}
/**
* @type {ClaudeResponse}
*/
let response = await result.json()
if (this.debug) {
console.log(JSON.stringify(response))
}
if (response.type === 'error') {
logger.error(response.error.message)
throw new Error(response.error.type)
}
await this.upsertMessage(thisMessage)
const respMessage = Object.assign(response, {
id: idModel,
parentMessageId: idThis
})
await this.upsertMessage(respMessage)
return {
text: response.content[0].text,
conversationId: '',
parentMessageId: idThis,
id: idModel
}
}
}

View file

@ -1,6 +1,6 @@
import { BaseClient } from './BaseClient.js'
import { getMessageById, upsertMessage } from '../utils/common.js'
import { getMessageById, upsertMessage } from '../utils/history.js'
import crypto from 'crypto'
let GoogleGenerativeAI, HarmBlockThreshold, HarmCategory
try {

View file

@ -65,14 +65,18 @@ export class SunoClient {
Authorization: `Bearer ${sess}`
}
})
if (queryRes.status === 401) {
sess = await this.getToken()
continue
}
if (queryRes.status !== 200) {
logger.error(await queryRes.text())
console.error('Failed to query song')
}
let queryData = await queryRes.json()
logger.debug(queryData)
allDone = queryData.every(clip => clip.status === 'complete')
songs = queryData
allDone = queryData.every(clip => clip.status === 'complete' || clip.status === 'error')
songs = queryData.filter(clip => clip.status === 'complete')
} catch (err) {
console.error(err)
}

View file

@ -0,0 +1,27 @@
// import { ClaudeAPIClient } from '../ClaudeAPIClient.js'
//
// async function test () {
// const client = new ClaudeAPIClient({
// key: 'sk-ant-api03-**************************************',
// model: 'claude-3-opus-20240229',
// debug: true,
// // baseUrl: 'http://claude-api.ikechan8370.com'
// })
// let rsp = await client.sendMessage('你好')
// console.log(rsp)
// }
// global.store = {}
// global.redis = {
// set: (key, val) => {
// global.store[key] = val
// },
// get: (key) => {
// return global.store[key]
// }
// }
// global.logger = {
// info: console.log,
// warn: console.warn,
// error: console.error
// }
// test()

View file

@ -1,6 +1,6 @@
import { SlackCozeClient } from '../CozeSlackClient.js'
import fs from 'fs'
global.store = {}
// global.store = {}
// global.redis = {
// set: (key, val) => {

View file

@ -89,13 +89,6 @@
"whitelist": [],
"blacklist": [],
"ttsRegex": "/匹配规则/匹配模式",
"slackUserToken": "",
"slackBotUserToken": "",
"slackSigningSecret": "",
"slackClaudeUserId": "",
"slackClaudeEnableGlobalPreset": true,
"slackClaudeGlobalPreset": "",
"slackClaudeSpecifiedChannel": "",
"cloudTranscode": "https://silk.201666.xyz",
"cloudRender": false,
"cloudMode": "url",

View file

@ -241,6 +241,12 @@ export function supportGuoba () {
bottomHelpMessage: '将输出更多调试信息,如果不希望控制台刷屏的话,可以关闭',
component: 'Switch'
},
{
field: 'enableToolbox',
label: '开启工具箱',
bottomHelpMessage: '独立的后台管理面板默认3321端口与锅巴类似。工具箱会有额外占用启动速度稍慢酌情开启。修改后需重启生效',
component: 'Switch'
},
{
field: 'enableMd',
label: 'QQ开启markdown',
@ -479,6 +485,42 @@ export function supportGuoba () {
bottomHelpMessage: '开启Sydney的图片识别功能建议和OCR只保留一个开启',
component: 'Switch'
},
{
field: 'chatExampleUser1',
label: '前置对话第一轮(用户)',
bottomHelpMessage: '会强行插入该轮对话,能有效抑制抱歉',
component: 'InputTextArea'
},
{
field: 'chatExampleBot1',
label: '前置对话第一轮AI',
bottomHelpMessage: '会强行插入该轮对话,能有效抑制抱歉',
component: 'InputTextArea'
},
{
field: 'chatExampleUser2',
label: '前置对话第二轮(用户)',
bottomHelpMessage: '会强行插入该轮对话,能有效抑制抱歉',
component: 'InputTextArea'
},
{
field: 'chatExampleBot2',
label: '前置对话第二轮AI',
bottomHelpMessage: '会强行插入该轮对话,能有效抑制抱歉',
component: 'InputTextArea'
},
{
field: 'chatExampleUser3',
label: '前置对话第三轮(用户)',
bottomHelpMessage: '会强行插入该轮对话,能有效抑制抱歉',
component: 'InputTextArea'
},
{
field: 'chatExampleBot3',
label: '前置对话第三轮AI',
bottomHelpMessage: '会强行插入该轮对话,能有效抑制抱歉',
component: 'InputTextArea'
},
{
label: '以下为API3方式的配置',
component: 'Divider'
@ -518,50 +560,44 @@ export function supportGuoba () {
component: 'Input'
},
{
label: '以下为Slack Claude方式的配置',
label: '以下为Claude API方式的配置',
component: 'Divider'
},
{
field: 'slackUserToken',
label: 'Slack用户Token',
bottomHelpMessage: 'slackUserToken在OAuth&Permissions页面获取。需要具有channels:history, chat:write, groups:history, im:history, mpim:history 这几个scope',
field: 'claudeApiKey',
label: 'claude API Key',
bottomHelpMessage: '前往 https://console.anthropic.com/settings/keys 注册和生成。可以填写多个,用英文逗号隔开',
component: 'InputPassword'
},
{
field: 'claudeApiModel',
label: 'claude API 模型',
bottomHelpMessage: '如 claude-3-sonnet-20240229 或 claude-3-opus-20240229',
component: 'Input'
},
{
field: 'slackBotUserToken',
label: 'Slack Bot Token',
bottomHelpMessage: 'slackBotUserToken在OAuth&Permissions页面获取。需要channels:historygroups:historyim:history 这几个scope',
field: 'claudeApiBaseUrl',
label: 'claude API 反代',
component: 'Input'
},
{
field: 'slackClaudeUserId',
label: 'Slack成员id',
bottomHelpMessage: '在Slack中点击Claude头像查看详情其中的成员ID复制过来',
component: 'Input'
field: 'claudeApiMaxToken',
label: 'claude 最大回复token数',
component: 'InputNumber'
},
{
field: 'slackSigningSecret',
label: 'Slack签名密钥',
bottomHelpMessage: 'Signing Secret。在Basic Information页面获取',
component: 'Input'
field: 'claudeApiTemperature',
label: 'claude 温度',
component: 'InputNumber',
componentProps: {
min: 0,
max: 1
}
},
{
field: 'slackClaudeSpecifiedChannel',
label: 'Slack指定频道',
bottomHelpMessage: '为空时将为每个qq号建立私有频道。若填写了对话将发生在本频道。和其他人公用workspace时建议用这个',
component: 'Input'
},
{
field: 'slackClaudeEnableGlobalPreset',
label: 'Claude使用全局设定',
bottomHelpMessage: '开启后所有人每次发起新对话时会先发送设定过去再开始对话达到类似Bing自设定的效果。',
component: 'Switch'
},
{
field: 'slackClaudeGlobalPreset',
label: 'Slack全局设定',
bottomHelpMessage: '若启用全局设定,每个人都会默认使用这里的设定。',
component: 'Input'
field: 'claudeSystemPrompt',
label: 'claude 设定',
component: 'InputTextArea'
},
{
label: '以下为Claude2方式的配置',

View file

@ -1,9 +1,16 @@
import fs from 'node:fs'
import { Config } from './utils/config.js'
import { createServer } from './server/index.js'
import { createServer, runServer } from './server/index.js'
logger.info('**************************************')
logger.info('chatgpt-plugin加载中')
if (!global.segment) {
global.segment = (await import('oicq')).segment
try {
global.segment = (await import('icqq')).segment
} catch (err) {
global.segment = (await import('oicq')).segment
}
}
const files = fs.readdirSync('./plugins/chatgpt-plugin/apps').filter(file => file.endsWith('.js'))
@ -19,7 +26,6 @@ ret = await Promise.allSettled(ret)
let apps = {}
for (let i in files) {
let name = files[i].replace('.js', '')
if (ret[i].status !== 'fulfilled') {
logger.error(`载入插件错误:${logger.red(name)}`)
logger.error(ret[i].reason)
@ -27,13 +33,22 @@ for (let i in files) {
}
apps[name] = ret[i].value[Object.keys(ret[i].value)[0]]
}
global.chatgpt = {
}
// 启动服务器
await createServer()
logger.info('**************************************')
if (Config.enableToolbox) {
logger.info('开启工具箱配置项,工具箱启动中')
await createServer()
await runServer()
logger.info('工具箱启动成功')
} else {
logger.info('提示当前配置未开启chatgpt工具箱可通过锅巴或`#chatgpt开启工具箱`指令开启')
}
logger.info('chatgpt-plugin加载成功')
logger.info(`当前版本${Config.version}`)
logger.info('仓库地址 https://github.com/ikechan8370/chatgpt-plugin')
logger.info('文档地址 https://www.yunzai.chat')
logger.info('插件群号 559567232')
logger.info('**************************************')

362
model/conversation.js Normal file
View file

@ -0,0 +1,362 @@
import { getUin, getUserData } from '../utils/common.js'
import { Config } from '../utils/config.js'
import { KeyvFile } from 'keyv-file'
import _ from 'lodash'
export const originalValues = ['星火', '通义千问', '克劳德', '克劳德2', '必应', 'api', 'API', 'api3', 'API3', 'glm', '巴德', '双子星', '双子座', '智谱']
export const correspondingValues = ['xh', 'qwen', 'claude', 'claude2', 'bing', 'api', 'api', 'api3', 'api3', 'chatglm', 'bard', 'gemini', 'gemini', 'chatglm4']
export class ConversationManager {
async endConversation (e) {
const userData = await getUserData(e.user_id)
const match = e.msg.trim().match('^#?(.*)(结束|新开|摧毁|毁灭|完结)对话')
console.log(match[1])
let use
if (match[1] && match[1] != 'chatgpt') {
use = correspondingValues[originalValues.indexOf(match[1])]
} else {
use = (userData.mode === 'default' ? null : userData.mode) || await redis.get('CHATGPT:USE')
}
console.log(use)
await redis.del(`CHATGPT:WRONG_EMOTION:${(e.isGroup && Config.groupMerge) ? e.group_id.toString() : e.sender.user_id}`)
// fast implementation
if (use === 'claude') {
await redis.del(`CHATGPT:CONVERSATIONS_CLAUDE:${(e.isGroup && Config.groupMerge) ? e.group_id.toString() : e.sender.user_id}`)
await this.reply('claude对话已结束')
return
}
if (use === 'claude2') {
await redis.del(`CHATGPT:CLAUDE2_CONVERSATION:${e.sender.user_id}`)
await this.reply('claude.ai对话已结束')
return
}
if (use === 'xh') {
await redis.del(`CHATGPT:CONVERSATIONS_XH:${(e.isGroup && Config.groupMerge) ? e.group_id.toString() : e.sender.user_id}`)
await this.reply('星火对话已结束')
return
}
if (use === 'bard') {
await redis.del(`CHATGPT:CONVERSATIONS_BARD:${(e.isGroup && Config.groupMerge) ? e.group_id.toString() : e.sender.user_id}`)
await this.reply('Bard对话已结束')
return
}
let ats = e.message.filter(m => m.type === 'at')
const isAtMode = Config.toggleMode === 'at'
if (isAtMode) ats = ats.filter(item => item.qq !== getUin(e))
if (ats.length === 0) {
if (use === 'api3') {
await redis.del(`CHATGPT:QQ_CONVERSATION:${(e.isGroup && Config.groupMerge) ? e.group_id.toString() : e.sender.user_id}`)
await this.reply('已退出当前对话,该对话仍然保留。请@我进行聊天以开启新的对话', true)
} else if (use === 'bing') {
let c = await redis.get(`CHATGPT:CONVERSATIONS_BING:${(e.isGroup && Config.groupMerge) ? e.group_id.toString() : e.sender.user_id}`)
if (!c) {
await this.reply('当前没有开启对话', true)
return
} else {
await redis.del(`CHATGPT:CONVERSATIONS_BING:${(e.isGroup && Config.groupMerge) ? e.group_id.toString() : e.sender.user_id}`)
}
const conversation = {
store: new KeyvFile({ filename: 'cache.json' }),
namespace: Config.toneStyle
}
let Keyv
try {
Keyv = (await import('keyv')).default
} catch (err) {
await this.reply('依赖keyv未安装请执行pnpm install keyv', true)
}
const conversationsCache = new Keyv(conversation)
logger.info(`SydneyUser_${e.sender.user_id}`, await conversationsCache.get(`SydneyUser_${e.sender.user_id}`))
await conversationsCache.delete(`SydneyUser_${e.sender.user_id}`)
await this.reply('已退出当前对话,该对话仍然保留。请@我进行聊天以开启新的对话', true)
} else if (use === 'chatglm') {
const conversation = {
store: new KeyvFile({ filename: 'cache.json' }),
namespace: 'chatglm_6b'
}
let Keyv
try {
Keyv = (await import('keyv')).default
} catch (err) {
await this.reply('依赖keyv未安装请执行pnpm install keyv', true)
}
const conversationsCache = new Keyv(conversation)
logger.info(`ChatGLMUser_${e.sender.user_id}`, await conversationsCache.get(`ChatGLMUser_${e.sender.user_id}`))
await conversationsCache.delete(`ChatGLMUser_${e.sender.user_id}`)
await this.reply('已退出当前对话,该对话仍然保留。请@我进行聊天以开启新的对话', true)
} else if (use === 'api') {
let c = await redis.get(`CHATGPT:CONVERSATIONS:${e.sender.user_id}`)
if (!c) {
await this.reply('当前没有开启对话', true)
} else {
await redis.del(`CHATGPT:CONVERSATIONS:${e.sender.user_id}`)
await this.reply('已结束当前对话,请@我进行聊天以开启新的对话', true)
}
} else if (use === 'qwen') {
let c = await redis.get(`CHATGPT:CONVERSATIONS_QWEN:${e.sender.user_id}`)
if (!c) {
await this.reply('当前没有开启对话', true)
} else {
await redis.del(`CHATGPT:CONVERSATIONS_QWEN:${e.sender.user_id}`)
await this.reply('已结束当前对话,请@我进行聊天以开启新的对话', true)
}
} else if (use === 'gemini') {
let c = await redis.get(`CHATGPT:CONVERSATIONS_GEMINI:${e.sender.user_id}`)
if (!c) {
await this.reply('当前没有开启对话', true)
} else {
await redis.del(`CHATGPT:CONVERSATIONS_GEMINI:${e.sender.user_id}`)
await this.reply('已结束当前对话,请@我进行聊天以开启新的对话', true)
}
} else if (use === 'chatglm4') {
let c = await redis.get(`CHATGPT:CONVERSATIONS_CHATGLM4:${e.sender.user_id}`)
if (!c) {
await this.reply('当前没有开启对话', true)
} else {
await redis.del(`CHATGPT:CONVERSATIONS_CHATGLM4:${e.sender.user_id}`)
await this.reply('已结束当前对话,请@我进行聊天以开启新的对话', true)
}
} else if (use === 'bing') {
let c = await redis.get(`CHATGPT:CONVERSATIONS_BING:${e.sender.user_id}`)
if (!c) {
await this.reply('当前没有开启对话', true)
} else {
await redis.del(`CHATGPT:CONVERSATIONS_BING:${e.sender.user_id}`)
await this.reply('已结束当前对话,请@我进行聊天以开启新的对话', true)
}
} else if (use === 'browser') {
let c = await redis.get(`CHATGPT:CONVERSATIONS_BROWSER:${e.sender.user_id}`)
if (!c) {
await this.reply('当前没有开启对话', true)
} else {
await redis.del(`CHATGPT:CONVERSATIONS_BROWSER:${e.sender.user_id}`)
await this.reply('已结束当前对话,请@我进行聊天以开启新的对话', true)
}
}
} else {
let at = ats[0]
let qq = at.qq
let atUser = _.trimStart(at.text, '@')
if (use === 'api3') {
await redis.del(`CHATGPT:QQ_CONVERSATION:${qq}`)
await this.reply(`${atUser}已退出TA当前的对话TA仍可以@我进行聊天以开启新的对话`, true)
} else if (use === 'bing') {
const conversation = {
store: new KeyvFile({ filename: 'cache.json' }),
namespace: Config.toneStyle
}
let Keyv
try {
Keyv = (await import('keyv')).default
} catch (err) {
await this.reply('依赖keyv未安装请执行pnpm install keyv', true)
}
const conversationsCache = new Keyv(conversation)
await conversationsCache.delete(`SydneyUser_${qq}`)
await this.reply('已退出当前对话,该对话仍然保留。请@我进行聊天以开启新的对话', true)
} else if (use === 'chatglm') {
const conversation = {
store: new KeyvFile({ filename: 'cache.json' }),
namespace: 'chatglm_6b'
}
let Keyv
try {
Keyv = (await import('keyv')).default
} catch (err) {
await this.reply('依赖keyv未安装请执行pnpm install keyv', true)
}
const conversationsCache = new Keyv(conversation)
logger.info(`ChatGLMUser_${e.sender.user_id}`, await conversationsCache.get(`ChatGLMUser_${e.sender.user_id}`))
await conversationsCache.delete(`ChatGLMUser_${qq}`)
await this.reply('已退出当前对话,该对话仍然保留。请@我进行聊天以开启新的对话', true)
} else if (use === 'api') {
let c = await redis.get(`CHATGPT:CONVERSATIONS:${qq}`)
if (!c) {
await this.reply(`当前${atUser}没有开启对话`, true)
} else {
await redis.del(`CHATGPT:CONVERSATIONS:${qq}`)
await this.reply(`已结束${atUser}的对话TA仍可以@我进行聊天以开启新的对话`, true)
}
} else if (use === 'qwen') {
let c = await redis.get(`CHATGPT:CONVERSATIONS_QWEN:${qq}`)
if (!c) {
await this.reply(`当前${atUser}没有开启对话`, true)
} else {
await redis.del(`CHATGPT:CONVERSATIONS_QWEN:${qq}`)
await this.reply(`已结束${atUser}的对话TA仍可以@我进行聊天以开启新的对话`, true)
}
} else if (use === 'gemini') {
let c = await redis.get(`CHATGPT:CONVERSATIONS_GEMINI:${qq}`)
if (!c) {
await this.reply(`当前${atUser}没有开启对话`, true)
} else {
await redis.del(`CHATGPT:CONVERSATIONS_GEMINI:${qq}`)
await this.reply(`已结束${atUser}的对话TA仍可以@我进行聊天以开启新的对话`, true)
}
} else if (use === 'chatglm4') {
let c = await redis.get(`CHATGPT:CONVERSATIONS_CHATGLM4:${qq}`)
if (!c) {
await this.reply(`当前${atUser}没有开启对话`, true)
} else {
await redis.del(`CHATGPT:CONVERSATIONS_CHATGLM4:${qq}`)
await this.reply(`已结束${atUser}的对话TA仍可以@我进行聊天以开启新的对话`, true)
}
} else if (use === 'bing') {
let c = await redis.get(`CHATGPT:CONVERSATIONS_BING:${qq}`)
if (!c) {
await this.reply(`当前${atUser}没有开启对话`, true)
} else {
await redis.del(`CHATGPT:CONVERSATIONS_BING:${qq}`)
await this.reply(`已结束${atUser}的对话TA仍可以@我进行聊天以开启新的对话`, true)
}
} else if (use === 'browser') {
let c = await redis.get(`CHATGPT:CONVERSATIONS_BROWSER:${qq}`)
if (!c) {
await this.reply(`当前${atUser}没有开启对话`, true)
} else {
await redis.del(`CHATGPT:CONVERSATIONS_BROWSER:${qq}`)
await this.reply(`已结束${atUser}的对话TA仍可以@我进行聊天以开启新的对话`, true)
}
}
}
}
async endAllConversations (e) {
const match = e.msg.trim().match('^#?(.*)(结束|新开|摧毁|毁灭|完结)全部对话')
console.log(match[1])
let use
if (match[1] && match[1] != 'chatgpt') {
use = correspondingValues[originalValues.indexOf(match[1])]
} else {
use = await redis.get('CHATGPT:USE') || 'api'
}
console.log(use)
let deleted = 0
switch (use) {
case 'claude': {
let cs = await redis.keys('CHATGPT:CONVERSATIONS_CLAUDE:*')
let we = await redis.keys('CHATGPT:WRONG_EMOTION:*')
for (let i = 0; i < cs.length; i++) {
await redis.del(cs[i])
if (Config.debug) {
logger.info('delete claude conversation of qq: ' + cs[i])
}
deleted++
}
for (const element of we) {
await redis.del(element)
}
break
}
case 'xh': {
let cs = await redis.keys('CHATGPT:CONVERSATIONS_XH:*')
for (let i = 0; i < cs.length; i++) {
await redis.del(cs[i])
if (Config.debug) {
logger.info('delete xh conversation of qq: ' + cs[i])
}
deleted++
}
break
}
case 'bard': {
let cs = await redis.keys('CHATGPT:CONVERSATIONS_BARD:*')
for (let i = 0; i < cs.length; i++) {
await redis.del(cs[i])
if (Config.debug) {
logger.info('delete bard conversation of qq: ' + cs[i])
}
deleted++
}
break
}
case 'bing': {
let cs = await redis.keys('CHATGPT:CONVERSATIONS_BING:*')
let we = await redis.keys('CHATGPT:WRONG_EMOTION:*')
for (let i = 0; i < cs.length; i++) {
await redis.del(cs[i])
if (Config.debug) {
logger.info('delete bing conversation of qq: ' + cs[i])
}
deleted++
}
for (const element of we) {
await redis.del(element)
}
break
}
case 'api': {
let cs = await redis.keys('CHATGPT:CONVERSATIONS:*')
for (let i = 0; i < cs.length; i++) {
await redis.del(cs[i])
if (Config.debug) {
logger.info('delete api conversation of qq: ' + cs[i])
}
deleted++
}
break
}
case 'api3': {
let qcs = await redis.keys('CHATGPT:QQ_CONVERSATION:*')
for (let i = 0; i < qcs.length; i++) {
await redis.del(qcs[i])
// todo clean last message id
if (Config.debug) {
logger.info('delete conversation bind: ' + qcs[i])
}
deleted++
}
break
}
case 'chatglm': {
let qcs = await redis.keys('CHATGPT:CONVERSATIONS_CHATGLM:*')
for (let i = 0; i < qcs.length; i++) {
await redis.del(qcs[i])
// todo clean last message id
if (Config.debug) {
logger.info('delete chatglm conversation bind: ' + qcs[i])
}
deleted++
}
break
}
case 'qwen': {
let qcs = await redis.keys('CHATGPT:CONVERSATIONS_QWEN:*')
for (let i = 0; i < qcs.length; i++) {
await redis.del(qcs[i])
// todo clean last message id
if (Config.debug) {
logger.info('delete qwen conversation bind: ' + qcs[i])
}
deleted++
}
break
}
case 'gemini': {
let qcs = await redis.keys('CHATGPT:CONVERSATIONS_GEMINI:*')
for (let i = 0; i < qcs.length; i++) {
await redis.del(qcs[i])
// todo clean last message id
if (Config.debug) {
logger.info('delete gemini conversation bind: ' + qcs[i])
}
deleted++
}
break
}
case 'chatglm4': {
let qcs = await redis.keys('CHATGPT:CONVERSATIONS_CHATGLM4:*')
for (let i = 0; i < qcs.length; i++) {
await redis.del(qcs[i])
// todo clean last message id
if (Config.debug) {
logger.info('delete chatglm4 conversation bind: ' + qcs[i])
}
deleted++
}
break
}
}
await this.reply(`结束了${deleted}个用户的对话。`, true)
}
}

1159
model/core.js Normal file

File diff suppressed because it is too large Load diff

View file

@ -1,5 +1,6 @@
{
"name": "chatgpt-plugin",
"version": "2.8.1",
"type": "module",
"author": "ikechan8370",
"dependencies": {
@ -9,7 +10,6 @@
"@fastify/static": "^6.9.0",
"@fastify/websocket": "^8.2.0",
"@google/generative-ai": "^0.1.1",
"@slack/bolt": "^3.13.2",
"asn1.js": "^5.0.0",
"diff": "^5.1.0",
"emoji-strip": "^1.0.1",
@ -40,9 +40,6 @@
"node-silk": "^0.1.0",
"nodejs-pptx": "^1.2.4",
"pdfjs-dist": "^3.11.174",
"puppeteer-extra": "^3.3.6",
"puppeteer-extra-plugin-recaptcha": "^3.6.8",
"puppeteer-extra-plugin-stealth": "^2.11.2",
"sharp": "^0.32.3",
"xlsx": "^0.18.5"
},

View file

@ -20,39 +20,9 @@ import Guoba from './modules/guoba.js'
import SettingView from './modules/setting_view.js'
const __dirname = path.resolve()
const server = fastify({
logger: Config.debug
})
async function setUserData(qq, data) {
const dir = 'resources/ChatGPTCache/user'
const filename = `${qq}.json`
const filepath = path.join(dir, filename)
fs.mkdirSync(dir, { recursive: true })
fs.writeFileSync(filepath, JSON.stringify(data))
}
await server.register(cors, {
origin: '*'
})
await server.register(fstatic, {
root: path.join(__dirname, 'plugins/chatgpt-plugin/server/static/')
})
await server.register(websocket, {
cors: true,
options: {
maxPayload: 1048576
}
})
await server.register(fastifyCookie)
await server.register(webRoute)
await server.register(webUser)
await server.register(SettingView)
await server.register(webPrompt)
await server.register(Guoba)
// 无法访问端口的情况下创建与media的通讯
async function mediaLink() {
async function mediaLink () {
const ip = await getPublicIP()
const testServer = await fetch(`${Config.cloudTranscode}/check`,
{
@ -74,7 +44,7 @@ async function mediaLink() {
ws.send(JSON.stringify({
command: 'register',
region: getUin(),
type: 'server',
type: 'server'
}))
})
ws.on('message', async (message) => {
@ -108,14 +78,13 @@ async function mediaLink() {
if (data.qq && data.passwd) {
const token = randomString(32)
if (data.qq == getUin() && await redis.get('CHATGPT:ADMIN_PASSWD') == data.passwd) {
AddUser({ user: data.qq, token: token, autho: 'admin' })
ws.send(JSON.stringify({ command: data.command, state: true, autho: 'admin', token: token, region: getUin(), type: 'server' }))
AddUser({ user: data.qq, token, autho: 'admin' })
ws.send(JSON.stringify({ command: data.command, state: true, autho: 'admin', token, region: getUin(), type: 'server' }))
} else {
const user = await getUserData(data.qq)
if (user.passwd != '' && user.passwd === data.passwd) {
AddUser({ user: data.qq, token: token, autho: 'user' })
ws.send(JSON.stringify({ command: data.command, state: true, autho: 'user', token: token, region: getUin(), type: 'server' }))
AddUser({ user: data.qq, token, autho: 'user' })
ws.send(JSON.stringify({ command: data.command, state: true, autho: 'user', token, region: getUin(), type: 'server' }))
} else {
ws.send(JSON.stringify({ command: data.command, state: false, error: `用户名密码错误,如果忘记密码请私聊机器人输入 ${data.qq == getUin() ? '#修改管理密码' : '#修改用户密码'} 进行修改`, region: getUin(), type: 'server' }))
}
@ -141,7 +110,6 @@ async function mediaLink() {
console.log(error)
}
})
} else {
console.log('本地服务网络正常,无需开启通讯')
}
@ -152,7 +120,38 @@ async function mediaLink() {
// 未完工,暂不开启这个功能
// mediaLink()
export async function createServer() {
export async function createServer () {
let server = fastify({
logger: Config.debug
})
async function setUserData (qq, data) {
const dir = 'resources/ChatGPTCache/user'
const filename = `${qq}.json`
const filepath = path.join(dir, filename)
fs.mkdirSync(dir, { recursive: true })
fs.writeFileSync(filepath, JSON.stringify(data))
}
await server.register(cors, {
origin: '*'
})
await server.register(fstatic, {
root: path.join(__dirname, 'plugins/chatgpt-plugin/server/static/')
})
await server.register(websocket, {
cors: true,
options: {
maxPayload: 1048576
}
})
await server.register(fastifyCookie)
await server.register(webRoute)
await server.register(webUser)
await server.register(SettingView)
await server.register(webPrompt)
await server.register(Guoba)
// 页面数据获取
server.post('/page', async (request, reply) => {
const body = request.body || {}
@ -316,7 +315,7 @@ export async function createServer() {
Bot.sendPrivateMsg(parseInt(data.id), data.message, data.quotable)
}
}
await connection.socket.send(JSON.stringify({ command: data.command, state: true, }))
await connection.socket.send(JSON.stringify({ command: data.command, state: true }))
} else {
await connection.socket.send(JSON.stringify({ command: data.command, state: false, error: '参数不足' }))
}
@ -370,7 +369,7 @@ export async function createServer() {
seq: e.seq,
rand: e.rand,
message: e.message,
user_name: e.sender.nickname,
user_name: e.sender.nickname
},
read: true
}
@ -380,12 +379,12 @@ export async function createServer() {
break
default:
await connection.socket.send(JSON.stringify({ "data": data }))
await connection.socket.send(JSON.stringify({ data }))
break
}
} catch (error) {
console.error(error)
await connection.socket.send(JSON.stringify({ "error": error.message }))
await connection.socket.send(JSON.stringify({ error: error.message }))
}
})
connection.socket.on('close', () => {
@ -395,7 +394,7 @@ export async function createServer() {
})
return request
}
Bot.on("message", e => {
Bot.on('message', e => {
const messageData = {
notice: 'clientMessage',
message: e.message,
@ -411,7 +410,7 @@ export async function createServer() {
seq: e.seq,
rand: e.rand,
message: e.message,
user_name: e.sender.nickname,
user_name: e.sender.nickname
}
}
if (clients) {
@ -486,10 +485,10 @@ export async function createServer() {
for (let [keyPath, value] of Object.entries(chatdata)) {
if (keyPath === 'blockWords' || keyPath === 'promptBlockWords' || keyPath === 'initiativeChatGroups') { value = value.toString().split(/[,;\|]/) }
if (Config[keyPath] != value) {
//检查云服务api
// 检查云服务api
if (keyPath === 'cloudTranscode') {
const referer = request.headers.referer;
const origin = referer.match(/(https?:\/\/[^/]+)/)[1];
const referer = request.headers.referer
const origin = referer.match(/(https?:\/\/[^/]+)/)[1]
const checkCloud = await fetch(`${value}/check`,
{
method: 'POST',
@ -562,7 +561,7 @@ export async function createServer() {
// 系统服务测试
server.post('/serverTest', async (request, reply) => {
let serverState = {
cache: false, //待移除
cache: false, // 待移除
cloud: false
}
if (Config.cloudTranscode) {
@ -575,6 +574,15 @@ export async function createServer() {
return reply
})
global.chatgpt.server = server
return server
}
export async function runServer () {
let server = global.chatgpt.server
if (!server) {
server = await createServer()
}
server.listen({
port: Config.serverPort || 3321,
host: '::'
@ -586,3 +594,10 @@ export async function createServer() {
}
})
}
export async function stopServer () {
let server = global.chatgpt.server
if (server) {
await server.close()
}
}

View file

@ -1,23 +1,23 @@
import fetch, {
Headers,
Request,
Response,
// Headers,
// Request,
// Response,
FormData
} from 'node-fetch'
import crypto from 'crypto'
import WebSocket from 'ws'
import { Config, pureSydneyInstruction } from './config.js'
import { Config } from './config.js'
import { formatDate, getMasterQQ, isCN, getUserData, limitString } from './common.js'
import moment from 'moment'
import { getProxy } from './proxy.js'
import common from '../../../lib/common/common.js'
if (!globalThis.fetch) {
globalThis.fetch = fetch
globalThis.Headers = Headers
globalThis.Request = Request
globalThis.Response = Response
}
//
// if (!globalThis.fetch) {
// globalThis.fetch = fetch
// globalThis.Headers = Headers
// globalThis.Request = Request
// globalThis.Response = Response
// }
// workaround for ver 7.x and ver 5.x
let proxy = getProxy()
@ -65,34 +65,40 @@ export default class SydneyAIClient {
accept: 'application/json',
'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
'content-type': 'application/json',
// 'sec-ch-ua': '"Microsoft Edge";v="113", "Chromium";v="113", "Not-A.Brand";v="24"',
// 'sec-ch-ua-arch': '"x86"',
// 'sec-ch-ua-bitness': '"64"',
// 'sec-ch-ua-full-version': '"112.0.1722.7"',
// 'sec-ch-ua-full-version-list': '"Chromium";v="112.0.5615.20", "Microsoft Edge";v="112.0.1722.7", "Not:A-Brand";v="99.0.0.0"',
// 'sec-ch-ua-mobile': '?0',
// 'sec-ch-ua-model': '',
// 'sec-ch-ua-platform': '"macOS"',
// 'sec-ch-ua-platform-version': '"15.0.0"',
// 'sec-fetch-dest': 'empty',
// 'sec-fetch-mode': 'cors',
// 'sec-fetch-site': 'same-origin',
// 'x-ms-client-request-id': crypto.randomUUID(),
// 'x-ms-useragent': 'azsdk-js-api-client-factory/1.0.0-beta.1 core-rest-pipeline/1.10.3 OS/macOS',
'sec-ch-ua': '"Microsoft Edge";v="113", "Chromium";v="113", "Not-A.Brand";v="24"',
'sec-ch-ua-arch': '"x86"',
'sec-ch-ua-bitness': '"64"',
'sec-ch-ua-full-version': '"112.0.1722.7"',
'sec-ch-ua-full-version-list': '"Chromium";v="112.0.5615.20", "Microsoft Edge";v="112.0.1722.7", "Not:A-Brand";v="99.0.0.0"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-model': '',
'sec-ch-ua-platform': '"macOS"',
'sec-ch-ua-platform-version': '"15.0.0"',
'sec-fetch-dest': 'empty',
'sec-fetch-mode': 'cors',
'sec-fetch-site': 'same-origin',
'x-ms-client-request-id': crypto.randomUUID(),
'x-ms-useragent': 'azsdk-js-api-client-factory/1.0.0-beta.1 core-rest-pipeline/1.10.3 OS/macOS',
// cookie: this.opts.cookies || `_U=${this.opts.userToken}`,
Referer: 'https://edgeservices.bing.com/edgesvc/chat?udsframed=1&form=SHORUN&clientscopes=chat,noheader,channelstable,'
// 'Referrer-Policy': 'origin-when-cross-origin',
Referer: 'https://edgeservices.bing.com/edgesvc/chat?udsframed=1&form=SHORUN&clientscopes=chat,noheader,channelstable,',
'Referrer-Policy': 'origin-when-cross-origin'
// Workaround for request being blocked due to geolocation
// 'x-forwarded-for': '1.1.1.1'
}
}
let initCk = 'SRCHD=AF=NOFORM; PPLState=1; SRCHHPGUSR=HV=' + new Date().getTime() + ';'
if (this.opts.userToken) {
if (this.opts.userToken || this.opts.cookies) {
// 疑似无需token了
fetchOptions.headers.cookie = `${initCk} _U=${this.opts.userToken}`
let proTag = await redis.get('CHATGPT:COPILOT_PRO_TAG:' + this.opts.userToken)
if (!this.opts.cookies) {
fetchOptions.headers.cookie = `${initCk} _U=${this.opts.userToken}`
} else {
fetchOptions.headers.cookie = this.opts.cookies
}
// let hash = md5(this.opts.cookies || this.opts.userToken)
let hash = crypto.createHash('md5').update(this.opts.cookies || this.opts.userToken).digest('hex')
let proTag = await redis.get('CHATGPT:COPILOT_PRO_TAG:' + hash)
if (!proTag) {
let indexContentRes = await fetch('https://www.bing.com', {
let indexContentRes = await fetch('https://www.bing.com/chat', {
headers: {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36 Edg/123.0.0.0',
Cookie: `_U=${this.opts.userToken}`
@ -104,7 +110,7 @@ export default class SydneyAIClient {
} else {
proTag = 'false'
}
await redis.set('CHATGPT:COPILOT_PRO_TAG:' + this.opts.userToken, proTag, { EX: 7200 })
await redis.set('CHATGPT:COPILOT_PRO_TAG:' + hash, proTag, { EX: 7200 })
}
if (proTag === 'true') {
logger.info('当前账户为copilot pro用户')
@ -123,12 +129,12 @@ export default class SydneyAIClient {
this.opts.host = 'https://edgeservices.bing.com/edgesvc'
}
logger.mark('使用host' + this.opts.host)
let response = await fetch(`${this.opts.host}/turing/conversation/create?bundleVersion=1.1381.12`, fetchOptions)
let response = await fetch(`${this.opts.host}/turing/conversation/create?bundleVersion=1.1626.12`, fetchOptions)
let text = await response.text()
let retry = 10
while (retry >= 0 && response.status === 200 && !text) {
await common.sleep(400)
response = await fetch(`${this.opts.host}/turing/conversation/create?bundleVersion=1.1381.12`, fetchOptions)
response = await fetch(`${this.opts.host}/turing/conversation/create?bundleVersion=1.1626.12`, fetchOptions)
text = await response.text()
retry--
}
@ -334,6 +340,21 @@ export default class SydneyAIClient {
if (!text) {
previousMessages = pm
} else {
let example = []
for (let i = 1; i < 4; i++) {
if (Config[`chatExampleUser${i}`]) {
example.push(...[
{
text: Config[`chatExampleUser${i}`],
author: 'user'
},
{
text: Config[`chatExampleBot${i}`],
author: 'bot'
}
])
}
}
previousMessages = [
{
text,
@ -343,6 +364,7 @@ export default class SydneyAIClient {
text: '好的。',
author: 'bot'
},
...example,
...pm
]
}
@ -379,6 +401,7 @@ export default class SydneyAIClient {
// 'cricinfo',
// 'cricinfov2',
'dv3sugg',
'autosave',
// 'gencontentv3',
'iycapbing',
'iyxapbing',
@ -387,12 +410,21 @@ export default class SydneyAIClient {
// 'revimgsrc1',
// 'revimgur',
// 'clgalileo',
'eredirecturl',
// 'eredirecturl',
// copilot
'uquopt',
'papynoapi',
'gndlogcf',
'sapsgrd'
// 'botthrottle',
// 'dlimitationnc',
// 'hourthrot',
// 'gndlogcf',
// 'ciorigimage',
// 'codeintfile',
'eredirecturl',
// 'ldsummary',
// 'ldqa',
// 'sdretrieval',
// "gencontentv3",
// 'gpt4tmncnp'
]
if (!isCreative) {
optionsSets.push('clgalileo')
@ -453,28 +485,30 @@ export default class SydneyAIClient {
'GeneratedCode'
],
sliceIds: [
'sappbcbt',
'inlineadsv2ho-prod',
'bgstream',
'dlidlat',
'autotts',
'dlid',
'sydoroff',
'voicemap',
'72enasright',
'semseronomon',
'srchqryfix',
'cmcpupsalltf',
'proupsallcf',
'206mems0',
'0209bicv3',
'205dcl1bt15',
'etlog',
'fpallsticy',
'0208papynoa',
'sapsgrd',
'1pgptwdes',
'newzigpt'
// 'supllmnfe',
// 'nodescf',
// 'stcheckcf',
// 'invldrqcf',
// 'v6voice',
// 'vnextr100',
// 'sydvrate100',
// 'vnextvoice',
// 'scmcbasecf',
// 'cmcpupsalltf',
// 'sydtransjson',
// 'thdnsrchcf',
// '220dcl1bt15',
// '311dlicnc',
// '0215wcrwippsr',
// '0305hrthrot',
// '0130gpt4t',
// 'bingfccf',
// 'dissagrds0',
// '0228scs',
// 'scprompt1',
// '228pyfilenfb',
// 'ecipc',
// '3022tpvs0'
],
requestId: crypto.randomUUID(),
traceId: genRanHex(32),
@ -532,11 +566,17 @@ export default class SydneyAIClient {
spokenTextMode: 'None',
conversationId,
previousMessages,
plugins: [
// {
// id: 'c310c353-b9f0-4d76-ab0d-1dd5e979cf68'
// }
]
// plugins: [
// {
// id: 'c310c353-b9f0-4d76-ab0d-1dd5e979cf68',
// category: 1
// }
// ],
// extraExtensionParameters: {
// 'gpt-creator-persona': {
// personaId: 'copilot'
// }
// }
}
if (encryptedconversationsignature) {

View file

@ -63,7 +63,7 @@ import * as types from './types.js';
import globalFetch from 'node-fetch';
var CHATGPT_MODEL = 'qwen-turbo'; // qwen-plus
var USER_LABEL_DEFAULT = 'User';
var ASSISTANT_LABEL_DEFAULT = '义千问';
var ASSISTANT_LABEL_DEFAULT = '义千问';
var QwenApi = /** @class */ (function () {
/**
* Creates a new client wrapper around Qwen's chat completion API, mimicing the official ChatGPT webapp's functionality as closely as possible.
@ -76,11 +76,11 @@ var QwenApi = /** @class */ (function () {
this._apiBaseUrl = apiBaseUrl;
this._debug = !!debug;
this._fetch = fetch;
this._completionParams = __assign({ model: CHATGPT_MODEL, parameters: __assign({ top_p: 0.5, top_k: 50, temperature: 1.0, seed: 114514, enable_search: true, result_format: "text", incremental_output: false }, parameters) }, completionParams);
this._completionParams = __assign({ model: CHATGPT_MODEL, parameters: __assign({ top_p: 0.5, top_k: 50, temperature: 1.0, seed: 114514, enable_search: true, result_format: "message", incremental_output: false }, parameters) }, completionParams);
this._systemMessage = systemMessage;
if (this._systemMessage === undefined) {
var currentDate = new Date().toISOString().split('T')[0];
this._systemMessage = "You are ChatGPT, a large language model trained by Qwen. Answer as concisely as possible.\nKnowledge cutoff: 2021-09-01\nCurrent date: ".concat(currentDate);
this._systemMessage = "You are Qwen, a large language model trained by Alibaba Cloud. Answer as concisely as possible.\nCurrent date: ".concat(currentDate);
}
this._getMessageById = getMessageById !== null && getMessageById !== void 0 ? getMessageById : this._defaultGetMessageById;
this._upsertMessage = upsertMessage !== null && upsertMessage !== void 0 ? upsertMessage : this._defaultUpsertMessage;
@ -120,7 +120,7 @@ var QwenApi = /** @class */ (function () {
* @param opts.timeoutMs - Optional timeout in milliseconds (defaults to no timeout)
* @param opts.onProgress - Optional callback which will be invoked every time the partial response is updated
* @param opts.abortSignal - Optional callback used to abort the underlying `fetch` call using an [AbortController](https://developer.mozilla.org/en-US/docs/Web/API/AbortController)
* @param completionParams - Optional overrides to send to the [Qwen chat completion API](https://platform.openai.com/docs/api-reference/chat/create). Options like `temperature` and `presence_penalty` can be tweaked to change the personality of the assistant.
* @param opts.completionParams - Optional overrides to send to the [Qwen chat completion API](https://platform.openai.com/docs/api-reference/chat/create). Options like `temperature` and `presence_penalty` can be tweaked to change the personality of the assistant.
*
* @returns The response from ChatGPT
*/
@ -128,7 +128,7 @@ var QwenApi = /** @class */ (function () {
if (opts === void 0) { opts = {}; }
if (role === void 0) { role = 'user'; }
return __awaiter(this, void 0, void 0, function () {
var parentMessageId, _a, messageId, timeoutMs, completionParams, conversationId, abortSignal, abortController, message, latestQuestion, _b, messages, maxTokens, numTokens, result, responseP;
var parentMessageId, _a, messageId, timeoutMs, completionParams, conversationId, abortSignal, abortController, message, latestQuestion, parameters, _b, messages, maxTokens, numTokens, result, responseP;
var _this = this;
return __generator(this, function (_c) {
switch (_c.label) {
@ -148,6 +148,9 @@ var QwenApi = /** @class */ (function () {
text: text,
};
latestQuestion = message;
parameters = Object.assign(this._completionParams.parameters, completionParams.parameters);
completionParams = Object.assign(this._completionParams, completionParams);
completionParams.parameters = parameters;
return [4 /*yield*/, this._buildMessages(text, role, opts, completionParams)];
case 1:
_b = _c.sent(), messages = _b.messages, maxTokens = _b.maxTokens, numTokens = _b.numTokens;
@ -158,28 +161,31 @@ var QwenApi = /** @class */ (function () {
conversationId: conversationId,
parentMessageId: messageId,
text: undefined,
functionCall: undefined,
conversation: []
};
this._completionParams.input = { messages: messages };
completionParams.input = { messages: messages };
responseP = new Promise(function (resolve, reject) { return __awaiter(_this, void 0, void 0, function () {
var url, headers, body, res, reason, msg, error, response, err_1;
return __generator(this, function (_a) {
switch (_a.label) {
var _a, _b, _c, _d, _e;
return __generator(this, function (_f) {
switch (_f.label) {
case 0:
url = "".concat(this._apiBaseUrl, "/services/aigc/text-generation/generation");
headers = {
'Content-Type': 'application/json',
Authorization: "Bearer ".concat(this._apiKey)
};
body = __assign(__assign({}, this._completionParams), completionParams);
body = completionParams;
if (this._debug) {
console.log(JSON.stringify(body));
}
if (this._debug) {
console.log("sendMessage (".concat(numTokens, " tokens)"), body);
}
_a.label = 1;
_f.label = 1;
case 1:
_a.trys.push([1, 6, , 7]);
_f.trys.push([1, 6, , 7]);
return [4 /*yield*/, this._fetch(url, {
method: 'POST',
headers: headers,
@ -187,11 +193,11 @@ var QwenApi = /** @class */ (function () {
signal: abortSignal
})];
case 2:
res = _a.sent();
res = _f.sent();
if (!!res.ok) return [3 /*break*/, 4];
return [4 /*yield*/, res.text()];
case 3:
reason = _a.sent();
reason = _f.sent();
msg = "Qwen error ".concat(res.status || res.statusText, ": ").concat(reason);
error = new types.ChatGPTError(msg, { cause: res });
error.statusCode = res.status;
@ -199,18 +205,22 @@ var QwenApi = /** @class */ (function () {
return [2 /*return*/, reject(error)];
case 4: return [4 /*yield*/, res.json()];
case 5:
response = _a.sent();
response = _f.sent();
if (this._debug) {
console.log(response);
}
if (((_e = (_d = (_c = (_b = (_a = response.output) === null || _a === void 0 ? void 0 : _a.choices) === null || _b === void 0 ? void 0 : _b[0]) === null || _c === void 0 ? void 0 : _c.message) === null || _d === void 0 ? void 0 : _d.tool_calls) === null || _e === void 0 ? void 0 : _e.length) > 0) {
// function call result
result.functionCall = response.output.choices[0].message.tool_calls[0].function;
}
if (response === null || response === void 0 ? void 0 : response.request_id) {
result.id = response.request_id;
}
result.detail = response;
result.text = response.output.text;
result.text = response.output.choices[0].message.content;
return [2 /*return*/, resolve(result)];
case 6:
err_1 = _a.sent();
err_1 = _f.sent();
return [2 /*return*/, reject(err_1)];
case 7: return [2 /*return*/];
}
@ -278,7 +288,8 @@ var QwenApi = /** @class */ (function () {
? messages.concat([
{
role: role,
content: text
content: text,
name: role === 'tool' ? opts.name : undefined
}
])
: messages;
@ -337,7 +348,8 @@ var QwenApi = /** @class */ (function () {
nextMessages = nextMessages.slice(0, systemMessageOffset).concat(__spreadArray([
{
role: parentMessageRole,
content: parentMessage.text
content: parentMessage.functionCall ? parentMessage.functionCall.arguments : parentMessage.text,
name: parentMessage.functionCall ? parentMessage.functionCall.name : undefined
}
], nextMessages.slice(systemMessageOffset), true));
parentMessageId = parentMessage.parentMessageId;

View file

@ -7,11 +7,12 @@ import * as tokenizer from './tokenizer'
import * as types from './types'
import globalFetch from 'node-fetch'
import {qwen, Role} from "./types";
import {openai} from "../openai/types";
const CHATGPT_MODEL = 'qwen-turbo' // qwen-plus
const USER_LABEL_DEFAULT = 'User'
const ASSISTANT_LABEL_DEFAULT = '义千问'
const ASSISTANT_LABEL_DEFAULT = '义千问'
export class QwenApi {
protected _apiKey: string
@ -64,7 +65,7 @@ export class QwenApi {
temperature: 1.0,
seed: 114514,
enable_search: true,
result_format: "text",
result_format: "message",
incremental_output: false,
...parameters
},
@ -75,7 +76,7 @@ export class QwenApi {
if (this._systemMessage === undefined) {
const currentDate = new Date().toISOString().split('T')[0]
this._systemMessage = `You are ChatGPT, a large language model trained by Qwen. Answer as concisely as possible.\nKnowledge cutoff: 2021-09-01\nCurrent date: ${currentDate}`
this._systemMessage = `You are Qwen, a large language model trained by Alibaba Cloud. Answer as concisely as possible.\nCurrent date: ${currentDate}`
}
this._getMessageById = getMessageById ?? this._defaultGetMessageById
@ -120,7 +121,7 @@ export class QwenApi {
* @param opts.timeoutMs - Optional timeout in milliseconds (defaults to no timeout)
* @param opts.onProgress - Optional callback which will be invoked every time the partial response is updated
* @param opts.abortSignal - Optional callback used to abort the underlying `fetch` call using an [AbortController](https://developer.mozilla.org/en-US/docs/Web/API/AbortController)
* @param completionParams - Optional overrides to send to the [Qwen chat completion API](https://platform.openai.com/docs/api-reference/chat/create). Options like `temperature` and `presence_penalty` can be tweaked to change the personality of the assistant.
* @param opts.completionParams - Optional overrides to send to the [Qwen chat completion API](https://platform.openai.com/docs/api-reference/chat/create). Options like `temperature` and `presence_penalty` can be tweaked to change the personality of the assistant.
*
* @returns The response from ChatGPT
*/
@ -129,7 +130,7 @@ export class QwenApi {
opts: types.SendMessageOptions = {},
role: Role = 'user',
): Promise<types.ChatMessage> {
const {
let {
parentMessageId,
messageId = uuidv4(),
timeoutMs,
@ -155,21 +156,30 @@ export class QwenApi {
const latestQuestion = message
let parameters = Object.assign(
this._completionParams.parameters,
completionParams.parameters
)
completionParams = Object.assign(this._completionParams, completionParams)
completionParams.parameters = parameters
const { messages, maxTokens, numTokens } = await this._buildMessages(
text,
role,
opts,
completionParams
)
console.log(`maxTokens: ${maxTokens}, numTokens: ${numTokens}`)
const result: types.ChatMessage = {
const result: types.ChatMessage & { conversation: qwen.ChatCompletionRequestMessage[] } = {
role: 'assistant',
id: uuidv4(),
conversationId,
parentMessageId: messageId,
text: undefined,
functionCall: undefined,
conversation: []
}
this._completionParams.input = { messages }
completionParams.input = { messages }
const responseP = new Promise<types.ChatMessage>(
async (resolve, reject) => {
const url = `${this._apiBaseUrl}/services/aigc/text-generation/generation`
@ -177,10 +187,7 @@ export class QwenApi {
'Content-Type': 'application/json',
Authorization: `Bearer ${this._apiKey}`
}
const body = {
...this._completionParams,
...completionParams
}
const body = completionParams
if (this._debug) {
console.log(JSON.stringify(body))
}
@ -212,12 +219,15 @@ export class QwenApi {
if (this._debug) {
console.log(response)
}
if (response.output?.choices?.[0]?.message?.tool_calls?.length > 0) {
// function call result
result.functionCall = response.output.choices[0].message.tool_calls[0].function
}
if (response?.request_id) {
result.id = response.request_id
}
result.detail = response
result.text = response.output.text
result.text = response.output.choices[0].message.content
return resolve(result)
} catch (err) {
return reject(err)
@ -283,7 +293,8 @@ export class QwenApi {
? messages.concat([
{
role,
content: text
content: text,
name: role === 'tool' ? opts.name : undefined
}
])
: messages
@ -338,7 +349,8 @@ export class QwenApi {
nextMessages = nextMessages.slice(0, systemMessageOffset).concat([
{
role: parentMessageRole,
content: parentMessage.text
content: parentMessage.functionCall ? parentMessage.functionCall.arguments : parentMessage.text,
name: parentMessage.functionCall ? parentMessage.functionCall.name : undefined
},
...nextMessages.slice(systemMessageOffset)
])

View file

@ -1,80 +1,82 @@
import Keyv from 'keyv'
import {openai} from "../openai/types";
export type Role = 'user' | 'assistant' | 'system'
export type Role = 'user' | 'assistant' | 'system' | 'tool'
export type FetchFn = typeof fetch
export type QWenAPIOptions = {
apiKey: string
apiKey: string
/** @defaultValue `'https://dashscope.aliyuncs.com/api/v1'` **/
apiBaseUrl?: string
/** @defaultValue `'https://dashscope.aliyuncs.com/api/v1'` **/
apiBaseUrl?: string
apiOrg?: string
apiOrg?: string
/** @defaultValue `false` **/
debug?: boolean
/** @defaultValue `false` **/
debug?: boolean
completionParams?: Partial<
Omit<qwen.CreateChatCompletionRequest, 'messages' | 'n' | 'stream'>
>
parameters?: qwen.QWenParameters,
completionParams?: Partial<
Omit<qwen.CreateChatCompletionRequest, 'messages' | 'n' | 'stream'>
>
parameters?: qwen.QWenParameters,
systemMessage?: string
systemMessage?: string
messageStore?: Keyv
getMessageById?: GetMessageByIdFunction
upsertMessage?: UpsertMessageFunction
messageStore?: Keyv
getMessageById?: GetMessageByIdFunction
upsertMessage?: UpsertMessageFunction
fetch?: FetchFn
fetch?: FetchFn
}
export type SendMessageOptions = {
/**
* function role name
*/
name?: string
messageId?: string
stream?: boolean
systemMessage?: string
parentMessageId?: string
conversationId?: string
timeoutMs?: number
onProgress?: (partialResponse: ChatMessage) => void
abortSignal?: AbortSignal
completionParams?: Partial<
Omit<qwen.CreateChatCompletionRequest, 'messages' | 'n' | 'stream'>
>
/**
* function role name
*/
name?: string
messageId?: string
stream?: boolean
systemMessage?: string
parentMessageId?: string
conversationId?: string
timeoutMs?: number
onProgress?: (partialResponse: ChatMessage) => void
abortSignal?: AbortSignal
completionParams?: Partial<
Omit<qwen.CreateChatCompletionRequest, 'messages' | 'n' | 'stream'>
>
}
export type MessageActionType = 'next' | 'variant'
export type SendMessageBrowserOptions = {
conversationId?: string
parentMessageId?: string
messageId?: string
action?: MessageActionType
timeoutMs?: number
onProgress?: (partialResponse: ChatMessage) => void
abortSignal?: AbortSignal
conversationId?: string
parentMessageId?: string
messageId?: string
action?: MessageActionType
timeoutMs?: number
onProgress?: (partialResponse: ChatMessage) => void
abortSignal?: AbortSignal
}
export interface ChatMessage {
id: string
text: string
role: Role
parentMessageId?: string
conversationId?: string
detail?:
| qwen.CreateChatCompletionResponse
| CreateChatCompletionStreamResponse
id: string
text: string
role: Role
parentMessageId?: string
conversationId?: string
detail?:
| qwen.CreateChatCompletionResponse
| CreateChatCompletionStreamResponse
functionCall?: qwen.FunctionCall
}
export class ChatGPTError extends Error {
statusCode?: number
statusText?: string
isFinal?: boolean
accountId?: string
statusCode?: number
statusText?: string
isFinal?: boolean
accountId?: string
}
/** Returns a chat message from a store by it's ID (or null if not found). */
@ -84,230 +86,289 @@ export type GetMessageByIdFunction = (id: string) => Promise<ChatMessage>
export type UpsertMessageFunction = (message: ChatMessage) => Promise<void>
export interface CreateChatCompletionStreamResponse
extends openai.CreateChatCompletionDeltaResponse {
usage: CreateCompletionStreamResponseUsage
extends openai.CreateChatCompletionDeltaResponse {
usage: CreateCompletionStreamResponseUsage
}
export interface CreateCompletionStreamResponseUsage
extends openai.CreateCompletionResponseUsage {
estimated: true
extends openai.CreateCompletionResponseUsage {
estimated: true
}
/**
* https://chat.openapi.com/backend-api/conversation
*/
export type ConversationJSONBody = {
/**
* The action to take
*/
action: string
/**
* The action to take
*/
action: string
/**
* The ID of the conversation
*/
conversation_id?: string
/**
* The ID of the conversation
*/
conversation_id?: string
/**
* Prompts to provide
*/
messages: Prompt[]
/**
* Prompts to provide
*/
messages: Prompt[]
/**
* The model to use
*/
model: string
/**
* The model to use
*/
model: string
/**
* The parent message ID
*/
parent_message_id: string
/**
* The parent message ID
*/
parent_message_id: string
}
export type Prompt = {
/**
* The content of the prompt
*/
content: PromptContent
/**
* The content of the prompt
*/
content: PromptContent
/**
* The ID of the prompt
*/
id: string
/**
* The ID of the prompt
*/
id: string
/**
* The role played in the prompt
*/
role: Role
/**
* The role played in the prompt
*/
role: Role
}
export type ContentType = 'text'
export type PromptContent = {
/**
* The content type of the prompt
*/
content_type: ContentType
/**
* The content type of the prompt
*/
content_type: ContentType
/**
* The parts to the prompt
*/
parts: string[]
/**
* The parts to the prompt
*/
parts: string[]
}
export type ConversationResponseEvent = {
message?: Message
conversation_id?: string
error?: string | null
message?: Message
conversation_id?: string
error?: string | null
}
export type Message = {
id: string
content: MessageContent
role: Role
user: string | null
create_time: string | null
update_time: string | null
end_turn: null
weight: number
recipient: string
metadata: MessageMetadata
id: string
content: MessageContent
role: Role
user: string | null
create_time: string | null
update_time: string | null
end_turn: null
weight: number
recipient: string
metadata: MessageMetadata
}
export type MessageContent = {
content_type: string
parts: string[]
content_type: string
parts: string[]
}
export type MessageMetadata = any
export namespace qwen {
export interface CreateChatCompletionDeltaResponse {
id: string
object: 'chat.completion.chunk'
created: number
model: string
choices: [
{
delta: {
role: Role
content?: string,
function_call?: {name: string, arguments: string}
}
index: number
finish_reason: string | null
}
]
}
export interface CreateChatCompletionDeltaResponse {
id: string
object: 'chat.completion.chunk'
created: number
model: string
choices: [
{
delta: {
role: Role
content?: string,
function_call?: { name: string, arguments: string }
}
index: number
finish_reason: string | null
}
]
}
/**
*
* @export
* @interface ChatCompletionRequestMessage
*/
export interface ChatCompletionRequestMessage {
/**
* The role of the author of this message.
* @type {string}
* @memberof ChatCompletionRequestMessage
*/
role: ChatCompletionRequestMessageRoleEnum
/**
* The contents of the message
* @type {string}
* @memberof ChatCompletionRequestMessage
*/
content: string
/**
*
* @export
* @interface ChatCompletionRequestMessage
* role为tool表示当前message为function_call的调用结果name是function的名称response中的tool_calls[i].function.name参数保持一致content为function的输出
*/
export interface ChatCompletionRequestMessage {
/**
* The role of the author of this message.
* @type {string}
* @memberof ChatCompletionRequestMessage
*/
role: ChatCompletionRequestMessageRoleEnum
/**
* The contents of the message
* @type {string}
* @memberof ChatCompletionRequestMessage
*/
content: string
}
name?: string
}
export declare const ChatCompletionRequestMessageRoleEnum: {
readonly System: 'system'
readonly User: 'user'
readonly Assistant: 'assistant'
}
export declare type ChatCompletionRequestMessageRoleEnum =
(typeof ChatCompletionRequestMessageRoleEnum)[keyof typeof ChatCompletionRequestMessageRoleEnum]
export interface FunctionCall {
name: string
arguments: string
}
export declare const ChatCompletionRequestMessageRoleEnum: {
readonly System: 'system'
readonly User: 'user'
readonly Assistant: 'assistant'
readonly Tool: 'tool'
}
export declare type ChatCompletionRequestMessageRoleEnum =
(typeof ChatCompletionRequestMessageRoleEnum)[keyof typeof ChatCompletionRequestMessageRoleEnum]
export interface QWenInput {
messages: Array<ChatCompletionRequestMessage>
}
export interface QWenInput {
messages: Array<ChatCompletionRequestMessage>
}
export interface QWenParameters {
result_format: string
top_p: number
top_k: number
seed: number
temperature: number
enable_search: boolean
incremental_output: boolean
}
export interface QWenParameters {
result_format: "text" | "message"
top_p: number
top_k: number
seed: number
temperature: number
enable_search: boolean
incremental_output: boolean
tools: Tools[]
}
export interface Tools {
type: "function"
function: QwenFunction
}
export interface QwenFunction {
name: string
description: string
parameters: QwenFunctionParameters
}
export interface QwenFunctionParameters {
type: "object"
properties: Properties;
required?: string[]
}
interface Properties {
[key: string]: Property;
}
interface Property {
type: string;
description?: string;
enum?: string[];
}
/**
*
* @export
* @interface CreateChatCompletionRequest
*/
export interface CreateChatCompletionRequest {
/**
* ID of the model to use. Currently, only `gpt-3.5-turbo` and `gpt-3.5-turbo-0301` are supported.
* @type {string}
* @memberof CreateChatCompletionRequest
*/
model: string
/**
* The messages to generate chat completions for, in the [chat format](/docs/guides/chat/introduction).
* @type {Array<ChatCompletionRequestMessage>}
* @memberof CreateChatCompletionRequest
*/
input?: QWenInput
parameters: QWenParameters
}
/**
*
* @export
* @interface CreateChatCompletionResponse
*/
export interface CreateChatCompletionResponse {
/**
*
* @export
* @interface CreateChatCompletionRequest
* @type {string}
* @memberof CreateChatCompletionResponse
*/
export interface CreateChatCompletionRequest {
/**
* ID of the model to use. Currently, only `gpt-3.5-turbo` and `gpt-3.5-turbo-0301` are supported.
* @type {string}
* @memberof CreateChatCompletionRequest
*/
model: string
/**
* The messages to generate chat completions for, in the [chat format](/docs/guides/chat/introduction).
* @type {Array<ChatCompletionRequestMessage>}
* @memberof CreateChatCompletionRequest
*/
input?: QWenInput
request_id: string
/**
*
* @type {QWenOutput}
* @memberof CreateChatCompletionResponse
*/
output: QWenOutput
/**
*
* @type {CreateCompletionResponseUsage}
* @memberof CreateChatCompletionResponse
*/
usage?: CreateCompletionResponseUsage
}
parameters: QWenParameters
}
export interface QWenOutput {
finish_reason: string
text?: string
choices?: Choice[]
}
export interface Choice {
finish_reason: string
message: ResponseMessage
}
export interface ResponseMessage {
role: Role
content: string
tool_calls: ToolCall[]
}
export interface ToolCall {
function: FunctionCall
type: "function"
}
/**
*
* @export
* @interface CreateCompletionResponseUsage
*/
export interface CreateCompletionResponseUsage {
/**
*
* @export
* @interface CreateChatCompletionResponse
* @type {number}
* @memberof CreateCompletionResponseUsage
*/
export interface CreateChatCompletionResponse {
/**
*
* @type {string}
* @memberof CreateChatCompletionResponse
*/
request_id: string
/**
*
* @type {QWenOutput}
* @memberof CreateChatCompletionResponse
*/
output: QWenOutput
/**
*
* @type {CreateCompletionResponseUsage}
* @memberof CreateChatCompletionResponse
*/
usage?: CreateCompletionResponseUsage
}
export interface QWenOutput {
finish_reason: string
text: string
}
input_tokens: number
/**
*
* @export
* @interface CreateCompletionResponseUsage
* @type {number}
* @memberof CreateCompletionResponseUsage
*/
export interface CreateCompletionResponseUsage {
/**
*
* @type {number}
* @memberof CreateCompletionResponseUsage
*/
input_tokens: number
/**
*
* @type {number}
* @memberof CreateCompletionResponseUsage
*/
output_tokens: number
}
output_tokens: number
}
}

View file

@ -1,373 +1,373 @@
// https://github.com/EvanZhouDev/bard-ai
class Bard {
static JSON = "json";
static MD = "markdown";
static JSON = 'json'
static MD = 'markdown'
// ID derived from Cookie
SNlM0e;
SNlM0e
// HTTPS Headers
#headers;
#headers
// Resolution status of initialization call
#initPromise;
#initPromise
#bardURL = "https://bard.google.com";
#bardURL = 'https://bard.google.com'
// Wether or not to log events to console
#verbose = false;
#verbose = false
// Fetch function
#fetch = fetch;
#fetch = fetch
constructor(cookie, config) {
// Register some settings
if (config?.verbose == true) this.#verbose = true;
if (config?.fetch) this.#fetch = config.fetch;
// 可变更访问地址,利用反向代理绕过区域限制
if (config?.bardURL) this.#bardURL = config.bardURL;
constructor (cookie, config) {
// Register some settings
if (config?.verbose == true) this.#verbose = true
if (config?.fetch) this.#fetch = config.fetch
// 可变更访问地址,利用反向代理绕过区域限制
if (config?.bardURL) this.#bardURL = config.bardURL
// If a Cookie is provided, initialize
if (cookie) {
this.#initPromise = this.#init(cookie);
} else {
throw new Error("Please provide a Cookie when initializing Bard.");
}
this.cookie = cookie;
// If a Cookie is provided, initialize
if (cookie) {
this.#initPromise = this.#init(cookie)
} else {
throw new Error('Please provide a Cookie when initializing Bard.')
}
this.cookie = cookie
}
// You can also choose to initialize manually
async #init(cookie) {
this.#verbose && console.log("🚀 Starting intialization");
// Assign headers
this.#headers = {
Host: this.#bardURL.match(/^https?:\/\/([^\/]+)\/?$/)[1],
"X-Same-Domain": "1",
"User-Agent":
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36",
"Content-Type": "application/x-www-form-urlencoded;charset=UTF-8",
Origin: this.#bardURL,
Referer: this.#bardURL,
Cookie: (typeof cookie === "object") ? (Object.entries(cookie).map(([key, val]) => `${key}=${val};`).join("")) : ("__Secure-1PSID=" + cookie),
};
async #init (cookie) {
this.#verbose && console.log('🚀 Starting intialization')
// Assign headers
this.#headers = {
Host: this.#bardURL.match(/^https?:\/\/([^\/]+)\/?$/)[1],
'X-Same-Domain': '1',
'User-Agent':
'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36',
'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8',
Origin: this.#bardURL,
Referer: this.#bardURL,
Cookie: (typeof cookie === 'object') ? (Object.entries(cookie).map(([key, val]) => `${key}=${val};`).join('')) : ('__Secure-1PSID=' + cookie)
}
let responseText;
// Attempt to retrieve SNlM0e
try {
this.#verbose &&
console.log("🔒 Authenticating your Google account");
responseText = await this.#fetch(this.#bardURL, {
method: "GET",
headers: this.#headers,
credentials: "include",
})
.then((response) => response.text())
} catch (e) {
// Failure to get server
throw new Error(
"Could not fetch Google Bard. You may be disconnected from internet: " +
let responseText
// Attempt to retrieve SNlM0e
try {
this.#verbose &&
console.log('🔒 Authenticating your Google account')
responseText = await this.#fetch(this.#bardURL, {
method: 'GET',
headers: this.#headers,
credentials: 'include'
})
.then((response) => response.text())
} catch (e) {
// Failure to get server
throw new Error(
'Could not fetch Google Bard. You may be disconnected from internet: ' +
e
);
}
)
}
try {
const SNlM0e = responseText.match(/SNlM0e":"(.*?)"/)[1];
// Assign SNlM0e and return it
this.SNlM0e = SNlM0e;
this.#verbose && console.log("✅ Initialization finished\n");
return SNlM0e;
} catch {
throw new Error(
"Could not use your Cookie. Make sure that you copied correctly the Cookie with name __Secure-1PSID exactly. If you are sure your cookie is correct, you may also have reached your rate limit."
);
}
try {
const SNlM0e = responseText.match(/SNlM0e":"(.*?)"/)[1]
// Assign SNlM0e and return it
this.SNlM0e = SNlM0e
this.#verbose && console.log('✅ Initialization finished\n')
return SNlM0e
} catch {
throw new Error(
'Could not use your Cookie. Make sure that you copied correctly the Cookie with name __Secure-1PSID exactly. If you are sure your cookie is correct, you may also have reached your rate limit.'
)
}
}
async #uploadImage(name, buffer) {
this.#verbose && console.log("🖼️ Starting image processing");
let size = buffer.byteLength;
let formBody = [
`${encodeURIComponent("File name")}=${encodeURIComponent([name])}`,
];
async #uploadImage (name, buffer) {
this.#verbose && console.log('🖼️ Starting image processing')
let size = buffer.byteLength
let formBody = [
`${encodeURIComponent('File name')}=${encodeURIComponent([name])}`
]
try {
this.#verbose &&
console.log("💻 Finding Google server destination");
let response = await this.#fetch(
"https://content-push.googleapis.com/upload/",
{
method: "POST",
headers: {
"X-Goog-Upload-Command": "start",
"X-Goog-Upload-Protocol": "resumable",
"X-Goog-Upload-Header-Content-Length": size,
"X-Tenant-Id": "bard-storage",
"Push-Id": "feeds/mcudyrk2a4khkz",
},
body: formBody,
credentials: "include",
}
);
try {
this.#verbose &&
console.log('💻 Finding Google server destination')
let response = await this.#fetch(
'https://content-push.googleapis.com/upload/',
{
method: 'POST',
headers: {
'X-Goog-Upload-Command': 'start',
'X-Goog-Upload-Protocol': 'resumable',
'X-Goog-Upload-Header-Content-Length': size,
'X-Tenant-Id': 'bard-storage',
'Push-Id': 'feeds/mcudyrk2a4khkz'
},
body: formBody,
credentials: 'include'
}
)
const uploadUrl = response.headers.get("X-Goog-Upload-URL");
this.#verbose && console.log("📤 Sending your image");
response = await this.#fetch(uploadUrl, {
method: "POST",
headers: {
"X-Goog-Upload-Command": "upload, finalize",
"X-Goog-Upload-Offset": 0,
"X-Tenant-Id": "bard-storage",
},
body: buffer,
credentials: "include",
});
const uploadUrl = response.headers.get('X-Goog-Upload-URL')
this.#verbose && console.log('📤 Sending your image')
response = await this.#fetch(uploadUrl, {
method: 'POST',
headers: {
'X-Goog-Upload-Command': 'upload, finalize',
'X-Goog-Upload-Offset': 0,
'X-Tenant-Id': 'bard-storage'
},
body: buffer,
credentials: 'include'
})
const imageFileLocation = await response.text();
const imageFileLocation = await response.text()
this.#verbose && console.log("✅ Image finished working\n");
return imageFileLocation;
} catch (e) {
throw new Error(
"Could not fetch Google Bard. You may be disconnected from internet: " +
this.#verbose && console.log('✅ Image finished working\n')
return imageFileLocation
} catch (e) {
throw new Error(
'Could not fetch Google Bard. You may be disconnected from internet: ' +
e
);
}
)
}
}
// Query Bard
async #query(message, config) {
let formatMarkdown = (text, images) => {
if (!images) return text;
async #query (message, config) {
let formatMarkdown = (text, images) => {
if (!images) return text
for (let imageData of images) {
const formattedTag = `!${imageData.tag}(${imageData.url})`;
text = text.replace(
new RegExp(`(?!\\!)\\[${imageData.tag.slice(1, -1)}\\]`),
formattedTag
);
}
return text;
for (let imageData of images) {
const formattedTag = `!${imageData.tag}(${imageData.url})`
text = text.replace(
new RegExp(`(?!\\!)\\[${imageData.tag.slice(1, -1)}\\]`),
formattedTag
)
}
let { ids, imageBuffer } = config;
return text
}
// Wait until after init
await this.#initPromise;
let { ids, imageBuffer } = config
this.#verbose && console.log("🔎 Starting Bard Query");
// Wait until after init
await this.#initPromise
// If user has not run init
if (!this.SNlM0e) {
throw new Error(
"Please initialize Bard first. If you haven't passed in your Cookie into the class, run Bard.init(cookie)."
);
}
this.#verbose && console.log('🔎 Starting Bard Query')
this.#verbose && console.log("🏗️ Building Request");
// HTTPS parameters
const params = {
bl: "boq_assistant-bard-web-server_20230711.08_p0",
_reqID: ids?._reqID ?? "0",
rt: "c",
};
// If user has not run init
if (!this.SNlM0e) {
throw new Error(
"Please initialize Bard first. If you haven't passed in your Cookie into the class, run Bard.init(cookie)."
)
}
// If IDs are provided, but doesn't have every one of the expected IDs, error
const messageStruct = [
[message],
null,
[null, null, null],
];
this.#verbose && console.log('🏗️ Building Request')
// HTTPS parameters
const params = {
bl: 'boq_assistant-bard-web-server_20230711.08_p0',
_reqID: ids?._reqID ?? '0',
rt: 'c'
}
if (imageBuffer) {
let imageLocation = await this.#uploadImage(
`bard-ai_upload`,
imageBuffer
);
messageStruct[0].push(0, null, [
[[imageLocation, 1], "bard-ai_upload"],
]);
}
// If IDs are provided, but doesn't have every one of the expected IDs, error
const messageStruct = [
[message],
null,
[null, null, null]
]
if (ids) {
const { conversationID, responseID, choiceID } = ids;
messageStruct[2] = [conversationID, responseID, choiceID];
}
if (imageBuffer) {
let imageLocation = await this.#uploadImage(
'bard-ai_upload',
imageBuffer
)
messageStruct[0].push(0, null, [
[[imageLocation, 1], 'bard-ai_upload']
])
}
// HTTPs data
const data = {
"f.req": JSON.stringify([null, JSON.stringify(messageStruct)]),
at: this.SNlM0e,
};
if (ids) {
const { conversationID, responseID, choiceID } = ids
messageStruct[2] = [conversationID, responseID, choiceID]
}
// URL that we are submitting to
const url = new URL(
"/_/BardChatUi/data/assistant.lamda.BardFrontendService/StreamGenerate",
this.#bardURL
);
// HTTPs data
const data = {
'f.req': JSON.stringify([null, JSON.stringify(messageStruct)]),
at: this.SNlM0e
}
// Append parameters to the URL
for (const key in params) {
url.searchParams.append(key, params[key]);
}
// URL that we are submitting to
const url = new URL(
'/_/BardChatUi/data/assistant.lamda.BardFrontendService/StreamGenerate',
this.#bardURL
)
// Encode the data
const formBody = Object.entries(data)
.map(
([property, value]) =>
// Append parameters to the URL
for (const key in params) {
url.searchParams.append(key, params[key])
}
// Encode the data
const formBody = Object.entries(data)
.map(
([property, value]) =>
`${encodeURIComponent(property)}=${encodeURIComponent(
value
)}`
)
.join("&");
)
.join('&')
this.#verbose && console.log("💭 Sending message to Bard");
// Send the fetch request
const chatData = await this.#fetch(url.toString(), {
method: "POST",
headers: this.#headers,
body: formBody,
credentials: "include",
this.#verbose && console.log('💭 Sending message to Bard')
// Send the fetch request
const chatData = await this.#fetch(url.toString(), {
method: 'POST',
headers: this.#headers,
body: formBody,
credentials: 'include'
})
.then((response) => {
return response.text()
})
.then((response) => {
return response.text();
})
.then((text) => {
return JSON.parse(text.split("\n")[3])[0][2];
})
.then((rawData) => JSON.parse(rawData));
.then((text) => {
return JSON.parse(text.split('\n')[3])[0][2]
})
.then((rawData) => JSON.parse(rawData))
this.#verbose && console.log("🧩 Parsing output");
// Get first Bard-recommended answer
const answer = chatData[4][0];
this.#verbose && console.log('🧩 Parsing output')
// Get first Bard-recommended answer
const answer = chatData[4][0]
// Text of that answer
const text = answer[1][0];
// Text of that answer
const text = answer[1][0]
// Get data about images in that answer
const images =
// Get data about images in that answer
const images =
answer[4]?.map((x) => ({
tag: x[2],
url: x[3][0][0],
info: {
raw: x[0][0][0],
source: x[1][0][0],
alt: x[0][4],
website: x[1][1],
favicon: x[1][3],
},
})) ?? [];
tag: x[2],
url: x[3][0][0],
info: {
raw: x[0][0][0],
source: x[1][0][0],
alt: x[0][4],
website: x[1][1],
favicon: x[1][3]
}
})) ?? []
this.#verbose && console.log("✅ All done!\n");
// Put everything together and return
return {
content: formatMarkdown(text, images),
images: images,
ids: {
conversationID: chatData[1][0],
responseID: chatData[1][1],
choiceID: answer[0],
_reqID: String(parseInt(ids?._reqID ?? 0) + 100000),
},
};
this.#verbose && console.log('✅ All done!\n')
// Put everything together and return
return {
content: formatMarkdown(text, images),
images,
ids: {
conversationID: chatData[1][0],
responseID: chatData[1][1],
choiceID: answer[0],
_reqID: String(parseInt(ids?._reqID ?? 0) + 100000)
}
}
}
async #parseConfig(config) {
let result = {
useJSON: false,
imageBuffer: undefined, // Returns as {extension, filename}
ids: undefined,
};
async #parseConfig (config) {
let result = {
useJSON: false,
imageBuffer: undefined, // Returns as {extension, filename}
ids: undefined
}
// Verify that format is one of the two types
if (config?.format) {
switch (config.format) {
case Bard.JSON:
result.useJSON = true;
break;
case Bard.MD:
result.useJSON = false;
break;
default:
throw new Error(
"Format can obly be Bard.JSON for JSON output or Bard.MD for Markdown output."
);
}
// Verify that format is one of the two types
if (config?.format) {
switch (config.format) {
case Bard.JSON:
result.useJSON = true
break
case Bard.MD:
result.useJSON = false
break
default:
throw new Error(
'Format can obly be Bard.JSON for JSON output or Bard.MD for Markdown output.'
)
}
}
// Verify that the image passed in is either a path to a jpeg, jpg, png, or webp, or that it is a Buffer
if (config?.image) {
if (
config.image instanceof ArrayBuffer
) {
result.imageBuffer = config.image;
} else if (
typeof config.image === "string" &&
// Verify that the image passed in is either a path to a jpeg, jpg, png, or webp, or that it is a Buffer
if (config?.image) {
if (
config.image instanceof ArrayBuffer
) {
result.imageBuffer = config.image
} else if (
typeof config.image === 'string' &&
/\.(jpeg|jpg|png|webp)$/.test(config.image)
) {
let fs;
) {
let fs
try {
fs = await import("fs")
} catch {
throw new Error(
"Loading from an image file path is not supported in a browser environment.",
);
}
try {
fs = await import('fs')
} catch {
throw new Error(
'Loading from an image file path is not supported in a browser environment.'
)
}
result.imageBuffer = fs.readFileSync(
config.image,
).buffer;
} else {
throw new Error(
"Provide your image as a file path to a .jpeg, .jpg, .png, or .webp, or a Buffer."
);
}
result.imageBuffer = fs.readFileSync(
config.image
).buffer
} else {
throw new Error(
'Provide your image as a file path to a .jpeg, .jpg, .png, or .webp, or a Buffer.'
)
}
}
// Verify that all values in IDs exist
if (config?.ids) {
if (config.ids.conversationID && config.ids.responseID && config.ids.choiceID && config.ids._reqID) {
result.ids = config.ids;
} else {
throw new Error(
"Please provide the IDs exported exactly as given."
);
}
// Verify that all values in IDs exist
if (config?.ids) {
if (config.ids.conversationID && config.ids.responseID && config.ids.choiceID && config.ids._reqID) {
result.ids = config.ids
} else {
throw new Error(
'Please provide the IDs exported exactly as given.'
)
}
return result;
}
return result
}
// Ask Bard a question!
async ask(message, config) {
let { useJSON, imageBuffer, ids } = await this.#parseConfig(config);
let response = await this.#query(message, { imageBuffer, ids });
return useJSON ? response : response.content;
async ask (message, config) {
let { useJSON, imageBuffer, ids } = await this.#parseConfig(config)
let response = await this.#query(message, { imageBuffer, ids })
return useJSON ? response : response.content
}
createChat(ids) {
let bard = this;
class Chat {
ids = ids;
createChat (ids) {
let bard = this
class Chat {
ids = ids
async ask(message, config) {
let { useJSON, imageBuffer } = await bard.#parseConfig(config);
let response = await bard.#query(message, {
imageBuffer,
ids: this.ids,
});
this.ids = response.ids;
return useJSON ? response : response.content;
}
export() {
return this.ids;
}
async ask (message, config) {
let { useJSON, imageBuffer } = await bard.#parseConfig(config)
let response = await bard.#query(message, {
imageBuffer,
ids: this.ids
})
this.ids = response.ids
return useJSON ? response : response.content
}
return new Chat();
export () {
return this.ids
}
}
return new Chat()
}
}
export default Bard;
export default Bard

View file

@ -1,7 +1,7 @@
import fetch from 'node-fetch'
// this file is deprecated
import {Config} from './config.js'
import { Config } from './config.js'
import HttpsProxyAgent from 'https-proxy-agent'
const newFetch = (url, options = {}) => {

View file

@ -1,10 +1,5 @@
import lodash from 'lodash'
import { Config } from '../utils/config.js'
import StealthPlugin from 'puppeteer-extra-plugin-stealth'
import { getOpenAIAuth } from './openai-auth.js'
import { v4 as uuidv4 } from 'uuid'
import common from '../../../lib/common/common.js'
const chatUrl = 'https://chat.openai.com/chat'
import { Config } from './config.js'
let puppeteer = {}
class Puppeteer {
@ -48,19 +43,9 @@ class Puppeteer {
async initPupp () {
if (!lodash.isEmpty(puppeteer)) return puppeteer
puppeteer = (await import('puppeteer-extra')).default
const pluginStealth = StealthPlugin()
puppeteer.use(pluginStealth)
if (Config['2captchaToken']) {
const pluginCaptcha = (await import('puppeteer-extra-plugin-recaptcha')).default
puppeteer.use(pluginCaptcha({
provider: {
id: '2captcha',
token: Config['2captchaToken'] // REPLACE THIS WITH YOUR OWN 2CAPTCHA API KEY ⚡
},
visualFeedback: true
}))
}
puppeteer = (await import('puppeteer')).default
// const pluginStealth = StealthPlugin()
// puppeteer.use(pluginStealth)
return puppeteer
}
@ -109,25 +94,10 @@ export class ChatGPTPuppeteer extends Puppeteer {
constructor (opts = {}) {
super()
const {
email,
password,
markdown = true,
debug = false,
isGoogleLogin = false,
minimize = true,
captchaToken,
executablePath
debug = false
} = opts
this._email = email
this._password = password
this._markdown = !!markdown
this._debug = !!debug
this._isGoogleLogin = !!isGoogleLogin
this._minimize = !!minimize
this._captchaToken = captchaToken
this._executablePath = executablePath
}
async getBrowser () {
@ -138,394 +108,6 @@ export class ChatGPTPuppeteer extends Puppeteer {
}
}
async init () {
// if (this.inited) {
// return true
// }
logger.info('init chatgpt browser')
try {
// this.browser = await getBrowser({
// captchaToken: this._captchaToken,
// executablePath: this._executablePath
// })
this.browser = await this.getBrowser()
this._page =
(await this.browser.pages())[0] || (await this.browser.newPage())
await maximizePage(this._page)
this._page.on('request', this._onRequest.bind(this))
this._page.on('response', this._onResponse.bind(this))
// bypass cloudflare and login
let preCookies = await redis.get('CHATGPT:RAW_COOKIES')
if (preCookies) {
await this._page.setCookie(...JSON.parse(preCookies))
}
// const url = this._page.url().replace(/\/$/, '')
// bypass annoying popup modals
await this._page.evaluateOnNewDocument(() => {
window.localStorage.setItem('oai/apps/hasSeenOnboarding/chat', 'true')
const chatGPTUpdateDates = ['2022-12-15', '2022-12-19', '2023-01-09', '2023-01-30', '2023-02-10']
chatGPTUpdateDates.forEach(date => {
window.localStorage.setItem(
`oai/apps/hasSeenReleaseAnnouncement/${date}`,
'true'
)
})
})
await this._page.goto(chatUrl, {
waitUntil: 'networkidle2'
})
let timeout = 30000
try {
while (timeout > 0 && (await this._page.title()).toLowerCase().indexOf('moment') > -1) {
// if meet captcha
if (Config['2captchaToken']) {
await this._page.solveRecaptchas()
}
await common.sleep(300)
timeout = timeout - 300
}
} catch (e) {
// navigation后获取title会报错报错说明已经在navigation了正合我意。
}
if (timeout < 0) {
logger.error('wait for cloudflare navigation timeout. 可能遇见验证码')
throw new Error('wait for cloudflare navigation timeout. 可能遇见验证码')
}
try {
await this._page.waitForNavigation({ timeout: 3000 })
} catch (e) {}
if (!await this.getIsAuthenticated()) {
await redis.del('CHATGPT:RAW_COOKIES')
logger.info('需要登录,准备进行自动化登录')
await getOpenAIAuth({
email: this._email,
password: this._password,
browser: this.browser,
page: this._page,
isGoogleLogin: this._isGoogleLogin
})
logger.info('登录完成')
} else {
logger.info('无需登录')
}
} catch (err) {
if (this.browser) {
await this.browser.close()
}
this.browser = null
this._page = null
throw err
}
const url = this._page.url().replace(/\/$/, '')
if (url !== chatUrl) {
await this._page.goto(chatUrl, {
waitUntil: 'networkidle2'
})
}
// dismiss welcome modal (and other modals)
do {
const modalSelector = '[data-headlessui-state="open"]'
if (!(await this._page.$(modalSelector))) {
break
}
try {
await this._page.click(`${modalSelector} button:last-child`)
} catch (err) {
// "next" button not found in welcome modal
break
}
await common.sleep(300)
} while (true)
if (!await this.getIsAuthenticated()) {
return false
}
if (this._minimize) {
await minimizePage(this._page)
}
return true
}
_onRequest = (request) => {
const url = request.url()
if (!isRelevantRequest(url)) {
return
}
const method = request.method()
let body
if (method === 'POST') {
body = request.postData()
try {
body = JSON.parse(body)
} catch (_) {
}
// if (url.endsWith('/conversation') && typeof body === 'object') {
// const conversationBody: types.ConversationJSONBody = body
// const conversationId = conversationBody.conversation_id
// const parentMessageId = conversationBody.parent_message_id
// const messageId = conversationBody.messages?.[0]?.id
// const prompt = conversationBody.messages?.[0]?.content?.parts?.[0]
// // TODO: store this info for the current sendMessage request
// }
}
if (this._debug) {
console.log('\nrequest', {
url,
method,
headers: request.headers(),
body
})
}
}
_onResponse = async (response) => {
const request = response.request()
const url = response.url()
if (!isRelevantRequest(url)) {
return
}
const status = response.status()
let body
try {
body = await response.json()
} catch (_) {
}
if (this._debug) {
console.log('\nresponse', {
url,
ok: response.ok(),
status,
statusText: response.statusText(),
headers: response.headers(),
body,
request: {
method: request.method(),
headers: request.headers(),
body: request.postData()
}
})
}
if (url.endsWith('/conversation')) {
if (status === 403) {
await this.handle403Error()
}
} else if (url.endsWith('api/auth/session')) {
if (status === 403) {
await this.handle403Error()
} else {
const session = body
if (session?.accessToken) {
this._accessToken = session.accessToken
}
}
}
}
async handle403Error () {
console.log(`ChatGPT "${this._email}" session expired; refreshing...`)
try {
await maximizePage(this._page)
await this._page.reload({
waitUntil: 'networkidle2',
timeout: Config.chromeTimeoutMS // 2 minutes
})
if (this._minimize) {
await minimizePage(this._page)
}
} catch (err) {
console.error(
`ChatGPT "${this._email}" error refreshing session`,
err.toString()
)
}
}
async getIsAuthenticated () {
try {
const inputBox = await this._getInputBox()
return !!inputBox
} catch (err) {
// can happen when navigating during login
return false
}
}
async sendMessage (
message,
opts = {}
) {
const {
conversationId,
parentMessageId = uuidv4(),
messageId = uuidv4(),
action = 'next',
// TODO
timeoutMs,
// onProgress,
onConversationResponse
} = opts
const inputBox = await this._getInputBox()
if (!inputBox || !this._accessToken) {
console.log(`chatgpt re-authenticating ${this._email}`)
let isAuthenticated = false
try {
isAuthenticated = await this.init()
} catch (err) {
console.warn(
`chatgpt error re-authenticating ${this._email}`,
err.toString()
)
throw err
}
let timeout = 100000
if (isAuthenticated) {
while (!this._accessToken) {
// wait for async response hook result
await common.sleep(300)
timeout = timeout - 300
if (timeout < 0) {
const error = new Error('Not signed in')
error.statusCode = 401
throw error
}
}
} else if (!this._accessToken) {
const error = new Error('Not signed in')
error.statusCode = 401
throw error
}
}
const url = 'https://chat.openai.com/backend-api/conversation'
const body = {
action,
messages: [
{
id: messageId,
role: 'user',
content: {
content_type: 'text',
parts: [message]
}
}
],
model: Config.plus ? Config.useGPT4 ? 'gpt-4' : 'text-davinci-002-render-sha' : 'text-davinci-002-render-sha',
parent_message_id: parentMessageId
}
if (conversationId) {
body.conversation_id = conversationId
}
// console.log('>>> EVALUATE', url, this._accessToken, body)
const result = await this._page.evaluate(
browserPostEventStream,
url,
this._accessToken,
body,
timeoutMs
)
// console.log('<<< EVALUATE', result)
if (result.error) {
const error = new Error(result.error.message)
error.statusCode = result.error.statusCode
error.statusText = result.error.statusText
if (error.statusCode === 403) {
await this.handle403Error()
}
throw error
}
// TODO: support sending partial response events
if (onConversationResponse) {
onConversationResponse(result.conversationResponse)
}
return {
text: result.response,
conversationId: result.conversationResponse.conversation_id,
id: messageId,
parentMessageId
}
// const lastMessage = await this.getLastMessage()
// await inputBox.focus()
// const paragraphs = message.split('\n')
// for (let i = 0; i < paragraphs.length; i++) {
// await inputBox.type(paragraphs[i], { delay: 0 })
// if (i < paragraphs.length - 1) {
// await this._page.keyboard.down('Shift')
// await inputBox.press('Enter')
// await this._page.keyboard.up('Shift')
// } else {
// await inputBox.press('Enter')
// }
// }
// const responseP = new Promise<string>(async (resolve, reject) => {
// try {
// do {
// await common.sleep(1000)
// // TODO: this logic needs some work because we can have repeat messages...
// const newLastMessage = await this.getLastMessage()
// if (
// newLastMessage &&
// lastMessage?.toLowerCase() !== newLastMessage?.toLowerCase()
// ) {
// return resolve(newLastMessage)
// }
// } while (true)
// } catch (err) {
// return reject(err)
// }
// })
// if (timeoutMs) {
// return pTimeout(responseP, {
// milliseconds: timeoutMs
// })
// } else {
// return responseP
// }
}
async resetThread () {
try {
await this._page.click('nav > a:nth-child(1)')
} catch (err) {
// ignore for now
}
}
async close () {
if (this.browser) {
await this.browser.close()
@ -533,510 +115,6 @@ export class ChatGPTPuppeteer extends Puppeteer {
this._page = null
this.browser = null
}
protected
async _getInputBox () {
// [data-id="root"]
return this._page?.$('textarea')
}
}
export default new ChatGPTPuppeteer()
export async function minimizePage (page) {
const session = await page.target().createCDPSession()
const goods = await session.send('Browser.getWindowForTarget')
const { windowId } = goods
await session.send('Browser.setWindowBounds', {
windowId,
bounds: { windowState: 'minimized' }
})
}
export async function maximizePage (page) {
const session = await page.target().createCDPSession()
const goods = await session.send('Browser.getWindowForTarget')
const { windowId } = goods
await session.send('Browser.setWindowBounds', {
windowId,
bounds: { windowState: 'normal' }
})
}
export function isRelevantRequest (url) {
let pathname
try {
const parsedUrl = new URL(url)
pathname = parsedUrl.pathname
url = parsedUrl.toString()
} catch (_) {
return false
}
if (!url.startsWith('https://chat.openai.com')) {
return false
}
if (
!pathname.startsWith('/backend-api/') &&
!pathname.startsWith('/api/auth/session')
) {
return false
}
if (pathname.endsWith('backend-api/moderations')) {
return false
}
return true
}
/**
* This function is injected into the ChatGPT webapp page using puppeteer. It
* has to be fully self-contained, so we copied a few third-party sources and
* included them in here.
*/
export async function browserPostEventStream (
url,
accessToken,
body,
timeoutMs
) {
// Workaround for https://github.com/esbuild-kit/tsx/issues/113
globalThis.__name = () => undefined
const BOM = [239, 187, 191]
let conversationResponse
let conversationId = body?.conversation_id
let messageId = body?.messages?.[0]?.id
let response = ''
try {
console.log('browserPostEventStream', url, accessToken, body)
let abortController = null
if (timeoutMs) {
abortController = new AbortController()
}
const res = await fetch(url, {
method: 'POST',
body: JSON.stringify(body),
signal: abortController?.signal,
headers: {
accept: 'text/event-stream',
'x-openai-assistant-app-id': '',
authorization: `Bearer ${accessToken}`,
'content-type': 'application/json'
}
})
console.log('browserPostEventStream response', res)
if (!res.ok) {
return {
error: {
message: `ChatGPTAPI error ${res.status || res.statusText}`,
statusCode: res.status,
statusText: res.statusText
},
response: null,
conversationId,
messageId
}
}
const responseP = new Promise(
async (resolve, reject) => {
function onMessage (data) {
if (data === '[DONE]') {
return resolve({
error: null,
response,
conversationId,
messageId,
conversationResponse
})
}
try {
const _checkJson = JSON.parse(data)
} catch (error) {
console.log('warning: parse error.')
return
}
try {
const convoResponseEvent =
JSON.parse(data)
conversationResponse = convoResponseEvent
if (convoResponseEvent.conversation_id) {
conversationId = convoResponseEvent.conversation_id
}
if (convoResponseEvent.message?.id) {
messageId = convoResponseEvent.message.id
}
const partialResponse =
convoResponseEvent.message?.content?.parts?.[0]
if (partialResponse) {
response = partialResponse
}
} catch (err) {
console.warn('fetchSSE onMessage unexpected error', err)
reject(err)
}
}
const parser = createParser((event) => {
if (event.type === 'event') {
onMessage(event.data)
}
})
for await (const chunk of streamAsyncIterable(res.body)) {
const str = new TextDecoder().decode(chunk)
parser.feed(str)
}
}
)
if (timeoutMs) {
if (abortController) {
// This will be called when a timeout occurs in order for us to forcibly
// ensure that the underlying HTTP request is aborted.
responseP.cancel = () => {
abortController.abort()
}
}
console.log({ pTimeout })
return await pTimeout(responseP, {
milliseconds: timeoutMs,
message: 'ChatGPT timed out waiting for response'
})
} else {
return await responseP
}
} catch (err) {
const errMessageL = err.toString().toLowerCase()
if (
response &&
(errMessageL === 'error: typeerror: terminated' ||
errMessageL === 'typeerror: terminated')
) {
// OpenAI sometimes forcefully terminates the socket from their end before
// the HTTP request has resolved cleanly. In my testing, these cases tend to
// happen when OpenAI has already send the last `response`, so we can ignore
// the `fetch` error in this case.
return {
error: null,
response,
conversationId,
messageId,
conversationResponse
}
}
return {
error: {
message: err.toString(),
statusCode: err.statusCode || err.status || err.response?.statusCode,
statusText: err.statusText || err.response?.statusText
},
response: null,
conversationId,
messageId,
conversationResponse
}
}
// async function pTimeout (promise, option) {
// return await pTimeout(promise, option)
// }
async function * streamAsyncIterable (stream) {
const reader = stream.getReader()
try {
while (true) {
const { done, value } = await reader.read()
if (done) {
return
}
yield value
}
} finally {
reader.releaseLock()
}
}
// @see https://github.com/rexxars/eventsource-parser
function createParser (onParse) {
// Processing state
let isFirstChunk
let buffer
let startingPosition
let startingFieldLength
// Event state
let eventId
let eventName
let data
reset()
return { feed, reset }
function reset () {
isFirstChunk = true
buffer = ''
startingPosition = 0
startingFieldLength = -1
eventId = undefined
eventName = undefined
data = ''
}
function feed (chunk) {
buffer = buffer ? buffer + chunk : chunk
// Strip any UTF8 byte order mark (BOM) at the start of the stream.
// Note that we do not strip any non - UTF8 BOM, as eventsource streams are
// always decoded as UTF8 as per the specification.
if (isFirstChunk && hasBom(buffer)) {
buffer = buffer.slice(BOM.length)
}
isFirstChunk = false
// Set up chunk-specific processing state
const length = buffer.length
let position = 0
let discardTrailingNewline = false
// Read the current buffer byte by byte
while (position < length) {
// EventSource allows for carriage return + line feed, which means we
// need to ignore a linefeed character if the previous character was a
// carriage return
// @todo refactor to reduce nesting, consider checking previous byte?
// @todo but consider multiple chunks etc
if (discardTrailingNewline) {
if (buffer[position] === '\n') {
++position
}
discardTrailingNewline = false
}
let lineLength = -1
let fieldLength = startingFieldLength
let character
for (
let index = startingPosition;
lineLength < 0 && index < length;
++index
) {
character = buffer[index]
if (character === ':' && fieldLength < 0) {
fieldLength = index - position
} else if (character === '\r') {
discardTrailingNewline = true
lineLength = index - position
} else if (character === '\n') {
lineLength = index - position
}
}
if (lineLength < 0) {
startingPosition = length - position
startingFieldLength = fieldLength
break
} else {
startingPosition = 0
startingFieldLength = -1
}
parseEventStreamLine(buffer, position, fieldLength, lineLength)
position += lineLength + 1
}
if (position === length) {
// If we consumed the entire buffer to read the event, reset the buffer
buffer = ''
} else if (position > 0) {
// If there are bytes left to process, set the buffer to the unprocessed
// portion of the buffer only
buffer = buffer.slice(position)
}
}
function parseEventStreamLine (
lineBuffer,
index,
fieldLength,
lineLength
) {
if (lineLength === 0) {
// We reached the last line of this event
if (data.length > 0) {
onParse({
type: 'event',
id: eventId,
event: eventName || undefined,
data: data.slice(0, -1) // remove trailing newline
})
data = ''
eventId = undefined
}
eventName = undefined
return
}
const noValue = fieldLength < 0
const field = lineBuffer.slice(
index,
index + (noValue ? lineLength : fieldLength)
)
let step = 0
if (noValue) {
step = lineLength
} else if (lineBuffer[index + fieldLength + 1] === ' ') {
step = fieldLength + 2
} else {
step = fieldLength + 1
}
const position = index + step
const valueLength = lineLength - step
const value = lineBuffer
.slice(position, position + valueLength)
.toString()
if (field === 'data') {
data += value ? `${value}\n` : '\n'
} else if (field === 'event') {
eventName = value
} else if (field === 'id' && !value.includes('\u0000')) {
eventId = value
} else if (field === 'retry') {
const retry = parseInt(value, 10)
if (!Number.isNaN(retry)) {
onParse({ type: 'reconnect-interval', value: retry })
}
}
}
}
function hasBom (buffer) {
return BOM.every(
(charCode, index) => buffer.charCodeAt(index) === charCode
)
}
// @see https://github.com/sindresorhus/p-timeout
function pTimeout (
promise,
options
) {
const {
milliseconds,
fallback,
message,
customTimers = { setTimeout, clearTimeout }
} = options
let timer
const cancelablePromise = new Promise((resolve, reject) => {
if (typeof milliseconds !== 'number' || Math.sign(milliseconds) !== 1) {
throw new TypeError(
`Expected \`milliseconds\` to be a positive number, got \`${milliseconds}\``
)
}
if (milliseconds === Number.POSITIVE_INFINITY) {
resolve(promise)
return
}
if (options.signal) {
const { signal } = options
if (signal.aborted) {
reject(getAbortedReason(signal))
}
signal.addEventListener('abort', () => {
reject(getAbortedReason(signal))
})
}
timer = customTimers.setTimeout.call(
undefined,
() => {
if (fallback) {
try {
resolve(fallback())
} catch (error) {
reject(error)
}
return
}
const errorMessage =
typeof message === 'string'
? message
: `Promise timed out after ${milliseconds} milliseconds`
const timeoutError =
message instanceof Error ? message : new Error(errorMessage)
if (typeof promise.cancel === 'function') {
promise.cancel()
}
reject(timeoutError)
},
milliseconds
)
;(async () => {
try {
resolve(await promise)
} catch (error) {
reject(error)
} finally {
customTimers.clearTimeout.call(undefined, timer)
}
})()
})
cancelablePromise.clear = () => {
customTimers.clearTimeout.call(undefined, timer)
timer = undefined
}
return cancelablePromise
}
/**
TODO: Remove below function and just 'reject(signal.reason)' when targeting Node 18.
*/
function getAbortedReason (signal) {
const reason =
signal.reason === undefined
? getDOMException('This operation was aborted.')
: signal.reason
return reason instanceof Error ? reason : getDOMException(reason)
}
/**
TODO: Remove AbortError and just throw DOMException when targeting Node 18.
*/
function getDOMException (errorMessage) {
return globalThis.DOMException === undefined
? new Error(errorMessage)
: new DOMException(errorMessage)
}
}

View file

@ -1,3 +1,5 @@
import { Config } from './config.js'
import { newFetch } from './proxy.js'
export async function getChatHistoryGroup (e, num) {
// if (e.adapter === 'shamrock') {
@ -58,3 +60,43 @@ async function pickMemberAsync (e, userId) {
})
})
}
export async function generateSuggestedResponse (conversations) {
let prompt = 'Attention! you do not need to answer any question according to the provided conversation! \nYou are a suggested questions generator, you should generate three suggested questions according to the provided conversation for the user in the next turn, the three questions should not be too long, and must be superated with newline. The suggested questions should be suitable in the context of the provided conversation, and should not be too long. \nNow give your 3 suggested questions, use the same language with the user.'
const res = await newFetch(`${Config.openAiBaseUrl}/chat/completions`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${Config.apiKey}`
},
body: JSON.stringify({
model: 'gpt-3.5-turbo-16k',
temperature: 0.7,
messages: [
{
role: 'system',
content: 'you are a suggested questions generator, you should generate three suggested questions according to the provided conversation for the user in the next turn, the three questions should not be too long, and must be superated with newline. Always use the same language with the user\'s content in the last turn. you should response like: \nWhat is ChatGPT?\nCan you write a poem aboud spring?\nWhat can you do?'
},
{
role: 'user',
content: 'User:\n\n我想知道今天的天气\n\nAI:\n\n今天北京的天气是晴转多云最高气温12度最低气温2度空气质量优。\n\n' + prompt
},
{
role: 'assistant',
content: '这个天气适合穿什么衣物?\n今天北京的湿度怎么样\n这个季节北京有什么适合游玩的地方'
},
{
role: 'user',
content: JSON.stringify(conversations) + prompt
}
]
})
})
if (res.status === 200) {
const resJson = await res.json()
if (resJson) { return resJson.choices[0].message.content }
} else {
logger.error('generateSuggestedResponse error: ' + res.status)
return null
}
}

View file

@ -74,21 +74,6 @@ export function randomString (length = 5) {
return str.substr(0, length)
}
export async function upsertMessage (message, suffix = '') {
if (suffix) {
suffix = '_' + suffix
}
await redis.set(`CHATGPT:MESSAGE${suffix}:${message.id}`, JSON.stringify(message))
}
export async function getMessageById (id, suffix = '') {
if (suffix) {
suffix = '_' + suffix
}
let messageStr = await redis.get(`CHATGPT:MESSAGE${suffix}:${id}`)
return JSON.parse(messageStr)
}
export async function tryTimes (promiseFn, maxTries = 10) {
try {
return await promiseFn()
@ -102,63 +87,7 @@ export async function tryTimes (promiseFn, maxTries = 10) {
}
export async function makeForwardMsg (e, msg = [], dec = '') {
if (Version.isTrss) {
return common.makeForwardMsg(e, msg, dec)
}
let nickname = e.bot.nickname
if (e.isGroup) {
try {
let info = await e.bot.getGroupMemberInfo(e.group_id, getUin(e))
nickname = info.card || info.nickname
} catch (err) {
console.error(`Failed to get group member info: ${err}`)
}
}
let userInfo = {
user_id: getUin(e),
nickname
}
let forwardMsg = []
msg.forEach((v) => {
forwardMsg.push({
...userInfo,
message: v
})
})
let is_sign = true
/** 制作转发内容 */
if (e.isGroup) {
forwardMsg = await e.group.makeForwardMsg(forwardMsg)
} else if (e.friend) {
forwardMsg = await e.friend.makeForwardMsg(forwardMsg)
} else {
return msg.join('\n')
}
let forwardMsg_json = forwardMsg.data
if (typeof (forwardMsg_json) === 'object') {
if (forwardMsg_json.app === 'com.tencent.multimsg' && forwardMsg_json.meta?.detail) {
let detail = forwardMsg_json.meta.detail
let resid = detail.resid
let fileName = detail.uniseq
let preview = ''
for (let val of detail.news) {
preview += `<title color="#777777" size="26">${val.text}</title>`
}
forwardMsg.data = `<?xml version="1.0" encoding="utf-8"?><msg brief="[聊天记录]" m_fileName="${fileName}" action="viewMultiMsg" tSum="1" flag="3" m_resid="${resid}" serviceID="35" m_fileSize="0"><item layout="1"><title color="#000000" size="34">转发的聊天记录</title>${preview}<hr></hr><summary color="#808080" size="26">${detail.summary}</summary></item><source name="聊天记录"></source></msg>`
forwardMsg.type = 'xml'
forwardMsg.id = 35
}
}
forwardMsg.data = forwardMsg.data
.replace(/\n/g, '')
.replace(/<title color="#777777" size="26">(.+?)<\/title>/g, '___')
.replace(/___+/, `<title color="#777777" size="26">${dec}</title>`)
if (!is_sign) {
forwardMsg.data = forwardMsg.data
.replace('转发的', '不可转发的')
}
return forwardMsg
return common.makeForwardMsg(e, msg, dec)
}
// @see https://github.com/sindresorhus/p-timeout

View file

@ -44,6 +44,12 @@ const defaultConfig = {
sydneyGPTs: 'Copilot',
sydneyImageRecognition: false,
sydneyMoodTip: 'Your response should be divided into two parts, namely, the text and your mood. The mood available to you can only include: blandness, happy, shy, frustrated, disgusted, and frightened.All content should be replied in this format {"text": "", "mood": ""}.All content except mood should be placed in text, It is important to ensure that the content you reply to can be parsed by json.',
chatExampleUser1: '',
chatExampleUser2: '',
chatExampleUser3: '',
chatExampleBot1: '',
chatExampleBot2: '',
chatExampleBot3: '',
enableSuggestedResponses: false,
sydneyEnableSearch: false,
api: defaultChatGPTAPI,
@ -66,13 +72,8 @@ const defaultConfig = {
xhRetReplace: '',
promptPrefixOverride: 'Your answer shouldn\'t be too verbose. Prefer to answer in Chinese.',
assistantLabel: 'ChatGPT',
// thinkingTips: true,
username: '',
password: '',
UA: 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36',
headless: false,
chromePath: '',
'2captchaToken': '',
proxy: '',
debug: true,
defaultTimeoutMs: 120000,
@ -179,9 +180,17 @@ const defaultConfig = {
chatglmRefreshToken: '',
sunoSessToken: '',
sunoClientToken: '',
claudeApiKey: '',
claudeApiBaseUrl: 'http://claude-api.ikechan8370.com',
claudeApiMaxToken: 1024,
claudeApiTemperature: 0.8,
claudeApiModel: '', // claude-3-opus-20240229 claude-3-sonnet-20240229
claudeSystemPrompt: '', // claude api 设定
translateSource: 'openai',
enableMd: false, // 第三方md非QQBot。需要适配器实现segment.markdown和segment.button方可使用否则不建议开启会造成各种错误
version: 'v2.7.10'
enableToolbox: true, // 默认关闭工具箱节省占用和加速启动
version: 'v2.8.1'
}
const _path = process.cwd()
let config = {}

14
utils/history.js Normal file
View file

@ -0,0 +1,14 @@
export async function upsertMessage (message, suffix = '') {
if (suffix) {
suffix = '_' + suffix
}
await redis.set(`CHATGPT:MESSAGE${suffix}:${message.id}`, JSON.stringify(message))
}
export async function getMessageById (id, suffix = '') {
if (suffix) {
suffix = '_' + suffix
}
let messageStr = await redis.get(`CHATGPT:MESSAGE${suffix}:${id}`)
return JSON.parse(messageStr)
}

View file

@ -1,7 +1,7 @@
export function decrypt (jwtToken) {
const [encodedHeader, encodedPayload, signature] = jwtToken.split('.')
const decodedHeader = Buffer.from(encodedHeader, 'base64').toString('utf-8')
// const decodedHeader = Buffer.from(encodedHeader, 'base64').toString('utf-8')
const decodedPayload = Buffer.from(encodedPayload, 'base64').toString('utf-8')
return decodedPayload

View file

@ -1,281 +0,0 @@
import { Config } from '../utils/config.js'
import random from 'random'
import common from '../../../lib/common/common.js'
let hasRecaptchaPlugin = !!Config['2captchaToken']
export async function getOpenAIAuth (opt) {
let {
email,
password,
browser,
page,
timeoutMs = Config.chromeTimeoutMS,
isGoogleLogin = false,
captchaToken = Config['2captchaToken'],
executablePath = Config.chromePath
} = opt
const origBrowser = browser
const origPage = page
try {
const userAgent = await browser.userAgent()
if (!page) {
page = (await browser.pages())[0] || (await browser.newPage())
page.setDefaultTimeout(timeoutMs)
}
await page.goto('https://chat.openai.com/auth/login', {
waitUntil: 'networkidle2'
})
logger.mark('chatgpt checkForChatGPTAtCapacity')
await checkForChatGPTAtCapacity(page)
// NOTE: this is where you may encounter a CAPTCHA
if (hasRecaptchaPlugin) {
logger.mark('RecaptchaPlugin key exists, try to solve recaptchas')
await page.solveRecaptchas()
}
logger.mark('chatgpt checkForChatGPTAtCapacity again')
await checkForChatGPTAtCapacity(page)
// once we get to this point, the Cloudflare cookies should be available
// login as well (optional)
if (email && password) {
let retry = 3
while (retry > 0) {
try {
await waitForConditionOrAtCapacity(page, () =>
page.waitForSelector('#__next .btn-primary', { timeout: timeoutMs / 3 })
)
} catch (e) {
await checkForChatGPTAtCapacity(page)
}
retry--
}
await waitForConditionOrAtCapacity(page, () =>
page.waitForSelector('#__next .btn-primary', { timeout: timeoutMs / 3 })
)
await common.sleep(500)
// click login button and wait for navigation to finish
do {
await Promise.all([
page.waitForNavigation({
waitUntil: 'networkidle2',
timeout: timeoutMs
}),
page.click('#__next .btn-primary')
])
await common.sleep(1000)
} while (page.url().endsWith('/auth/login'))
logger.mark('进入登录页面')
await checkForChatGPTAtCapacity(page)
let submitP
if (isGoogleLogin) {
await page.click('button[data-provider="google"]')
await page.waitForSelector('input[type="email"]')
await page.type('input[type="email"]', email, { delay: 10 })
await Promise.all([
page.waitForNavigation(),
await page.keyboard.press('Enter')
])
await page.waitForSelector('input[type="password"]', { visible: true })
await page.type('input[type="password"]', password, { delay: 10 })
submitP = () => page.keyboard.press('Enter')
} else {
await page.waitForSelector('#username')
await page.type('#username', email, { delay: 20 })
await common.sleep(100)
if (hasRecaptchaPlugin) {
// console.log('solveRecaptchas()')
const res = await page.solveRecaptchas()
// console.log('solveRecaptchas result', res)
}
await page.click('button[type="submit"]')
await page.waitForSelector('#password', { timeout: timeoutMs })
await page.type('#password', password, { delay: 10 })
submitP = () => page.click('button[type="submit"]')
}
await Promise.all([
waitForConditionOrAtCapacity(page, () =>
page.waitForNavigation({
waitUntil: 'networkidle2',
timeout: timeoutMs
})
),
submitP()
])
} else {
await common.sleep(2000)
await checkForChatGPTAtCapacity(page)
}
const pageCookies = await page.cookies()
await redis.set('CHATGPT:RAW_COOKIES', JSON.stringify(pageCookies))
const cookies = pageCookies.reduce(
(map, cookie) => ({ ...map, [cookie.name]: cookie }),
{}
)
const authInfo = {
userAgent,
clearanceToken: cookies.cf_clearance?.value,
sessionToken: cookies['__Secure-next-auth.session-token']?.value,
cookies
}
logger.info('chatgpt登录成功')
return authInfo
} catch (err) {
throw err
} finally {
await page.screenshot({
type: 'png',
path: './error.png'
})
if (origBrowser) {
if (page && page !== origPage) {
await page.close()
}
} else if (browser) {
await browser.close()
}
page = null
browser = null
}
}
async function checkForChatGPTAtCapacity (page, opts = {}) {
const {
timeoutMs = Config.chromeTimeoutMS, // 2 minutes
pollingIntervalMs = 3000,
retries = 10
} = opts
// console.log('checkForChatGPTAtCapacity', page.url())
let isAtCapacity = false
let numTries = 0
do {
try {
await solveSimpleCaptchas(page)
const res = await page.$x("//div[contains(., 'ChatGPT is at capacity')]")
isAtCapacity = !!res?.length
if (isAtCapacity) {
if (++numTries >= retries) {
break
}
// try refreshing the page if chatgpt is at capacity
await page.reload({
waitUntil: 'networkidle2',
timeout: timeoutMs
})
await common.sleep(pollingIntervalMs)
}
} catch (err) {
// ignore errors likely due to navigation
++numTries
break
}
} while (isAtCapacity)
if (isAtCapacity) {
const error = new Error('ChatGPT is at capacity')
error.statusCode = 503
throw error
}
}
async function waitForConditionOrAtCapacity (
page,
condition,
opts = {}
) {
const { pollingIntervalMs = 500 } = opts
return new Promise((resolve, reject) => {
let resolved = false
async function waitForCapacityText () {
if (resolved) {
return
}
try {
await checkForChatGPTAtCapacity(page)
if (!resolved) {
setTimeout(waitForCapacityText, pollingIntervalMs)
}
} catch (err) {
if (!resolved) {
resolved = true
return reject(err)
}
}
}
condition()
.then(() => {
if (!resolved) {
resolved = true
resolve()
}
})
.catch((err) => {
if (!resolved) {
resolved = true
reject(err)
}
})
setTimeout(waitForCapacityText, pollingIntervalMs)
})
}
async function solveSimpleCaptchas (page) {
try {
const verifyYouAreHuman = await page.$('text=Verify you are human')
if (verifyYouAreHuman) {
logger.mark('encounter cloudflare simple captcha "Verify you are human"')
await common.sleep(2000)
await verifyYouAreHuman.click({
delay: random.int(5, 25)
})
await common.sleep(1000)
}
const verifyYouAreHumanCN = await page.$('text=确认您是真人')
if (verifyYouAreHumanCN) {
logger.mark('encounter cloudflare simple captcha "确认您是真人"')
await common.sleep(2000)
await verifyYouAreHumanCN.click({
delay: random.int(5, 25)
})
await common.sleep(1000)
}
const cloudflareButton = await page.$('.hcaptcha-box')
if (cloudflareButton) {
await common.sleep(2000)
await cloudflareButton.click({
delay: random.int(5, 25)
})
await common.sleep(1000)
}
} catch (err) {
// ignore errors
}
}

View file

@ -173,7 +173,8 @@ var ChatGPTAPI = /** @class */ (function () {
conversationId: conversationId,
parentMessageId: messageId,
text: '',
functionCall: null
functionCall: undefined,
conversation: []
};
responseP = new Promise(function (resolve, reject) { return __awaiter(_this, void 0, void 0, function () {
var url, headers, body, res, reason, msg, error, response, message_1, res_1, err_1;
@ -208,6 +209,7 @@ var ChatGPTAPI = /** @class */ (function () {
var _a;
if (data === '[DONE]') {
result.text = result.text.trim();
result.conversation = messages;
return resolve(result);
}
try {
@ -293,6 +295,7 @@ var ChatGPTAPI = /** @class */ (function () {
return [2 /*return*/, reject(new Error("OpenAI error: ".concat(((_b = res_1 === null || res_1 === void 0 ? void 0 : res_1.detail) === null || _b === void 0 ? void 0 : _b.message) || (res_1 === null || res_1 === void 0 ? void 0 : res_1.detail) || 'unknown')))];
}
result.detail = response;
result.conversation = messages;
return [2 /*return*/, resolve(result)];
case 6:
err_1 = _c.sent();

View file

@ -7,7 +7,7 @@ import * as tokenizer from './tokenizer'
import * as types from './types'
import globalFetch from 'node-fetch'
import { fetchSSE } from './fetch-sse'
import {openai, Role} from "./types";
import {ChatCompletionRequestMessage, openai, Role} from "./types";
const CHATGPT_MODEL = 'gpt-3.5-turbo-0613'
@ -176,16 +176,17 @@ export class ChatGPTAPI {
completionParams
)
console.log(`maxTokens: ${maxTokens}, numTokens: ${numTokens}`)
const result: types.ChatMessage = {
const result: types.ChatMessage & { conversation: openai.ChatCompletionRequestMessage[] } = {
role: 'assistant',
id: uuidv4(),
conversationId,
parentMessageId: messageId,
text: undefined,
functionCall: undefined
text: '',
functionCall: undefined,
conversation: []
}
const responseP = new Promise<types.ChatMessage>(
const responseP = new Promise<types.ChatMessage & { conversation: openai.ChatCompletionRequestMessage[] }>(
async (resolve, reject) => {
const url = `${this._apiBaseUrl}/chat/completions`
const headers = {
@ -223,6 +224,7 @@ export class ChatGPTAPI {
onMessage: (data: string) => {
if (data === '[DONE]') {
result.text = result.text.trim()
result.conversation = messages
return resolve(result)
}
@ -318,7 +320,7 @@ export class ChatGPTAPI {
}
result.detail = response
result.conversation = messages
return resolve(result)
} catch (err) {
return reject(err)
@ -548,4 +550,4 @@ export class ChatGPTAPI {
): Promise<void> {
await this._messageStore.set(message.id, message)
}
}
}

View file

@ -1,48 +0,0 @@
import fetch from 'node-fetch'
import { readFileSync, writeFile } from 'fs'
const scrape = async (pbCookie, proxy) => {
let option = { headers: { cookie: `${pbCookie}` } }
if (proxy) {
option.agent = proxy
}
const _setting = await fetch(
'https://poe.com/api/settings',
option
)
if (_setting.status !== 200) throw new Error('Failed to fetch token')
const appSettings = await _setting.json()
console.log(appSettings)
const { tchannelData: { channel: channelName } } = appSettings
return {
channelName,
appSettings,
formKey: appSettings.formKey
}
}
const getUpdatedSettings = async (channelName, pbCookie, proxy) => {
let option = { headers: { cookie: `${pbCookie}` } }
if (proxy) {
option.agent = proxy
}
const _setting = await fetch(
`https://poe.com/api/settings?channel=${channelName}`,
option
)
if (_setting.status !== 200) throw new Error('Failed to fetch token')
const appSettings = await _setting.json()
const { tchannelData: { minSeq } } = appSettings
const credentials = JSON.parse(readFileSync('config.json', 'utf8'))
credentials.app_settings.tchannelData.minSeq = minSeq
writeFile('config.json', JSON.stringify(credentials, null, 4), function (err) {
if (err) {
console.log(err)
}
})
return {
minSeq
}
}
export { scrape, getUpdatedSettings }

View file

@ -1,52 +0,0 @@
mutation AddHumanMessageMutation(
$chatId: BigInt!
$bot: String!
$query: String!
$source: MessageSource
$withChatBreak: Boolean! = false
) {
messageCreateWithStatus(
chatId: $chatId
bot: $bot
query: $query
source: $source
withChatBreak: $withChatBreak
) {
message {
id
__typename
messageId
text
linkifiedText
authorNickname
state
vote
voteReason
creationTime
suggestedReplies
chat {
id
shouldShowDisclaimer
}
}
messageLimit{
canSend
numMessagesRemaining
resetTime
shouldShowReminder
}
chatBreak {
id
__typename
messageId
text
linkifiedText
authorNickname
state
vote
voteReason
creationTime
suggestedReplies
}
}
}

View file

@ -1,17 +0,0 @@
mutation AddMessageBreakMutation($chatId: BigInt!) {
messageBreakCreate(chatId: $chatId) {
message {
id
__typename
messageId
text
linkifiedText
authorNickname
state
vote
voteReason
creationTime
suggestedReplies
}
}
}

View file

@ -1,7 +0,0 @@
mutation AutoSubscriptionMutation($subscriptions: [AutoSubscriptionQuery!]!) {
autoSubscribe(subscriptions: $subscriptions) {
viewer {
id
}
}
}

View file

@ -1,8 +0,0 @@
fragment BioFragment on Viewer {
id
poeUser {
id
uid
bio
}
}

View file

@ -1,5 +0,0 @@
subscription ChatAddedSubscription {
chatAdded {
...ChatFragment
}
}

View file

@ -1,6 +0,0 @@
fragment ChatFragment on Chat {
id
chatId
defaultBotNickname
shouldShowDisclaimer
}

View file

@ -1,26 +0,0 @@
query ChatPaginationQuery($bot: String!, $before: String, $last: Int! = 10) {
chatOfBot(bot: $bot) {
id
__typename
messagesConnection(before: $before, last: $last) {
pageInfo {
hasPreviousPage
}
edges {
node {
id
__typename
messageId
text
linkifiedText
authorNickname
state
vote
voteReason
creationTime
suggestedReplies
}
}
}
}
}

View file

@ -1,8 +0,0 @@
query ChatViewQuery($bot: String!) {
chatOfBot(bot: $bot) {
id
chatId
defaultBotNickname
shouldShowDisclaimer
}
}

View file

@ -1,7 +0,0 @@
mutation DeleteHumanMessagesMutation($messageIds: [BigInt!]!) {
messagesDelete(messageIds: $messageIds) {
viewer {
id
}
}
}

View file

@ -1,8 +0,0 @@
fragment HandleFragment on Viewer {
id
poeUser {
id
uid
handle
}
}

View file

@ -1,13 +0,0 @@
mutation LoginWithVerificationCodeMutation(
$verificationCode: String!
$emailAddress: String
$phoneNumber: String
) {
loginWithVerificationCode(
verificationCode: $verificationCode
emailAddress: $emailAddress
phoneNumber: $phoneNumber
) {
status
}
}

View file

@ -1,5 +0,0 @@
subscription MessageAddedSubscription($chatId: BigInt!) {
messageAdded(chatId: $chatId) {
...MessageFragment
}
}

View file

@ -1,6 +0,0 @@
subscription MessageDeletedSubscription($chatId: BigInt!) {
messageDeleted(chatId: $chatId) {
id
messageId
}
}

View file

@ -1,13 +0,0 @@
fragment MessageFragment on Message {
id
__typename
messageId
text
linkifiedText
authorNickname
state
vote
voteReason
creationTime
suggestedReplies
}

View file

@ -1,7 +0,0 @@
mutation MessageRemoveVoteMutation($messageId: BigInt!) {
messageRemoveVote(messageId: $messageId) {
message {
...MessageFragment
}
}
}

View file

@ -1,7 +0,0 @@
mutation MessageSetVoteMutation($messageId: BigInt!, $voteType: VoteType!, $reason: String) {
messageSetVote(messageId: $messageId, voteType: $voteType, reason: $reason) {
message {
...MessageFragment
}
}
}

View file

@ -1,12 +0,0 @@
mutation SendVerificationCodeForLoginMutation(
$emailAddress: String
$phoneNumber: String
) {
sendVerificationCode(
verificationReason: login
emailAddress: $emailAddress
phoneNumber: $phoneNumber
) {
status
}
}

View file

@ -1,9 +0,0 @@
mutation ShareMessagesMutation(
$chatId: BigInt!
$messageIds: [BigInt!]!
$comment: String
) {
messagesShare(chatId: $chatId, messageIds: $messageIds, comment: $comment) {
shareCode
}
}

View file

@ -1,13 +0,0 @@
mutation SignupWithVerificationCodeMutation(
$verificationCode: String!
$emailAddress: String
$phoneNumber: String
) {
signupWithVerificationCode(
verificationCode: $verificationCode
emailAddress: $emailAddress
phoneNumber: $phoneNumber
) {
status
}
}

View file

@ -1,7 +0,0 @@
mutation StaleChatUpdateMutation($chatId: BigInt!) {
staleChatUpdate(chatId: $chatId) {
message {
...MessageFragment
}
}
}

View file

@ -1,3 +0,0 @@
query SummarizePlainPostQuery($comment: String!) {
summarizePlainPost(comment: $comment)
}

View file

@ -1,3 +0,0 @@
query SummarizeQuotePostQuery($comment: String, $quotedPostId: BigInt!) {
summarizeQuotePost(comment: $comment, quotedPostId: $quotedPostId)
}

View file

@ -1,3 +0,0 @@
query SummarizeSharePostQuery($comment: String!, $chatId: BigInt!, $messageIds: [BigInt!]!) {
summarizeSharePost(comment: $comment, chatId: $chatId, messageIds: $messageIds)
}

View file

@ -1,14 +0,0 @@
fragment UserSnippetFragment on PoeUser {
id
uid
bio
handle
fullName
viewerIsFollowing
isPoeOnlyUser
profilePhotoURLTiny: profilePhotoUrl(size: tiny)
profilePhotoURLSmall: profilePhotoUrl(size: small)
profilePhotoURLMedium: profilePhotoUrl(size: medium)
profilePhotoURLLarge: profilePhotoUrl(size: large)
isFollowable
}

View file

@ -1,21 +0,0 @@
query ViewerInfoQuery {
viewer {
id
uid
...ViewerStateFragment
...BioFragment
...HandleFragment
hasCompletedMultiplayerNux
poeUser {
id
...UserSnippetFragment
}
messageLimit{
canSend
numMessagesRemaining
resetTime
shouldShowReminder
}
}
}

View file

@ -1,30 +0,0 @@
fragment ViewerStateFragment on Viewer {
id
__typename
iosMinSupportedVersion: integerGate(gateName: "poe_ios_min_supported_version")
iosMinEncouragedVersion: integerGate(
gateName: "poe_ios_min_encouraged_version"
)
macosMinSupportedVersion: integerGate(
gateName: "poe_macos_min_supported_version"
)
macosMinEncouragedVersion: integerGate(
gateName: "poe_macos_min_encouraged_version"
)
showPoeDebugPanel: booleanGate(gateName: "poe_show_debug_panel")
enableCommunityFeed: booleanGate(gateName: "enable_poe_shares_feed")
linkifyText: booleanGate(gateName: "poe_linkify_response")
enableSuggestedReplies: booleanGate(gateName: "poe_suggested_replies")
removeInviteLimit: booleanGate(gateName: "poe_remove_invite_limit")
enableInAppPurchases: booleanGate(gateName: "poe_enable_in_app_purchases")
availableBots {
nickname
displayName
profilePicture
isDown
disclaimer
subtitle
poweredBy
}
}

View file

@ -1,5 +0,0 @@
subscription ViewerStateUpdatedSubscription {
viewerStateUpdated {
...ViewerStateFragment
}
}

View file

@ -1,299 +0,0 @@
import { readFileSync } from 'fs'
import { scrape } from './credential.js'
import fetch from 'node-fetch'
import crypto from 'crypto'
import { Config } from '../config.js'
let proxy
if (Config.proxy) {
try {
proxy = (await import('https-proxy-agent')).default
} catch (e) {
console.warn('未安装https-proxy-agent请在插件目录下执行pnpm add https-proxy-agent')
}
}
// used when test as a single file
// const _path = process.cwd()
const _path = process.cwd() + '/plugins/chatgpt-plugin/utils/poe'
const gqlDir = `${_path}/graphql`
const queries = {
// chatViewQuery: readFileSync(gqlDir + '/ChatViewQuery.graphql', 'utf8'),
addMessageBreakMutation: readFileSync(gqlDir + '/AddMessageBreakMutation.graphql', 'utf8'),
chatPaginationQuery: readFileSync(gqlDir + '/ChatPaginationQuery.graphql', 'utf8'),
addHumanMessageMutation: readFileSync(gqlDir + '/AddHumanMessageMutation.graphql', 'utf8'),
loginMutation: readFileSync(gqlDir + '/LoginWithVerificationCodeMutation.graphql', 'utf8'),
signUpWithVerificationCodeMutation: readFileSync(gqlDir + '/SignupWithVerificationCodeMutation.graphql', 'utf8'),
sendVerificationCodeMutation: readFileSync(gqlDir + '/SendVerificationCodeForLoginMutation.graphql', 'utf8')
}
const optionMap = [
{ title: 'Claude (Powered by Anthropic)', value: 'a2' },
{ title: 'Sage (Powered by OpenAI - logical)', value: 'capybara' },
{ title: 'Dragonfly (Powered by OpenAI - simpler)', value: 'nutria' },
{ title: 'ChatGPT (Powered by OpenAI - current)', value: 'chinchilla' },
{ title: 'Claude+', value: 'a2_2' },
{ title: 'GPT-4', value: 'beaver' }
]
export class PoeClient {
constructor (props) {
this.config = props
}
headers = {
'Content-Type': 'application/json',
Referrer: 'https://poe.com/',
Origin: 'https://poe.com',
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36'
}
chatId = 0
bot = ''
reConnectWs = false
async setCredentials () {
let result = await scrape(this.config.quora_cookie, this.config.proxy ? proxy(Config.proxy) : null)
console.log(result)
this.config.quora_formkey = result.appSettings.formkey
this.config.channel_name = result.channelName
this.config.app_settings = result.appSettings
// set value
this.headers['poe-formkey'] = this.config.quora_formkey // unused
this.headers['poe-tchannel'] = this.config.channel_name
this.headers.Cookie = this.config.quora_cookie
console.log(this.headers)
}
async subscribe () {
const query = {
queryName: 'subscriptionsMutation',
variables: {
subscriptions: [
{
subscriptionName: 'messageAdded',
query: 'subscription subscriptions_messageAdded_Subscription(\n $chatId: BigInt!\n) {\n messageAdded(chatId: $chatId) {\n id\n messageId\n creationTime\n state\n ...ChatMessage_message\n ...chatHelpers_isBotMessage\n }\n}\n\nfragment ChatMessageDownvotedButton_message on Message {\n ...MessageFeedbackReasonModal_message\n ...MessageFeedbackOtherModal_message\n}\n\nfragment ChatMessageDropdownMenu_message on Message {\n id\n messageId\n vote\n text\n ...chatHelpers_isBotMessage\n}\n\nfragment ChatMessageFeedbackButtons_message on Message {\n id\n messageId\n vote\n voteReason\n ...ChatMessageDownvotedButton_message\n}\n\nfragment ChatMessageOverflowButton_message on Message {\n text\n ...ChatMessageDropdownMenu_message\n ...chatHelpers_isBotMessage\n}\n\nfragment ChatMessageSuggestedReplies_SuggestedReplyButton_message on Message {\n messageId\n}\n\nfragment ChatMessageSuggestedReplies_message on Message {\n suggestedReplies\n ...ChatMessageSuggestedReplies_SuggestedReplyButton_message\n}\n\nfragment ChatMessage_message on Message {\n id\n messageId\n text\n author\n linkifiedText\n state\n ...ChatMessageSuggestedReplies_message\n ...ChatMessageFeedbackButtons_message\n ...ChatMessageOverflowButton_message\n ...chatHelpers_isHumanMessage\n ...chatHelpers_isBotMessage\n ...chatHelpers_isChatBreak\n ...chatHelpers_useTimeoutLevel\n ...MarkdownLinkInner_message\n}\n\nfragment MarkdownLinkInner_message on Message {\n messageId\n}\n\nfragment MessageFeedbackOtherModal_message on Message {\n id\n messageId\n}\n\nfragment MessageFeedbackReasonModal_message on Message {\n id\n messageId\n}\n\nfragment chatHelpers_isBotMessage on Message {\n ...chatHelpers_isHumanMessage\n ...chatHelpers_isChatBreak\n}\n\nfragment chatHelpers_isChatBreak on Message {\n author\n}\n\nfragment chatHelpers_isHumanMessage on Message {\n author\n}\n\nfragment chatHelpers_useTimeoutLevel on Message {\n id\n state\n text\n messageId\n}\n'
},
{
subscriptionName: 'viewerStateUpdated',
query: 'subscription subscriptions_viewerStateUpdated_Subscription {\n viewerStateUpdated {\n id\n ...ChatPageBotSwitcher_viewer\n }\n}\n\nfragment BotHeader_bot on Bot {\n displayName\n ...BotImage_bot\n}\n\nfragment BotImage_bot on Bot {\n profilePicture\n displayName\n}\n\nfragment BotLink_bot on Bot {\n displayName\n}\n\nfragment ChatPageBotSwitcher_viewer on Viewer {\n availableBots {\n id\n ...BotLink_bot\n ...BotHeader_bot\n }\n}\n'
}
]
},
query: 'mutation subscriptionsMutation(\n $subscriptions: [AutoSubscriptionQuery!]!\n) {\n autoSubscribe(subscriptions: $subscriptions) {\n viewer {\n id\n }\n }\n}\n'
}
await this.makeRequest(query)
}
async makeRequest (request) {
let payload = JSON.stringify(request)
let baseString = payload + this.headers['poe-formkey'] + 'WpuLMiXEKKE98j56k'
const md5 = crypto.createHash('md5').update(baseString).digest('hex')
let option = {
method: 'POST',
headers: Object.assign(this.headers, {
'poe-tag-id': md5,
'content-type': 'application/json'
}),
body: payload
}
if (this.config.proxy) {
option.agent = proxy(Config.proxy)
}
const response = await fetch('https://poe.com/api/gql_POST', option)
let text = await response.text()
try {
let result = JSON.parse(text)
console.log({ result })
return result
} catch (e) {
console.error(text)
throw e
}
}
async getBot (displayName) {
let r
let retry = 10
while (retry >= 0) {
let url = `https://poe.com/_next/data/${this.nextData.buildId}/${displayName}.json`
let option = {
headers: this.headers
}
if (this.config.proxy) {
option.agent = proxy(Config.proxy)
}
let r = await fetch(url, option)
let res = await r.text()
try {
let chatData = (JSON.parse(res)).pageProps.payload.chatOfBotDisplayName
return chatData
} catch (e) {
r = res
retry--
}
}
throw new Error(r)
}
async getChatId () {
let option = {
headers: this.headers
}
if (this.config.proxy) {
option.agent = proxy(Config.proxy)
}
let r = await fetch('https://poe.com', option)
let text = await r.text()
const jsonRegex = /<script id="__NEXT_DATA__" type="application\/json">(.+?)<\/script>/
const jsonText = text.match(jsonRegex)[1]
const nextData = JSON.parse(jsonText)
this.nextData = nextData
this.viewer = nextData.props.pageProps.payload.viewer
this.formkey = this.extract_formkey(text)
this.headers['poe-formkey'] = this.formkey
let bots = this.viewer.availableBots
this.bots = {}
for (let i = 0; i < bots.length; i++) {
let bot = bots[i]
let chatData = await this.getBot(bot.displayName)
this.bots[chatData.defaultBotObject.nickname] = chatData
}
console.log(this.bots)
}
extract_formkey (html) {
const scriptRegex = /<script>if\(.+\)throw new Error;(.+)<\/script>/
const scriptText = html.match(scriptRegex)[1]
const keyRegex = /var .="([0-9a-f]+)",/
const keyText = scriptText.match(keyRegex)[1]
const cipherRegex = /.\[(\d+)]=.\[(\d+)]/g
const cipherPairs = scriptText.match(cipherRegex)
const formkeyList = Array(cipherPairs.length).fill('')
for (const pair of cipherPairs) {
const [formkeyIndex, keyIndex] = pair.match(/\d+/g).map(Number)
formkeyList[formkeyIndex] = keyText[keyIndex]
}
const formkey = formkeyList.join('')
return formkey
}
async clearContext (bot) {
try {
const data = await this.makeRequest({
query: `${queries.addMessageBreakMutation}`,
variables: { chatId: this.config.chat_ids[bot] }
})
if (!data.data) {
this.reConnectWs = true // for websocket purpose
console.log('ON TRY! Could not clear context! Trying to reLogin..')
}
return data
} catch (e) {
this.reConnectWs = true // for websocket purpose
console.log('ON CATCH! Could not clear context! Trying to reLogin..')
return e
}
}
async sendMsg (bot, query) {
try {
const data = await this.makeRequest({
query: `${queries.addHumanMessageMutation}`,
variables: {
bot,
chatId: this.bots[bot].chatId,
query,
source: null,
withChatBreak: false
}
})
console.log(data)
if (!data.data) {
this.reConnectWs = true // for cli websocket purpose
console.log('Could not send message! Trying to reLogin..')
}
return data
} catch (e) {
this.reConnectWs = true // for cli websocket purpose
console.error(e)
return e
}
}
async getHistory (bot) {
try {
let response = await this.makeRequest({
query: `${queries.chatPaginationQuery}`,
variables: {
before: null,
bot,
last: 25
}
})
return response.data.chatOfBot.messagesConnection.edges
.map(({ node: { messageId, text, authorNickname } }) => ({
messageId,
text,
authorNickname
}))
} catch (e) {
console.log('There has been an error while fetching your history!')
}
}
async deleteMessages (msgIds) {
await this.makeRequest({
queryName: 'MessageDeleteConfirmationModal_deleteMessageMutation_Mutation',
variables: {
messageIds: msgIds
},
query: 'mutation MessageDeleteConfirmationModal_deleteMessageMutation_Mutation(\n $messageIds: [BigInt!]!\n){\n messagesDelete(messageIds: $messageIds) {\n edgeIds\n }\n}\n'
})
}
async getResponse (bot) {
let text
let state
let authorNickname
try {
while (true) {
await new Promise((resolve) => setTimeout(resolve, 2000))
let response = await this.makeRequest({
query: `${queries.chatPaginationQuery}`,
variables: {
before: null,
bot,
last: 1
}
})
let base = response.data.chatOfBot.messagesConnection.edges
let lastEdgeIndex = base.length - 1
text = base[lastEdgeIndex].node.text
authorNickname = base[lastEdgeIndex].node.authorNickname
state = base[lastEdgeIndex].node.state
if (state === 'complete' && authorNickname === bot) {
break
}
}
} catch (e) {
console.log('Could not get response!')
return {
status: false,
message: 'failed',
data: null
}
}
return {
status: true,
message: 'success',
data: text
}
}
}

View file

@ -1,65 +0,0 @@
import WebSocket from 'ws'
import * as diff from 'diff'
import { readFileSync } from 'fs'
const getSocketUrl = async () => {
const tchRand = Math.floor(100000 + Math.random() * 900000) // They're surely using 6 digit random number for ws url.
const socketUrl = `wss://tch${tchRand}.tch.quora.com`
const credentials = JSON.parse(readFileSync('config.json', 'utf8'))
const appSettings = credentials.app_settings.tchannelData
const boxName = appSettings.boxName
const minSeq = appSettings.minSeq
const channel = appSettings.channel
const hash = appSettings.channelHash
return `${socketUrl}/up/${boxName}/updates?min_seq=${minSeq}&channel=${channel}&hash=${hash}`
}
export const connectWs = async () => {
const url = await getSocketUrl()
const ws = new WebSocket(url)
return new Promise((resolve, reject) => {
ws.on('open', function open () {
console.log('Connected to websocket')
return resolve(ws)
})
})
}
export const disconnectWs = async (ws) => {
return new Promise((resolve, reject) => {
ws.on('close', function close () {
return resolve(true)
})
ws.close()
})
}
export const listenWs = async (ws) => {
let previousText = ''
return new Promise((resolve, reject) => {
const onMessage = function incoming (data) {
let jsonData = JSON.parse(data)
if (jsonData.messages && jsonData.messages.length > 0) {
const messages = JSON.parse(jsonData.messages[0])
const dataPayload = messages.payload.data
const text = dataPayload.messageAdded.text
const state = dataPayload.messageAdded.state
if (state !== 'complete') {
const differences = diff.diffChars(previousText, text)
let result = ''
differences.forEach((part) => {
if (part.added) {
result += part.value
}
})
previousText = text
process.stdout.write(result)
} else {
ws.removeListener('message', onMessage)
return resolve(true)
}
}
}
ws.on('message', onMessage)
})
}

View file

@ -1,65 +0,0 @@
import WebSocket from 'ws'
import * as diff from 'diff'
import { readFileSync } from 'fs'
const getSocketUrl = async () => {
const tchRand = Math.floor(100000 + Math.random() * 900000) // They're surely using 6 digit random number for ws url.
const socketUrl = `wss://tch${tchRand}.tch.quora.com`
const credentials = JSON.parse(readFileSync('config.json', 'utf8'))
const appSettings = credentials.app_settings.tchannelData
const boxName = appSettings.boxName
const minSeq = appSettings.minSeq
const channel = appSettings.channel
const hash = appSettings.channelHash
return `${socketUrl}/up/${boxName}/updates?min_seq=${minSeq}&channel=${channel}&hash=${hash}`
}
export const connectWs = async () => {
const url = await getSocketUrl()
const ws = new WebSocket(url)
return new Promise((resolve, reject) => {
ws.on('open', function open () {
console.log('Connected to websocket')
return resolve(ws)
})
})
}
export const disconnectWs = async (ws) => {
return new Promise((resolve, reject) => {
ws.on('close', function close () {
return resolve(true)
})
ws.close()
})
}
export const listenWs = async (ws) => {
let previousText = ''
return new Promise((resolve, reject) => {
const onMessage = function incoming (data) {
let jsonData = JSON.parse(data)
if (jsonData.messages && jsonData.messages.length > 0) {
const messages = JSON.parse(jsonData.messages[0])
const dataPayload = messages.payload.data
const text = dataPayload.messageAdded.text
const state = dataPayload.messageAdded.state
if (state !== 'complete') {
const differences = diff.diffChars(previousText, text)
let result = ''
differences.forEach((part) => {
if (part.added) {
result += part.value
}
})
previousText = text
process.stdout.write(result)
} else {
ws.removeListener('message', onMessage)
return resolve(true)
}
}
}
ws.on('message', onMessage)
})
}

View file

@ -11,9 +11,19 @@ export function readPrompts () {
txtFiles.forEach(txtFile => {
let name = _.trimEnd(txtFile, '.txt')
const content = fs.readFileSync(`${_path}/plugins/chatgpt-plugin/prompts/${txtFile}`, 'utf8')
let example = []
try {
if (fs.existsSync(`${_path}/plugins/chatgpt-plugin/prompts/${name}_example.json`)) {
example = fs.readFileSync(`${_path}/plugins/chatgpt-plugin/prompts/${name}_example.json`, 'utf8')
example = JSON.parse(example)
}
} catch (err) {
logger.debug(err)
}
prompts.push({
name,
content
content,
example
})
})
}
@ -34,11 +44,15 @@ export function getPromptByName (name) {
}
}
export function saveOnePrompt (name, content) {
export function saveOnePrompt (name, content, examples) {
const _path = process.cwd()
mkdirs(`${_path}/plugins/chatgpt-plugin/prompts`)
let filePath = `${_path}/plugins/chatgpt-plugin/prompts/${name}.txt`
fs.writeFileSync(filePath, content)
if (examples) {
let examplePath = `${_path}/plugins/chatgpt-plugin/prompts/${name}_example.json`
fs.writeFileSync(examplePath, JSON.stringify(examples))
}
}
export function deleteOnePrompt (name) {
@ -46,4 +60,8 @@ export function deleteOnePrompt (name) {
mkdirs(`${_path}/plugins/chatgpt-plugin/prompts`)
let filePath = `${_path}/plugins/chatgpt-plugin/prompts/${name}.txt`
fs.unlinkSync(filePath)
try {
let examplePath = `${_path}/plugins/chatgpt-plugin/prompts/${name}_example.json`
fs.unlinkSync(examplePath)
} catch (err) {}
}

View file

@ -1,170 +0,0 @@
import { Config } from '../config.js'
import slack from '@slack/bolt'
import { limitString } from '../common.js'
import common from '../../../../lib/common/common.js'
let proxy
if (Config.proxy) {
try {
proxy = (await import('https-proxy-agent')).default
} catch (e) {
console.warn('未安装https-proxy-agent请在插件目录下执行pnpm add https-proxy-agent')
}
}
export class SlackClaudeClient {
constructor (props) {
this.config = props
if (Config.slackSigningSecret && Config.slackBotUserToken && Config.slackUserToken) {
let option = {
signingSecret: Config.slackSigningSecret,
token: Config.slackBotUserToken,
// socketMode: true,
appToken: Config.slackUserToken
// port: 45912
}
if (Config.proxy) {
option.agent = proxy(Config.proxy)
}
option.logLevel = Config.debug ? 'debug' : 'info'
this.app = new slack.App(option)
} else {
throw new Error('未配置Slack信息')
}
}
async sendMessage (prompt, e, t = 0) {
if (t > 10) {
return 'claude 未响应'
}
if (prompt.length > 3990) {
logger.warn('消息长度大于slack限制长度剪切至3990')
prompt = limitString(prompt, 3990, false)
}
let channel
let qq = e.sender.user_id
if (Config.slackClaudeSpecifiedChannel) {
channel = { id: Config.slackClaudeSpecifiedChannel }
} else {
let channels = await this.app.client.conversations.list({
token: this.config.slackUserToken,
types: 'public_channel,private_channel'
})
channel = channels.channels.filter(c => c.name === '' + qq)
if (!channel || channel.length === 0) {
let createChannelResponse = await this.app.client.conversations.create({
token: this.config.slackUserToken,
name: qq + '',
is_private: true
})
channel = createChannelResponse.channel
await this.app.client.conversations.invite({
token: this.config.slackUserToken,
channel: channel.id,
users: Config.slackClaudeUserId
})
await common.sleep(1000)
} else {
channel = channel[0]
}
}
let conversationId = await redis.get(`CHATGPT:SLACK_CONVERSATION:${qq}`)
if (!conversationId) {
let sendResponse = await this.app.client.chat.postMessage({
as_user: true,
text: `<@${Config.slackClaudeUserId}> ${prompt}`,
token: this.config.slackUserToken,
channel: channel.id
})
let ts = sendResponse.ts
let response = '_Typing…_'
let tryTimes = 0
// 发完先等3喵
await common.sleep(3000)
while (response.trim().endsWith('_Typing…_')) {
let replies = await this.app.client.conversations.replies({
token: this.config.slackUserToken,
channel: channel.id,
limit: 1000,
ts
})
await await redis.set(`CHATGPT:SLACK_CONVERSATION:${qq}`, `${ts}`)
if (replies.messages.length > 0) {
let formalMessages = replies.messages
.filter(m => m.metadata?.event_type !== 'claude_moderation')
.filter(m => !m.text.startsWith('_'))
if (!formalMessages[formalMessages.length - 1].bot_profile) {
// 问题的下一句不是bot回复的这属于意料之外的问题可能是多人同时问问题导致 再问一次吧
return await this.sendMessage(prompt, e, t + 1)
}
let reply = formalMessages[formalMessages.length - 1]
if (!reply.text.startsWith(`<@${Config.slackClaudeUserId}>`)) {
response = reply.text
if (Config.debug) {
let text = response.replace('_Typing…_', '')
if (text) {
logger.info(response.replace('_Typing…_', ''))
}
}
}
}
await common.sleep(2000)
tryTimes++
if (tryTimes > 3 && response === '_Typing…_') {
// 过了6秒还没任何回复就重新发一下试试
logger.warn('claude没有响应重试中')
return await this.sendMessage(prompt, e, t + 1)
}
}
return response
} else {
let postResponse = await this.app.client.chat.postMessage({
as_user: true,
text: `<@${Config.slackClaudeUserId}> ${prompt}`,
token: this.config.slackUserToken,
channel: channel.id,
thread_ts: conversationId
})
let postTs = postResponse.ts
let response = '_Typing…_'
let tryTimes = 0
// 发完先等3喵
await common.sleep(3000)
while (response.trim().endsWith('_Typing…_')) {
let replies = await this.app.client.conversations.replies({
token: this.config.slackUserToken,
channel: channel.id,
limit: 1000,
ts: conversationId,
oldest: postTs
})
if (replies.messages.length > 0) {
let formalMessages = replies.messages
.filter(m => m.metadata?.event_type !== 'claude_moderation')
.filter(m => !m.text.startsWith('_'))
if (!formalMessages[formalMessages.length - 1].bot_profile) {
// 问题的下一句不是bot回复的这属于意料之外的问题可能是多人同时问问题导致 再问一次吧
return await this.sendMessage(prompt, e, t + 1)
}
let reply = formalMessages[formalMessages.length - 1]
if (!reply.text.startsWith(`<@${Config.slackClaudeUserId}>`)) {
response = reply.text
if (Config.debug) {
let text = response.replace('_Typing…_', '')
if (text) {
logger.info(response.replace('_Typing…_', ''))
}
}
}
}
await common.sleep(2000)
tryTimes++
if (tryTimes > 3 && response === '_Typing…_') {
// 过了6秒还没任何回复就重新发一下试试
logger.warn('claude没有响应重试中')
return await this.sendMessage(prompt, e, t + 1)
}
}
return response
}
}
}

View file

@ -21,6 +21,7 @@ export class WebsiteTool extends AbstractTool {
func = async function (opts) {
let { url, mode, e } = opts
let browser
try {
// let res = await fetch(url, {
// headers: {
@ -34,7 +35,7 @@ export class WebsiteTool extends AbstractTool {
origin = true
}
let ppt = new ChatGPTPuppeteer()
let browser = await ppt.getBrowser()
browser = await ppt.getBrowser()
let page = await browser.newPage()
await page.goto(url, {
waitUntil: 'networkidle2'
@ -104,6 +105,12 @@ export class WebsiteTool extends AbstractTool {
}
} catch (err) {
return `failed to visit the website, error: ${err.toString()}`
} finally {
if (browser) {
try {
await browser.close()
} catch (err) {}
}
}
}

View file

@ -5,9 +5,7 @@ import { ChatGPTAPI } from './openai/chatgpt-api.js'
import { newFetch } from './proxy.js'
import { CustomGoogleGeminiClient } from '../client/CustomGoogleGeminiClient.js'
import XinghuoClient from './xinghuo/xinghuo.js'
import {getImg, getMessageById, upsertMessage} from './common.js'
import {QwenApi} from "./alibaba/qwen-api.js";
import {v4 as uuid} from "uuid";
import { QwenApi } from './alibaba/qwen-api.js'
// 代码参考https://github.com/yeyang52/yenai-plugin/blob/b50b11338adfa5a4ef93912eefd2f1f704e8b990/model/api/funApi.js#L25
export const translateLangSupports = [

View file

@ -1,28 +1,5 @@
import { Config } from '../config.js'
let proxy
if (Config.proxy) {
try {
proxy = (await import('https-proxy-agent')).default
} catch (e) {
console.warn('未安装https-proxy-agent请在插件目录下执行pnpm add https-proxy-agent')
}
}
const newFetch = (url, options = {}) => {
const defaultOptions = Config.proxy
? {
agent: proxy(Config.proxy)
}
: {}
const mergedOptions = {
...defaultOptions,
...options
}
return fetch(url, mergedOptions)
}
import { newFetch } from '../proxy.js'
/**
* 生成voxTTSMode下的wav音频

View file

@ -1,15 +1,8 @@
import { Config } from '../config.js'
import fs from 'fs'
import nodejieba from '@node-rs/jieba'
let nodejieba
try {
nodejieba = (await import('@node-rs/jieba')).default
nodejieba.load()
} catch (err) {
logger.info('未安装@node-rs/jieba娱乐功能-词云统计不可用')
}
export class Tokenizer {
class Tokenizer {
async getHistory (e, groupId, date = new Date(), duration = 0, userId) {
if (!groupId) {
throw new Error('no valid group id')
@ -78,6 +71,10 @@ export class Tokenizer {
if (!nodejieba) {
throw new Error('未安装node-rs/jieba娱乐功能-词云统计不可用')
}
if (!this.loaded) {
nodejieba.load()
this.loaded = true
}
// duration represents the number of hours to go back, should in range [0, 24]
let chats = await this.getHistory(e, groupId, new Date(), duration, userId)
let durationStr = duration > 0 ? `${duration}小时` : '今日'
@ -139,7 +136,7 @@ export class Tokenizer {
}
}
export class ShamrockTokenizer extends Tokenizer {
class ShamrockTokenizer extends Tokenizer {
async getHistory (e, groupId, date = new Date(), duration = 0, userId) {
logger.mark('当前使用Shamrock适配器')
if (!groupId) {
@ -227,3 +224,8 @@ function isTimestampInDateRange (timestamp, startOfSpecifiedDate, endOfSpecified
// Step 5: Compare the given timestamp with the start and end of the specified date
return timestamp >= startOfSpecifiedDate && timestamp < endOfSpecifiedDate
}
export default {
default: new Tokenizer(),
shamrock: new ShamrockTokenizer()
}

View file

@ -1,4 +1,4 @@
import { ShamrockTokenizer, Tokenizer } from './tokenizer.js'
import Tokenizer from './tokenizer.js'
import { render } from '../common.js'
export async function makeWordcloud (e, groupId, duration = 0, userId) {
@ -12,8 +12,8 @@ export async function makeWordcloud (e, groupId, duration = 0, userId) {
function getTokenizer (e) {
if (e.adapter === 'shamrock') {
return new ShamrockTokenizer()
return Tokenizer.shamrock
} else {
return new Tokenizer()
return Tokenizer.default
}
}