cherry-pick: 1

This commit is contained in:
ikechan8370 2024-03-10 17:27:41 +08:00
parent 8e73a28dae
commit fd2d976686
5 changed files with 69 additions and 15 deletions

View file

@ -72,7 +72,7 @@ import { solveCaptchaOneShot } from '../utils/bingCaptcha.js'
import { ClaudeAIClient } from '../utils/claude.ai/index.js' import { ClaudeAIClient } from '../utils/claude.ai/index.js'
import { getProxy } from '../utils/proxy.js' import { getProxy } from '../utils/proxy.js'
import { QwenApi } from '../utils/alibaba/qwen-api.js' import { QwenApi } from '../utils/alibaba/qwen-api.js'
import { getChatHistoryGroup } from '../utils/chat.js' import { generateSuggestedResponse, getChatHistoryGroup } from '../utils/chat.js'
import { CustomGoogleGeminiClient } from '../client/CustomGoogleGeminiClient.js' import { CustomGoogleGeminiClient } from '../client/CustomGoogleGeminiClient.js'
import { resizeAndCropImage } from '../utils/dalle.js' import { resizeAndCropImage } from '../utils/dalle.js'
import fs from 'fs' import fs from 'fs'
@ -862,8 +862,8 @@ export class chatgpt extends plugin {
} }
// 处理星火和bard图片 // 处理星火和bard图片
if ((use === 'bard' || use === 'xh') && chatMessage?.images) { if ((use === 'bard' || use === 'xh') && chatMessage?.images) {
chatMessage.images.forEach(async element => { chatMessage.images.forEach(element => {
await this.reply([element.tag, segment.image(element.url)]) this.reply([element.tag, segment.image(element.url)])
}) })
} }
// chatglm4图片调整至sendMessage中处理 // chatglm4图片调整至sendMessage中处理
@ -1102,7 +1102,13 @@ export class chatgpt extends plugin {
if (quotemessage.length > 0) { if (quotemessage.length > 0) {
this.reply(await makeForwardMsg(this.e, quotemessage.map(msg => `${msg.text} - ${msg.url}`))) this.reply(await makeForwardMsg(this.e, quotemessage.map(msg => `${msg.text} - ${msg.url}`)))
} }
if (chatMessage?.conversation && Config.enableSuggestedResponses && !chatMessage.suggestedResponses && Config.apiKey) {
try {
chatMessage.suggestedResponses = await generateSuggestedResponse(chatMessage.conversation)
} catch (err) {
logger.debug('生成建议回复失败', err)
}
}
this.reply(responseText, e.isGroup, { this.reply(responseText, e.isGroup, {
btnData: { btnData: {
use, use,
@ -1166,7 +1172,7 @@ export class chatgpt extends plugin {
} }
async glm4 (e) { async glm4 (e) {
return await this.otherMode(e, 'chatglm4') return await this.otherMode(e, 'chatglm4', '#glm4')
} }
async gemini (e) { async gemini (e) {
@ -2027,7 +2033,7 @@ export class chatgpt extends plugin {
tools.push(...[new EliMusicTool(), new EliMovieTool()]) tools.push(...[new EliMusicTool(), new EliMovieTool()])
} catch (err) { } catch (err) {
tools.push(...[new SendMusicTool(), new SearchMusicTool()]) tools.push(...[new SendMusicTool(), new SearchMusicTool()])
logger.mark(logger.green('【ChatGPT-Plugin】插件avocado-plugin未安装') + ',安装后可查看最近热映电影与体验可玩性更高的点歌工具。\n可前往 https://github.com/Qz-Sean/avocado-plugin 获取') logger.debug(logger.green('【ChatGPT-Plugin】插件avocado-plugin未安装') + ',安装后可查看最近热映电影与体验可玩性更高的点歌工具。\n可前往 https://github.com/Qz-Sean/avocado-plugin 获取')
} }
if (e.isGroup) { if (e.isGroup) {
let botInfo = await e.bot.getGroupMemberInfo(e.group_id, getUin(e), true) let botInfo = await e.bot.getGroupMemberInfo(e.group_id, getUin(e), true)

View file

@ -64,7 +64,7 @@ export default class SydneyAIClient {
headers: { headers: {
accept: 'application/json', accept: 'application/json',
'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6', 'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
'content-type': 'application/json', // 'content-type': 'application/json',
// 'sec-ch-ua': '"Microsoft Edge";v="113", "Chromium";v="113", "Not-A.Brand";v="24"', // 'sec-ch-ua': '"Microsoft Edge";v="113", "Chromium";v="113", "Not-A.Brand";v="24"',
// 'sec-ch-ua-arch': '"x86"', // 'sec-ch-ua-arch': '"x86"',
// 'sec-ch-ua-bitness': '"64"', // 'sec-ch-ua-bitness': '"64"',

View file

@ -1,3 +1,6 @@
import { Config } from './config.js'
import { newFetch } from './proxy.js'
export async function getChatHistoryGroup (e, num) { export async function getChatHistoryGroup (e, num) {
// if (e.adapter === 'shamrock') { // if (e.adapter === 'shamrock') {
// return await e.group.getChatHistory(0, num, false) // return await e.group.getChatHistory(0, num, false)
@ -57,3 +60,43 @@ async function pickMemberAsync (e, userId) {
}) })
}) })
} }
export async function generateSuggestedResponse (conversations) {
let prompt = 'Attention! you do not need to answer any question according to the provided conversation! \nYou are a suggested questions generator, you should generate three suggested questions according to the provided conversation for the user in the next turn, the three questions should not be too long, and must be superated with newline. The suggested questions should be suitable in the context of the provided conversation, and should not be too long. \nNow give your 3 suggested questions, use the same language with the user.'
const res = await newFetch(`${Config.openAiBaseUrl}/chat/completions`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${Config.apiKey}`
},
body: JSON.stringify({
model: 'gpt-3.5-turbo-16k',
temperature: 0.7,
messages: [
{
role: 'system',
content: 'you are a suggested questions generator, you should generate three suggested questions according to the provided conversation for the user in the next turn, the three questions should not be too long, and must be superated with newline. Always use the same language with the user\'s content in the last turn. you should response like: \nWhat is ChatGPT?\nCan you write a poem aboud spring?\nWhat can you do?'
},
{
role: 'user',
content: 'User:\n\n我想知道今天的天气\n\nAI:\n\n今天北京的天气是晴转多云最高气温12度最低气温2度空气质量优。\n\n' + prompt
},
{
role: 'assistant',
content: '这个天气适合穿什么衣物?\n今天北京的湿度怎么样\n这个季节北京有什么适合游玩的地方'
},
{
role: 'user',
content: JSON.stringify(conversations) + prompt
}
]
})
})
if (res.status === 200) {
const resJson = await res.json()
if (resJson) { return resJson.choices[0].message.content }
} else {
logger.error('generateSuggestedResponse error: ' + res.status)
return null
}
}

View file

@ -172,8 +172,9 @@ var ChatGPTAPI = /** @class */ (function () {
id: uuidv4(), id: uuidv4(),
conversationId: conversationId, conversationId: conversationId,
parentMessageId: messageId, parentMessageId: messageId,
text: '', text: undefined,
functionCall: null functionCall: undefined,
conversation: []
}; };
responseP = new Promise(function (resolve, reject) { return __awaiter(_this, void 0, void 0, function () { responseP = new Promise(function (resolve, reject) { return __awaiter(_this, void 0, void 0, function () {
var url, headers, body, res, reason, msg, error, response, message_1, res_1, err_1; var url, headers, body, res, reason, msg, error, response, message_1, res_1, err_1;
@ -208,6 +209,7 @@ var ChatGPTAPI = /** @class */ (function () {
var _a; var _a;
if (data === '[DONE]') { if (data === '[DONE]') {
result.text = result.text.trim(); result.text = result.text.trim();
result.conversation = messages;
return resolve(result); return resolve(result);
} }
try { try {
@ -293,6 +295,7 @@ var ChatGPTAPI = /** @class */ (function () {
return [2 /*return*/, reject(new Error("OpenAI error: ".concat(((_b = res_1 === null || res_1 === void 0 ? void 0 : res_1.detail) === null || _b === void 0 ? void 0 : _b.message) || (res_1 === null || res_1 === void 0 ? void 0 : res_1.detail) || 'unknown')))]; return [2 /*return*/, reject(new Error("OpenAI error: ".concat(((_b = res_1 === null || res_1 === void 0 ? void 0 : res_1.detail) === null || _b === void 0 ? void 0 : _b.message) || (res_1 === null || res_1 === void 0 ? void 0 : res_1.detail) || 'unknown')))];
} }
result.detail = response; result.detail = response;
result.conversation = messages;
return [2 /*return*/, resolve(result)]; return [2 /*return*/, resolve(result)];
case 6: case 6:
err_1 = _c.sent(); err_1 = _c.sent();

View file

@ -7,7 +7,7 @@ import * as tokenizer from './tokenizer'
import * as types from './types' import * as types from './types'
import globalFetch from 'node-fetch' import globalFetch from 'node-fetch'
import { fetchSSE } from './fetch-sse' import { fetchSSE } from './fetch-sse'
import {openai, Role} from "./types"; import {ChatCompletionRequestMessage, openai, Role} from "./types";
const CHATGPT_MODEL = 'gpt-3.5-turbo-0613' const CHATGPT_MODEL = 'gpt-3.5-turbo-0613'
@ -176,16 +176,17 @@ export class ChatGPTAPI {
completionParams completionParams
) )
console.log(`maxTokens: ${maxTokens}, numTokens: ${numTokens}`) console.log(`maxTokens: ${maxTokens}, numTokens: ${numTokens}`)
const result: types.ChatMessage = { const result: types.ChatMessage & { conversation: openai.ChatCompletionRequestMessage[] }= {
role: 'assistant', role: 'assistant',
id: uuidv4(), id: uuidv4(),
conversationId, conversationId,
parentMessageId: messageId, parentMessageId: messageId,
text: undefined, text: undefined,
functionCall: undefined functionCall: undefined,
conversation: []
} }
const responseP = new Promise<types.ChatMessage>( const responseP = new Promise<types.ChatMessage & { conversation: openai.ChatCompletionRequestMessage[] }>(
async (resolve, reject) => { async (resolve, reject) => {
const url = `${this._apiBaseUrl}/chat/completions` const url = `${this._apiBaseUrl}/chat/completions`
const headers = { const headers = {
@ -223,6 +224,7 @@ export class ChatGPTAPI {
onMessage: (data: string) => { onMessage: (data: string) => {
if (data === '[DONE]') { if (data === '[DONE]') {
result.text = result.text.trim() result.text = result.text.trim()
result.conversation = messages
return resolve(result) return resolve(result)
} }
@ -318,7 +320,7 @@ export class ChatGPTAPI {
} }
result.detail = response result.detail = response
result.conversation = messages
return resolve(result) return resolve(result)
} catch (err) { } catch (err) {
return reject(err) return reject(err)
@ -548,4 +550,4 @@ export class ChatGPTAPI {
): Promise<void> { ): Promise<void> {
await this._messageStore.set(message.id, message) await this._messageStore.set(message.id, message)
} }
} }