update: sync fork

This commit is contained in:
127Wzc 2025-05-02 18:14:04 +08:00
parent 2645292dd1
commit 1a8ad165a8
3 changed files with 15 additions and 17 deletions

View file

@ -17,6 +17,10 @@
> 由于相关领域发展快速迭代较多本文档有部分过时内容不确定的问题可以开discussion或加群问群里的大佬们哦
>
> 插件大幅重构中v2分支仅做最低程度维护。
>
> v3分支已基本可用相比v2分支针对日益新增的大模型API进行了全面适配并大幅增加了定制化Bot的能力。
>
> Karin用户可以使用本插件的[karin版](https://github.com/ikechan8370/karin-plugin-chaite)
### 推荐的相关文档和参考资料
本README

View file

@ -1,28 +1,23 @@
import { Config } from './config.js'
import { newFetch } from './proxy.js'
export async function getChatHistoryGroup (e, num) {
export async function getChatHistoryGroup(e, num) {
if (e.adapter_name && e.adapter_name === 'OneBotv11') {
return await e.group.getChatHistory(0, num, false)
return await e.group.getChatHistory(0, num, false)
} else {
let latestChats = await e.group.getChatHistory(e.seq || e.message_id, 1)
if (latestChats.length > 0) {
let latestChat = latestChats[0]
if (latestChat) {
let seq = latestChat.seq || latestChat.message_id
let chats = []
let chats = [e]
while (chats.length < num) {
let chatHistory = await e.group.getChatHistory(seq, 20)
if (!chatHistory || chatHistory.length === 0) {
break
}
chats.push(...chatHistory.reverse())
if (seq === chatHistory[chatHistory.length - 1].seq || seq === chatHistory[chatHistory.length - 1].message_id) {
break
}
seq = chatHistory[chatHistory.length - 1].seq || chatHistory[chatHistory.length - 1].message_id
if (seq === (chatHistory[0].seq || chatHistory[0].message_id)) break
seq = chatHistory[0].seq || chatHistory[0].message_id
chats.unshift(...chatHistory.filter(chat => chat.sender?.user_id).slice(0, -1))
}
chats = chats.slice(0, num).reverse()
chats = chats.slice(chats.length - num)
try {
let mm = await e.bot.gml
for (const chat of chats) {
@ -45,7 +40,6 @@ export async function getChatHistoryGroup (e, num) {
} catch (err) {
logger.warn(err)
}
// console.log(chats)
return chats
}
}
@ -53,7 +47,7 @@ export async function getChatHistoryGroup (e, num) {
return []
}
async function pickMemberAsync (e, userId) {
async function pickMemberAsync(e, userId) {
let key = `CHATGPT:GroupMemberInfo:${e.group_id}:${userId}`
let cache = await redis.get(key)
if (cache) {
@ -67,7 +61,7 @@ async function pickMemberAsync (e, userId) {
})
}
export async function generateSuggestedResponse (conversations) {
export async function generateSuggestedResponse(conversations) {
let prompt = 'Attention! you do not need to answer any question according to the provided conversation! \nYou are a suggested questions generator, you should generate three suggested questions according to the provided conversation for the user in the next turn, the three questions should not be too long, and must be superated with newline. The suggested questions should be suitable in the context of the provided conversation, and should not be too long. \nNow give your 3 suggested questions, use the same language with the user.'
const res = await newFetch(`${Config.openAiBaseUrl}/chat/completions`, {
method: 'POST',

View file

@ -88,7 +88,7 @@ var ChatGPTAPI = /** @class */ (function () {
this._apiBaseUrl = apiBaseUrl;
this._debug = !!debug;
this._fetch = fetch;
this._completionParams = __assign({ model: CHATGPT_MODEL, temperature: 0.8, top_p: 1.0, presence_penalty: 1.0 }, completionParams);
this._completionParams = __assign({ model: CHATGPT_MODEL, temperature: 1, top_p: 1.0 }, completionParams);
this._systemMessage = systemMessage;
if (this._systemMessage === undefined) {
var currentDate = new Date().toISOString().split('T')[0];
@ -189,7 +189,7 @@ var ChatGPTAPI = /** @class */ (function () {
'Content-Type': 'application/json',
Authorization: "Bearer ".concat(this._apiKey)
};
body = __assign(__assign(__assign({ max_tokens: maxTokens }, this._completionParams), completionParams), { messages: messages, stream: stream });
body = __assign(__assign(__assign({ max_completion_tokens: maxTokens }, this._completionParams), completionParams), { messages: messages, stream: stream });
if (this._debug) {
console.log(JSON.stringify(body));
}