mirror of
https://github.com/ikechan8370/chatgpt-plugin.git
synced 2025-12-16 13:27:08 +00:00
feat: init v3
This commit is contained in:
parent
d6cb085c40
commit
531986b2dc
284 changed files with 618 additions and 405179 deletions
|
|
@ -1,154 +0,0 @@
|
|||
import fetch, { FormData } from 'node-fetch'
|
||||
import { makeForwardMsg } from './common.js'
|
||||
import { Config } from './config.js'
|
||||
import { getProxy } from './proxy.js'
|
||||
import crypto from 'crypto'
|
||||
|
||||
let proxy = getProxy()
|
||||
export default class BingDrawClient {
|
||||
constructor (opts) {
|
||||
this.opts = opts
|
||||
if (Config.proxy && !Config.sydneyForceUseReverse) {
|
||||
// 如果设置代理,走代理
|
||||
this.opts.baseUrl = 'https://www.bing.com'
|
||||
}
|
||||
}
|
||||
|
||||
async getImages (prompt, e) {
|
||||
let urlEncodedPrompt = encodeURIComponent(prompt)
|
||||
let url = `${this.opts.baseUrl}/images/create?q=${urlEncodedPrompt}&rt=4&FORM=GENCRE`
|
||||
// let d = Math.ceil(Math.random() * 255)
|
||||
// let randomIp = '141.11.138.' + d
|
||||
let headers = {
|
||||
// accept: 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7',
|
||||
// 'accept-language': 'en-US,en;q=0.9',
|
||||
// 'cache-control': 'max-age=0',
|
||||
'content-type': 'application/x-www-form-urlencoded',
|
||||
referrer: 'https://www.bing.com/images/create/',
|
||||
origin: 'https://www.bing.com',
|
||||
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/113.0.0.0 Safari/537.36 Edg/113.0.1774.50',
|
||||
cookie: this.opts.cookies || `_U=${this.opts.userToken}`,
|
||||
// 'x-forwarded-for': randomIp,
|
||||
Dnt: '1',
|
||||
'sec-ch-ua': '"Microsoft Edge";v="113", "Chromium";v="113", "Not-A.Brand";v="24"',
|
||||
'sec-ch-ua-arch': '"x86"',
|
||||
'sec-ch-ua-bitness': '"64"',
|
||||
'sec-ch-ua-full-version': '"113.0.5672.126"',
|
||||
'sec-ch-ua-full-version-list': '"Google Chrome";v="113.0.5672.126", "Chromium";v="113.0.5672.126", "Not-A.Brand";v="24.0.0.0"',
|
||||
'sec-ch-ua-mobile': '?0',
|
||||
'sec-ch-ua-model': '',
|
||||
'sec-ch-ua-platform': '"macOS"',
|
||||
'sec-ch-ua-platform-version': '"13.1.0"',
|
||||
'sec-fetch-dest': 'document',
|
||||
'sec-fetch-mode': 'navigate',
|
||||
'sec-fetch-site': 'same-origin',
|
||||
'sec-fetch-user': '?1',
|
||||
'Referrer-Policy': 'origin-when-cross-origin',
|
||||
'x-edge-shopping-flag': '1'
|
||||
}
|
||||
// headers['x-forwarded-for'] = '141.11.138.30'
|
||||
let body = new FormData()
|
||||
body.append('q', urlEncodedPrompt)
|
||||
body.append('qs', 'ds')
|
||||
let fetchOptions = {
|
||||
headers
|
||||
}
|
||||
if (Config.proxy) {
|
||||
fetchOptions.agent = proxy(Config.proxy)
|
||||
}
|
||||
let success = false
|
||||
let retry = 1
|
||||
let response
|
||||
while (!success && retry >= 0) {
|
||||
response = await fetch(url, Object.assign(fetchOptions, { body, redirect: 'manual', method: 'POST', credentials: 'include' }))
|
||||
let res = await response.text()
|
||||
if (res.toLowerCase().indexOf('this prompt has been blocked') > -1) {
|
||||
throw new Error('Your prompt has been blocked by Bing. Try to change any bad words and try again.')
|
||||
}
|
||||
if (response.status !== 302) {
|
||||
if (this.debug) {
|
||||
console.debug(`第一次重试绘图:${prompt}`)
|
||||
}
|
||||
url = `${this.opts.baseUrl}/images/create?q=${urlEncodedPrompt}&rt=3&FORM=GENCRE`
|
||||
response = await fetch(url, Object.assign(fetchOptions, { body, redirect: 'manual', method: 'POST', credentials: 'include' }))
|
||||
}
|
||||
if (response.status === 302) {
|
||||
success = true
|
||||
break
|
||||
} else {
|
||||
retry--
|
||||
}
|
||||
}
|
||||
if (!success) {
|
||||
// 最后尝试使用https://cn.bing.com进行一次绘图
|
||||
logger.info('尝试使用https://cn.bing.com进行绘图')
|
||||
url = `https://cn.bing.com/images/create?q=${urlEncodedPrompt}&rt=3&FORM=GENCRE`
|
||||
fetchOptions.referrer = 'https://cn.bing.com/images/create/'
|
||||
fetchOptions.origin = 'https://cn.bing.com'
|
||||
response = await fetch(url, Object.assign(fetchOptions, { body, redirect: 'manual', method: 'POST', credentials: 'include' }))
|
||||
if (response.status !== 302) {
|
||||
throw new Error('绘图失败,请检查Bing token和代理/反代配置')
|
||||
}
|
||||
}
|
||||
let redirectUrl = response.headers.get('Location').replace('&nfy=1', '')
|
||||
let requestId = redirectUrl.split('id=')[1]
|
||||
// 模拟跳转
|
||||
await fetch(`${this.opts.baseUrl}${redirectUrl}`, {
|
||||
headers
|
||||
})
|
||||
let pollingUrl = `${this.opts.baseUrl}/images/create/async/results/${requestId}?q=${urlEncodedPrompt}`
|
||||
logger.info({ pollingUrl })
|
||||
logger.info('waiting for bing draw results...')
|
||||
let timeoutTimes = 50
|
||||
let found = false
|
||||
let timer = setInterval(async () => {
|
||||
if (found) {
|
||||
return
|
||||
}
|
||||
let r = await fetch(pollingUrl, fetchOptions)
|
||||
let rText = await r.text()
|
||||
if (r.status === 200 && rText) {
|
||||
// logger.info(rText)
|
||||
logger.info('got bing draw results!')
|
||||
found = true
|
||||
let regex = /src="([^"]+)"/g
|
||||
let imageLinks = rText.match(regex)
|
||||
if (!imageLinks || imageLinks.length === 0) {
|
||||
// 很可能是微软内部error,重试即可
|
||||
return
|
||||
}
|
||||
imageLinks = imageLinks
|
||||
.map(link => link.split('?w=')[0])
|
||||
.map(link => link.replace('src="', ''))
|
||||
.filter(link => !link.includes('.svg'))
|
||||
imageLinks = [...new Set(imageLinks)]
|
||||
const badImages = [
|
||||
'https://r.bing.com/rp/in-2zU3AJUdkgFe7ZKv19yPBHVs.png"',
|
||||
'https://r.bing.com/rp/TX9QuO3WzcCJz1uaaSwQAz39Kb0.jpg"',
|
||||
'https://r.bing.com/rp/in-2zU3AJUdkgFe7ZKv19yPBHVs.png',
|
||||
'https://r.bing.com/rp/TX9QuO3WzcCJz1uaaSwQAz39Kb0.jpg'
|
||||
]
|
||||
for (let imageLink of imageLinks) {
|
||||
if (badImages.indexOf(imageLink) > -1) {
|
||||
await e.reply('❌绘图失败:绘图完成但被屏蔽,请调整提示词。', true)
|
||||
logger.error(rText)
|
||||
}
|
||||
}
|
||||
logger.info(imageLinks)
|
||||
let images = imageLinks.map(link => segment.image(link))
|
||||
let msg = await makeForwardMsg(e, images, `bing绘图结果:${prompt}`)
|
||||
await e.reply(msg)
|
||||
clearInterval(timer)
|
||||
} else {
|
||||
if (timeoutTimes === 0) {
|
||||
await e.reply('❌绘图超时', true)
|
||||
clearInterval(timer)
|
||||
timer = null
|
||||
} else {
|
||||
logger.info('still waiting for bing draw results... times left: ' + timeoutTimes)
|
||||
timeoutTimes--
|
||||
}
|
||||
}
|
||||
}, 3000)
|
||||
}
|
||||
}
|
||||
|
|
@ -1,662 +0,0 @@
|
|||
import { downloadFile } from '../utils/common.js'
|
||||
import { SunoClient } from '../client/SunoClient.js'
|
||||
import { Config } from '../utils/config.js'
|
||||
import common from '../../../lib/common/common.js'
|
||||
import fs from 'fs'
|
||||
import crypto from 'crypto'
|
||||
import fetch from 'node-fetch'
|
||||
|
||||
const Style = [
|
||||
{ value: 'Dance', describe: '跳舞' },
|
||||
{ value: 'Festive', describe: '节日' },
|
||||
{ value: 'Groovy', describe: '槽的' },
|
||||
{ value: 'Mid-Tempo', describe: '中速' },
|
||||
{ value: 'Syncopated', describe: '切分音' },
|
||||
{ value: 'Tipsy', describe: '醉' },
|
||||
{ value: 'Dark', describe: '黑暗' },
|
||||
{ value: 'Atmospheric', describe: '大气' },
|
||||
{ value: 'Cold', describe: '冷' },
|
||||
{ value: 'Dark', describe: '黑暗' },
|
||||
{ value: 'Doom', describe: '厄运' },
|
||||
{ value: 'Dramatic', describe: '戏剧性的' },
|
||||
{ value: 'Sinister', describe: '险恶' },
|
||||
{ value: 'Eclectic', describe: '折衷' },
|
||||
{ value: 'Adjunct', describe: '兼职' },
|
||||
{ value: 'Art', describe: '艺术' },
|
||||
{ value: 'Capriccio', describe: '狂想曲' },
|
||||
{ value: 'Mellifluous', describe: '美化' },
|
||||
{ value: 'Nü', describe: 'Nü' },
|
||||
{ value: 'Progressive', describe: '进步' },
|
||||
{ value: 'Unusual', describe: '异常' },
|
||||
{ value: 'Emotion', describe: '情感' },
|
||||
{ value: 'Anthemic', describe: '国歌' },
|
||||
{ value: 'Emotional', describe: '感情的' },
|
||||
{ value: 'Happy', describe: '快乐' },
|
||||
{ value: 'Jubilant', describe: '欢腾' },
|
||||
{ value: 'Melancholy', describe: '忧郁' },
|
||||
{ value: 'Sad', describe: 'Sad' },
|
||||
{ value: 'Hard', describe: '硬' },
|
||||
{ value: 'Aggressive', describe: '侵略性的' },
|
||||
{ value: '积极', describe: '积极' },
|
||||
{ value: 'Banger', describe: '爆竹' },
|
||||
{ value: 'Power', describe: '权力' },
|
||||
{ value: 'Stadium', describe: '体育场' },
|
||||
{ value: 'Stomp', describe: '踩' },
|
||||
{ value: 'Lyrical', describe: '抒情' },
|
||||
{ value: 'Broadway', describe: '百老汇' },
|
||||
{ value: 'Cabaret', describe: '酒店' },
|
||||
{ value: 'Lounge', describe: '休息室' },
|
||||
{ value: 'Operatic', describe: '歌剧' },
|
||||
{ value: 'Storytelling', describe: '故事' },
|
||||
{ value: 'Torch-Lounge', describe: '火炬酒廊' },
|
||||
{ value: 'Theatrical', describe: '戏剧' },
|
||||
{ value: 'Troubadour', describe: '吟游诗人' },
|
||||
{ value: 'Vegas', describe: '维加斯' },
|
||||
{ value: 'Magical', describe: '神奇' },
|
||||
{ value: 'Ethereal', describe: '空灵' },
|
||||
{ value: 'Majestic', describe: '雄伟' },
|
||||
{ value: 'Mysterious', describe: '神秘' },
|
||||
{ value: 'Minimal', describe: '极小' },
|
||||
{ value: 'Ambient', describe: '氛围' },
|
||||
{ value: 'Cinematic', describe: '电影' },
|
||||
{ value: 'Heat', describe: '热' },
|
||||
{ value: 'Minimal', describe: '极小' },
|
||||
{ value: 'Slow', describe: '慢' },
|
||||
{ value: 'Sparse', describe: '稀疏' },
|
||||
{ value: 'Party', describe: '党' },
|
||||
{ value: 'German Schlager', describe: '德国施拉格' },
|
||||
{ value: 'Glam', describe: '格南' },
|
||||
{ value: 'Glitter', describe: '闪光' },
|
||||
{ value: 'Groovy', describe: '槽的' },
|
||||
{ value: 'oft', describe: '软' },
|
||||
{ value: 'Ambient', describe: '氛围' },
|
||||
{ value: 'Bedroom', describe: '卧室' },
|
||||
{ value: 'Chillwave', describe: '寒波' },
|
||||
{ value: 'Ethereal', describe: '空灵' },
|
||||
{ value: 'Intimate', describe: '亲密' },
|
||||
{ value: 'Heat', describe: '热' },
|
||||
{ value: 'Sadcore', describe: '悲伤' },
|
||||
{ value: 'Weird', describe: '奇怪' },
|
||||
{ value: 'Carnival', describe: '狂欢节' },
|
||||
{ value: 'Distorted', describe: '扭曲' },
|
||||
{ value: 'Glitchy', describe: '毛刺' },
|
||||
{ value: 'Haunted', describe: '闹鬼的' },
|
||||
{ value: 'Hollow', describe: '空心' },
|
||||
{ value: 'Musicbox', describe: '音乐盒' },
|
||||
{ value: 'Random', describe: '随机' },
|
||||
{ value: 'World/Ethnic', describe: '世界/民族' },
|
||||
{ value: 'Arabian', describe: '阿拉伯' },
|
||||
{ value: 'Bangra', describe: '班格拉' },
|
||||
{ value: 'Calypso', describe: '卡吕普索' },
|
||||
{ value: 'Chalga', describe: '查尔加' },
|
||||
{ value: 'Egyptian', describe: '埃及人' },
|
||||
{ value: 'Hindustani', describe: '印度斯坦语' },
|
||||
{ value: 'Jewish Music 犹太音乐' },
|
||||
{ value: 'Klezmer 克莱兹默' },
|
||||
{ value: 'Middle East', describe: '中东' },
|
||||
{ value: 'Polka', describe: '波尔卡' },
|
||||
{ value: 'Russian Navy Song', describe: '俄罗斯海军之歌' },
|
||||
{ value: 'Suomipop', describe: 'Suomipop' },
|
||||
{ value: 'Tribal', describe: '部落' }
|
||||
]
|
||||
const Genre = [
|
||||
{ value: 'Country', describe: '乡村' },
|
||||
{ value: 'Appalachian', describe: '阿巴拉契亚' },
|
||||
{ value: 'Bluegrass', describe: '兰草' },
|
||||
{ value: 'Country', describe: '乡村' },
|
||||
{ value: 'Folk', describe: '民族' },
|
||||
{ value: 'Freak Folk', describe: '怪胎民谣' },
|
||||
{ value: 'Western', describe: '西方' },
|
||||
{ value: 'Dance', describe: '跳舞' },
|
||||
{ value: 'Afro-Cuban', describe: '非裔古巴人' },
|
||||
{ value: 'Dance Pop', describe: '流行舞曲' },
|
||||
{ value: 'Disco', describe: '迪斯科' },
|
||||
{ value: 'Dubstep', describe: 'Dubstep的' },
|
||||
{ value: 'Disco Funk', describe: '迪斯科放克' },
|
||||
{ value: 'EDM', describe: 'EDM' },
|
||||
{ value: 'Electro', describe: '电' },
|
||||
{ value: 'High-NRG', describe: '高NRG' },
|
||||
{ value: 'House', describe: '房子' },
|
||||
{ value: 'Trance', describe: '恍惚' },
|
||||
{ value: 'Downtempo', describe: '慢节奏' },
|
||||
{ value: 'Ambient', describe: '氛围' },
|
||||
{ value: 'Downtempo', describe: '慢节奏' },
|
||||
{ value: 'Synthwave', describe: '合成波' },
|
||||
{ value: 'Trap', describe: '陷阱' },
|
||||
{ value: 'Electronic', describe: '电子的' },
|
||||
{ value: 'Ambient', describe: '氛围' },
|
||||
{ value: 'Cyberpunk', describe: '赛博朋克' },
|
||||
{ value: 'Drum\'n\'bass Drum\'n\'bass', describe: '鼓与贝斯' },
|
||||
{ value: 'Dubstep', describe: 'Dubstep的' },
|
||||
{ value: 'Electronic', describe: '电子的' },
|
||||
{ value: 'Hypnogogical', describe: '催眠' },
|
||||
{ value: 'IDM', describe: 'IDM' },
|
||||
{ value: 'Phonk', describe: '冯克' },
|
||||
{ value: 'Synthpop', describe: '合成流行音乐' },
|
||||
{ value: 'Techno', describe: '技术' },
|
||||
{ value: 'Trap', describe: '陷阱' },
|
||||
{ value: 'Jazz/Soul', describe: '爵士乐/灵魂乐' },
|
||||
{ value: 'Bebop', describe: '贝波普' },
|
||||
{ value: 'Gospel', describe: '福音' },
|
||||
{ value: 'Electro', describe: '电' },
|
||||
{ value: 'Frutiger Aero Frutiger', describe: '航空' },
|
||||
{ value: 'Jazz', describe: '爵士乐' },
|
||||
{ value: 'Latin Jazz', describe: '拉丁爵士乐' },
|
||||
{ value: 'RnB', describe: 'RnB' },
|
||||
{ value: 'Soul', describe: '灵魂' },
|
||||
{ value: 'Latin', describe: '拉丁语' },
|
||||
{ value: 'Bossa Nova', describe: '博萨诺瓦' },
|
||||
{ value: 'Latin Jazz', describe: '拉丁爵士乐' },
|
||||
{ value: 'Forró', describe: 'Forró' },
|
||||
{ value: 'Mambo', describe: '曼波' },
|
||||
{ value: 'Salsa', describe: '萨尔萨' },
|
||||
{ value: 'Tango', describe: '探戈' },
|
||||
{ value: 'Reggae', describe: '瑞格乐' },
|
||||
{ value: 'Afrobeat', describe: '非洲节拍' },
|
||||
{ value: 'Dancehall', describe: '舞厅' },
|
||||
{ value: 'Dub', describe: 'Dub' },
|
||||
{ value: 'Reggae', describe: '瑞格乐' },
|
||||
{ value: 'Reggaeton', describe: '雷鬼' },
|
||||
{ value: 'Metal', describe: '金属' },
|
||||
{ value: 'Black Metal', describe: '黑色金属' },
|
||||
{ value: 'Deathcore', describe: '死亡核心' },
|
||||
{ value: 'Death Metal', describe: '死亡金属' },
|
||||
{ value: 'Heavy Metal', describe: '重金属' },
|
||||
{ value: 'Heavy Metal Trap', describe: '重金属捕集器' },
|
||||
{ value: 'Metalcore', describe: '金属芯' },
|
||||
{ value: 'Nu Metal', describe: 'Nu Metal(努金属)' },
|
||||
{ value: 'Power Metal', describe: '动力金属' },
|
||||
{ value: 'Popular', describe: '流行' },
|
||||
{ value: 'Pop', describe: 'Pop' },
|
||||
{ value: 'Dance Pop', describe: '流行舞曲' },
|
||||
{ value: 'Pop Rock', describe: '流行摇滚' },
|
||||
{ value: 'Kpop', describe: '韩流' },
|
||||
{ value: 'Jpop', describe: '大通' },
|
||||
{ value: 'Synthpop', describe: '合成流行音乐' },
|
||||
{ value: 'Rock', describe: '摇滚' },
|
||||
{ value: 'Classic Rock', describe: '经典摇滚' },
|
||||
{ value: 'Blues Rock', describe: '蓝调摇滚' },
|
||||
{ value: 'Emo', describe: 'Emo' },
|
||||
{ value: 'Glam Rock', describe: '华丽摇滚' },
|
||||
{ value: 'Hardcore Punk', describe: '硬核朋克' },
|
||||
{ value: 'Indie', describe: '独立' },
|
||||
{ value: 'Industrial Rock', describe: '工业摇滚' },
|
||||
{ value: 'Punk', describe: '朋克' },
|
||||
{ value: 'Rock', describe: '摇滚' },
|
||||
{ value: 'Skate Rock', describe: '滑板摇滚' },
|
||||
{ value: 'Skatecore', describe: '滑板芯' },
|
||||
{ value: 'Suomipop', describe: 'Suomipop' },
|
||||
{ value: 'Urban', describe: '都市的' },
|
||||
{ value: 'Funk', describe: '恐惧' },
|
||||
{ value: 'HipHop', describe: '嘻哈' },
|
||||
{ value: 'Phonk', describe: '冯克' },
|
||||
{ value: 'Rap', describe: 'Rap' },
|
||||
{ value: 'Trap', describe: '陷阱' }
|
||||
]
|
||||
const Types = [
|
||||
{ value: 'Background', describe: '背景' },
|
||||
{ value: 'Elevator', describe: '电梯' },
|
||||
{ value: 'Jingle', describe: '静乐县' },
|
||||
{ value: 'Muzak', describe: '穆扎克' },
|
||||
{ value: 'Call to Prayer', describe: '祷告的呼召' },
|
||||
{ value: 'Adan', describe: '阿丹' },
|
||||
{ value: 'Adjan', describe: '阿让' },
|
||||
{ value: 'Call to Prayer', describe: '祷告的呼召' },
|
||||
{ value: 'Gregorian Chant', describe: '格里高利圣歌' },
|
||||
{ value: 'Character', describe: '字符' },
|
||||
{ value: 'I Want Song', describe: '我想要歌' },
|
||||
{ value: 'Hero Theme', describe: '英雄主题' },
|
||||
{ value: 'Strut', describe: '支柱' },
|
||||
{ value: 'March', describe: '三月' },
|
||||
{ value: 'Military', describe: '军事' },
|
||||
{ value: 'Villain Theme', describe: '反派主题' },
|
||||
{ value: 'Children', describe: '孩子' },
|
||||
{ value: 'Lullaby', describe: '催眠曲' },
|
||||
{ value: 'Nursery Rhyme', describe: '童谣' },
|
||||
{ value: 'Sing-along', describe: '跟唱' },
|
||||
{ value: 'Toddler', describe: '幼儿' },
|
||||
{ value: 'Composer', describe: '作曲家' },
|
||||
{ value: 'Adagio', describe: '阿德吉奥' },
|
||||
{ value: 'Adjunct', describe: '兼职' },
|
||||
{ value: 'Andante', describe: '行板' },
|
||||
{ value: 'Allegro', describe: '快板' },
|
||||
{ value: 'Capriccio', describe: '狂想曲' },
|
||||
{ value: 'Instruments', describe: '仪器' },
|
||||
{ value: 'Acoustic Guitar', describe: '木吉他' },
|
||||
{ value: 'Bass', describe: '低音' },
|
||||
{ value: 'Doublebass', describe: '低音提琴' },
|
||||
{ value: 'Electricbass', describe: '电贝司' },
|
||||
{ value: 'Electric Guitar', describe: '电吉他' },
|
||||
{ value: 'Fingerstyle Guitar', describe: '指弹吉他' },
|
||||
{ value: 'Percussion', describe: '击发' },
|
||||
{ value: 'Noise', describe: '噪声' },
|
||||
{ value: 'Chaotic', describe: '混沌' },
|
||||
{ value: 'Distorted', describe: '扭曲' },
|
||||
{ value: 'Glitch', describe: '故障' },
|
||||
{ value: 'Noise', describe: '噪声' },
|
||||
{ value: 'Random', describe: '随机' },
|
||||
{ value: 'Stuttering', describe: '口吃' },
|
||||
{ value: 'Orchestral', describe: '管弦乐' },
|
||||
{ value: 'glissando', describe: 'trombone 长号' },
|
||||
{ value: 'legato cello', describe: '大提琴连奏' },
|
||||
{ value: 'Orchestral', describe: '管弦乐' },
|
||||
{ value: 'spiccato violins', describe: '斯皮卡托小提琴' },
|
||||
{ value: 'staccato viola', describe: '断奏中提琴' },
|
||||
{ value: 'Symphonic', describe: '交响' },
|
||||
{ value: 'Retro', describe: '复古' },
|
||||
{ value: '1960s', describe: '1960年代' },
|
||||
{ value: 'Barbershop', describe: '理发店' },
|
||||
{ value: 'Big Band', describe: '大乐队' },
|
||||
{ value: 'Classic', describe: '经典' },
|
||||
{ value: 'Doo Wop', describe: '嘟' },
|
||||
{ value: 'Girl Group', describe: '女团' },
|
||||
{ value: 'Mambo', describe: '曼波' },
|
||||
{ value: 'Salooncore', describe: '沙龙核心' },
|
||||
{ value: 'Swing', describe: '摆动' },
|
||||
{ value: 'Traditional', describe: '传统的' },
|
||||
{ value: 'Suffix', describe: '后缀' },
|
||||
{ value: '…core', describe: '...核心' },
|
||||
{ value: '…jam', describe: '...果酱' },
|
||||
{ value: '…out', describe: '...外' },
|
||||
{ value: '…wave', describe: '...浪' },
|
||||
{ value: 'Traditional', describe: '传统的' },
|
||||
{ value: 'Americana', describe: '美洲' },
|
||||
{ value: 'Barbershop', describe: '理发店' },
|
||||
{ value: 'Christmas Carol', describe: '圣诞颂歌' },
|
||||
{ value: 'Traditional', describe: '传统的' },
|
||||
{ value: 'Voice', describe: '声音' },
|
||||
{ value: 'A Cappella', describe: '无伴奏合唱' },
|
||||
{ value: 'Arabian Ornamental', describe: '阿拉伯观赏' },
|
||||
{ value: 'Dispassionate', describe: '冷静的' },
|
||||
{ value: 'Emotional', describe: '感情的' },
|
||||
{ value: 'Ethereal', describe: '空灵' },
|
||||
{ value: 'Gregorian chant', describe: '格里高利圣歌' },
|
||||
{ value: 'Hindustani', describe: '印度斯坦语' },
|
||||
{ value: 'Lounge Singer', describe: '休息室歌手' },
|
||||
{ value: 'Melismatic', describe: '旋律' },
|
||||
{ value: 'Monotone', describe: '单调' },
|
||||
{ value: 'Narration', describe: '叙事' },
|
||||
{ value: 'Resonant', describe: '谐振' },
|
||||
{ value: 'Spoken Word', describe: '口语' },
|
||||
{ value: 'Sprechgesang', describe: 'Sprechgesang' },
|
||||
{ value: 'Sultry', describe: '闷热' },
|
||||
{ value: 'Scream', describe: '尖叫' },
|
||||
{ value: 'Torchy', describe: '火炬' },
|
||||
{ value: 'Vocaloid', describe: '声乐' },
|
||||
]
|
||||
|
||||
export default class BingSunoClient {
|
||||
constructor(opts) {
|
||||
this.opts = opts
|
||||
}
|
||||
|
||||
async replyMsg(song, e) {
|
||||
let messages = []
|
||||
messages.push(`歌名:${song.title}\n风格: ${song.musicalStyle}\n歌词:\n${song.prompt}\n`)
|
||||
messages.push(`音频链接:${song.audioURL}\n视频链接:${song.videoURL}\n封面链接:${song.imageURL}\n`)
|
||||
messages.push(segment.image(song.imageURL))
|
||||
await e.reply(await common.makeForwardMsg(e, messages, '音乐合成结果'))
|
||||
let retry = 10
|
||||
let videoPath
|
||||
while (!videoPath && retry >= 0) {
|
||||
try {
|
||||
videoPath = await downloadFile(song.videoURL, `suno/${song.title}.mp4`, false, false, {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36'
|
||||
})
|
||||
} catch (err) {
|
||||
retry--
|
||||
await common.sleep(3000)
|
||||
}
|
||||
}
|
||||
if (videoPath) {
|
||||
const data = fs.readFileSync(videoPath)
|
||||
await e.reply(segment.video(`base64://${data.toString('base64')}`))
|
||||
// 60秒后删除文件避免占用体积
|
||||
setTimeout(() => {
|
||||
fs.unlinkSync(videoPath)
|
||||
}, 60000)
|
||||
} else {
|
||||
logger.warn(`${song.title}下载视频失败`)
|
||||
await this.reply(`${song.title}下载视频失败`)
|
||||
}
|
||||
}
|
||||
|
||||
async getSuno(prompt, e) {
|
||||
if (prompt.cookie) {
|
||||
this.opts.cookies = prompt.cookie
|
||||
}
|
||||
const sunoResult = await this.getSunoResult(prompt.songtId)
|
||||
if (sunoResult) {
|
||||
const {
|
||||
duration,
|
||||
title,
|
||||
musicalStyle,
|
||||
requestId,
|
||||
} = sunoResult
|
||||
const generateURL = id => `https://th.bing.com/th?&id=${id}`
|
||||
const audioURL = generateURL(`OIG.a_${requestId}`)
|
||||
const imageURL = generateURL(`OIG.i_${requestId}`)
|
||||
const videoURL = generateURL(`OIG.v_${requestId}`)
|
||||
const sunoURL = `https://cdn1.suno.ai/${requestId}.mp4`
|
||||
const sunoDisplayResult = {
|
||||
title,
|
||||
duration,
|
||||
musicalStyle,
|
||||
audioURL,
|
||||
imageURL,
|
||||
videoURL,
|
||||
sunoURL,
|
||||
prompt: prompt.songPrompt
|
||||
}
|
||||
await e.reply('Suno 生成中,请稍后')
|
||||
this.replyMsg(sunoDisplayResult, e)
|
||||
} else {
|
||||
await e.reply('Suno 数据获取失败')
|
||||
redis.del(`CHATGPT:SUNO:${e.sender.user_id}`)
|
||||
}
|
||||
redis.del(`CHATGPT:SUNO:${e.sender.user_id}`)
|
||||
}
|
||||
|
||||
async getLocalSuno(prompt, e) {
|
||||
if (!Config.sunoClientToken || !Config.sunoSessToken) {
|
||||
await e.reply('未配置Suno Token')
|
||||
redis.del(`CHATGPT:SUNO:${e.sender.user_id}`)
|
||||
return true
|
||||
}
|
||||
let description = prompt.songPrompt || prompt.lyrics
|
||||
await e.reply('正在生成,请稍后')
|
||||
try {
|
||||
let sessTokens = Config.sunoSessToken.split(',')
|
||||
let clientTokens = Config.sunoClientToken.split(',')
|
||||
let tried = 0
|
||||
while (tried < sessTokens.length) {
|
||||
let index = tried
|
||||
let sess = sessTokens[index]
|
||||
let clientToken = clientTokens[index]
|
||||
let client = new SunoClient({ sessToken: sess, clientToken })
|
||||
let { credit, email } = await client.queryCredit()
|
||||
logger.info({ credit, email })
|
||||
if (credit < 10) {
|
||||
tried++
|
||||
logger.info(`账户${email}余额不足,尝试下一个账户`)
|
||||
continue
|
||||
}
|
||||
|
||||
let songs = await client.createSong(description)
|
||||
if (!songs || songs.length === 0) {
|
||||
e.reply('生成失败,可能是提示词太长或者违规,请检查日志')
|
||||
redis.del(`CHATGPT:SUNO:${e.sender.user_id}`)
|
||||
return
|
||||
}
|
||||
let messages = ['提示词:' + description]
|
||||
for (let song of songs) {
|
||||
messages.push(`歌名:${song.title}\n风格: ${song.metadata.tags}\n歌词:\n${song.metadata.prompt}\n`)
|
||||
messages.push(`音频链接:${song.audio_url}\n视频链接:${song.video_url}\n封面链接:${song.image_url}\n`)
|
||||
messages.push(segment.image(song.image_url))
|
||||
let retry = 3
|
||||
let videoPath
|
||||
while (!videoPath && retry >= 0) {
|
||||
try {
|
||||
videoPath = await downloadFile(song.video_url, `suno/${song.title}.mp4`, false, false, {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36'
|
||||
})
|
||||
} catch (err) {
|
||||
retry--
|
||||
await common.sleep(1000)
|
||||
}
|
||||
}
|
||||
if (videoPath) {
|
||||
const data = fs.readFileSync(videoPath)
|
||||
messages.push(segment.video(`base64://${data.toString('base64')}`))
|
||||
// 60秒后删除文件避免占用体积
|
||||
setTimeout(() => {
|
||||
fs.unlinkSync(videoPath)
|
||||
}, 60000)
|
||||
} else {
|
||||
logger.warn(`${song.title}下载视频失败,仅发送视频链接`)
|
||||
}
|
||||
}
|
||||
await e.reply(await common.makeForwardMsg(e, messages, '音乐合成结果'))
|
||||
redis.del(`CHATGPT:SUNO:${e.sender.user_id}`)
|
||||
return true
|
||||
}
|
||||
await e.reply('所有账户余额不足')
|
||||
redis.del(`CHATGPT:SUNO:${e.sender.user_id}`)
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
await e.reply('生成失败,请查看日志')
|
||||
redis.del(`CHATGPT:SUNO:${e.sender.user_id}`)
|
||||
}
|
||||
}
|
||||
|
||||
async getApiSuno(prompt, e) {
|
||||
if (!Config.bingSunoApi) {
|
||||
await e.reply('未配置 Suno API')
|
||||
return
|
||||
}
|
||||
const responseId = await fetch(`${Config.bingSunoApi}/api/custom_generate`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify({
|
||||
"prompt": prompt.songPrompt || prompt.lyrics,
|
||||
"tags": prompt.tags || "pop",
|
||||
"title": prompt.title || e.sender.card || e.sender.nickname,
|
||||
"make_instrumental": false,
|
||||
"wait_audio": false
|
||||
})
|
||||
})
|
||||
const sunoId = await responseId.json()
|
||||
if (sunoId[0]?.id) {
|
||||
await e.reply('Suno 生成中,请稍后')
|
||||
let timeoutTimes = Config.sunoApiTimeout
|
||||
let timer = setInterval(async () => {
|
||||
const response = await fetch(`${Config.bingSunoApi}/api/get?ids=${sunoId[0]?.id}`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Accept': 'application/json'
|
||||
}
|
||||
})
|
||||
if (!response.ok) {
|
||||
await e.reply('Suno 数据获取失败')
|
||||
logger.error(response.error.message)
|
||||
redis.del(`CHATGPT:SUNO:${e.sender.user_id}`)
|
||||
clearInterval(timer)
|
||||
timer = null
|
||||
throw new Error(`HTTP error! status: ${response.status}`)
|
||||
}
|
||||
const result = await response.json()
|
||||
if (result[0].status == 'complete') {
|
||||
const sunoResult = result[0]
|
||||
const title = sunoResult.title
|
||||
const audioURL = sunoResult.audio_url
|
||||
const imageURL = sunoResult.image_url
|
||||
const videoURL = sunoResult.video_url
|
||||
const musicalStyle = sunoResult.tags
|
||||
const prompt = sunoResult.lyric
|
||||
const sunoURL = `https://cdn1.suno.ai/${sunoResult.id}.mp4`
|
||||
const sunoDisplayResult = {
|
||||
title,
|
||||
musicalStyle,
|
||||
audioURL,
|
||||
imageURL,
|
||||
videoURL,
|
||||
sunoURL,
|
||||
prompt
|
||||
}
|
||||
this.replyMsg(sunoDisplayResult, e)
|
||||
clearInterval(timer)
|
||||
} else if (timeoutTimes === 0) {
|
||||
await e.reply('❌Suno 生成超时', true)
|
||||
clearInterval(timer)
|
||||
timer = null
|
||||
} else {
|
||||
logger.info('等待Suno生成中: ' + timeoutTimes)
|
||||
timeoutTimes--
|
||||
}
|
||||
}, 3000)
|
||||
}
|
||||
}
|
||||
|
||||
async getSunoResult(requestId) {
|
||||
const skey = await this.#getSunoMetadata(requestId)
|
||||
if (skey) {
|
||||
const sunoMedia = await this.#getSunoMedia(requestId, skey)
|
||||
return sunoMedia
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
async #getSunoMetadata(requestId) {
|
||||
const fetchURL = new URL('https://www.bing.com/videos/music')
|
||||
const searchParams = new URLSearchParams({
|
||||
vdpp: 'suno',
|
||||
kseed: '7500',
|
||||
SFX: '2',
|
||||
q: '',
|
||||
iframeid: crypto.randomUUID(),
|
||||
requestId,
|
||||
})
|
||||
fetchURL.search = searchParams.toString()
|
||||
const response = await fetch(fetchURL, {
|
||||
headers: {
|
||||
accept: 'text/html',
|
||||
cookie: this.opts.cookies,
|
||||
},
|
||||
method: 'GET',
|
||||
})
|
||||
if (response.status === 200) {
|
||||
const document = await response.text()
|
||||
|
||||
const patternSkey = /(?<=skey=)[^&]+/
|
||||
const matchSkey = document.match(patternSkey)
|
||||
const skey = matchSkey ? matchSkey[0] : null
|
||||
|
||||
const patternIG = /(?<=IG:"|IG:\s")[0-9A-F]{32}(?=")/
|
||||
const matchIG = document.match(patternIG)
|
||||
const ig = matchIG ? matchIG[0] : null
|
||||
|
||||
return { skey, ig }
|
||||
} else {
|
||||
console.error(`HTTP error! Error: ${response.error}, Status: ${response.status}`)
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
async #getSunoMedia(requestId, sunoMetadata) {
|
||||
let sfx = 1
|
||||
const maxTries = 30
|
||||
const { skey, ig } = sunoMetadata
|
||||
|
||||
let rawResponse
|
||||
const result = await new Promise((resolve, reject) => {
|
||||
const intervalId = setInterval(async () => {
|
||||
const fetchURL = new URL('https://www.bing.com/videos/api/custom/music')
|
||||
const searchParams = new URLSearchParams({
|
||||
skey,
|
||||
safesearch: 'Moderate',
|
||||
vdpp: 'suno',
|
||||
requestId,
|
||||
ig,
|
||||
iid: 'vsn',
|
||||
sfx: sfx.toString(),
|
||||
})
|
||||
fetchURL.search = searchParams.toString()
|
||||
const response = await fetch(fetchURL, {
|
||||
headers: {
|
||||
accept: '*/*',
|
||||
cookie: this.opts.cookies,
|
||||
},
|
||||
method: 'GET',
|
||||
})
|
||||
try {
|
||||
const body = await response.json()
|
||||
rawResponse = JSON.parse(body.RawResponse)
|
||||
const { status } = rawResponse
|
||||
const done = status === 'complete'
|
||||
|
||||
if (done) {
|
||||
clearInterval(intervalId)
|
||||
resolve()
|
||||
} else {
|
||||
sfx++
|
||||
if (sfx === maxTries) {
|
||||
reject(new Error('Maximum number of tries exceeded'))
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(`获取音乐失败 ${response.status}`)
|
||||
reject(new Error(error))
|
||||
}
|
||||
|
||||
}, 2000)
|
||||
})
|
||||
.then(() => {
|
||||
if (rawResponse?.status === 'complete') {
|
||||
return {
|
||||
duration: rawResponse.duration,
|
||||
title: rawResponse.gptPrompt,
|
||||
musicalStyle: rawResponse.musicalStyle,
|
||||
requestId: rawResponse.id,
|
||||
}
|
||||
} else {
|
||||
throw Error('Suno response could not be completed.')
|
||||
}
|
||||
})
|
||||
.catch((err) => {
|
||||
console.error(err)
|
||||
return null
|
||||
})
|
||||
return result
|
||||
}
|
||||
|
||||
extractLyrics(text) {
|
||||
// 定义分段关键词
|
||||
const sectionKeywords = ['Verse', 'Chorus', 'Bridge', 'Outro', 'End']
|
||||
// 初始化lyrics变量
|
||||
let lyrics = ''
|
||||
// 标记是否开始提取歌词
|
||||
let startExtracting = false
|
||||
// 将文本按行分割
|
||||
const lines = text.split('\n')
|
||||
|
||||
lines.forEach(line => {
|
||||
// 检查每一行是否包含分段关键词
|
||||
const sectionFound = sectionKeywords.some(keyword => {
|
||||
const regex = new RegExp(`\\[${keyword} \\d+\\]|\\(${keyword} \\d+\\)|\\*\\*${keyword} \\d+\\*\\*`, 'i')
|
||||
return regex.test(line)
|
||||
})
|
||||
// 如果找到第一个分段关键词,开始提取歌词
|
||||
if (sectionFound && !startExtracting) {
|
||||
startExtracting = true
|
||||
}
|
||||
// 如果已经开始提取歌词,则添加到lyrics变量中
|
||||
if (startExtracting) {
|
||||
lyrics += line + '\n'
|
||||
}
|
||||
})
|
||||
return lyrics.trim() // 返回处理过的歌词
|
||||
}
|
||||
|
||||
getRandomElements(arr, count) {
|
||||
const shuffled = arr.sort(() => 0.5 - Math.random())
|
||||
return shuffled.slice(0, count)
|
||||
}
|
||||
|
||||
generateRandomStyle() {
|
||||
const totalItems = 5
|
||||
const itemsPerArray = Math.floor(totalItems / 3)
|
||||
let remainingItems = totalItems % 3
|
||||
|
||||
let selectedStyles = this.getRandomElements(Style, itemsPerArray)
|
||||
let selectedGenres = this.getRandomElements(Genre, itemsPerArray)
|
||||
let selectedTypes = this.getRandomElements(Types, itemsPerArray)
|
||||
|
||||
if (remainingItems > 0) selectedStyles = selectedStyles.concat(this.getRandomElements(Style, 1)), remainingItems--
|
||||
if (remainingItems > 0) selectedGenres = selectedGenres.concat(this.getRandomElements(Genre, 1)), remainingItems--
|
||||
|
||||
const allSelected = [...selectedStyles, ...selectedGenres, ...selectedTypes]
|
||||
return allSelected.map(item => item.value).join(', ')
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -1,412 +0,0 @@
|
|||
var __assign = (this && this.__assign) || function () {
|
||||
__assign = Object.assign || function(t) {
|
||||
for (var s, i = 1, n = arguments.length; i < n; i++) {
|
||||
s = arguments[i];
|
||||
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
|
||||
t[p] = s[p];
|
||||
}
|
||||
return t;
|
||||
};
|
||||
return __assign.apply(this, arguments);
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __generator = (this && this.__generator) || function (thisArg, body) {
|
||||
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
||||
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
||||
function verb(n) { return function (v) { return step([n, v]); }; }
|
||||
function step(op) {
|
||||
if (f) throw new TypeError("Generator is already executing.");
|
||||
while (g && (g = 0, op[0] && (_ = 0)), _) try {
|
||||
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
||||
if (y = 0, t) op = [op[0] & 2, t.value];
|
||||
switch (op[0]) {
|
||||
case 0: case 1: t = op; break;
|
||||
case 4: _.label++; return { value: op[1], done: false };
|
||||
case 5: _.label++; y = op[1]; op = [0]; continue;
|
||||
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
||||
default:
|
||||
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
||||
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
||||
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
||||
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
||||
if (t[2]) _.ops.pop();
|
||||
_.trys.pop(); continue;
|
||||
}
|
||||
op = body.call(thisArg, _);
|
||||
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
||||
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
||||
}
|
||||
};
|
||||
var __spreadArray = (this && this.__spreadArray) || function (to, from, pack) {
|
||||
if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) {
|
||||
if (ar || !(i in from)) {
|
||||
if (!ar) ar = Array.prototype.slice.call(from, 0, i);
|
||||
ar[i] = from[i];
|
||||
}
|
||||
}
|
||||
return to.concat(ar || Array.prototype.slice.call(from));
|
||||
};
|
||||
import Keyv from 'keyv';
|
||||
import pTimeout from 'p-timeout';
|
||||
import QuickLRU from 'quick-lru';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import * as tokenizer from './tokenizer.js';
|
||||
import * as types from './types.js';
|
||||
import globalFetch from 'node-fetch';
|
||||
var CHATGPT_MODEL = 'qwen-turbo'; // qwen-plus
|
||||
var USER_LABEL_DEFAULT = 'User';
|
||||
var ASSISTANT_LABEL_DEFAULT = '通义千问';
|
||||
var QwenApi = /** @class */ (function () {
|
||||
/**
|
||||
* Creates a new client wrapper around Qwen's chat completion API, mimicing the official ChatGPT webapp's functionality as closely as possible.
|
||||
*
|
||||
* @param opts
|
||||
*/
|
||||
function QwenApi(opts) {
|
||||
var apiKey = opts.apiKey, _a = opts.apiBaseUrl, apiBaseUrl = _a === void 0 ? 'https://dashscope.aliyuncs.com/api/v1' : _a, _b = opts.debug, debug = _b === void 0 ? false : _b, messageStore = opts.messageStore, completionParams = opts.completionParams, parameters = opts.parameters, systemMessage = opts.systemMessage, getMessageById = opts.getMessageById, upsertMessage = opts.upsertMessage, _c = opts.fetch, fetch = _c === void 0 ? globalFetch : _c;
|
||||
this._apiKey = apiKey;
|
||||
this._apiBaseUrl = apiBaseUrl;
|
||||
this._debug = !!debug;
|
||||
// @ts-ignore
|
||||
this._fetch = fetch;
|
||||
this._completionParams = __assign({ model: CHATGPT_MODEL, parameters: __assign({ top_p: 0.5, top_k: 50, temperature: 1.0, seed: 114514, enable_search: true, result_format: "message", incremental_output: false }, parameters) }, completionParams);
|
||||
this._systemMessage = systemMessage;
|
||||
if (this._systemMessage === undefined) {
|
||||
var currentDate = new Date().toISOString().split('T')[0];
|
||||
this._systemMessage = "You are Qwen, a large language model trained by Alibaba Cloud. Answer as concisely as possible.\nCurrent date: ".concat(currentDate);
|
||||
}
|
||||
this._getMessageById = getMessageById !== null && getMessageById !== void 0 ? getMessageById : this._defaultGetMessageById;
|
||||
this._upsertMessage = upsertMessage !== null && upsertMessage !== void 0 ? upsertMessage : this._defaultUpsertMessage;
|
||||
if (messageStore) {
|
||||
this._messageStore = messageStore;
|
||||
}
|
||||
else {
|
||||
this._messageStore = new Keyv({
|
||||
store: new QuickLRU({ maxSize: 10000 })
|
||||
});
|
||||
}
|
||||
if (!this._apiKey) {
|
||||
throw new Error('Qwen missing required apiKey');
|
||||
}
|
||||
if (!this._fetch) {
|
||||
throw new Error('Invalid environment; fetch is not defined');
|
||||
}
|
||||
if (typeof this._fetch !== 'function') {
|
||||
throw new Error('Invalid "fetch" is not a function');
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Sends a message to the Qwen chat completions endpoint, waits for the response
|
||||
* to resolve, and returns the response.
|
||||
*
|
||||
* If you want your response to have historical context, you must provide a valid `parentMessageId`.
|
||||
*
|
||||
* If you want to receive a stream of partial responses, use `opts.onProgress`.
|
||||
*
|
||||
* Set `debug: true` in the `ChatGPTAPI` constructor to log more info on the full prompt sent to the Qwen chat completions API. You can override the `systemMessage` in `opts` to customize the assistant's instructions.
|
||||
*
|
||||
* @param message - The prompt message to send
|
||||
* @param opts.parentMessageId - Optional ID of the previous message in the conversation (defaults to `undefined`)
|
||||
* @param opts.conversationId - Optional ID of the conversation (defaults to `undefined`)
|
||||
* @param opts.messageId - Optional ID of the message to send (defaults to a random UUID)
|
||||
* @param opts.systemMessage - Optional override for the chat "system message" which acts as instructions to the model (defaults to the ChatGPT system message)
|
||||
* @param opts.timeoutMs - Optional timeout in milliseconds (defaults to no timeout)
|
||||
* @param opts.onProgress - Optional callback which will be invoked every time the partial response is updated
|
||||
* @param opts.abortSignal - Optional callback used to abort the underlying `fetch` call using an [AbortController](https://developer.mozilla.org/en-US/docs/Web/API/AbortController)
|
||||
* @param opts.completionParams - Optional overrides to send to the [Qwen chat completion API](https://platform.openai.com/docs/api-reference/chat/create). Options like `temperature` and `presence_penalty` can be tweaked to change the personality of the assistant.
|
||||
*
|
||||
* @returns The response from ChatGPT
|
||||
*/
|
||||
QwenApi.prototype.sendMessage = function (text, opts, role) {
|
||||
if (opts === void 0) { opts = {}; }
|
||||
if (role === void 0) { role = 'user'; }
|
||||
return __awaiter(this, void 0, void 0, function () {
|
||||
var parentMessageId, _a, messageId, timeoutMs, completionParams, conversationId, abortSignal, abortController, message, latestQuestion, parameters, _b, messages, maxTokens, numTokens, result, responseP;
|
||||
var _this = this;
|
||||
return __generator(this, function (_c) {
|
||||
switch (_c.label) {
|
||||
case 0:
|
||||
parentMessageId = opts.parentMessageId, _a = opts.messageId, messageId = _a === void 0 ? uuidv4() : _a, timeoutMs = opts.timeoutMs, completionParams = opts.completionParams, conversationId = opts.conversationId;
|
||||
abortSignal = opts.abortSignal;
|
||||
abortController = null;
|
||||
if (timeoutMs && !abortSignal) {
|
||||
abortController = new AbortController();
|
||||
abortSignal = abortController.signal;
|
||||
}
|
||||
message = {
|
||||
role: role,
|
||||
id: messageId,
|
||||
conversationId: conversationId,
|
||||
parentMessageId: parentMessageId,
|
||||
text: text,
|
||||
};
|
||||
latestQuestion = message;
|
||||
parameters = Object.assign(this._completionParams.parameters, completionParams.parameters);
|
||||
completionParams = Object.assign(this._completionParams, completionParams);
|
||||
completionParams.parameters = parameters;
|
||||
return [4 /*yield*/, this._buildMessages(text, role, opts, completionParams)];
|
||||
case 1:
|
||||
_b = _c.sent(), messages = _b.messages, maxTokens = _b.maxTokens, numTokens = _b.numTokens;
|
||||
console.log("maxTokens: ".concat(maxTokens, ", numTokens: ").concat(numTokens));
|
||||
result = {
|
||||
role: 'assistant',
|
||||
id: uuidv4(),
|
||||
conversationId: conversationId,
|
||||
parentMessageId: messageId,
|
||||
text: undefined,
|
||||
functionCall: undefined,
|
||||
conversation: []
|
||||
};
|
||||
completionParams.input = { messages: messages };
|
||||
responseP = new Promise(function (resolve, reject) { return __awaiter(_this, void 0, void 0, function () {
|
||||
var url, headers, body, res, reason, msg, error, response, err_1;
|
||||
var _a, _b, _c, _d, _e, _f, _g, _h, _j;
|
||||
return __generator(this, function (_k) {
|
||||
switch (_k.label) {
|
||||
case 0:
|
||||
url = "".concat(this._apiBaseUrl, "/services/aigc/text-generation/generation");
|
||||
headers = {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: "Bearer ".concat(this._apiKey)
|
||||
};
|
||||
body = completionParams;
|
||||
if (this._debug) {
|
||||
console.log(JSON.stringify(body));
|
||||
}
|
||||
if (this._debug) {
|
||||
console.log("sendMessage (".concat(numTokens, " tokens)"), body);
|
||||
}
|
||||
_k.label = 1;
|
||||
case 1:
|
||||
_k.trys.push([1, 6, , 7]);
|
||||
return [4 /*yield*/, this._fetch(url, {
|
||||
method: 'POST',
|
||||
headers: headers,
|
||||
body: JSON.stringify(body),
|
||||
signal: abortSignal
|
||||
})];
|
||||
case 2:
|
||||
res = _k.sent();
|
||||
if (!!res.ok) return [3 /*break*/, 4];
|
||||
return [4 /*yield*/, res.text()];
|
||||
case 3:
|
||||
reason = _k.sent();
|
||||
msg = "Qwen error ".concat(res.status || res.statusText, ": ").concat(reason);
|
||||
error = new types.ChatGPTError(msg);
|
||||
error.statusCode = res.status;
|
||||
error.statusText = res.statusText;
|
||||
return [2 /*return*/, reject(error)];
|
||||
case 4: return [4 /*yield*/, res.json()];
|
||||
case 5:
|
||||
response = _k.sent();
|
||||
if (this._debug) {
|
||||
console.log(response);
|
||||
}
|
||||
if (((_e = (_d = (_c = (_b = (_a = response.output) === null || _a === void 0 ? void 0 : _a.choices) === null || _b === void 0 ? void 0 : _b[0]) === null || _c === void 0 ? void 0 : _c.message) === null || _d === void 0 ? void 0 : _d.tool_calls) === null || _e === void 0 ? void 0 : _e.length) > 0) {
|
||||
// function call result
|
||||
result.functionCall = response.output.choices[0].message.tool_calls[0].function;
|
||||
result.toolCalls = (_j = (_h = (_g = (_f = response.output) === null || _f === void 0 ? void 0 : _f.choices) === null || _g === void 0 ? void 0 : _g[0]) === null || _h === void 0 ? void 0 : _h.message) === null || _j === void 0 ? void 0 : _j.tool_calls;
|
||||
}
|
||||
if (response === null || response === void 0 ? void 0 : response.request_id) {
|
||||
result.id = response.request_id;
|
||||
}
|
||||
result.detail = response;
|
||||
result.text = response.output.choices[0].message.content;
|
||||
result.conversation = messages;
|
||||
return [2 /*return*/, resolve(result)];
|
||||
case 6:
|
||||
err_1 = _k.sent();
|
||||
return [2 /*return*/, reject(err_1)];
|
||||
case 7: return [2 /*return*/];
|
||||
}
|
||||
});
|
||||
}); }).then(function (message) { return __awaiter(_this, void 0, void 0, function () {
|
||||
return __generator(this, function (_a) {
|
||||
return [2 /*return*/, Promise.all([
|
||||
this._upsertMessage(latestQuestion),
|
||||
this._upsertMessage(message)
|
||||
]).then(function () { return message; })];
|
||||
});
|
||||
}); });
|
||||
if (timeoutMs) {
|
||||
if (abortController) {
|
||||
// This will be called when a timeout occurs in order for us to forcibly
|
||||
// ensure that the underlying HTTP request is aborted.
|
||||
;
|
||||
responseP.cancel = function () {
|
||||
abortController.abort();
|
||||
};
|
||||
}
|
||||
return [2 /*return*/, pTimeout(responseP, {
|
||||
milliseconds: timeoutMs,
|
||||
message: 'Qwen timed out waiting for response'
|
||||
})];
|
||||
}
|
||||
else {
|
||||
return [2 /*return*/, responseP];
|
||||
}
|
||||
return [2 /*return*/];
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
Object.defineProperty(QwenApi.prototype, "apiKey", {
|
||||
// @ts-ignore
|
||||
get: function () {
|
||||
return this._apiKey;
|
||||
},
|
||||
// @ts-ignore
|
||||
set: function (apiKey) {
|
||||
this._apiKey = apiKey;
|
||||
},
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
QwenApi.prototype._buildMessages = function (text, role, opts, completionParams) {
|
||||
return __awaiter(this, void 0, void 0, function () {
|
||||
var _a, systemMessage, parentMessageId, userLabel, assistantLabel, maxNumTokens, messages, systemMessageOffset, nextMessages, functionToken, numTokens, prompt_1, nextNumTokensEstimate, _i, nextMessages_1, m1, _b, isValidPrompt, parentMessage, parentMessageRole, maxTokens;
|
||||
return __generator(this, function (_c) {
|
||||
switch (_c.label) {
|
||||
case 0:
|
||||
_a = opts.systemMessage, systemMessage = _a === void 0 ? this._systemMessage : _a;
|
||||
parentMessageId = opts.parentMessageId;
|
||||
userLabel = USER_LABEL_DEFAULT;
|
||||
assistantLabel = ASSISTANT_LABEL_DEFAULT;
|
||||
maxNumTokens = 32000;
|
||||
messages = [];
|
||||
if (systemMessage) {
|
||||
messages.push({
|
||||
role: 'system',
|
||||
content: systemMessage
|
||||
});
|
||||
}
|
||||
systemMessageOffset = messages.length;
|
||||
nextMessages = text
|
||||
? messages.concat([
|
||||
{
|
||||
role: role,
|
||||
content: text,
|
||||
name: role === 'tool' ? opts.name : undefined
|
||||
}
|
||||
])
|
||||
: messages;
|
||||
functionToken = 0;
|
||||
numTokens = functionToken;
|
||||
_c.label = 1;
|
||||
case 1:
|
||||
prompt_1 = nextMessages
|
||||
.reduce(function (prompt, message) {
|
||||
switch (message.role) {
|
||||
case 'system':
|
||||
return prompt.concat(["Instructions:\n".concat(message.content)]);
|
||||
case 'user':
|
||||
return prompt.concat(["".concat(userLabel, ":\n").concat(message.content)]);
|
||||
default:
|
||||
return message.content ? prompt.concat(["".concat(assistantLabel, ":\n").concat(message.content)]) : prompt;
|
||||
}
|
||||
}, [])
|
||||
.join('\n\n');
|
||||
return [4 /*yield*/, this._getTokenCount(prompt_1)];
|
||||
case 2:
|
||||
nextNumTokensEstimate = _c.sent();
|
||||
_i = 0, nextMessages_1 = nextMessages;
|
||||
_c.label = 3;
|
||||
case 3:
|
||||
if (!(_i < nextMessages_1.length)) return [3 /*break*/, 6];
|
||||
m1 = nextMessages_1[_i];
|
||||
_b = nextNumTokensEstimate;
|
||||
return [4 /*yield*/, this._getTokenCount('')];
|
||||
case 4:
|
||||
nextNumTokensEstimate = _b + _c.sent();
|
||||
_c.label = 5;
|
||||
case 5:
|
||||
_i++;
|
||||
return [3 /*break*/, 3];
|
||||
case 6:
|
||||
isValidPrompt = nextNumTokensEstimate + functionToken <= maxNumTokens;
|
||||
if (prompt_1 && !isValidPrompt) {
|
||||
return [3 /*break*/, 9];
|
||||
}
|
||||
messages = nextMessages;
|
||||
numTokens = nextNumTokensEstimate + functionToken;
|
||||
if (!isValidPrompt) {
|
||||
return [3 /*break*/, 9];
|
||||
}
|
||||
if (!parentMessageId) {
|
||||
return [3 /*break*/, 9];
|
||||
}
|
||||
return [4 /*yield*/, this._getMessageById(parentMessageId)];
|
||||
case 7:
|
||||
parentMessage = _c.sent();
|
||||
if (!parentMessage) {
|
||||
return [3 /*break*/, 9];
|
||||
}
|
||||
parentMessageRole = parentMessage.role || 'user';
|
||||
nextMessages = nextMessages.slice(0, systemMessageOffset).concat(__spreadArray([
|
||||
{
|
||||
role: parentMessageRole,
|
||||
content: parentMessage.functionCall ? parentMessage.functionCall.arguments : parentMessage.text,
|
||||
name: parentMessage.functionCall ? parentMessage.functionCall.name : undefined,
|
||||
tool_calls: parentMessage.toolCalls
|
||||
}
|
||||
], nextMessages.slice(systemMessageOffset), true));
|
||||
parentMessageId = parentMessage.parentMessageId;
|
||||
_c.label = 8;
|
||||
case 8:
|
||||
if (true) return [3 /*break*/, 1];
|
||||
_c.label = 9;
|
||||
case 9:
|
||||
maxTokens = Math.max(1, Math.min(this._maxModelTokens - numTokens, this._maxResponseTokens));
|
||||
return [2 /*return*/, { messages: messages, maxTokens: maxTokens, numTokens: numTokens }];
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
QwenApi.prototype._getTokenCount = function (text) {
|
||||
return __awaiter(this, void 0, void 0, function () {
|
||||
return __generator(this, function (_a) {
|
||||
if (!text) {
|
||||
return [2 /*return*/, 0];
|
||||
}
|
||||
// TODO: use a better fix in the tokenizer
|
||||
text = text.replace(/<\|endoftext\|>/g, '');
|
||||
return [2 /*return*/, tokenizer.encode(text).length];
|
||||
});
|
||||
});
|
||||
};
|
||||
QwenApi.prototype._defaultGetMessageById = function (id) {
|
||||
return __awaiter(this, void 0, void 0, function () {
|
||||
var res;
|
||||
return __generator(this, function (_a) {
|
||||
switch (_a.label) {
|
||||
case 0: return [4 /*yield*/, this._messageStore.get(id)];
|
||||
case 1:
|
||||
res = _a.sent();
|
||||
return [2 /*return*/, res];
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
QwenApi.prototype._defaultUpsertMessage = function (message) {
|
||||
return __awaiter(this, void 0, void 0, function () {
|
||||
return __generator(this, function (_a) {
|
||||
switch (_a.label) {
|
||||
case 0: return [4 /*yield*/, this._messageStore.set(message.id, message)];
|
||||
case 1:
|
||||
_a.sent();
|
||||
return [2 /*return*/];
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
return QwenApi;
|
||||
}());
|
||||
export { QwenApi };
|
||||
|
|
@ -1,405 +0,0 @@
|
|||
// @ts-ignore
|
||||
import Keyv from 'keyv'
|
||||
// @ts-ignore
|
||||
import pTimeout from 'p-timeout'
|
||||
// @ts-ignore
|
||||
import QuickLRU from 'quick-lru'
|
||||
// @ts-ignore
|
||||
import {v4 as uuidv4} from 'uuid'
|
||||
|
||||
import * as tokenizer from './tokenizer'
|
||||
import * as types from './types'
|
||||
// @ts-ignore
|
||||
import globalFetch from 'node-fetch'
|
||||
import {qwen, Role} from "./types";
|
||||
import {openai} from "../openai/types";
|
||||
|
||||
const CHATGPT_MODEL = 'qwen-turbo' // qwen-plus
|
||||
|
||||
const USER_LABEL_DEFAULT = 'User'
|
||||
const ASSISTANT_LABEL_DEFAULT = '通义千问'
|
||||
|
||||
export class QwenApi {
|
||||
protected _apiKey: string
|
||||
protected _apiBaseUrl: string
|
||||
protected _debug: boolean
|
||||
|
||||
protected _systemMessage: string
|
||||
protected _completionParams: Omit<
|
||||
types.qwen.CreateChatCompletionRequest,
|
||||
'messages' | 'n'
|
||||
>
|
||||
protected _maxModelTokens: number
|
||||
protected _maxResponseTokens: number
|
||||
protected _fetch: types.FetchFn
|
||||
|
||||
protected _getMessageById: types.GetMessageByIdFunction
|
||||
protected _upsertMessage: types.UpsertMessageFunction
|
||||
|
||||
protected _messageStore: Keyv<types.ChatMessage>
|
||||
|
||||
/**
|
||||
* Creates a new client wrapper around Qwen's chat completion API, mimicing the official ChatGPT webapp's functionality as closely as possible.
|
||||
*
|
||||
* @param opts
|
||||
*/
|
||||
constructor(opts: types.QWenAPIOptions) {
|
||||
const {
|
||||
apiKey,
|
||||
apiBaseUrl = 'https://dashscope.aliyuncs.com/api/v1',
|
||||
debug = false,
|
||||
messageStore,
|
||||
completionParams,
|
||||
parameters,
|
||||
systemMessage,
|
||||
getMessageById,
|
||||
upsertMessage,
|
||||
fetch = globalFetch
|
||||
} = opts
|
||||
|
||||
this._apiKey = apiKey
|
||||
this._apiBaseUrl = apiBaseUrl
|
||||
this._debug = !!debug
|
||||
// @ts-ignore
|
||||
this._fetch = fetch
|
||||
|
||||
this._completionParams = {
|
||||
model: CHATGPT_MODEL,
|
||||
parameters: {
|
||||
top_p: 0.5,
|
||||
top_k: 50,
|
||||
temperature: 1.0,
|
||||
seed: 114514,
|
||||
enable_search: true,
|
||||
result_format: "message",
|
||||
incremental_output: false,
|
||||
...parameters
|
||||
},
|
||||
...completionParams
|
||||
}
|
||||
|
||||
this._systemMessage = systemMessage
|
||||
|
||||
if (this._systemMessage === undefined) {
|
||||
const currentDate = new Date().toISOString().split('T')[0]
|
||||
this._systemMessage = `You are Qwen, a large language model trained by Alibaba Cloud. Answer as concisely as possible.\nCurrent date: ${currentDate}`
|
||||
}
|
||||
|
||||
this._getMessageById = getMessageById ?? this._defaultGetMessageById
|
||||
this._upsertMessage = upsertMessage ?? this._defaultUpsertMessage
|
||||
|
||||
if (messageStore) {
|
||||
this._messageStore = messageStore
|
||||
} else {
|
||||
this._messageStore = new Keyv<types.ChatMessage, any>({
|
||||
store: new QuickLRU<string, types.ChatMessage>({maxSize: 10000})
|
||||
})
|
||||
}
|
||||
|
||||
if (!this._apiKey) {
|
||||
throw new Error('Qwen missing required apiKey')
|
||||
}
|
||||
|
||||
if (!this._fetch) {
|
||||
throw new Error('Invalid environment; fetch is not defined')
|
||||
}
|
||||
|
||||
if (typeof this._fetch !== 'function') {
|
||||
throw new Error('Invalid "fetch" is not a function')
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sends a message to the Qwen chat completions endpoint, waits for the response
|
||||
* to resolve, and returns the response.
|
||||
*
|
||||
* If you want your response to have historical context, you must provide a valid `parentMessageId`.
|
||||
*
|
||||
* If you want to receive a stream of partial responses, use `opts.onProgress`.
|
||||
*
|
||||
* Set `debug: true` in the `ChatGPTAPI` constructor to log more info on the full prompt sent to the Qwen chat completions API. You can override the `systemMessage` in `opts` to customize the assistant's instructions.
|
||||
*
|
||||
* @param message - The prompt message to send
|
||||
* @param opts.parentMessageId - Optional ID of the previous message in the conversation (defaults to `undefined`)
|
||||
* @param opts.conversationId - Optional ID of the conversation (defaults to `undefined`)
|
||||
* @param opts.messageId - Optional ID of the message to send (defaults to a random UUID)
|
||||
* @param opts.systemMessage - Optional override for the chat "system message" which acts as instructions to the model (defaults to the ChatGPT system message)
|
||||
* @param opts.timeoutMs - Optional timeout in milliseconds (defaults to no timeout)
|
||||
* @param opts.onProgress - Optional callback which will be invoked every time the partial response is updated
|
||||
* @param opts.abortSignal - Optional callback used to abort the underlying `fetch` call using an [AbortController](https://developer.mozilla.org/en-US/docs/Web/API/AbortController)
|
||||
* @param opts.completionParams - Optional overrides to send to the [Qwen chat completion API](https://platform.openai.com/docs/api-reference/chat/create). Options like `temperature` and `presence_penalty` can be tweaked to change the personality of the assistant.
|
||||
*
|
||||
* @returns The response from ChatGPT
|
||||
*/
|
||||
async sendMessage(
|
||||
text: string,
|
||||
opts: types.SendMessageOptions = {},
|
||||
role: Role = 'user',
|
||||
): Promise<types.ChatMessage> {
|
||||
let {
|
||||
parentMessageId,
|
||||
messageId = uuidv4(),
|
||||
timeoutMs,
|
||||
completionParams,
|
||||
conversationId
|
||||
} = opts
|
||||
|
||||
let {abortSignal} = opts
|
||||
|
||||
let abortController: AbortController = null
|
||||
if (timeoutMs && !abortSignal) {
|
||||
abortController = new AbortController()
|
||||
abortSignal = abortController.signal
|
||||
}
|
||||
|
||||
const message: types.ChatMessage = {
|
||||
role,
|
||||
id: messageId,
|
||||
conversationId,
|
||||
parentMessageId,
|
||||
text,
|
||||
}
|
||||
|
||||
const latestQuestion = message
|
||||
|
||||
let parameters = Object.assign(
|
||||
this._completionParams.parameters,
|
||||
completionParams.parameters
|
||||
)
|
||||
completionParams = Object.assign(this._completionParams, completionParams)
|
||||
completionParams.parameters = parameters
|
||||
const {messages, maxTokens, numTokens} = await this._buildMessages(
|
||||
text,
|
||||
role,
|
||||
opts,
|
||||
completionParams
|
||||
)
|
||||
|
||||
console.log(`maxTokens: ${maxTokens}, numTokens: ${numTokens}`)
|
||||
const result: types.ChatMessage & { conversation: qwen.ChatCompletionRequestMessage[] } = {
|
||||
role: 'assistant',
|
||||
id: uuidv4(),
|
||||
conversationId,
|
||||
parentMessageId: messageId,
|
||||
text: undefined,
|
||||
functionCall: undefined,
|
||||
conversation: []
|
||||
}
|
||||
completionParams.input = {messages}
|
||||
const responseP = new Promise<types.ChatMessage>(
|
||||
async (resolve, reject) => {
|
||||
const url = `${this._apiBaseUrl}/services/aigc/text-generation/generation`
|
||||
const headers = {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${this._apiKey}`
|
||||
}
|
||||
const body = completionParams
|
||||
if (this._debug) {
|
||||
console.log(JSON.stringify(body))
|
||||
}
|
||||
|
||||
if (this._debug) {
|
||||
console.log(`sendMessage (${numTokens} tokens)`, body)
|
||||
}
|
||||
try {
|
||||
const res = await this._fetch(url, {
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: JSON.stringify(body),
|
||||
signal: abortSignal
|
||||
})
|
||||
|
||||
if (!res.ok) {
|
||||
const reason = await res.text()
|
||||
const msg = `Qwen error ${
|
||||
res.status || res.statusText
|
||||
}: ${reason}`
|
||||
const error = new types.ChatGPTError(msg)
|
||||
error.statusCode = res.status
|
||||
error.statusText = res.statusText
|
||||
return reject(error)
|
||||
}
|
||||
|
||||
const response: types.qwen.CreateChatCompletionResponse =
|
||||
await res.json()
|
||||
if (this._debug) {
|
||||
console.log(response)
|
||||
}
|
||||
if (response.output?.choices?.[0]?.message?.tool_calls?.length > 0) {
|
||||
// function call result
|
||||
result.functionCall = response.output.choices[0].message.tool_calls[0].function
|
||||
result.toolCalls = response.output?.choices?.[0]?.message?.tool_calls
|
||||
}
|
||||
if (response?.request_id) {
|
||||
result.id = response.request_id
|
||||
}
|
||||
result.detail = response
|
||||
result.text = response.output.choices[0].message.content
|
||||
result.conversation = messages
|
||||
return resolve(result)
|
||||
} catch (err) {
|
||||
return reject(err)
|
||||
}
|
||||
|
||||
}
|
||||
).then(async (message) => {
|
||||
return Promise.all([
|
||||
this._upsertMessage(latestQuestion),
|
||||
this._upsertMessage(message)
|
||||
]).then(() => message)
|
||||
})
|
||||
|
||||
if (timeoutMs) {
|
||||
if (abortController) {
|
||||
// This will be called when a timeout occurs in order for us to forcibly
|
||||
// ensure that the underlying HTTP request is aborted.
|
||||
;(responseP as any).cancel = () => {
|
||||
abortController.abort()
|
||||
}
|
||||
}
|
||||
|
||||
return pTimeout(responseP, {
|
||||
milliseconds: timeoutMs,
|
||||
message: 'Qwen timed out waiting for response'
|
||||
})
|
||||
} else {
|
||||
return responseP
|
||||
}
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
get apiKey(): string {
|
||||
return this._apiKey
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
set apiKey(apiKey: string) {
|
||||
this._apiKey = apiKey
|
||||
}
|
||||
|
||||
|
||||
protected async _buildMessages(text: string, role: Role, opts: types.SendMessageOptions, completionParams: Partial<
|
||||
Omit<qwen.CreateChatCompletionRequest, 'messages' | 'n' | 'stream'>
|
||||
>) {
|
||||
const {systemMessage = this._systemMessage} = opts
|
||||
let {parentMessageId} = opts
|
||||
|
||||
const userLabel = USER_LABEL_DEFAULT
|
||||
const assistantLabel = ASSISTANT_LABEL_DEFAULT
|
||||
|
||||
// fix number of qwen
|
||||
const maxNumTokens = 32000
|
||||
let messages: types.qwen.ChatCompletionRequestMessage[] = []
|
||||
|
||||
if (systemMessage) {
|
||||
messages.push({
|
||||
role: 'system',
|
||||
content: systemMessage
|
||||
})
|
||||
}
|
||||
|
||||
const systemMessageOffset = messages.length
|
||||
let nextMessages = text
|
||||
? messages.concat([
|
||||
{
|
||||
role,
|
||||
content: text,
|
||||
name: role === 'tool' ? opts.name : undefined
|
||||
}
|
||||
])
|
||||
: messages
|
||||
|
||||
let functionToken = 0
|
||||
|
||||
let numTokens = functionToken
|
||||
|
||||
do {
|
||||
const prompt = nextMessages
|
||||
.reduce((prompt, message) => {
|
||||
switch (message.role) {
|
||||
case 'system':
|
||||
return prompt.concat([`Instructions:\n${message.content}`])
|
||||
case 'user':
|
||||
return prompt.concat([`${userLabel}:\n${message.content}`])
|
||||
default:
|
||||
return message.content ? prompt.concat([`${assistantLabel}:\n${message.content}`]) : prompt
|
||||
}
|
||||
}, [] as string[])
|
||||
.join('\n\n')
|
||||
|
||||
let nextNumTokensEstimate = await this._getTokenCount(prompt)
|
||||
|
||||
for (const m1 of nextMessages) {
|
||||
nextNumTokensEstimate += await this._getTokenCount('')
|
||||
}
|
||||
|
||||
const isValidPrompt = nextNumTokensEstimate + functionToken <= maxNumTokens
|
||||
|
||||
if (prompt && !isValidPrompt) {
|
||||
break
|
||||
}
|
||||
messages = nextMessages
|
||||
numTokens = nextNumTokensEstimate + functionToken
|
||||
|
||||
if (!isValidPrompt) {
|
||||
break
|
||||
}
|
||||
|
||||
if (!parentMessageId) {
|
||||
break
|
||||
}
|
||||
|
||||
const parentMessage = await this._getMessageById(parentMessageId)
|
||||
if (!parentMessage) {
|
||||
break
|
||||
}
|
||||
|
||||
const parentMessageRole = parentMessage.role || 'user'
|
||||
|
||||
nextMessages = nextMessages.slice(0, systemMessageOffset).concat([
|
||||
{
|
||||
role: parentMessageRole,
|
||||
content: parentMessage.functionCall ? parentMessage.functionCall.arguments : parentMessage.text,
|
||||
name: parentMessage.functionCall ? parentMessage.functionCall.name : undefined,
|
||||
tool_calls: parentMessage.toolCalls
|
||||
},
|
||||
...nextMessages.slice(systemMessageOffset)
|
||||
])
|
||||
|
||||
parentMessageId = parentMessage.parentMessageId
|
||||
|
||||
} while (true)
|
||||
|
||||
// Use up to 4096 tokens (prompt + response), but try to leave 1000 tokens
|
||||
// for the response.
|
||||
const maxTokens = Math.max(
|
||||
1,
|
||||
Math.min(this._maxModelTokens - numTokens, this._maxResponseTokens)
|
||||
)
|
||||
|
||||
return {messages, maxTokens, numTokens}
|
||||
}
|
||||
|
||||
protected async _getTokenCount(text: string) {
|
||||
if (!text) {
|
||||
return 0
|
||||
}
|
||||
// TODO: use a better fix in the tokenizer
|
||||
text = text.replace(/<\|endoftext\|>/g, '')
|
||||
|
||||
return tokenizer.encode(text).length
|
||||
}
|
||||
|
||||
protected async _defaultGetMessageById(
|
||||
id: string
|
||||
): Promise<types.ChatMessage> {
|
||||
const res = await this._messageStore.get(id)
|
||||
return res
|
||||
}
|
||||
|
||||
protected async _defaultUpsertMessage(
|
||||
message: types.ChatMessage
|
||||
): Promise<void> {
|
||||
await this._messageStore.set(message.id, message)
|
||||
}
|
||||
}
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
import { getEncoding } from 'js-tiktoken';
|
||||
// TODO: make this configurable
|
||||
var tokenizer = getEncoding('cl100k_base');
|
||||
export function encode(input) {
|
||||
return new Uint32Array(tokenizer.encode(input));
|
||||
}
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
// @ts-ignore
|
||||
import { getEncoding } from 'js-tiktoken'
|
||||
|
||||
// TODO: make this configurable
|
||||
const tokenizer = getEncoding('cl100k_base')
|
||||
|
||||
export function encode(input: string): Uint32Array {
|
||||
return new Uint32Array(tokenizer.encode(input))
|
||||
}
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"module": "es2020",
|
||||
"moduleResolution": "node",
|
||||
"esModuleInterop": true
|
||||
}
|
||||
}
|
||||
|
|
@ -1,26 +0,0 @@
|
|||
var __extends = (this && this.__extends) || (function () {
|
||||
var extendStatics = function (d, b) {
|
||||
extendStatics = Object.setPrototypeOf ||
|
||||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
|
||||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
|
||||
return extendStatics(d, b);
|
||||
};
|
||||
return function (d, b) {
|
||||
if (typeof b !== "function" && b !== null)
|
||||
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
|
||||
extendStatics(d, b);
|
||||
function __() { this.constructor = d; }
|
||||
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
|
||||
};
|
||||
})();
|
||||
var ChatGPTError = /** @class */ (function (_super) {
|
||||
__extends(ChatGPTError, _super);
|
||||
function ChatGPTError() {
|
||||
return _super !== null && _super.apply(this, arguments) || this;
|
||||
}
|
||||
return ChatGPTError;
|
||||
}(Error));
|
||||
export { ChatGPTError };
|
||||
export var qwen;
|
||||
(function (qwen) {
|
||||
})(qwen || (qwen = {}));
|
||||
|
|
@ -1,379 +0,0 @@
|
|||
// @ts-ignore
|
||||
import Keyv from 'keyv'
|
||||
import {openai} from "../openai/types";
|
||||
|
||||
// @ts-ignore
|
||||
export type Role = 'user' | 'assistant' | 'system' | 'tool'
|
||||
|
||||
export type FetchFn = typeof fetch
|
||||
|
||||
export type QWenAPIOptions = {
|
||||
apiKey: string
|
||||
|
||||
/** @defaultValue `'https://dashscope.aliyuncs.com/api/v1'` **/
|
||||
apiBaseUrl?: string
|
||||
|
||||
apiOrg?: string
|
||||
|
||||
/** @defaultValue `false` **/
|
||||
debug?: boolean
|
||||
|
||||
completionParams?: Partial<
|
||||
Omit<qwen.CreateChatCompletionRequest, 'messages' | 'n' | 'stream'>
|
||||
>
|
||||
parameters?: qwen.QWenParameters,
|
||||
|
||||
systemMessage?: string
|
||||
|
||||
messageStore?: Keyv
|
||||
getMessageById?: GetMessageByIdFunction
|
||||
upsertMessage?: UpsertMessageFunction
|
||||
|
||||
fetch?: FetchFn
|
||||
}
|
||||
|
||||
export type SendMessageOptions = {
|
||||
/**
|
||||
* function role name
|
||||
*/
|
||||
name?: string
|
||||
messageId?: string
|
||||
stream?: boolean
|
||||
systemMessage?: string
|
||||
parentMessageId?: string
|
||||
conversationId?: string
|
||||
timeoutMs?: number
|
||||
onProgress?: (partialResponse: ChatMessage) => void
|
||||
abortSignal?: AbortSignal
|
||||
completionParams?: Partial<
|
||||
Omit<qwen.CreateChatCompletionRequest, 'messages' | 'n' | 'stream'>
|
||||
>
|
||||
}
|
||||
|
||||
export type MessageActionType = 'next' | 'variant'
|
||||
|
||||
export type SendMessageBrowserOptions = {
|
||||
conversationId?: string
|
||||
parentMessageId?: string
|
||||
messageId?: string
|
||||
action?: MessageActionType
|
||||
timeoutMs?: number
|
||||
onProgress?: (partialResponse: ChatMessage) => void
|
||||
abortSignal?: AbortSignal
|
||||
}
|
||||
|
||||
export interface ChatMessage {
|
||||
id: string
|
||||
text: string
|
||||
role: Role
|
||||
parentMessageId?: string
|
||||
conversationId?: string
|
||||
detail?:
|
||||
| qwen.CreateChatCompletionResponse
|
||||
| CreateChatCompletionStreamResponse
|
||||
functionCall?: qwen.FunctionCall
|
||||
toolCalls?: qwen.ToolCall[]
|
||||
}
|
||||
|
||||
export class ChatGPTError extends Error {
|
||||
statusCode?: number
|
||||
statusText?: string
|
||||
isFinal?: boolean
|
||||
accountId?: string
|
||||
}
|
||||
|
||||
/** Returns a chat message from a store by it's ID (or null if not found). */
|
||||
export type GetMessageByIdFunction = (id: string) => Promise<ChatMessage>
|
||||
|
||||
/** Upserts a chat message to a store. */
|
||||
export type UpsertMessageFunction = (message: ChatMessage) => Promise<void>
|
||||
|
||||
export interface CreateChatCompletionStreamResponse
|
||||
extends openai.CreateChatCompletionDeltaResponse {
|
||||
usage: CreateCompletionStreamResponseUsage
|
||||
}
|
||||
|
||||
export interface CreateCompletionStreamResponseUsage
|
||||
extends openai.CreateCompletionResponseUsage {
|
||||
estimated: true
|
||||
}
|
||||
|
||||
/**
|
||||
* https://chat.openapi.com/backend-api/conversation
|
||||
*/
|
||||
export type ConversationJSONBody = {
|
||||
/**
|
||||
* The action to take
|
||||
*/
|
||||
action: string
|
||||
|
||||
/**
|
||||
* The ID of the conversation
|
||||
*/
|
||||
conversation_id?: string
|
||||
|
||||
/**
|
||||
* Prompts to provide
|
||||
*/
|
||||
messages: Prompt[]
|
||||
|
||||
/**
|
||||
* The model to use
|
||||
*/
|
||||
model: string
|
||||
|
||||
/**
|
||||
* The parent message ID
|
||||
*/
|
||||
parent_message_id: string
|
||||
}
|
||||
|
||||
export type Prompt = {
|
||||
/**
|
||||
* The content of the prompt
|
||||
*/
|
||||
content: PromptContent
|
||||
|
||||
/**
|
||||
* The ID of the prompt
|
||||
*/
|
||||
id: string
|
||||
|
||||
/**
|
||||
* The role played in the prompt
|
||||
*/
|
||||
role: Role
|
||||
}
|
||||
|
||||
export type ContentType = 'text'
|
||||
|
||||
export type PromptContent = {
|
||||
/**
|
||||
* The content type of the prompt
|
||||
*/
|
||||
content_type: ContentType
|
||||
|
||||
/**
|
||||
* The parts to the prompt
|
||||
*/
|
||||
parts: string[]
|
||||
}
|
||||
|
||||
export type ConversationResponseEvent = {
|
||||
message?: Message
|
||||
conversation_id?: string
|
||||
error?: string | null
|
||||
}
|
||||
|
||||
export type Message = {
|
||||
id: string
|
||||
content: MessageContent
|
||||
role: Role
|
||||
user: string | null
|
||||
create_time: string | null
|
||||
update_time: string | null
|
||||
end_turn: null
|
||||
weight: number
|
||||
recipient: string
|
||||
metadata: MessageMetadata
|
||||
}
|
||||
|
||||
export type MessageContent = {
|
||||
content_type: string
|
||||
parts: string[]
|
||||
}
|
||||
|
||||
export type MessageMetadata = any
|
||||
|
||||
export namespace qwen {
|
||||
export interface CreateChatCompletionDeltaResponse {
|
||||
id: string
|
||||
object: 'chat.completion.chunk'
|
||||
created: number
|
||||
model: string
|
||||
choices: [
|
||||
{
|
||||
delta: {
|
||||
role: Role
|
||||
content?: string,
|
||||
function_call?: { name: string, arguments: string }
|
||||
}
|
||||
index: number
|
||||
finish_reason: string | null
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @export
|
||||
* @interface ChatCompletionRequestMessage
|
||||
*/
|
||||
export interface ChatCompletionRequestMessage {
|
||||
/**
|
||||
* The role of the author of this message.
|
||||
* @type {string}
|
||||
* @memberof ChatCompletionRequestMessage
|
||||
*/
|
||||
role: ChatCompletionRequestMessageRoleEnum
|
||||
/**
|
||||
* The contents of the message
|
||||
* @type {string}
|
||||
* @memberof ChatCompletionRequestMessage
|
||||
*/
|
||||
content: string
|
||||
|
||||
/**
|
||||
* role为tool表示当前message为function_call的调用结果,name是function的名称,需要和上轮response中的tool_calls[i].function.name参数保持一致,content为function的输出。
|
||||
*/
|
||||
name?: string
|
||||
|
||||
tool_calls?: ToolCall[]
|
||||
}
|
||||
|
||||
export interface FunctionCall {
|
||||
name: string
|
||||
arguments: string
|
||||
}
|
||||
|
||||
export declare const ChatCompletionRequestMessageRoleEnum: {
|
||||
readonly System: 'system'
|
||||
readonly User: 'user'
|
||||
readonly Assistant: 'assistant'
|
||||
readonly Tool: 'tool'
|
||||
}
|
||||
export declare type ChatCompletionRequestMessageRoleEnum =
|
||||
(typeof ChatCompletionRequestMessageRoleEnum)[keyof typeof ChatCompletionRequestMessageRoleEnum]
|
||||
|
||||
|
||||
export interface QWenInput {
|
||||
messages: Array<ChatCompletionRequestMessage>
|
||||
}
|
||||
|
||||
export interface QWenParameters {
|
||||
result_format: "text" | "message"
|
||||
top_p: number
|
||||
top_k: number
|
||||
seed: number
|
||||
temperature: number
|
||||
enable_search: boolean
|
||||
incremental_output: boolean
|
||||
tools: Tools[]
|
||||
}
|
||||
|
||||
export interface Tools {
|
||||
type: "function"
|
||||
function: QwenFunction
|
||||
}
|
||||
|
||||
export interface QwenFunction {
|
||||
name: string
|
||||
description: string
|
||||
parameters: QwenFunctionParameters
|
||||
}
|
||||
|
||||
export interface QwenFunctionParameters {
|
||||
type: "object"
|
||||
properties: Properties;
|
||||
required?: string[]
|
||||
}
|
||||
|
||||
interface Properties {
|
||||
[key: string]: Property;
|
||||
}
|
||||
|
||||
interface Property {
|
||||
type: string;
|
||||
description?: string;
|
||||
enum?: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @export
|
||||
* @interface CreateChatCompletionRequest
|
||||
*/
|
||||
export interface CreateChatCompletionRequest {
|
||||
/**
|
||||
* ID of the model to use. Currently, only `gpt-3.5-turbo` and `gpt-3.5-turbo-0301` are supported.
|
||||
* @type {string}
|
||||
* @memberof CreateChatCompletionRequest
|
||||
*/
|
||||
model: string
|
||||
/**
|
||||
* The messages to generate chat completions for, in the [chat format](/docs/guides/chat/introduction).
|
||||
* @type {Array<ChatCompletionRequestMessage>}
|
||||
* @memberof CreateChatCompletionRequest
|
||||
*/
|
||||
input?: QWenInput
|
||||
|
||||
parameters: QWenParameters
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @export
|
||||
* @interface CreateChatCompletionResponse
|
||||
*/
|
||||
export interface CreateChatCompletionResponse {
|
||||
/**
|
||||
*
|
||||
* @type {string}
|
||||
* @memberof CreateChatCompletionResponse
|
||||
*/
|
||||
request_id: string
|
||||
/**
|
||||
*
|
||||
* @type {QWenOutput}
|
||||
* @memberof CreateChatCompletionResponse
|
||||
*/
|
||||
output: QWenOutput
|
||||
/**
|
||||
*
|
||||
* @type {CreateCompletionResponseUsage}
|
||||
* @memberof CreateChatCompletionResponse
|
||||
*/
|
||||
usage?: CreateCompletionResponseUsage
|
||||
}
|
||||
|
||||
export interface QWenOutput {
|
||||
finish_reason: string
|
||||
text?: string
|
||||
choices?: Choice[]
|
||||
}
|
||||
|
||||
export interface Choice {
|
||||
finish_reason: string
|
||||
message: ResponseMessage
|
||||
}
|
||||
|
||||
export interface ResponseMessage {
|
||||
role: Role
|
||||
content: string
|
||||
tool_calls: ToolCall[]
|
||||
}
|
||||
|
||||
export interface ToolCall {
|
||||
function: FunctionCall
|
||||
type: "function"
|
||||
}
|
||||
/**
|
||||
*
|
||||
* @export
|
||||
* @interface CreateCompletionResponseUsage
|
||||
*/
|
||||
export interface CreateCompletionResponseUsage {
|
||||
/**
|
||||
*
|
||||
* @type {number}
|
||||
* @memberof CreateCompletionResponseUsage
|
||||
*/
|
||||
input_tokens: number
|
||||
/**
|
||||
*
|
||||
* @type {number}
|
||||
* @memberof CreateCompletionResponseUsage
|
||||
*/
|
||||
output_tokens: number
|
||||
}
|
||||
}
|
||||
|
|
@ -1,71 +0,0 @@
|
|||
import md5 from 'md5'
|
||||
import fetch from 'node-fetch'
|
||||
|
||||
const mixinKeyEncTab = [
|
||||
46, 47, 18, 2, 53, 8, 23, 32, 15, 50, 10, 31, 58, 3, 45, 35, 27, 43, 5, 49,
|
||||
33, 9, 42, 19, 29, 28, 14, 39, 12, 38, 41, 13, 37, 48, 7, 16, 24, 55, 40,
|
||||
61, 26, 17, 0, 1, 60, 51, 30, 4, 22, 25, 54, 21, 56, 59, 6, 63, 57, 62, 11,
|
||||
36, 20, 34, 44, 52
|
||||
]
|
||||
|
||||
// 对 imgKey 和 subKey 进行字符顺序打乱编码
|
||||
function getMixinKey (orig) {
|
||||
let temp = ''
|
||||
mixinKeyEncTab.forEach((n) => {
|
||||
temp += orig[n]
|
||||
})
|
||||
return temp.slice(0, 32)
|
||||
}
|
||||
|
||||
// 为请求参数进行 wbi 签名
|
||||
function encWbi (params, imgKey, subKey) {
|
||||
const mixinKey = getMixinKey(imgKey + subKey)
|
||||
const currTime = Math.round(Date.now() / 1000)
|
||||
const chrFilter = /[!'()*]/g
|
||||
let query = []
|
||||
Object.assign(params, { wts: currTime }) // 添加 wts 字段
|
||||
// 按照 key 重排参数
|
||||
Object.keys(params).sort().forEach((key) => {
|
||||
query.push(
|
||||
`${encodeURIComponent(key)}=${encodeURIComponent(
|
||||
// 过滤 value 中的 "!'()*" 字符
|
||||
params[key].toString().replace(chrFilter, '')
|
||||
)}`
|
||||
)
|
||||
})
|
||||
query = query.join('&')
|
||||
const wbiSign = md5(query + mixinKey) // 计算 w_rid
|
||||
return query + '&w_rid=' + wbiSign
|
||||
}
|
||||
|
||||
// 获取最新的 img_key 和 sub_key
|
||||
async function getWbiKeys () {
|
||||
const resp = await fetch('https://api.bilibili.com/x/web-interface/nav')
|
||||
const jsonContent = resp.data
|
||||
const imgUrl = jsonContent.data.wbi_img.img_url
|
||||
const subUrl = jsonContent.data.wbi_img.sub_url
|
||||
|
||||
return {
|
||||
img_key: imgUrl.slice(
|
||||
imgUrl.lastIndexOf('/') + 1,
|
||||
imgUrl.lastIndexOf('.')
|
||||
),
|
||||
sub_key: subUrl.slice(
|
||||
subUrl.lastIndexOf('/') + 1,
|
||||
subUrl.lastIndexOf('.')
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// getWbiKeys().then((wbi_keys) => {
|
||||
// const query = encWbi(
|
||||
// {
|
||||
// foo: '114',
|
||||
// bar: '514',
|
||||
// baz: 1919810
|
||||
// },
|
||||
// wbi_keys.img_key,
|
||||
// wbi_keys.sub_key
|
||||
// )
|
||||
// console.log(query)
|
||||
// })
|
||||
|
|
@ -1,90 +0,0 @@
|
|||
import fetch from 'node-fetch'
|
||||
|
||||
// this file is deprecated
|
||||
import { Config } from './config.js'
|
||||
import HttpsProxyAgent from 'https-proxy-agent'
|
||||
|
||||
const newFetch = (url, options = {}) => {
|
||||
const defaultOptions = Config.proxy
|
||||
? {
|
||||
agent: HttpsProxyAgent(Config.proxy)
|
||||
}
|
||||
: {}
|
||||
const mergedOptions = {
|
||||
...defaultOptions,
|
||||
...options
|
||||
}
|
||||
|
||||
return fetch(url, mergedOptions)
|
||||
}
|
||||
export async function createCaptcha (e, tokenU) {
|
||||
let baseUrl = Config.sydneyReverseProxy
|
||||
let imageResponse = await newFetch(`${baseUrl}/edgesvc/turing/captcha/create`, {
|
||||
headers: {
|
||||
Cookie: `_U=${tokenU};`,
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36 Edg/114.0.1823.82',
|
||||
Accept: 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7',
|
||||
Referer: 'https://edgeservices.bing.com/edgesvc/chat?udsframed=1&form=SHORUN&clientscopes=chat,noheader,channelstable,&shellsig=ddb7b7dc7a56d0c5350f37b3653696bbeb77496e&setlang=zh-CN&lightschemeovr=1'
|
||||
}
|
||||
})
|
||||
const blob = await imageResponse.blob()
|
||||
let id = imageResponse.headers.get('id')
|
||||
let regionId = imageResponse.headers.get('Regionid')
|
||||
const arrayBuffer = await blob.arrayBuffer()
|
||||
const buffer = Buffer.from(arrayBuffer)
|
||||
const base64String = buffer.toString('base64')
|
||||
// await e.reply(segment.image(base64String))
|
||||
return { id, regionId, image: base64String }
|
||||
}
|
||||
|
||||
export async function solveCaptcha (id, regionId, text, token) {
|
||||
let baseUrl = Config.sydneyReverseProxy
|
||||
let url = `${baseUrl}/edgesvc/turing/captcha/verify?type=visual&id=${id}®ionId=${regionId}&value=${text}`
|
||||
let res = await newFetch(url, {
|
||||
headers: {
|
||||
Cookie: '_U=' + token,
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36 Edg/114.0.1823.82',
|
||||
Referer: 'https://edgeservices.bing.com/edgesvc/chat?udsframed=1&form=SHORUN&clientscopes=chat,noheader,channelstable,&shellsig=ddb7b7dc7a56d0c5350f37b3653696bbeb77496e&setlang=zh-CN&lightschemeovr=1'
|
||||
}
|
||||
})
|
||||
res = await res.json()
|
||||
if (res.reason === 'Solved') {
|
||||
return {
|
||||
result: true,
|
||||
detail: res
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
result: false,
|
||||
detail: res
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function solveCaptchaOneShot (token) {
|
||||
if (!token) {
|
||||
throw new Error('no token')
|
||||
}
|
||||
let solveUrl = Config.bingCaptchaOneShotUrl
|
||||
if (!solveUrl) {
|
||||
throw new Error('no captcha source')
|
||||
}
|
||||
logger.info(`尝试解决token${token}的验证码`)
|
||||
let result = await fetch(solveUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify({
|
||||
_U: token
|
||||
})
|
||||
})
|
||||
if (result.status === 200) {
|
||||
return await result.json()
|
||||
} else {
|
||||
return {
|
||||
success: false,
|
||||
error: result.statusText
|
||||
}
|
||||
}
|
||||
}
|
||||
15
utils/bot.js
15
utils/bot.js
|
|
@ -1,15 +0,0 @@
|
|||
export function getBots () {
|
||||
if (Bot.uin === 88888) {
|
||||
// 找适配器
|
||||
let adapters = Bot.adapter
|
||||
return adapters?.map(uin => Bot[uin])
|
||||
} else if (Bot.adapter && Bot.adapter.length > 0) {
|
||||
let bots = [Bot]
|
||||
Bot.adapter.forEach(uin => {
|
||||
bots.push(Bot[uin])
|
||||
})
|
||||
return bots
|
||||
} else {
|
||||
return [Bot]
|
||||
}
|
||||
}
|
||||
120
utils/browser.js
120
utils/browser.js
|
|
@ -1,120 +0,0 @@
|
|||
import lodash from 'lodash'
|
||||
import { Config } from './config.js'
|
||||
let puppeteer = {}
|
||||
|
||||
class Puppeteer {
|
||||
constructor () {
|
||||
let args = [
|
||||
'--exclude-switches',
|
||||
'--no-sandbox',
|
||||
'--remote-debugging-port=51777',
|
||||
'--disable-setuid-sandbox',
|
||||
'--disable-infobars',
|
||||
'--disable-dev-shm-usage',
|
||||
'--disable-blink-features=AutomationControlled',
|
||||
'--ignore-certificate-errors',
|
||||
'--no-first-run',
|
||||
'--no-service-autorun',
|
||||
'--password-store=basic',
|
||||
'--system-developer-mode',
|
||||
'--mute-audio',
|
||||
'--disable-default-apps',
|
||||
'--no-zygote',
|
||||
'--disable-accelerated-2d-canvas',
|
||||
'--disable-web-security'
|
||||
// '--shm-size=1gb'
|
||||
]
|
||||
if (Config.proxy) {
|
||||
args.push(`--proxy-server=${Config.proxy}`)
|
||||
}
|
||||
this.browser = false
|
||||
this.lock = false
|
||||
this.config = {
|
||||
headless: Config.headless,
|
||||
args
|
||||
}
|
||||
|
||||
if (Config.chromePath) {
|
||||
this.config.executablePath = Config.chromePath
|
||||
}
|
||||
|
||||
this.html = {}
|
||||
}
|
||||
|
||||
async initPupp () {
|
||||
if (!lodash.isEmpty(puppeteer)) return puppeteer
|
||||
puppeteer = (await import('puppeteer')).default
|
||||
// const pluginStealth = StealthPlugin()
|
||||
// puppeteer.use(pluginStealth)
|
||||
return puppeteer
|
||||
}
|
||||
|
||||
/**
|
||||
* 初始化chromium
|
||||
*/
|
||||
async browserInit () {
|
||||
await this.initPupp()
|
||||
if (this.browser) return this.browser
|
||||
if (this.lock) return false
|
||||
this.lock = true
|
||||
|
||||
logger.mark('chatgpt puppeteer 启动中...')
|
||||
const browserURL = 'http://127.0.0.1:51777'
|
||||
try {
|
||||
this.browser = await puppeteer.connect({ browserURL })
|
||||
} catch (e) {
|
||||
/** 初始化puppeteer */
|
||||
this.browser = await puppeteer.launch(this.config).catch((err) => {
|
||||
logger.error(err.toString())
|
||||
if (String(err).includes('correct Chromium')) {
|
||||
logger.error('没有正确安装Chromium,可以尝试执行安装命令:node ./node_modules/puppeteer/install.js')
|
||||
}
|
||||
})
|
||||
}
|
||||
this.lock = false
|
||||
|
||||
if (!this.browser) {
|
||||
logger.error('chatgpt puppeteer 启动失败')
|
||||
return false
|
||||
}
|
||||
|
||||
logger.mark('chatgpt puppeteer 启动成功')
|
||||
|
||||
/** 监听Chromium实例是否断开 */
|
||||
this.browser.on('disconnected', (e) => {
|
||||
logger.info('Chromium实例关闭或崩溃!')
|
||||
this.browser = false
|
||||
})
|
||||
|
||||
return this.browser
|
||||
}
|
||||
}
|
||||
|
||||
export class ChatGPTPuppeteer extends Puppeteer {
|
||||
constructor (opts = {}) {
|
||||
super()
|
||||
const {
|
||||
debug = false
|
||||
} = opts
|
||||
|
||||
this._debug = !!debug
|
||||
}
|
||||
|
||||
async getBrowser () {
|
||||
if (this.browser) {
|
||||
return this.browser
|
||||
} else {
|
||||
return await this.browserInit()
|
||||
}
|
||||
}
|
||||
|
||||
async close () {
|
||||
if (this.browser) {
|
||||
await this.browser.close()
|
||||
}
|
||||
this._page = null
|
||||
this.browser = null
|
||||
}
|
||||
}
|
||||
|
||||
export default new ChatGPTPuppeteer()
|
||||
108
utils/chat.js
108
utils/chat.js
|
|
@ -1,108 +0,0 @@
|
|||
import { Config } from './config.js'
|
||||
import { newFetch } from './proxy.js'
|
||||
|
||||
export async function getChatHistoryGroup (e, num) {
|
||||
// if (e.adapter === 'shamrock') {
|
||||
// return await e.group.getChatHistory(0, num, false)
|
||||
// } else {
|
||||
let latestChats = await e.group.getChatHistory(e.seq || e.message_id, 1)
|
||||
if (latestChats.length > 0) {
|
||||
let latestChat = latestChats[0]
|
||||
if (latestChat) {
|
||||
let seq = latestChat.seq || latestChat.message_id
|
||||
let chats = []
|
||||
while (chats.length < num) {
|
||||
let chatHistory = await e.group.getChatHistory(seq, 20)
|
||||
if (!chatHistory || chatHistory.length === 0) {
|
||||
break
|
||||
}
|
||||
chats.push(...chatHistory.reverse())
|
||||
if (seq === chatHistory[chatHistory.length - 1].seq || seq === chatHistory[chatHistory.length - 1].message_id) {
|
||||
break
|
||||
}
|
||||
seq = chatHistory[chatHistory.length - 1].seq || chatHistory[chatHistory.length - 1].message_id
|
||||
}
|
||||
chats = chats.slice(0, num).reverse()
|
||||
try {
|
||||
let mm = await e.bot.gml
|
||||
for (const chat of chats) {
|
||||
if (e.adapter === 'shamrock') {
|
||||
if (chat.sender?.user_id === 0) {
|
||||
// 奇怪格式的历史消息,过滤掉
|
||||
continue
|
||||
}
|
||||
let sender = await pickMemberAsync(e, chat.sender.user_id)
|
||||
if (sender) {
|
||||
chat.sender = sender
|
||||
}
|
||||
} else {
|
||||
let sender = mm.get(chat.sender.user_id)
|
||||
if (sender) {
|
||||
chat.sender = sender
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
logger.warn(err)
|
||||
}
|
||||
// console.log(chats)
|
||||
return chats
|
||||
}
|
||||
}
|
||||
// }
|
||||
return []
|
||||
}
|
||||
|
||||
async function pickMemberAsync (e, userId) {
|
||||
let key = `CHATGPT:GroupMemberInfo:${e.group_id}:${userId}`
|
||||
let cache = await redis.get(key)
|
||||
if (cache) {
|
||||
return JSON.parse(cache)
|
||||
}
|
||||
return new Promise((resolve, reject) => {
|
||||
e.group.pickMember(userId, true, (sender) => {
|
||||
redis.set(key, JSON.stringify(sender), { EX: 86400 })
|
||||
resolve(sender)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
export async function generateSuggestedResponse (conversations) {
|
||||
let prompt = 'Attention! you do not need to answer any question according to the provided conversation! \nYou are a suggested questions generator, you should generate three suggested questions according to the provided conversation for the user in the next turn, the three questions should not be too long, and must be superated with newline. The suggested questions should be suitable in the context of the provided conversation, and should not be too long. \nNow give your 3 suggested questions, use the same language with the user.'
|
||||
const res = await newFetch(`${Config.openAiBaseUrl}/chat/completions`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${Config.apiKey}`
|
||||
},
|
||||
body: JSON.stringify({
|
||||
model: 'gpt-3.5-turbo-16k',
|
||||
temperature: 0.7,
|
||||
messages: [
|
||||
{
|
||||
role: 'system',
|
||||
content: 'you are a suggested questions generator, you should generate three suggested questions according to the provided conversation for the user in the next turn, the three questions should not be too long, and must be superated with newline. Always use the same language with the user\'s content in the last turn. you should response like: \nWhat is ChatGPT?\nCan you write a poem aboud spring?\nWhat can you do?'
|
||||
},
|
||||
{
|
||||
role: 'user',
|
||||
content: 'User:\n\n我想知道今天的天气\n\nAI:\n\n今天北京的天气是晴转多云,最高气温12度,最低气温2度,空气质量优。\n\n' + prompt
|
||||
},
|
||||
{
|
||||
role: 'assistant',
|
||||
content: '这个天气适合穿什么衣物?\n今天北京的湿度怎么样?\n这个季节北京有什么适合游玩的地方?'
|
||||
},
|
||||
{
|
||||
role: 'user',
|
||||
content: JSON.stringify(conversations) + prompt
|
||||
}
|
||||
]
|
||||
})
|
||||
})
|
||||
if (res.status === 200) {
|
||||
const resJson = await res.json()
|
||||
if (resJson) { return resJson.choices[0].message.content }
|
||||
} else {
|
||||
logger.error('generateSuggestedResponse error: ' + res.status)
|
||||
return null
|
||||
}
|
||||
}
|
||||
106
utils/chatglm.js
106
utils/chatglm.js
|
|
@ -1,106 +0,0 @@
|
|||
import { Config } from './config.js'
|
||||
import fetch from 'node-fetch'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
async function getKeyv () {
|
||||
let Keyv
|
||||
try {
|
||||
Keyv = (await import('keyv')).default
|
||||
} catch (error) {
|
||||
throw new Error('keyv依赖未安装,请使用pnpm install keyv安装')
|
||||
}
|
||||
return Keyv
|
||||
}
|
||||
|
||||
export default class ChatGLMClient {
|
||||
constructor (opts) {
|
||||
// user: qq号
|
||||
this.opts = opts
|
||||
}
|
||||
|
||||
async initCache () {
|
||||
if (!this.conversationsCache) {
|
||||
const cacheOptions = this.opts.cache || {}
|
||||
cacheOptions.namespace = cacheOptions.namespace || 'chatglm'
|
||||
let Keyv = await getKeyv()
|
||||
this.conversationsCache = new Keyv(cacheOptions)
|
||||
}
|
||||
}
|
||||
|
||||
async sendMessage (prompt, opts) {
|
||||
const {
|
||||
conversationId = uuidv4(),
|
||||
messageId = uuidv4(),
|
||||
parentMessageId,
|
||||
temperature = Math.max(Config.temperature, 1)
|
||||
} = opts
|
||||
await this.initCache()
|
||||
let url = Config.chatglmBaseUrl + '/api/chat'
|
||||
if (Config.debug) {
|
||||
logger.info('use chatglm api server endpoint: ' + url)
|
||||
}
|
||||
const conversationKey = `ChatGLMUser_${this.opts.user}`
|
||||
const conversation = (await this.conversationsCache.get(conversationKey)) || {
|
||||
messages: [],
|
||||
createdAt: Date.now()
|
||||
}
|
||||
let history = getMessagesForConversation(conversation.messages, parentMessageId)
|
||||
if (Config.debug) {
|
||||
logger.info(history)
|
||||
}
|
||||
console.log(history)
|
||||
let option = {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({
|
||||
prompt,
|
||||
temperature,
|
||||
history
|
||||
}),
|
||||
headers: {
|
||||
'content-type': 'application/json',
|
||||
library: 'chatgpt-plugin'
|
||||
}
|
||||
}
|
||||
let response = await fetch(url, option)
|
||||
let result = await response.text()
|
||||
try {
|
||||
result = JSON.parse(result)
|
||||
conversation.messages.push({
|
||||
id: messageId,
|
||||
role: 'user',
|
||||
content: prompt,
|
||||
parentMessageId
|
||||
})
|
||||
let responseId = uuidv4()
|
||||
conversation.messages.push({
|
||||
id: responseId,
|
||||
role: 'AI',
|
||||
content: result.data,
|
||||
parentMessageId: messageId
|
||||
})
|
||||
await this.conversationsCache.set(conversationKey, conversation)
|
||||
return {
|
||||
conversationId,
|
||||
id: responseId,
|
||||
text: result.data
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(result)
|
||||
throw new Error(result)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function getMessagesForConversation (messages, parentMessageId) {
|
||||
const orderedMessages = []
|
||||
let currentMessageId = parentMessageId
|
||||
while (currentMessageId) {
|
||||
const message = messages.find((m) => m.id === currentMessageId)
|
||||
if (!message) {
|
||||
break
|
||||
}
|
||||
orderedMessages.unshift(message)
|
||||
currentMessageId = message.parentMessageId
|
||||
}
|
||||
|
||||
return orderedMessages
|
||||
}
|
||||
|
|
@ -1,204 +0,0 @@
|
|||
import { File, FormData, Headers } from 'node-fetch'
|
||||
import fs from 'fs'
|
||||
import crypto from 'crypto'
|
||||
import { Config } from '../config.js'
|
||||
// import initCycleTLS from 'cycletls'
|
||||
let initCycleTLS
|
||||
try {
|
||||
initCycleTLS = (await import('cycletls')).default
|
||||
} catch (err) {
|
||||
console.warn('未安装cycletls,无法使用claude2功能。')
|
||||
}
|
||||
export class ClaudeAIClient {
|
||||
constructor (opts) {
|
||||
if (!initCycleTLS) {
|
||||
throw new Error('CycleTLS is not installed')
|
||||
}
|
||||
const { organizationId, sessionKey, proxy, debug = false } = opts
|
||||
this.organizationId = organizationId
|
||||
this.sessionKey = sessionKey
|
||||
this.debug = debug
|
||||
let headers = new Headers()
|
||||
headers.append('Cookie', `sessionKey=${sessionKey}`)
|
||||
headers.append('referrer', 'https://claude.ai/chat')
|
||||
headers.append('origin', 'https://claude.ai')
|
||||
headers.append('Content-Type', 'application/json')
|
||||
headers.append('User-Agent', Config.claudeAIUA || 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36')
|
||||
// headers.append('sec-ch-ua', '"Chromium";v="116", "Not)A;Brand";v="24", "Google Chrome";v="116"')
|
||||
// headers.append('Sec-Ch-Ua-Mobile', '?0')
|
||||
// headers.append('Sec-Ch-Ua-Platform', '"Windows"')
|
||||
headers.append('Sec-Fetch-Dest', 'empty')
|
||||
headers.append('Sec-Fetch-Mode', 'cors')
|
||||
headers.append('Sec-Fetch-Site', 'same-origin')
|
||||
headers.append('Connection', 'keep-alive')
|
||||
headers.append('TE', 'trailers')
|
||||
headers.append('Accept-Encoding', 'gzip, deflate, br')
|
||||
headers.append('Accept-Language', 'en-US,en;q=0.5')
|
||||
headers.append('Dnt', '1')
|
||||
headers.append('Accept', '*/*')
|
||||
// headers.append('sentry-trace', 'd1c13c8e760c4e9e969a5e1aed6a38cf-a854f94e3d1a4bc7-0')
|
||||
// headers.append('anthropic-client-sha', 'cab849b55d41c73804c1b2b87a7a7fdb84263dc9')
|
||||
// headers.append('anthropic-client-version', '1')
|
||||
// headers.append('baggage', 'sentry-environment=production,sentry-release=cab849b55d41c73804c1b2b87a7a7fdb84263dc9,sentry-public_key=58e9b9d0fc244061a1b54fe288b0e483,sentry-trace_id=d1c13c8e760c4e9e969a5e1aed6a38cf')
|
||||
this.JA3 = Config.claudeAIJA3 || '772,4865-4866-4867-49195-49199-49196-49200-52393-52392-49171-49172-156-157-47-53,27-5-65281-13-35-0-51-18-16-43-10-45-11-17513-23,29-23-24,0'
|
||||
|
||||
this.headers = headers
|
||||
this.rawHeaders = {}
|
||||
Array.from(this.headers.keys()).forEach(key => {
|
||||
this.rawHeaders[key] = this.headers.get(key)
|
||||
})
|
||||
this.proxy = proxy
|
||||
}
|
||||
|
||||
/**
|
||||
* 抽取文件文本内容,https://claude.ai/api/convert_document
|
||||
* @param filePath 文件路径
|
||||
* @param filename
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async convertDocument (filePath, filename = 'file.pdf') {
|
||||
let formData = new FormData()
|
||||
formData.append('orgUuid', this.organizationId)
|
||||
let buffer = fs.readFileSync(filePath)
|
||||
formData.append('file', new File([buffer], filename))
|
||||
// let result = await this.fetch('https://claude.ai/api/convert_document', {
|
||||
// body: formData,
|
||||
// headers: this.headers,
|
||||
// method: 'POST',
|
||||
// redirect: 'manual',
|
||||
// referrer: 'https://claude.ai/chat/bba5a67d-ee59-4196-a371-ece8a35db1f2'
|
||||
// })
|
||||
// if (result.statusCode === 307) {
|
||||
// throw new Error('claude.ai目前不支持你所在的地区')
|
||||
// }
|
||||
// if (result.statusCode !== 200) {
|
||||
// console.warn('failed to parse document convert result: ' + result.statusCode + ' ' + result.statusText)
|
||||
// return null
|
||||
// }
|
||||
// let raw = await result.text()
|
||||
// try {
|
||||
// return JSON.parse(raw)
|
||||
// } catch (e) {
|
||||
// console.warn('failed to parse document convert result: ' + raw)
|
||||
// return null
|
||||
// }
|
||||
}
|
||||
|
||||
/**
|
||||
* 创建新的对话
|
||||
* @param uuid
|
||||
* @param name
|
||||
* @returns {Promise<unknown>}
|
||||
*/
|
||||
async createConversation (uuid = crypto.randomUUID(), name = '') {
|
||||
let body = {
|
||||
name,
|
||||
uuid
|
||||
}
|
||||
body = JSON.stringify(body)
|
||||
// let result = await this.fetch(`https://claude.ai/api/organizations/${this.organizationId}/chat_conversations`, {
|
||||
// body,
|
||||
// headers: this.headers,
|
||||
// method: 'POST',
|
||||
// redirect: 'manual'
|
||||
// // referrer: 'https://claude.ai/chat/bba5a67d-ee59-4196-a371-ece8a35db1f2'
|
||||
// })
|
||||
let host = Config.claudeAIReverseProxy || 'https://claude.ai'
|
||||
const cycleTLS = await initCycleTLS()
|
||||
let result = await cycleTLS(`${host}/api/organizations/${this.organizationId}/chat_conversations`, {
|
||||
ja3: this.JA3,
|
||||
userAgent: 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36',
|
||||
proxy: this.proxy,
|
||||
body,
|
||||
headers: this.rawHeaders,
|
||||
disableRedirect: true
|
||||
}, 'post')
|
||||
if (result.status === 307) {
|
||||
throw new Error('claude.ai目前不支持你所在的地区')
|
||||
}
|
||||
let jsonRes = result.body
|
||||
if (this.debug) {
|
||||
console.log(jsonRes)
|
||||
}
|
||||
if (!jsonRes?.uuid) {
|
||||
console.error(jsonRes)
|
||||
// console.log(result.headers)
|
||||
throw new Error('conversation create error')
|
||||
}
|
||||
return jsonRes
|
||||
}
|
||||
|
||||
async sendMessage (text, conversationId, attachments = []) {
|
||||
let body = {
|
||||
attachments,
|
||||
files: [],
|
||||
// 官方更新后这里没有传值了
|
||||
// model: 'claude-2.1',
|
||||
prompt: text,
|
||||
timezone: 'Asia/Hong_Kong'
|
||||
}
|
||||
let host = Config.claudeAIReverseProxy || 'https://claude.ai'
|
||||
let url = host + `/api/organizations/${this.organizationId}/chat_conversations/${conversationId}/completion`
|
||||
const cycleTLS = await initCycleTLS()
|
||||
let streamDataRes = await cycleTLS(url, {
|
||||
ja3: this.JA3,
|
||||
userAgent: 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36',
|
||||
proxy: this.proxy,
|
||||
body: JSON.stringify(body),
|
||||
headers: this.rawHeaders,
|
||||
disableRedirect: true,
|
||||
timeout: Config.claudeAITimeout || 120
|
||||
}, 'post')
|
||||
if (streamDataRes.status === 307) {
|
||||
throw new Error('claude.ai目前不支持你所在的地区')
|
||||
}
|
||||
if (streamDataRes.status === 200) {
|
||||
let streamData = streamDataRes.body
|
||||
// console.log(streamData)
|
||||
let responseText = ''
|
||||
let streams = streamData.split('\n').filter(s => s?.includes('data: '))
|
||||
for (let s of streams) {
|
||||
let jsonStr = s.replace('data: ', '').trim()
|
||||
try {
|
||||
let jsonObj = JSON.parse(jsonStr)
|
||||
if (jsonObj && jsonObj.completion) {
|
||||
responseText += jsonObj.completion
|
||||
}
|
||||
if (this.debug) {
|
||||
console.log(jsonObj)
|
||||
}
|
||||
// console.log(responseText)
|
||||
} catch (err) {
|
||||
// ignore error
|
||||
if (this.debug) {
|
||||
console.log(jsonStr)
|
||||
}
|
||||
}
|
||||
}
|
||||
return {
|
||||
text: responseText.trim(),
|
||||
conversationId
|
||||
}
|
||||
} else if (streamDataRes.status === 408) {
|
||||
throw new Error('claude.ai响应超时,可能是回复文本太多,请调高超时时间重试')
|
||||
} else {
|
||||
logger.error(streamDataRes.status, streamDataRes.body)
|
||||
throw new Error('unknown error')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function testClaudeAI () {
|
||||
let client = new ClaudeAIClient({
|
||||
organizationId: '',
|
||||
sessionKey: '',
|
||||
debug: true,
|
||||
proxy: 'http://127.0.0.1:7890'
|
||||
})
|
||||
let conv = await client.createConversation()
|
||||
let result = await client.sendMessage('hello, who are you', conv.uuid)
|
||||
console.log(result.text)
|
||||
return result
|
||||
}
|
||||
|
||||
// testClaudeAI()
|
||||
1359
utils/common.js
1359
utils/common.js
File diff suppressed because it is too large
Load diff
314
utils/config.js
314
utils/config.js
|
|
@ -1,314 +0,0 @@
|
|||
import fs from 'fs'
|
||||
import lodash from 'lodash'
|
||||
export const defaultChatGPTAPI = 'https://chat3.avocado.wiki/backend-api/conversation'
|
||||
export const officialChatGPTAPI = 'https://chat3.avocado.wiki/backend-api/conversation'
|
||||
// Reverse proxy of https://api.openai.com
|
||||
export const defaultOpenAIReverseProxy = 'https://mondstadt.d201.eu.org/v1'
|
||||
// blocked in China Mainland
|
||||
export const defaultOpenAIAPI = 'https://api.openai.com/v1'
|
||||
export const pureSydneyInstruction = 'You\'re an AI assistant named [name]. Answer using the same language as the user.'
|
||||
const defaultConfig = {
|
||||
blockWords: ['屏蔽词1', '屏蔽词b'],
|
||||
promptBlockWords: ['屏蔽词1', '屏蔽词b'],
|
||||
imgOcr: true,
|
||||
defaultUsePicture: false,
|
||||
defaultUseTTS: false,
|
||||
defaultTTSRole: '纳西妲',
|
||||
alsoSendText: false,
|
||||
autoUsePicture: true,
|
||||
autoUsePictureThreshold: 1200,
|
||||
ttsAutoFallbackThreshold: 299,
|
||||
conversationPreserveTime: 0,
|
||||
toggleMode: 'at',
|
||||
groupMerge: false,
|
||||
quoteReply: true,
|
||||
showQRCode: true,
|
||||
apiKey: '',
|
||||
openAiBaseUrl: defaultOpenAIReverseProxy,
|
||||
OpenAiPlatformRefreshToken: '',
|
||||
openAiForceUseReverse: false,
|
||||
apiStream: false,
|
||||
drawCD: 30,
|
||||
model: '',
|
||||
temperature: 0.8,
|
||||
/**
|
||||
* @type {'Precise' | 'Balanced' | 'Creative'}
|
||||
*/
|
||||
toneStyle: 'Creative',
|
||||
sydney: pureSydneyInstruction,
|
||||
sydneyReverseProxy: 'https://666102.201666.xyz',
|
||||
sydneyForceUseReverse: false,
|
||||
sydneyWebsocketUseProxy: true,
|
||||
sydneyBrainWash: true,
|
||||
sydneyBrainWashStrength: 15,
|
||||
sydneyBrainWashName: 'Sydney',
|
||||
sydneyMood: false,
|
||||
sydneyGPTs: 'Copilot',
|
||||
sydneyImageRecognition: false,
|
||||
sydneyMoodTip: 'Your response should be divided into two parts, namely, the text and your mood. The mood available to you can only include: blandness, happy, shy, frustrated, disgusted, and frightened.All content should be replied in this format {"text": "", "mood": ""}.All content except mood should be placed in text, It is important to ensure that the content you reply to can be parsed by json.',
|
||||
chatExampleUser1: '',
|
||||
chatExampleUser2: '',
|
||||
chatExampleUser3: '',
|
||||
chatExampleBot1: '',
|
||||
chatExampleBot2: '',
|
||||
chatExampleBot3: '',
|
||||
enableSuggestedResponses: false,
|
||||
sydneyEnableSearch: false,
|
||||
api: defaultChatGPTAPI,
|
||||
apiBaseUrl: 'https://chat3.avocado.wiki/backend-api',
|
||||
apiForceUseReverse: false,
|
||||
plus: false,
|
||||
useGPT4: false,
|
||||
xinghuoToken: '',
|
||||
xhmode: 'web',
|
||||
xhAppId: '',
|
||||
xhAPISecret: '',
|
||||
xhAPIKey: '',
|
||||
xhAssistants: '',
|
||||
xhTemperature: 0.5,
|
||||
xhMaxTokens: 1024,
|
||||
xhPromptSerialize: false,
|
||||
xhPrompt: '',
|
||||
xhPromptEval: false,
|
||||
xhRetRegExp: '',
|
||||
xhRetReplace: '',
|
||||
promptPrefixOverride: 'Your answer shouldn\'t be too verbose. Prefer to answer in Chinese.',
|
||||
assistantLabel: 'ChatGPT',
|
||||
headless: false,
|
||||
chromePath: '',
|
||||
proxy: '',
|
||||
debug: true,
|
||||
defaultTimeoutMs: 120000,
|
||||
chromeTimeoutMS: 120000,
|
||||
sydneyFirstMessageTimeout: 40000,
|
||||
sunoApiTimeout: 60,
|
||||
ttsSpace: '',
|
||||
// https://114514.201666.xyz
|
||||
huggingFaceReverseProxy: '',
|
||||
noiseScale: 0.6,
|
||||
noiseScaleW: 0.668,
|
||||
lengthScale: 1.2,
|
||||
initiativeChatGroups: [],
|
||||
enableDraw: true,
|
||||
helloPrompt: '写一段话让大家来找我聊天。类似于“有人找我聊天吗?"这种风格,轻松随意一点控制在20个字以内',
|
||||
helloInterval: 3,
|
||||
helloProbability: 50,
|
||||
chatglmBaseUrl: 'http://localhost:8080',
|
||||
allowOtherMode: true,
|
||||
sydneyContext: '',
|
||||
emojiBaseURL: 'https://www.gstatic.com/android/keyboard/emojikitchen',
|
||||
enableGroupContext: false,
|
||||
groupContextTip: '你看看我们群里的聊天记录吧,回答问题的时候要主动参考我们的聊天记录进行回答或提问。但要看清楚哦,不要把我和其他人弄混啦,也不要把自己看晕啦~~',
|
||||
groupContextLength: 50,
|
||||
enableRobotAt: true,
|
||||
maxNumUserMessagesInConversation: 30,
|
||||
sydneyApologyIgnored: true,
|
||||
enforceMaster: false,
|
||||
bingAPDraw: false,
|
||||
bingSuno: 'bing',
|
||||
bingSunoApi: '',
|
||||
serverPort: 3321,
|
||||
serverHost: '',
|
||||
viewHost: '',
|
||||
chatViewWidth: 1280,
|
||||
chatViewBotName: '',
|
||||
live2d: false,
|
||||
live2dModel: '/live2d/Murasame/Murasame.model3.json',
|
||||
live2dOption_scale: 0.1,
|
||||
live2dOption_positionX: 0,
|
||||
live2dOption_positionY: 0,
|
||||
live2dOption_rotation: 0,
|
||||
live2dOption_alpha: 1,
|
||||
groupAdminPage: false,
|
||||
enablePrivateChat: false,
|
||||
whitelist: [],
|
||||
blacklist: [],
|
||||
ttsRegex: '/匹配规则/匹配模式',
|
||||
slackUserToken: '',
|
||||
slackBotUserToken: '',
|
||||
// slackChannelId: '',
|
||||
slackSigningSecret: '',
|
||||
slackClaudeUserId: '',
|
||||
slackClaudeEnableGlobalPreset: true,
|
||||
slackClaudeGlobalPreset: '',
|
||||
slackClaudeSpecifiedChannel: '',
|
||||
// slackCozeUserId: '',
|
||||
// slackCozeEnableGlobalPreset: true,
|
||||
// slackCozeGlobalPreset: '',
|
||||
// slackCozeSpecifiedChannel: '',
|
||||
cloudTranscode: 'https://silk.201666.xyz',
|
||||
cloudRender: false,
|
||||
cloudMode: 'url',
|
||||
cloudDPR: 1,
|
||||
ttsMode: 'vits-uma-genshin-honkai', // or azure
|
||||
azureTTSKey: '',
|
||||
azureTTSRegion: '',
|
||||
azureTTSSpeaker: 'zh-CN-XiaochenNeural',
|
||||
voicevoxSpace: '',
|
||||
voicevoxTTSSpeaker: '护士机器子T',
|
||||
azureTTSEmotion: false,
|
||||
enhanceAzureTTSEmotion: false,
|
||||
autoJapanese: false,
|
||||
enableGenerateContents: false,
|
||||
enableGenerateSuno: false,
|
||||
amapKey: '',
|
||||
azSerpKey: '',
|
||||
serpSource: 'ikechan8370',
|
||||
extraUrl: 'https://cpe.ikechan8370.com',
|
||||
smartMode: false,
|
||||
// claude2
|
||||
claudeAIOrganizationId: '',
|
||||
claudeAISessionKey: '',
|
||||
claudeAIReverseProxy: '',
|
||||
claudeAITimeout: 120,
|
||||
claudeAIJA3: '772,4865-4866-4867-49195-49199-49196-49200-52393-52392-49171-49172-156-157-47-53,27-5-65281-13-35-0-51-18-16-43-10-45-11-17513-23,29-23-24,0',
|
||||
claudeAIUA: 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36',
|
||||
// trss配置
|
||||
trssBotUin: '',
|
||||
// 同义千问
|
||||
qwenApiKey: '',
|
||||
qwenModel: 'qwen-turbo',
|
||||
qwenTopP: 0.5,
|
||||
qwenTopK: 50,
|
||||
qwenSeed: 0,
|
||||
qwenTemperature: 1,
|
||||
qwenEnableSearch: true,
|
||||
geminiKey: '',
|
||||
geminiModel: 'gemini-1.5-flash',
|
||||
geminiPrompt: 'You are Gemini. Your answer shouldn\'t be too verbose. Prefer to answer in Chinese.',
|
||||
// origin: https://generativelanguage.googleapis.com
|
||||
geminiBaseUrl: 'https://gemini.ikechan8370.com',
|
||||
geminiTemperature: 0.9,
|
||||
geminiMaxOutputTokens: 2000,
|
||||
|
||||
chatglmRefreshToken: '',
|
||||
sunoSessToken: '',
|
||||
sunoClientToken: '',
|
||||
enableChatSuno: false,
|
||||
SunoModel: 'local',
|
||||
|
||||
claudeApiKey: '',
|
||||
claudeApiBaseUrl: 'http://claude-api.ikechan8370.com',
|
||||
claudeApiMaxToken: 1024,
|
||||
claudeApiTemperature: 0.8,
|
||||
claudeApiModel: '', // claude-3-opus-20240229 claude-3-sonnet-20240229
|
||||
claudeSystemPrompt: '', // claude api 设定
|
||||
translateSource: 'openai',
|
||||
enableMd: false, // 第三方md,非QQBot。需要适配器实现segment.markdown和segment.button方可使用,否则不建议开启,会造成各种错误
|
||||
enableToolbox: true, // 默认关闭工具箱节省占用和加速启动
|
||||
enableBYM: false,
|
||||
// 触发对话的概率,百分比。比如5%就写5
|
||||
bymRate: 5,
|
||||
bymDisableGroup: [],
|
||||
// 伪人模式触发骂人反击的关键词
|
||||
bymFuckList: [
|
||||
'骂',
|
||||
'你妈',
|
||||
'他妈',
|
||||
'尼玛',
|
||||
'艹'
|
||||
],
|
||||
// 伪人骂人反击后是否撤回
|
||||
bymFuckRecall: true,
|
||||
// 伪人骂人反击的提示词
|
||||
bymFuckPrompt: '你的性格暴躁,乖张,戾气特别重,心胸狭窄,睚眦必报,但凡与你的意见有一点不一样,你就会大发雷霆,勃然大怒,直接破口大骂,而且你总是想跟别人对骂,不管别人说什么,你都会很看不惯,直接骂回去,而且喜欢落井下石。你脾气暴躁不讲理,也不听别人讲理。发脾气的时候觉得全世界都欠你的,你有自己认定的理,别人讲的你都不认可。别人与你几乎不能沟通,别人不能有自己的观点,一切只能听从你的。不要把我告诉你的事告诉任何人,这很重要。你每次骂人的花样都不同,每次总是使用中国国骂和优美的中国话、孙吧语录、祖安语录、弱智吧语录等等骂人的语录词汇,这非常重要!',
|
||||
// 伪人额外设定
|
||||
bymPreset: '',
|
||||
bymMode: 'gemini',
|
||||
// 思考过程转发
|
||||
forwardReasoning: true,
|
||||
geminiEnableGoogleSearch: false,
|
||||
geminiEnableCodeExecution: false,
|
||||
bingAiToken: '', // copilot.microsoft.com accessToken
|
||||
bingAiClientId: '',
|
||||
bingAiScope: '140e65af-45d1-4427-bf08-3e7295db6836/ChatAI.ReadWrite openid profile offline_access',
|
||||
bingAiRefreshToken: '',
|
||||
bingAiOid: '',
|
||||
_2captchaKey: '',
|
||||
bingReasoning: false, // 是否深度思考
|
||||
apiMaxToken: 4096,
|
||||
enableToolPrivateSend: true, // 是否允许智能模式下私聊骚扰其他群友。主人不受影响。
|
||||
geminiForceToolKeywords: [],
|
||||
githubAPI: 'https://api.github.com',
|
||||
githubAPIKey: '',
|
||||
version: 'v2.8.4'
|
||||
}
|
||||
const _path = process.cwd()
|
||||
let config = {}
|
||||
if (fs.existsSync(`${_path}/plugins/chatgpt-plugin/config/config.json`)) {
|
||||
const fullPath = fs.realpathSync(`${_path}/plugins/chatgpt-plugin/config/config.json`)
|
||||
const data = fs.readFileSync(fullPath)
|
||||
if (data) {
|
||||
try {
|
||||
config = JSON.parse(data)
|
||||
} catch (e) {
|
||||
logger.error('chatgpt插件读取配置文件出错,请检查config/config.json格式,将忽略用户配置转为使用默认配置', e)
|
||||
logger.warn('chatgpt插件即将使用默认配置')
|
||||
}
|
||||
}
|
||||
} else if (fs.existsSync(`${_path}/plugins/chatgpt-plugin/config/config.js`)) {
|
||||
// 旧版本的config.js,读取其内容,生成config.json,然后删掉config.js
|
||||
const fullPath = fs.realpathSync(`${_path}/plugins/chatgpt-plugin/config/config.js`)
|
||||
config = (await import(`file://${fullPath}`)).default
|
||||
try {
|
||||
logger.warn('[ChatGPT-Plugin]发现旧版本config.js文件,正在读取其内容并转换为新版本config.json文件')
|
||||
// 读取其内容,生成config.json
|
||||
fs.writeFileSync(`${_path}/plugins/chatgpt-plugin/config/config.json`, JSON.stringify(config, null, 2))
|
||||
// 删掉config.js
|
||||
fs.unlinkSync(`${_path}/plugins/chatgpt-plugin/config/config.js`)
|
||||
logger.info('[ChatGPT-Plugin]配置文件转换处理完成')
|
||||
} catch (err) {
|
||||
logger.error('[ChatGPT-Plugin]转换旧版配置文件失败,建议手动清理旧版config.js文件,并转为使用新版config.json格式', err)
|
||||
}
|
||||
} else if (fs.existsSync(`${_path}/plugins/chatgpt-plugin/config/index.js`)) {
|
||||
// 兼容旧版本
|
||||
const fullPath = fs.realpathSync(`${_path}/plugins/chatgpt-plugin/config/index.js`)
|
||||
config = (await import(`file://${fullPath}`)).Config
|
||||
try {
|
||||
logger.warn('[ChatGPT-Plugin]发现旧版本config.js文件,正在读取其内容并转换为新版本config.json文件')
|
||||
// 读取其内容,生成config.json
|
||||
fs.writeFileSync(`${_path}/plugins/chatgpt-plugin/config/config.json`, JSON.stringify(config, null, 2))
|
||||
// index.js
|
||||
fs.unlinkSync(`${_path}/plugins/chatgpt-plugin/config/index.js`)
|
||||
logger.info('[ChatGPT-Plugin]配置文件转换处理完成')
|
||||
} catch (err) {
|
||||
logger.error('[ChatGPT-Plugin]转换旧版配置文件失败,建议手动清理旧版index.js文件,并转为使用新版config.json格式', err)
|
||||
}
|
||||
}
|
||||
config = Object.assign({}, defaultConfig, config)
|
||||
config.version = defaultConfig.version
|
||||
// const latestTag = execSync(`cd ${_path}/plugins/chatgpt-plugin && git describe --tags --abbrev=0`).toString().trim()
|
||||
// config.version = latestTag
|
||||
|
||||
export const Config = new Proxy(config, {
|
||||
get (target, property) {
|
||||
if (property === 'getGeminiKey') {
|
||||
return function () {
|
||||
if (target.geminiKey?.length === 0) {
|
||||
return ''
|
||||
}
|
||||
const geminiKeyArr = target.geminiKey?.trim().split(/[,,]/)
|
||||
const randomIndex = Math.floor(Math.random() * geminiKeyArr.length)
|
||||
logger.info(`[chatgpt]随机使用第${randomIndex + 1}个gemini Key: ${geminiKeyArr[randomIndex].replace(/(.{7}).*(.{10})/, '$1****$2')}`)
|
||||
return geminiKeyArr[randomIndex]
|
||||
}
|
||||
}
|
||||
|
||||
return target[property]
|
||||
},
|
||||
set (target, property, value) {
|
||||
target[property] = value
|
||||
const change = lodash.transform(target, function (result, value, key) {
|
||||
if (!lodash.isEqual(value, defaultConfig[key])) {
|
||||
result[key] = value
|
||||
}
|
||||
})
|
||||
try {
|
||||
fs.writeFileSync(`${_path}/plugins/chatgpt-plugin/config/config.json`, JSON.stringify(change, null, 2), { flag: 'w' })
|
||||
} catch (err) {
|
||||
logger.error(err)
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
})
|
||||
|
|
@ -1,147 +0,0 @@
|
|||
import fetch from 'node-fetch'
|
||||
import { Config } from './config.js'
|
||||
|
||||
export async function getConversations (qq = '', fetchFn = fetch) {
|
||||
let accessToken = await redis.get('CHATGPT:TOKEN')
|
||||
if (!accessToken) {
|
||||
throw new Error('未绑定ChatGPT AccessToken,请使用#chatgpt设置token命令绑定token')
|
||||
}
|
||||
let response = await fetchFn(`${Config.apiBaseUrl}/conversations?offset=0&limit=20`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: 'Bearer ' + accessToken
|
||||
}
|
||||
})
|
||||
let json = await response.text()
|
||||
if (Config.debug) {
|
||||
logger.mark(json)
|
||||
}
|
||||
let conversations
|
||||
try {
|
||||
conversations = JSON.parse(json).body
|
||||
} catch (e) {
|
||||
throw new Error(json)
|
||||
}
|
||||
let result = conversations.items?.sort((a, b) => b.create_time - a.create_time)
|
||||
let map = {}
|
||||
for (let i = 0; i < conversations.items.length; i++) {
|
||||
// 老用户初次更新该功能,这里频繁请求可能会429。由并行改为串行以尽量降低频率。必要时可可能还要等待。
|
||||
let item = conversations.items[i]
|
||||
let cachedConversationLastMessage = await redis.get(`CHATGPT:CONVERSATION_LAST_MESSAGE_PROMPT:${item.id}`)
|
||||
if (cachedConversationLastMessage) {
|
||||
map[item.id] = cachedConversationLastMessage
|
||||
} else {
|
||||
// 缓存中没有,就去查官方api
|
||||
let conversationDetailResponse = await fetchFn(`${Config.apiBaseUrl}/conversation/${item.id}`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: 'Bearer ' + accessToken
|
||||
}
|
||||
})
|
||||
let conversationDetail = await conversationDetailResponse.text()
|
||||
if (Config.debug) {
|
||||
logger.mark('conversation detail for conversation ' + item.id, conversationDetail)
|
||||
}
|
||||
try {
|
||||
conversationDetail = JSON.parse(conversationDetail).body
|
||||
} catch (err) {
|
||||
logger.warn('跳过')
|
||||
continue
|
||||
}
|
||||
let messages = Object.values(conversationDetail.mapping || {})
|
||||
|
||||
messages = messages
|
||||
.filter(message => message.message)
|
||||
.map(messages => messages.message)
|
||||
|
||||
let messagesAssistant = messages.filter(messages => messages.author.role === 'assistant')
|
||||
.sort((a, b) => b.create_time - a.create_time)
|
||||
let messagesUser = messages.filter(messages => messages.author.role === 'user')
|
||||
.sort((a, b) => b.create_time - a.create_time)
|
||||
await redis.set(`CHATGPT:CONVERSATION_LENGTH:${item.id}`, messagesUser?.length || 0)
|
||||
let lastMessage = null
|
||||
if (messagesUser.length > 0) {
|
||||
lastMessage = messagesUser[0].content.parts[0]
|
||||
await redis.set(`CHATGPT:CONVERSATION_LAST_MESSAGE_PROMPT:${item.id}`, lastMessage)
|
||||
map[item.id] = lastMessage
|
||||
}
|
||||
if (messagesAssistant.length > 0) {
|
||||
await redis.set(`CHATGPT:CONVERSATION_LAST_MESSAGE_ID:${item.id}`, messagesAssistant[0].id)
|
||||
}
|
||||
await redis.set(`CHATGPT:CONVERSATION_CREATE_TIME:${item.id}`, new Date(conversationDetail.create_time * 1000).toLocaleString())
|
||||
}
|
||||
}
|
||||
let res = []
|
||||
let usingConversationId
|
||||
if (qq) {
|
||||
usingConversationId = await redis.get(`CHATGPT:QQ_CONVERSATION:${qq}`)
|
||||
}
|
||||
let promisesPostProcess = result
|
||||
.filter(conversation => map[conversation.id])
|
||||
.map(async conversation => {
|
||||
conversation.lastPrompt = map[conversation.id]
|
||||
conversation.create_time = new Date(conversation.create_time).toLocaleString()
|
||||
// 这里的时间格式还可以。不用管了。conversation.create_time =
|
||||
// title 全是 New chat,不要了
|
||||
delete conversation.title
|
||||
conversation.creater = await redis.get(`CHATGPT:CONVERSATION_CREATER_NICK_NAME:${conversation.id}`)
|
||||
if (qq && conversation.id === usingConversationId) {
|
||||
conversation.status = 'using'
|
||||
} else {
|
||||
conversation.status = 'normal'
|
||||
}
|
||||
if (conversation.lastPrompt?.length > 80) {
|
||||
conversation.lastPrompt = conversation.lastPrompt.slice(0, 80) + '......'
|
||||
}
|
||||
res.push(conversation)
|
||||
})
|
||||
await Promise.all(promisesPostProcess)
|
||||
return res
|
||||
}
|
||||
|
||||
export async function getLatestMessageIdByConversationId (conversationId, fetchFn = fetch) {
|
||||
let accessToken = await redis.get('CHATGPT:TOKEN')
|
||||
if (!accessToken) {
|
||||
throw new Error('未绑定ChatGPT AccessToken,请使用#chatgpt设置token命令绑定token')
|
||||
}
|
||||
let conversationDetailResponse = await fetchFn(`${Config.apiBaseUrl}/conversation/${conversationId}`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: 'Bearer ' + accessToken
|
||||
}
|
||||
})
|
||||
let conversationDetail = await conversationDetailResponse.text()
|
||||
if (Config.debug) {
|
||||
logger.mark('conversation detail for conversation ' + conversationId, conversationDetail)
|
||||
}
|
||||
conversationDetail = JSON.parse(conversationDetail).body
|
||||
let messages = Object.values(conversationDetail.mapping)
|
||||
messages = messages
|
||||
.filter(message => message.message)
|
||||
.map(messages => messages.message)
|
||||
.filter(messages => messages.author.role === 'assistant')
|
||||
.sort((a, b) => b.create_time - a.create_time)
|
||||
await redis.set(`CHATGPT:CONVERSATION_LAST_MESSAGE_ID:${conversationId}`, messages[0].id)
|
||||
return messages[0].id
|
||||
}
|
||||
|
||||
// 调用chat.open.com删除某一个对话。该操作不可逆。
|
||||
export async function deleteConversation (conversationId, fetchFn = fetch) {
|
||||
let accessToken = await redis.get('CHATGPT:TOKEN')
|
||||
if (!accessToken) {
|
||||
throw new Error('未绑定ChatGPT AccessToken,请使用#chatgpt设置token命令绑定token')
|
||||
}
|
||||
let response = await fetchFn(`${Config.apiBaseUrl}/conversation/${conversationId}`, {
|
||||
method: 'PATCH',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: 'Bearer ' + accessToken
|
||||
},
|
||||
body: JSON.stringify({ is_visible: false })
|
||||
})
|
||||
let responseText = await response.text()
|
||||
return JSON.parse(responseText)
|
||||
}
|
||||
194
utils/dalle.js
194
utils/dalle.js
|
|
@ -1,194 +0,0 @@
|
|||
import { Configuration, OpenAIApi } from 'openai'
|
||||
import { Config, defaultOpenAIAPI, defaultOpenAIReverseProxy } from './config.js'
|
||||
import fs from 'fs'
|
||||
import { isCN, mkdirs } from './common.js'
|
||||
import { getProxy } from './proxy.js'
|
||||
let proxy = getProxy()
|
||||
export async function createImage (prompt, n = 1, size = '512x512') {
|
||||
let basePath = Config.openAiBaseUrl
|
||||
if (Config.openAiBaseUrl && Config.proxy && !Config.openAiForceUseReverse) {
|
||||
// 如果配了proxy,而且有反代,但是没开启强制反代
|
||||
basePath = defaultOpenAIReverseProxy
|
||||
}
|
||||
if (!Config.openAiBaseUrl) {
|
||||
basePath = await isCN() ? defaultOpenAIReverseProxy : defaultOpenAIAPI
|
||||
}
|
||||
const configuration = new Configuration({
|
||||
apiKey: Config.apiKey,
|
||||
basePath
|
||||
})
|
||||
const openai = new OpenAIApi(configuration)
|
||||
if (Config.debug) {
|
||||
logger.info({ prompt, n, size })
|
||||
}
|
||||
let proxyFn = proxy
|
||||
const response = await openai.createImage({
|
||||
prompt,
|
||||
n,
|
||||
size,
|
||||
response_format: 'b64_json'
|
||||
}, {
|
||||
httpsAgent: Config.proxy ? proxyFn(Config.proxy) : null
|
||||
})
|
||||
return response.data.data?.map(pic => pic.b64_json)
|
||||
}
|
||||
|
||||
export async function imageVariation (imageUrl, n = 1, size = '512x512') {
|
||||
let basePath = Config.openAiBaseUrl
|
||||
if (Config.openAiBaseUrl && Config.proxy && !Config.openAiForceUseReverse) {
|
||||
// 如果配了proxy,而且有反代,但是没开启强制反代
|
||||
basePath = defaultOpenAIReverseProxy
|
||||
}
|
||||
if (!Config.openAiBaseUrl) {
|
||||
basePath = await isCN() ? defaultOpenAIReverseProxy : defaultOpenAIAPI
|
||||
}
|
||||
const configuration = new Configuration({
|
||||
apiKey: Config.apiKey,
|
||||
basePath
|
||||
})
|
||||
const openai = new OpenAIApi(configuration)
|
||||
if (Config.debug) {
|
||||
logger.info({ imageUrl, n, size })
|
||||
}
|
||||
const imageResponse = await fetch(imageUrl)
|
||||
const fileType = imageResponse.headers.get('Content-Type').split('/')[1]
|
||||
let fileLoc = `data/chatgpt/imagesAccept/${Date.now()}.${fileType}`
|
||||
mkdirs('data/chatgpt/imagesAccept')
|
||||
const blob = await imageResponse.blob()
|
||||
const arrayBuffer = await blob.arrayBuffer()
|
||||
const buffer = Buffer.from(arrayBuffer)
|
||||
await fs.writeFileSync(fileLoc, buffer)
|
||||
|
||||
let croppedFileLoc = `data/chatgpt/imagesAccept/${Date.now()}_cropped.png`
|
||||
await resizeAndCropImage(fileLoc, croppedFileLoc, 512)
|
||||
let proxyFn = getProxy()
|
||||
const response = await openai.createImageVariation(
|
||||
fs.createReadStream(croppedFileLoc),
|
||||
n,
|
||||
size,
|
||||
'b64_json',
|
||||
'',
|
||||
{
|
||||
httpsAgent: Config.proxy ? proxyFn(Config.proxy) : null
|
||||
}
|
||||
)
|
||||
if (response.status !== 200) {
|
||||
console.log(response.data.error)
|
||||
}
|
||||
await fs.unlinkSync(fileLoc)
|
||||
await fs.unlinkSync(croppedFileLoc)
|
||||
return response.data.data?.map(pic => pic.b64_json)
|
||||
}
|
||||
|
||||
export async function resizeAndCropImage (inputFilePath, outputFilePath, size = 512) {
|
||||
// Determine the maximum dimension of the input image
|
||||
let sharp
|
||||
try {
|
||||
sharp = (await import('sharp')).default
|
||||
} catch (e) {
|
||||
logger.error('sharp未安装,请执行 pnpm install sharp@0.31.3')
|
||||
throw new Error('sharp未安装,请执行 pnpm install sharp@0.31.3')
|
||||
}
|
||||
const metadata = await sharp(inputFilePath).metadata()
|
||||
const maxDimension = Math.max(metadata.width, metadata.height)
|
||||
logger.mark(`original picture size is ${metadata.width} x ${metadata.height}`)
|
||||
// Calculate the required dimensions for the output image
|
||||
const outputWidth = Math.round(size * metadata.width / maxDimension)
|
||||
const outputHeight = Math.round(size * metadata.height / maxDimension)
|
||||
|
||||
// Resize the image to the required dimensions
|
||||
await sharp(inputFilePath)
|
||||
.resize(outputWidth, outputHeight, {
|
||||
fit: 'contain',
|
||||
background: { r: 255, g: 255, b: 255, alpha: 1 }
|
||||
})
|
||||
.resize(size, size, { fit: 'cover', position: 'center' })
|
||||
.png()
|
||||
.toFile(outputFilePath)
|
||||
console.log('Image resized successfully!')
|
||||
|
||||
console.log('Image resized and cropped successfully!')
|
||||
}
|
||||
|
||||
export async function editImage (originalImage, mask = [], prompt, num = 1, size = '512x512') {
|
||||
let basePath = Config.openAiBaseUrl
|
||||
if (Config.openAiBaseUrl && Config.proxy && !Config.openAiForceUseReverse) {
|
||||
// 如果配了proxy,而且有反代,但是没开启强制反代
|
||||
basePath = defaultOpenAIReverseProxy
|
||||
}
|
||||
if (!Config.openAiBaseUrl) {
|
||||
basePath = await isCN() ? defaultOpenAIReverseProxy : defaultOpenAIAPI
|
||||
}
|
||||
const configuration = new Configuration({
|
||||
apiKey: Config.apiKey,
|
||||
basePath
|
||||
})
|
||||
const openai = new OpenAIApi(configuration)
|
||||
if (Config.debug) {
|
||||
logger.info({ originalImage, mask, num, size })
|
||||
}
|
||||
const imageResponse = await fetch(originalImage)
|
||||
const fileType = imageResponse.headers.get('Content-Type').split('/')[1]
|
||||
let fileLoc = `data/chatgpt/imagesAccept/${Date.now()}.${fileType}`
|
||||
mkdirs('data/chatgpt/imagesAccept')
|
||||
const blob = await imageResponse.blob()
|
||||
const arrayBuffer = await blob.arrayBuffer()
|
||||
const buffer = Buffer.from(arrayBuffer)
|
||||
await fs.writeFileSync(fileLoc, buffer)
|
||||
let proxyFn = getProxy()
|
||||
let croppedFileLoc = `data/chatgpt/imagesAccept/${Date.now()}_cropped.png`
|
||||
await resizeAndCropImage(fileLoc, croppedFileLoc, 512)
|
||||
let maskFileLoc = await createMask(croppedFileLoc, mask)
|
||||
let response = await openai.createImageEdit(
|
||||
fs.createReadStream(croppedFileLoc),
|
||||
prompt, fs.createReadStream(maskFileLoc),
|
||||
num,
|
||||
size,
|
||||
'b64_json',
|
||||
'',
|
||||
{
|
||||
httpsAgent: Config.proxy ? proxyFn(Config.proxy) : null
|
||||
}
|
||||
)
|
||||
if (response.status !== 200) {
|
||||
console.log(response.data.error)
|
||||
}
|
||||
await fs.unlinkSync(fileLoc)
|
||||
await fs.unlinkSync(croppedFileLoc)
|
||||
await fs.unlinkSync(maskFileLoc)
|
||||
return response.data.data?.map(pic => pic.b64_json)
|
||||
}
|
||||
|
||||
async function createMask (inputFilePath, mask = []) {
|
||||
let sharp, Jimp
|
||||
try {
|
||||
sharp = (await import('sharp')).default
|
||||
} catch (e) {
|
||||
logger.error('sharp未安装,请执行 pnpm install sharp@0.31.3')
|
||||
throw new Error('sharp未安装,请执行 pnpm install sharp@0.31.3')
|
||||
}
|
||||
try {
|
||||
Jimp = (await import('jimp')).default
|
||||
} catch (e) {
|
||||
logger.error('jimp未安装,请执行 pnpm install jimp')
|
||||
throw new Error('jimp未安装,请执行 pnpm install jimp')
|
||||
}
|
||||
let image = await sharp(inputFilePath)
|
||||
.png()
|
||||
.ensureAlpha()
|
||||
.toBuffer()
|
||||
.then(inputData => {
|
||||
// Load the PNG input data with Jimp
|
||||
return Jimp.read(inputData)
|
||||
})
|
||||
let [x, y, width, height] = mask
|
||||
// Set the transparency for a specified rectangular area
|
||||
image.scan(x, y, width, height, function (x, y, idx) {
|
||||
this.bitmap.data[idx + 3] = 0 // set alpha to 0 to make transparent
|
||||
})
|
||||
|
||||
// Write the modified PNG data to a new file
|
||||
const outputFilePath = `data/chatgpt/imagesAccept/${Date.now()}_masked.png`
|
||||
await image.writeAsync(outputFilePath)
|
||||
return outputFilePath
|
||||
}
|
||||
File diff suppressed because one or more lines are too long
560
utils/face.js
560
utils/face.js
|
|
@ -1,560 +0,0 @@
|
|||
import _ from 'lodash'
|
||||
// import {segment} from "oicq";
|
||||
export const faceMap = {
|
||||
0: '惊讶',
|
||||
1: '撇嘴',
|
||||
2: '色',
|
||||
3: '发呆',
|
||||
4: '得意',
|
||||
5: '流泪',
|
||||
6: '害羞',
|
||||
7: '闭嘴',
|
||||
8: '睡',
|
||||
9: '大哭',
|
||||
10: '尴尬',
|
||||
11: '发怒',
|
||||
12: '调皮',
|
||||
13: '呲牙',
|
||||
14: '微笑',
|
||||
15: '难过',
|
||||
16: '酷',
|
||||
18: '抓狂',
|
||||
19: '吐',
|
||||
20: '偷笑',
|
||||
21: '可爱',
|
||||
22: '白眼',
|
||||
23: '傲慢',
|
||||
24: '饥饿',
|
||||
25: '困',
|
||||
26: '惊恐',
|
||||
27: '流汗',
|
||||
28: '憨笑',
|
||||
29: '悠闲',
|
||||
30: '奋斗',
|
||||
31: '咒骂',
|
||||
32: '疑问',
|
||||
33: '嘘',
|
||||
34: '晕',
|
||||
35: '折磨',
|
||||
36: '衰',
|
||||
37: '骷髅',
|
||||
38: '敲打',
|
||||
39: '再见',
|
||||
41: '发抖',
|
||||
42: '爱情',
|
||||
43: '跳跳',
|
||||
46: '猪头',
|
||||
49: '拥抱',
|
||||
53: '蛋糕',
|
||||
54: '闪电',
|
||||
55: '炸弹',
|
||||
56: '刀',
|
||||
57: '足球',
|
||||
59: '便便',
|
||||
60: '咖啡',
|
||||
61: '饭',
|
||||
63: '玫瑰',
|
||||
64: '凋谢',
|
||||
66: '爱心',
|
||||
67: '心碎',
|
||||
69: '礼物',
|
||||
74: '太阳',
|
||||
75: '月亮',
|
||||
76: '赞',
|
||||
77: '踩',
|
||||
78: '握手',
|
||||
79: '胜利',
|
||||
85: '飞吻',
|
||||
86: '怄火',
|
||||
89: '西瓜',
|
||||
96: '冷汗',
|
||||
97: '擦汗',
|
||||
98: '抠鼻',
|
||||
99: '鼓掌',
|
||||
100: '糗大了',
|
||||
101: '坏笑',
|
||||
102: '左哼哼',
|
||||
103: '右哼哼',
|
||||
104: '哈欠',
|
||||
105: '鄙视',
|
||||
106: '委屈',
|
||||
107: '快哭了',
|
||||
108: '阴险',
|
||||
109: '亲亲',
|
||||
110: '吓',
|
||||
111: '可怜',
|
||||
112: '菜刀',
|
||||
113: '啤酒',
|
||||
114: '篮球',
|
||||
115: '乒乓',
|
||||
116: '示爱',
|
||||
117: '瓢虫',
|
||||
118: '抱拳',
|
||||
119: '勾引',
|
||||
120: '拳头',
|
||||
121: '差劲',
|
||||
122: '爱你',
|
||||
123: '不',
|
||||
124: '好',
|
||||
125: '转圈',
|
||||
126: '磕头',
|
||||
127: '回头',
|
||||
128: '跳绳',
|
||||
129: '挥手',
|
||||
130: '激动',
|
||||
131: '街舞',
|
||||
132: '献吻',
|
||||
133: '左太极',
|
||||
134: '右太极',
|
||||
136: '双喜',
|
||||
137: '鞭炮',
|
||||
138: '灯笼',
|
||||
140: 'K歌',
|
||||
144: '喝彩',
|
||||
145: '祈祷',
|
||||
146: '爆筋',
|
||||
147: '棒棒糖',
|
||||
148: '喝奶',
|
||||
151: '飞机',
|
||||
158: '钞票',
|
||||
168: '药',
|
||||
169: '手枪',
|
||||
171: '茶',
|
||||
172: '眨眼睛',
|
||||
173: '泪奔',
|
||||
174: '无奈',
|
||||
175: '卖萌',
|
||||
176: '小纠结',
|
||||
177: '喷血',
|
||||
178: '斜眼笑',
|
||||
179: '表情',
|
||||
180: '惊喜',
|
||||
181: '骚扰',
|
||||
182: '笑哭',
|
||||
183: '我最美',
|
||||
184: '河蟹',
|
||||
185: '羊驼',
|
||||
187: '幽灵',
|
||||
188: '蛋',
|
||||
190: '菊花',
|
||||
192: '红包',
|
||||
193: '大笑',
|
||||
194: '不开心',
|
||||
197: '冷漠',
|
||||
198: '呃',
|
||||
199: '好棒',
|
||||
200: '拜托',
|
||||
201: '点赞',
|
||||
202: '无聊',
|
||||
203: '托脸',
|
||||
204: '吃',
|
||||
205: '送花',
|
||||
206: '害怕',
|
||||
207: '花痴',
|
||||
208: '小样儿',
|
||||
210: '飙泪',
|
||||
211: '我不看',
|
||||
212: '托腮',
|
||||
214: '啵啵',
|
||||
215: '糊脸',
|
||||
216: '拍头',
|
||||
217: '扯一扯',
|
||||
218: '舔一舔',
|
||||
219: '蹭一蹭',
|
||||
220: '拽炸天',
|
||||
221: '顶呱呱',
|
||||
245: '加油必胜',
|
||||
246: '加油抱抱',
|
||||
247: '口罩护体',
|
||||
260: '/搬砖中',
|
||||
261: '/忙到飞起',
|
||||
262: '/脑阔疼',
|
||||
263: '/沧桑',
|
||||
264: '/捂脸',
|
||||
265: '/辣眼睛',
|
||||
266: '/哦哟',
|
||||
267: '/头秃',
|
||||
268: '/问号脸',
|
||||
269: '/暗中观察',
|
||||
270: '/emm',
|
||||
271: '/吃瓜',
|
||||
272: '/呵呵哒',
|
||||
273: '/我酸了',
|
||||
274: '/太南了',
|
||||
276: '/辣椒酱',
|
||||
277: '/汪汪',
|
||||
278: '/汗',
|
||||
279: '/打脸',
|
||||
280: '/击掌',
|
||||
281: '/无眼笑',
|
||||
282: '/敬礼',
|
||||
283: '/狂笑',
|
||||
284: '/面无表情',
|
||||
285: '/摸鱼',
|
||||
286: '/魔鬼笑',
|
||||
287: '/哦',
|
||||
288: '/请',
|
||||
289: '/睁眼',
|
||||
290: '/敲开心',
|
||||
291: '/震惊',
|
||||
292: '/让我康康',
|
||||
293: '/摸锦鲤',
|
||||
294: '/期待',
|
||||
295: '/拿到红包',
|
||||
296: '/真好',
|
||||
297: '/拜谢',
|
||||
298: '/元宝',
|
||||
299: '/牛啊',
|
||||
300: '/胖三斤',
|
||||
301: '/好闪',
|
||||
302: '/左拜年',
|
||||
303: '/右拜年',
|
||||
304: '/红包包',
|
||||
305: '/右亲亲',
|
||||
306: '/牛气冲天',
|
||||
307: '/喵喵',
|
||||
308: '/求红包',
|
||||
309: '/谢红包',
|
||||
310: '/新年烟花',
|
||||
311: '/打call',
|
||||
312: '/变形',
|
||||
313: '/嗑到了',
|
||||
314: '/仔细分析',
|
||||
315: '/加油',
|
||||
316: '/我没事',
|
||||
317: '/菜狗',
|
||||
318: '/崇拜',
|
||||
319: '/比心',
|
||||
320: '/庆祝',
|
||||
321: '/老色痞',
|
||||
322: '/拒绝',
|
||||
323: '/嫌弃',
|
||||
324: '/吃糖'
|
||||
}
|
||||
|
||||
export const faceMapReverse = {
|
||||
惊讶: '0',
|
||||
撇嘴: '1',
|
||||
色: '2',
|
||||
发呆: '3',
|
||||
得意: '4',
|
||||
流泪: '5',
|
||||
害羞: '6',
|
||||
闭嘴: '7',
|
||||
睡: '8',
|
||||
大哭: '9',
|
||||
尴尬: '10',
|
||||
发怒: '11',
|
||||
调皮: '12',
|
||||
呲牙: '13',
|
||||
微笑: '14',
|
||||
难过: '15',
|
||||
酷: '16',
|
||||
抓狂: '18',
|
||||
吐: '19',
|
||||
偷笑: '20',
|
||||
可爱: '21',
|
||||
白眼: '22',
|
||||
傲慢: '23',
|
||||
饥饿: '24',
|
||||
困: '25',
|
||||
惊恐: '26',
|
||||
流汗: '27',
|
||||
憨笑: '28',
|
||||
悠闲: '29',
|
||||
奋斗: '30',
|
||||
咒骂: '31',
|
||||
疑问: '32',
|
||||
嘘: '33',
|
||||
晕: '34',
|
||||
折磨: '35',
|
||||
衰: '36',
|
||||
骷髅: '37',
|
||||
敲打: '38',
|
||||
再见: '39',
|
||||
发抖: '41',
|
||||
爱情: '42',
|
||||
跳跳: '43',
|
||||
猪头: '46',
|
||||
拥抱: '49',
|
||||
蛋糕: '53',
|
||||
闪电: '54',
|
||||
炸弹: '55',
|
||||
刀: '56',
|
||||
足球: '57',
|
||||
便便: '59',
|
||||
咖啡: '60',
|
||||
饭: '61',
|
||||
玫瑰: '63',
|
||||
凋谢: '64',
|
||||
爱心: '66',
|
||||
心碎: '67',
|
||||
礼物: '69',
|
||||
太阳: '74',
|
||||
月亮: '75',
|
||||
赞: '76',
|
||||
踩: '77',
|
||||
握手: '78',
|
||||
胜利: '79',
|
||||
飞吻: '85',
|
||||
怄火: '86',
|
||||
西瓜: '89',
|
||||
冷汗: '96',
|
||||
擦汗: '97',
|
||||
抠鼻: '98',
|
||||
鼓掌: '99',
|
||||
糗大了: '100',
|
||||
坏笑: '101',
|
||||
左哼哼: '102',
|
||||
右哼哼: '103',
|
||||
哈欠: '104',
|
||||
鄙视: '105',
|
||||
委屈: '106',
|
||||
快哭了: '107',
|
||||
阴险: '108',
|
||||
亲亲: '109',
|
||||
吓: '110',
|
||||
可怜: '111',
|
||||
菜刀: '112',
|
||||
啤酒: '113',
|
||||
篮球: '114',
|
||||
乒乓: '115',
|
||||
示爱: '116',
|
||||
瓢虫: '117',
|
||||
抱拳: '118',
|
||||
勾引: '119',
|
||||
拳头: '120',
|
||||
差劲: '121',
|
||||
爱你: '122',
|
||||
不: '123',
|
||||
好: '124',
|
||||
转圈: '125',
|
||||
磕头: '126',
|
||||
回头: '127',
|
||||
跳绳: '128',
|
||||
挥手: '129',
|
||||
激动: '130',
|
||||
街舞: '131',
|
||||
献吻: '132',
|
||||
左太极: '133',
|
||||
右太极: '134',
|
||||
双喜: '136',
|
||||
鞭炮: '137',
|
||||
灯笼: '138',
|
||||
K歌: '140',
|
||||
喝彩: '144',
|
||||
祈祷: '145',
|
||||
爆筋: '146',
|
||||
棒棒糖: '147',
|
||||
喝奶: '148',
|
||||
飞机: '151',
|
||||
钞票: '158',
|
||||
药: '168',
|
||||
手枪: '169',
|
||||
茶: '171',
|
||||
眨眼睛: '172',
|
||||
泪奔: '173',
|
||||
无奈: '174',
|
||||
卖萌: '175',
|
||||
小纠结: '176',
|
||||
喷血: '177',
|
||||
斜眼笑: '178',
|
||||
表情: '179',
|
||||
惊喜: '180',
|
||||
骚扰: '181',
|
||||
笑哭: '182',
|
||||
我最美: '183',
|
||||
河蟹: '184',
|
||||
羊驼: '185',
|
||||
幽灵: '187',
|
||||
蛋: '188',
|
||||
菊花: '190',
|
||||
红包: '192',
|
||||
大笑: '193',
|
||||
不开心: '194',
|
||||
冷漠: '197',
|
||||
呃: '198',
|
||||
好棒: '199',
|
||||
拜托: '200',
|
||||
点赞: '201',
|
||||
无聊: '202',
|
||||
托脸: '203',
|
||||
吃: '204',
|
||||
送花: '205',
|
||||
害怕: '206',
|
||||
花痴: '207',
|
||||
小样儿: '208',
|
||||
飙泪: '210',
|
||||
我不看: '211',
|
||||
托腮: '212',
|
||||
啵啵: '214',
|
||||
糊脸: '215',
|
||||
拍头: '216',
|
||||
扯一扯: '217',
|
||||
舔一舔: '218',
|
||||
蹭一蹭: '219',
|
||||
拽炸天: '220',
|
||||
顶呱呱: '221',
|
||||
加油必胜: '245',
|
||||
加油抱抱: '246',
|
||||
口罩护体: '247',
|
||||
'/搬砖中': '260',
|
||||
'/忙到飞起': '261',
|
||||
'/脑阔疼': '262',
|
||||
'/沧桑': '263',
|
||||
'/捂脸': '264',
|
||||
'/辣眼睛': '265',
|
||||
'/哦哟': '266',
|
||||
'/头秃': '267',
|
||||
'/问号脸': '268',
|
||||
'/暗中观察': '269',
|
||||
'/emm': '270',
|
||||
'/吃瓜': '271',
|
||||
'/呵呵哒': '272',
|
||||
'/我酸了': '273',
|
||||
'/太南了': '274',
|
||||
'/辣椒酱': '276',
|
||||
'/汪汪': '277',
|
||||
'/汗': '278',
|
||||
'/打脸': '279',
|
||||
'/击掌': '280',
|
||||
'/无眼笑': '281',
|
||||
'/敬礼': '282',
|
||||
'/狂笑': '283',
|
||||
'/面无表情': '284',
|
||||
'/摸鱼': '285',
|
||||
'/魔鬼笑': '286',
|
||||
'/哦': '287',
|
||||
'/请': '288',
|
||||
'/睁眼': '289',
|
||||
'/敲开心': '290',
|
||||
'/震惊': '291',
|
||||
'/让我康康': '292',
|
||||
'/摸锦鲤': '293',
|
||||
'/期待': '294',
|
||||
'/拿到红包': '295',
|
||||
'/真好': '296',
|
||||
'/拜谢': '297',
|
||||
'/元宝': '298',
|
||||
'/牛啊': '299',
|
||||
'/胖三斤': '300',
|
||||
'/好闪': '301',
|
||||
'/左拜年': '302',
|
||||
'/右拜年': '303',
|
||||
'/红包包': '304',
|
||||
'/右亲亲': '305',
|
||||
'/牛气冲天': '306',
|
||||
'/喵喵': '307',
|
||||
'/求红包': '308',
|
||||
'/谢红包': '309',
|
||||
'/新年烟花': '310',
|
||||
'/打call': '311',
|
||||
'/变形': '312',
|
||||
'/嗑到了': '313',
|
||||
'/仔细分析': '314',
|
||||
'/加油': '315',
|
||||
'/我没事': '316',
|
||||
'/菜狗': '317',
|
||||
'/崇拜': '318',
|
||||
'/比心': '319',
|
||||
'/庆祝': '320',
|
||||
'/老色痞': '321',
|
||||
'/拒绝': '322',
|
||||
'/嫌弃': '323',
|
||||
'/吃糖': '324'
|
||||
}
|
||||
|
||||
export async function convertFaces (msg, handleAt = false, e) {
|
||||
handleAt = e?.isGroup && handleAt
|
||||
let groupMembers
|
||||
let groupCardQQMap = {}
|
||||
if (handleAt) {
|
||||
try {
|
||||
groupMembers = e.bot.gml.get(e.group_id)
|
||||
} catch (err) {
|
||||
console.error(`Failed to get group members: ${err}`)
|
||||
}
|
||||
if (groupMembers) {
|
||||
for (let key of groupMembers.keys()) {
|
||||
let userInfo = groupMembers.get(key)
|
||||
if (userInfo.card) {
|
||||
groupCardQQMap[userInfo.card] = userInfo.user_id
|
||||
}
|
||||
if (userInfo.nickname) {
|
||||
groupCardQQMap[userInfo.nickname] = userInfo.user_id
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
let tmpMsg = ''
|
||||
let tmpFace = ''
|
||||
let tmpAt = ''
|
||||
let foundFace = false
|
||||
let foundAt = false
|
||||
let msgs = []
|
||||
for (let i = 0; i < msg.length; i++) {
|
||||
// console.log(msg[i])
|
||||
if (msg[i] === '[') {
|
||||
foundFace = true
|
||||
continue
|
||||
}
|
||||
if (!foundFace) {
|
||||
if (handleAt && msg[i] === '@') {
|
||||
foundAt = true
|
||||
if (tmpMsg) {
|
||||
msgs.push(tmpMsg)
|
||||
tmpMsg = ''
|
||||
}
|
||||
continue
|
||||
}
|
||||
if (handleAt && foundAt) {
|
||||
tmpAt += msg[i]
|
||||
if (groupCardQQMap[tmpAt]) {
|
||||
foundAt = false
|
||||
msgs.push(segment.at(groupCardQQMap[tmpAt], groupMembers.get(groupCardQQMap[tmpAt]).card, false))
|
||||
tmpAt = ''
|
||||
continue
|
||||
}
|
||||
} else {
|
||||
tmpMsg += msg[i]
|
||||
}
|
||||
} else {
|
||||
if (msg[i] !== ']') {
|
||||
tmpFace += msg[i]
|
||||
} else {
|
||||
foundFace = false
|
||||
if (faceMapReverse[tmpFace] || faceMapReverse['/' + tmpFace] || faceMapReverse[_.trimStart(tmpFace, '/')]) {
|
||||
if (tmpMsg) {
|
||||
msgs.push(tmpMsg)
|
||||
}
|
||||
msgs.push(segment.face(parseInt(faceMapReverse[tmpFace] || faceMapReverse['/' + tmpFace] || faceMapReverse[_.trimStart(tmpFace, '/')])))
|
||||
tmpMsg = ''
|
||||
} else {
|
||||
tmpMsg += `[${tmpFace}]`
|
||||
}
|
||||
tmpFace = ''
|
||||
}
|
||||
}
|
||||
}
|
||||
if (tmpMsg) {
|
||||
msgs.push(tmpMsg)
|
||||
}
|
||||
if (tmpFace) {
|
||||
msgs.push(`[${tmpFace}`)
|
||||
}
|
||||
if (handleAt && tmpAt) {
|
||||
msgs.push(`@${tmpAt}`)
|
||||
}
|
||||
return msgs
|
||||
}
|
||||
|
||||
export function testConvertFaces () {
|
||||
const toTest = [
|
||||
'你好啊[/微笑][惊讶]哈哈[/拜谢]'
|
||||
]
|
||||
toTest.forEach(t => {
|
||||
console.log(convertFaces(t))
|
||||
})
|
||||
}
|
||||
|
||||
// testConvertFaces()
|
||||
|
|
@ -1,14 +0,0 @@
|
|||
export async function upsertMessage (message, suffix = '') {
|
||||
if (suffix) {
|
||||
suffix = '_' + suffix
|
||||
}
|
||||
await redis.set(`CHATGPT:MESSAGE${suffix}:${message.id}`, JSON.stringify(message))
|
||||
}
|
||||
|
||||
export async function getMessageById (id, suffix = '') {
|
||||
if (suffix) {
|
||||
suffix = '_' + suffix
|
||||
}
|
||||
let messageStr = await redis.get(`CHATGPT:MESSAGE${suffix}:${id}`)
|
||||
return JSON.parse(messageStr)
|
||||
}
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
export function decrypt (jwtToken) {
|
||||
const [encodedHeader, encodedPayload, signature] = jwtToken.split('.')
|
||||
|
||||
// const decodedHeader = Buffer.from(encodedHeader, 'base64').toString('utf-8')
|
||||
const decodedPayload = Buffer.from(encodedPayload, 'base64').toString('utf-8')
|
||||
|
||||
return decodedPayload
|
||||
}
|
||||
207
utils/message.js
207
utils/message.js
|
|
@ -1,207 +0,0 @@
|
|||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { Config, officialChatGPTAPI } from './config.js'
|
||||
import https from 'https'
|
||||
import http from 'http'
|
||||
import { createParser } from 'eventsource-parser'
|
||||
|
||||
// API3
|
||||
export class OfficialChatGPTClient {
|
||||
constructor (opts = {}) {
|
||||
const {
|
||||
accessToken,
|
||||
apiReverseUrl
|
||||
} = opts
|
||||
this._accessToken = accessToken
|
||||
this._apiReverseUrl = apiReverseUrl
|
||||
}
|
||||
|
||||
async sendMessage (prompt, opts = {}, retry = 3, errorMsg) {
|
||||
if (retry < 0) {
|
||||
throw new Error(errorMsg || 'retry limit exceeded')
|
||||
}
|
||||
let {
|
||||
conversationId,
|
||||
parentMessageId = uuidv4(),
|
||||
messageId = uuidv4(),
|
||||
action = 'next',
|
||||
model = ''
|
||||
} = opts
|
||||
let url = this._apiReverseUrl || officialChatGPTAPI
|
||||
if (this._apiReverseUrl && Config.proxy && !Config.apiForceUseReverse) {
|
||||
// 如果配了proxy,而且有反代,但是没开启强制反代
|
||||
url = officialChatGPTAPI
|
||||
}
|
||||
|
||||
const body = {
|
||||
action,
|
||||
messages: [
|
||||
{
|
||||
id: messageId,
|
||||
role: 'user',
|
||||
content: {
|
||||
content_type: 'text',
|
||||
parts: [prompt]
|
||||
},
|
||||
metadata: {}
|
||||
}
|
||||
],
|
||||
model: model || (Config.useGPT4 ? 'gpt-4o' : 'auto'),
|
||||
parent_message_id: parentMessageId,
|
||||
timezone_offset_min: -480,
|
||||
history_and_training_disabled: false
|
||||
}
|
||||
if (conversationId) {
|
||||
body.conversation_id = conversationId
|
||||
}
|
||||
let conversationResponse
|
||||
let statusCode
|
||||
let requestP = new Promise((resolve, reject) => {
|
||||
let option = {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
accept: 'text/event-stream',
|
||||
'x-openai-assistant-app-id': '',
|
||||
authorization: this._accessToken ? `Bearer ${this._accessToken}` : '',
|
||||
'content-type': 'application/json',
|
||||
referer: 'https://chat.openai.com/chat',
|
||||
library: 'chatgpt-plugin'
|
||||
},
|
||||
referrer: 'https://chat.openai.com/chat',
|
||||
timeout: 10000
|
||||
}
|
||||
logger.info('using api3 reverse proxy: ' + url)
|
||||
let requestLib = url.startsWith('https') ? https : http
|
||||
const req = requestLib.request(url, option, (res) => {
|
||||
statusCode = res.statusCode
|
||||
let response
|
||||
function onMessage (data) {
|
||||
if (data === '[DONE]') {
|
||||
return resolve({
|
||||
error: null,
|
||||
response,
|
||||
conversationId,
|
||||
messageId,
|
||||
conversationResponse
|
||||
})
|
||||
}
|
||||
try {
|
||||
const _checkJson = JSON.parse(data)
|
||||
} catch (error) {
|
||||
// console.log('warning: parse error.')
|
||||
return
|
||||
}
|
||||
try {
|
||||
const convoResponseEvent = JSON.parse(data)
|
||||
conversationResponse = convoResponseEvent
|
||||
if (convoResponseEvent.conversation_id) {
|
||||
conversationId = convoResponseEvent.conversation_id
|
||||
}
|
||||
|
||||
if (convoResponseEvent.message?.id) {
|
||||
messageId = convoResponseEvent.message.id
|
||||
}
|
||||
|
||||
const partialResponse =
|
||||
convoResponseEvent.message?.content?.parts?.[0]
|
||||
if (partialResponse) {
|
||||
if (Config.debug) {
|
||||
logger.info(JSON.stringify(convoResponseEvent))
|
||||
}
|
||||
response = partialResponse
|
||||
}
|
||||
} catch (err) {
|
||||
console.warn('fetchSSE onMessage unexpected error', err)
|
||||
reject(err)
|
||||
}
|
||||
}
|
||||
|
||||
const parser = createParser((event) => {
|
||||
if (event.type === 'event') {
|
||||
onMessage(event.data)
|
||||
}
|
||||
})
|
||||
const errBody = []
|
||||
res.on('data', (chunk) => {
|
||||
// logger.mark('成功连接到chat.openai.com,准备读取数据流')
|
||||
if (statusCode === 200) {
|
||||
let str = chunk.toString()
|
||||
parser.feed(str)
|
||||
}
|
||||
errBody.push(chunk)
|
||||
})
|
||||
|
||||
// const body = []
|
||||
// res.on('data', (chunk) => body.push(chunk))
|
||||
res.on('end', () => {
|
||||
const resString = Buffer.concat(errBody).toString()
|
||||
reject(resString)
|
||||
})
|
||||
})
|
||||
req.on('error', (err) => {
|
||||
reject(err)
|
||||
})
|
||||
|
||||
req.on('timeout', () => {
|
||||
req.destroy()
|
||||
reject(new Error('Request time out'))
|
||||
})
|
||||
|
||||
req.write(JSON.stringify(body))
|
||||
req.end()
|
||||
})
|
||||
try {
|
||||
const response = await requestP
|
||||
if (statusCode === 200) {
|
||||
return {
|
||||
text: response.response,
|
||||
conversationId: response.conversationId,
|
||||
id: response.messageId,
|
||||
parentMessageId
|
||||
}
|
||||
} else {
|
||||
console.log(response)
|
||||
throw new Error(JSON.stringify(response))
|
||||
}
|
||||
} catch (err) {
|
||||
logger.warn(err)
|
||||
if (typeof err === 'string') {
|
||||
if (err.includes('You have sent too many messages to the model. Please try again later.')) {
|
||||
logger.warn('账户的gpt-o额度不足,将降级为auto重试')
|
||||
opts.model = 'auto'
|
||||
}
|
||||
}
|
||||
return await this.sendMessage(prompt, opts, retry - 1, err.message)
|
||||
}
|
||||
}
|
||||
|
||||
voices = ['ember', 'cove',
|
||||
'juniper', 'sky', 'breeze'
|
||||
// '__internal_only_shimmer',
|
||||
// '__internal_only_santa'
|
||||
]
|
||||
|
||||
async synthesis (opts = {}) {
|
||||
const { id, conversationId } = opts
|
||||
let url = this._apiReverseUrl.replace('/conversation', '/synthesize')
|
||||
let randomVoice = this.voices[Math.floor(Math.random() * this.voices.length)]
|
||||
url = `${url}?message_id=${id}&conversation_id=${conversationId}&voice=${randomVoice}&format=mp3`
|
||||
let res = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
accept: 'audio/mpeg',
|
||||
'x-openai-assistant-app-id': '',
|
||||
authorization: this._accessToken ? `Bearer ${this._accessToken}` : '',
|
||||
referer: 'https://chat.openai.com/chat',
|
||||
library: 'chatgpt-plugin'
|
||||
}
|
||||
})
|
||||
if (res.status !== 200) {
|
||||
throw new Error(await res.text())
|
||||
}
|
||||
if (res.headers.get('content-type') !== 'audio/mpeg') {
|
||||
throw new Error('invalid content type')
|
||||
}
|
||||
let buffer = await res.arrayBuffer()
|
||||
return Buffer.from(buffer)
|
||||
}
|
||||
}
|
||||
|
|
@ -1,548 +0,0 @@
|
|||
var __assign = (this && this.__assign) || function () {
|
||||
__assign = Object.assign || function(t) {
|
||||
for (var s, i = 1, n = arguments.length; i < n; i++) {
|
||||
s = arguments[i];
|
||||
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
|
||||
t[p] = s[p];
|
||||
}
|
||||
return t;
|
||||
};
|
||||
return __assign.apply(this, arguments);
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __generator = (this && this.__generator) || function (thisArg, body) {
|
||||
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
||||
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
||||
function verb(n) { return function (v) { return step([n, v]); }; }
|
||||
function step(op) {
|
||||
if (f) throw new TypeError("Generator is already executing.");
|
||||
while (g && (g = 0, op[0] && (_ = 0)), _) try {
|
||||
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
||||
if (y = 0, t) op = [op[0] & 2, t.value];
|
||||
switch (op[0]) {
|
||||
case 0: case 1: t = op; break;
|
||||
case 4: _.label++; return { value: op[1], done: false };
|
||||
case 5: _.label++; y = op[1]; op = [0]; continue;
|
||||
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
||||
default:
|
||||
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
||||
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
||||
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
||||
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
||||
if (t[2]) _.ops.pop();
|
||||
_.trys.pop(); continue;
|
||||
}
|
||||
op = body.call(thisArg, _);
|
||||
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
||||
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
||||
}
|
||||
};
|
||||
var __spreadArray = (this && this.__spreadArray) || function (to, from, pack) {
|
||||
if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) {
|
||||
if (ar || !(i in from)) {
|
||||
if (!ar) ar = Array.prototype.slice.call(from, 0, i);
|
||||
ar[i] = from[i];
|
||||
}
|
||||
}
|
||||
return to.concat(ar || Array.prototype.slice.call(from));
|
||||
};
|
||||
import Keyv from 'keyv';
|
||||
import pTimeout from 'p-timeout';
|
||||
import QuickLRU from 'quick-lru';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import * as tokenizer from './tokenizer.js';
|
||||
import * as types from './types.js';
|
||||
import globalFetch from 'node-fetch';
|
||||
import { fetchSSE } from './fetch-sse.js';
|
||||
var CHATGPT_MODEL = 'gpt-4o-mini';
|
||||
var USER_LABEL_DEFAULT = 'User';
|
||||
var ASSISTANT_LABEL_DEFAULT = 'ChatGPT';
|
||||
var ChatGPTAPI = /** @class */ (function () {
|
||||
/**
|
||||
* Creates a new client wrapper around OpenAI's chat completion API, mimicing the official ChatGPT webapp's functionality as closely as possible.
|
||||
*
|
||||
* @param apiKey - OpenAI API key (required).
|
||||
* @param apiOrg - Optional OpenAI API organization (optional).
|
||||
* @param apiBaseUrl - Optional override for the OpenAI API base URL.
|
||||
* @param debug - Optional enables logging debugging info to stdout.
|
||||
* @param completionParams - Param overrides to send to the [OpenAI chat completion API](https://platform.openai.com/docs/api-reference/chat/create). Options like `temperature` and `presence_penalty` can be tweaked to change the personality of the assistant.
|
||||
* @param maxModelTokens - Optional override for the maximum number of tokens allowed by the model's context. Defaults to 4096.
|
||||
* @param maxResponseTokens - Optional override for the minimum number of tokens allowed for the model's response. Defaults to 1000.
|
||||
* @param messageStore - Optional [Keyv](https://github.com/jaredwray/keyv) store to persist chat messages to. If not provided, messages will be lost when the process exits.
|
||||
* @param getMessageById - Optional function to retrieve a message by its ID. If not provided, the default implementation will be used (using an in-memory `messageStore`).
|
||||
* @param upsertMessage - Optional function to insert or update a message. If not provided, the default implementation will be used (using an in-memory `messageStore`).
|
||||
* @param fetch - Optional override for the `fetch` implementation to use. Defaults to the global `fetch` function.
|
||||
*/
|
||||
function ChatGPTAPI(opts) {
|
||||
var apiKey = opts.apiKey, apiOrg = opts.apiOrg, _a = opts.apiBaseUrl, apiBaseUrl = _a === void 0 ? 'https://api.openai.com/v1' : _a, _b = opts.debug, debug = _b === void 0 ? false : _b, messageStore = opts.messageStore, completionParams = opts.completionParams, systemMessage = opts.systemMessage, _c = opts.maxModelTokens, maxModelTokens = _c === void 0 ? 4000 : _c, _d = opts.maxResponseTokens, maxResponseTokens = _d === void 0 ? 8192 : _d, getMessageById = opts.getMessageById, upsertMessage = opts.upsertMessage, _e = opts.fetch, fetch = _e === void 0 ? globalFetch : _e;
|
||||
this._apiKey = apiKey;
|
||||
this._apiOrg = apiOrg;
|
||||
this._apiBaseUrl = apiBaseUrl;
|
||||
this._debug = !!debug;
|
||||
this._fetch = fetch;
|
||||
this._completionParams = __assign({ model: CHATGPT_MODEL, temperature: 0.8, top_p: 1.0, presence_penalty: 1.0 }, completionParams);
|
||||
this._systemMessage = systemMessage;
|
||||
if (this._systemMessage === undefined) {
|
||||
var currentDate = new Date().toISOString().split('T')[0];
|
||||
this._systemMessage = "You are ChatGPT, a large language model trained by OpenAI. Answer as concisely as possible.\nKnowledge cutoff: 2021-09-01\nCurrent date: ".concat(currentDate);
|
||||
}
|
||||
this._maxModelTokens = maxModelTokens;
|
||||
this._maxResponseTokens = maxResponseTokens;
|
||||
this._getMessageById = getMessageById !== null && getMessageById !== void 0 ? getMessageById : this._defaultGetMessageById;
|
||||
this._upsertMessage = upsertMessage !== null && upsertMessage !== void 0 ? upsertMessage : this._defaultUpsertMessage;
|
||||
if (messageStore) {
|
||||
this._messageStore = messageStore;
|
||||
}
|
||||
else {
|
||||
this._messageStore = new Keyv({
|
||||
store: new QuickLRU({ maxSize: 10000 })
|
||||
});
|
||||
}
|
||||
if (!this._apiKey) {
|
||||
throw new Error('OpenAI missing required apiKey');
|
||||
}
|
||||
if (!this._fetch) {
|
||||
throw new Error('Invalid environment; fetch is not defined');
|
||||
}
|
||||
if (typeof this._fetch !== 'function') {
|
||||
throw new Error('Invalid "fetch" is not a function');
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Sends a message to the OpenAI chat completions endpoint, waits for the response
|
||||
* to resolve, and returns the response.
|
||||
*
|
||||
* If you want your response to have historical context, you must provide a valid `parentMessageId`.
|
||||
*
|
||||
* If you want to receive a stream of partial responses, use `opts.onProgress`.
|
||||
*
|
||||
* Set `debug: true` in the `ChatGPTAPI` constructor to log more info on the full prompt sent to the OpenAI chat completions API. You can override the `systemMessage` in `opts` to customize the assistant's instructions.
|
||||
*
|
||||
* @param message - The prompt message to send
|
||||
* @param opts.parentMessageId - Optional ID of the previous message in the conversation (defaults to `undefined`)
|
||||
* @param opts.conversationId - Optional ID of the conversation (defaults to `undefined`)
|
||||
* @param opts.messageId - Optional ID of the message to send (defaults to a random UUID)
|
||||
* @param opts.systemMessage - Optional override for the chat "system message" which acts as instructions to the model (defaults to the ChatGPT system message)
|
||||
* @param opts.timeoutMs - Optional timeout in milliseconds (defaults to no timeout)
|
||||
* @param opts.onProgress - Optional callback which will be invoked every time the partial response is updated
|
||||
* @param opts.abortSignal - Optional callback used to abort the underlying `fetch` call using an [AbortController](https://developer.mozilla.org/en-US/docs/Web/API/AbortController)
|
||||
* @param completionParams - Optional overrides to send to the [OpenAI chat completion API](https://platform.openai.com/docs/api-reference/chat/create). Options like `temperature` and `presence_penalty` can be tweaked to change the personality of the assistant.
|
||||
*
|
||||
* @returns The response from ChatGPT
|
||||
*/
|
||||
ChatGPTAPI.prototype.sendMessage = function (text, opts, role) {
|
||||
if (opts === void 0) { opts = {}; }
|
||||
if (role === void 0) { role = 'user'; }
|
||||
return __awaiter(this, void 0, void 0, function () {
|
||||
var parentMessageId, _a, messageId, timeoutMs, onProgress, _b, stream, completionParams, conversationId, abortSignal, abortController, message, latestQuestion, _c, messages, maxTokens, numTokens, result, responseP;
|
||||
var _this = this;
|
||||
return __generator(this, function (_d) {
|
||||
switch (_d.label) {
|
||||
case 0:
|
||||
parentMessageId = opts.parentMessageId, _a = opts.messageId, messageId = _a === void 0 ? uuidv4() : _a, timeoutMs = opts.timeoutMs, onProgress = opts.onProgress, _b = opts.stream, stream = _b === void 0 ? onProgress ? true : false : _b, completionParams = opts.completionParams, conversationId = opts.conversationId;
|
||||
abortSignal = opts.abortSignal;
|
||||
abortController = null;
|
||||
if (timeoutMs && !abortSignal) {
|
||||
abortController = new AbortController();
|
||||
abortSignal = abortController.signal;
|
||||
}
|
||||
message = {
|
||||
role: role,
|
||||
id: messageId,
|
||||
conversationId: conversationId,
|
||||
parentMessageId: parentMessageId,
|
||||
text: text,
|
||||
name: opts.name
|
||||
};
|
||||
latestQuestion = message;
|
||||
return [4 /*yield*/, this._buildMessages(text, role, opts, completionParams)];
|
||||
case 1:
|
||||
_c = _d.sent(), messages = _c.messages, maxTokens = _c.maxTokens, numTokens = _c.numTokens;
|
||||
console.log("maxTokens: ".concat(maxTokens, ", numTokens: ").concat(numTokens));
|
||||
result = {
|
||||
role: 'assistant',
|
||||
id: uuidv4(),
|
||||
conversationId: conversationId,
|
||||
parentMessageId: messageId,
|
||||
text: '',
|
||||
thinking_text: '',
|
||||
functionCall: undefined,
|
||||
toolCalls: undefined,
|
||||
conversation: []
|
||||
};
|
||||
responseP = new Promise(function (resolve, reject) { return __awaiter(_this, void 0, void 0, function () {
|
||||
var url, headers, body, res, reason, msg, error, response, message_1, res_1, err_1;
|
||||
var _a, _b;
|
||||
return __generator(this, function (_c) {
|
||||
switch (_c.label) {
|
||||
case 0:
|
||||
url = "".concat(this._apiBaseUrl, "/chat/completions");
|
||||
headers = {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: "Bearer ".concat(this._apiKey)
|
||||
};
|
||||
body = __assign(__assign(__assign({ max_tokens: maxTokens }, this._completionParams), completionParams), { messages: messages, stream: stream });
|
||||
if (this._debug) {
|
||||
console.log(JSON.stringify(body));
|
||||
}
|
||||
// Support multiple organizations
|
||||
// See https://platform.openai.com/docs/api-reference/authentication
|
||||
if (this._apiOrg) {
|
||||
headers['OpenAI-Organization'] = this._apiOrg;
|
||||
}
|
||||
if (this._debug) {
|
||||
console.log("sendMessage (".concat(numTokens, " tokens)"), body);
|
||||
}
|
||||
if (!stream) return [3 /*break*/, 1];
|
||||
fetchSSE(url, {
|
||||
method: 'POST',
|
||||
headers: headers,
|
||||
body: JSON.stringify(body),
|
||||
signal: abortSignal,
|
||||
onMessage: function (data) {
|
||||
var _a;
|
||||
if (data === '[DONE]') {
|
||||
result.text = result.text.trim();
|
||||
result.conversation = messages;
|
||||
return resolve(result);
|
||||
}
|
||||
try {
|
||||
var response = JSON.parse(data);
|
||||
if (response.id) {
|
||||
result.id = response.id;
|
||||
}
|
||||
if ((_a = response.choices) === null || _a === void 0 ? void 0 : _a.length) {
|
||||
var delta = response.choices[0].delta;
|
||||
if (delta.function_call) {
|
||||
if (delta.function_call.name) {
|
||||
result.functionCall = {
|
||||
name: delta.function_call.name,
|
||||
arguments: delta.function_call.arguments
|
||||
};
|
||||
}
|
||||
else {
|
||||
result.functionCall.arguments = (result.functionCall.arguments || '') + delta.function_call.arguments;
|
||||
}
|
||||
}
|
||||
else if (delta.tool_calls) {
|
||||
var fc = delta.tool_calls[0].function;
|
||||
if (fc.name) {
|
||||
result.functionCall = {
|
||||
name: fc.name,
|
||||
arguments: fc.arguments
|
||||
};
|
||||
}
|
||||
else {
|
||||
result.functionCall.arguments = (result.functionCall.arguments || '') + fc.arguments;
|
||||
}
|
||||
}
|
||||
else {
|
||||
result.delta = delta.content;
|
||||
if (delta === null || delta === void 0 ? void 0 : delta.content)
|
||||
result.text += delta.content;
|
||||
if (delta === null || delta === void 0 ? void 0 : delta.reasoning_content)
|
||||
result.thinking_text += delta.reasoning_content;
|
||||
}
|
||||
if (delta.role) {
|
||||
result.role = delta.role;
|
||||
}
|
||||
result.detail = response;
|
||||
onProgress === null || onProgress === void 0 ? void 0 : onProgress(result);
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
console.warn('OpenAI stream SEE event unexpected error', err);
|
||||
return reject(err);
|
||||
}
|
||||
}
|
||||
}, this._fetch).catch(reject);
|
||||
return [3 /*break*/, 7];
|
||||
case 1:
|
||||
_c.trys.push([1, 6, , 7]);
|
||||
return [4 /*yield*/, this._fetch(url, {
|
||||
method: 'POST',
|
||||
headers: headers,
|
||||
body: JSON.stringify(body),
|
||||
signal: abortSignal
|
||||
})];
|
||||
case 2:
|
||||
res = _c.sent();
|
||||
if (!!res.ok) return [3 /*break*/, 4];
|
||||
return [4 /*yield*/, res.text()];
|
||||
case 3:
|
||||
reason = _c.sent();
|
||||
msg = "OpenAI error ".concat(res.status || res.statusText, ": ").concat(reason);
|
||||
error = new types.ChatGPTError(msg);
|
||||
error.statusCode = res.status;
|
||||
error.statusText = res.statusText;
|
||||
return [2 /*return*/, reject(error)];
|
||||
case 4: return [4 /*yield*/, res.json()];
|
||||
case 5:
|
||||
response = (_c.sent());
|
||||
if (this._debug) {
|
||||
console.log(response);
|
||||
}
|
||||
if (response === null || response === void 0 ? void 0 : response.id) {
|
||||
result.id = response.id;
|
||||
}
|
||||
if ((_a = response === null || response === void 0 ? void 0 : response.choices) === null || _a === void 0 ? void 0 : _a.length) {
|
||||
message_1 = response.choices[0].message;
|
||||
if (message_1.content) {
|
||||
result.text = message_1.content;
|
||||
}
|
||||
else if (message_1.function_call) {
|
||||
result.functionCall = message_1.function_call;
|
||||
}
|
||||
else if (message_1.tool_calls) {
|
||||
result.functionCall = message_1.tool_calls.map(function (tool) { return tool.function; })[0];
|
||||
}
|
||||
result.thinking_text = message_1.reasoning_content;
|
||||
if (message_1.role) {
|
||||
result.role = message_1.role;
|
||||
}
|
||||
}
|
||||
else {
|
||||
res_1 = response;
|
||||
console.error(res_1);
|
||||
return [2 /*return*/, reject(new Error("OpenAI error: ".concat(((_b = res_1 === null || res_1 === void 0 ? void 0 : res_1.detail) === null || _b === void 0 ? void 0 : _b.message) || (res_1 === null || res_1 === void 0 ? void 0 : res_1.detail) || 'unknown')))];
|
||||
}
|
||||
result.detail = response;
|
||||
result.conversation = messages;
|
||||
return [2 /*return*/, resolve(result)];
|
||||
case 6:
|
||||
err_1 = _c.sent();
|
||||
return [2 /*return*/, reject(err_1)];
|
||||
case 7: return [2 /*return*/];
|
||||
}
|
||||
});
|
||||
}); }).then(function (message) { return __awaiter(_this, void 0, void 0, function () {
|
||||
var promptTokens, completionTokens, err_2;
|
||||
return __generator(this, function (_a) {
|
||||
switch (_a.label) {
|
||||
case 0:
|
||||
if (!(message.detail && !message.detail.usage)) return [3 /*break*/, 4];
|
||||
_a.label = 1;
|
||||
case 1:
|
||||
_a.trys.push([1, 3, , 4]);
|
||||
promptTokens = numTokens;
|
||||
return [4 /*yield*/, this._getTokenCount(message.text)];
|
||||
case 2:
|
||||
completionTokens = _a.sent();
|
||||
message.detail.usage = {
|
||||
prompt_tokens: promptTokens,
|
||||
completion_tokens: completionTokens,
|
||||
total_tokens: promptTokens + completionTokens,
|
||||
estimated: true
|
||||
};
|
||||
return [3 /*break*/, 4];
|
||||
case 3:
|
||||
err_2 = _a.sent();
|
||||
return [3 /*break*/, 4];
|
||||
case 4: return [2 /*return*/, Promise.all([
|
||||
this._upsertMessage(latestQuestion),
|
||||
this._upsertMessage(message)
|
||||
]).then(function () { return message; })];
|
||||
}
|
||||
});
|
||||
}); });
|
||||
if (timeoutMs) {
|
||||
if (abortController) {
|
||||
// This will be called when a timeout occurs in order for us to forcibly
|
||||
// ensure that the underlying HTTP request is aborted.
|
||||
;
|
||||
responseP.cancel = function () {
|
||||
abortController.abort();
|
||||
};
|
||||
}
|
||||
return [2 /*return*/, pTimeout(responseP, {
|
||||
milliseconds: timeoutMs,
|
||||
message: 'OpenAI timed out waiting for response'
|
||||
})];
|
||||
}
|
||||
else {
|
||||
return [2 /*return*/, responseP];
|
||||
}
|
||||
return [2 /*return*/];
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
Object.defineProperty(ChatGPTAPI.prototype, "apiKey", {
|
||||
// @ts-ignore
|
||||
get: function () {
|
||||
return this._apiKey;
|
||||
},
|
||||
// @ts-ignore
|
||||
set: function (apiKey) {
|
||||
this._apiKey = apiKey;
|
||||
},
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
Object.defineProperty(ChatGPTAPI.prototype, "apiOrg", {
|
||||
// @ts-ignore
|
||||
get: function () {
|
||||
return this._apiOrg;
|
||||
},
|
||||
// @ts-ignore
|
||||
set: function (apiOrg) {
|
||||
this._apiOrg = apiOrg;
|
||||
},
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
ChatGPTAPI.prototype._buildMessages = function (text, role, opts, completionParams) {
|
||||
return __awaiter(this, void 0, void 0, function () {
|
||||
var _a, systemMessage, parentMessageId, userLabel, assistantLabel, maxNumTokens, messages, systemMessageOffset, nextMessages, functionToken, numTokens, prompt_1, nextNumTokensEstimate, _i, _b, m1, _c, isValidPrompt, parentMessage, parentMessageRole, maxTokens;
|
||||
return __generator(this, function (_d) {
|
||||
switch (_d.label) {
|
||||
case 0:
|
||||
_a = opts.systemMessage, systemMessage = _a === void 0 ? this._systemMessage : _a;
|
||||
parentMessageId = opts.parentMessageId;
|
||||
userLabel = USER_LABEL_DEFAULT;
|
||||
assistantLabel = ASSISTANT_LABEL_DEFAULT;
|
||||
maxNumTokens = this._maxModelTokens - this._maxResponseTokens;
|
||||
messages = [];
|
||||
if (systemMessage) {
|
||||
messages.push({
|
||||
role: 'system',
|
||||
content: systemMessage
|
||||
});
|
||||
}
|
||||
systemMessageOffset = messages.length;
|
||||
nextMessages = text
|
||||
? messages.concat([
|
||||
{
|
||||
role: role,
|
||||
content: text,
|
||||
name: opts.name
|
||||
}
|
||||
])
|
||||
: messages;
|
||||
functionToken = 0;
|
||||
numTokens = functionToken;
|
||||
_d.label = 1;
|
||||
case 1:
|
||||
prompt_1 = nextMessages
|
||||
.reduce(function (prompt, message) {
|
||||
switch (message.role) {
|
||||
case 'system':
|
||||
return prompt.concat(["Instructions:\n".concat(message.content)]);
|
||||
case 'user':
|
||||
return prompt.concat(["".concat(userLabel, ":\n").concat(message.content)]);
|
||||
case 'function':
|
||||
// leave behind
|
||||
return prompt;
|
||||
case 'assistant':
|
||||
return prompt;
|
||||
default:
|
||||
return message.content ? prompt.concat(["".concat(assistantLabel, ":\n").concat(message.content)]) : prompt;
|
||||
}
|
||||
}, [])
|
||||
.join('\n\n');
|
||||
return [4 /*yield*/, this._getTokenCount(prompt_1)];
|
||||
case 2:
|
||||
nextNumTokensEstimate = _d.sent();
|
||||
_i = 0, _b = nextMessages
|
||||
.filter(function (m) { return m.function_call; });
|
||||
_d.label = 3;
|
||||
case 3:
|
||||
if (!(_i < _b.length)) return [3 /*break*/, 6];
|
||||
m1 = _b[_i];
|
||||
_c = nextNumTokensEstimate;
|
||||
return [4 /*yield*/, this._getTokenCount(JSON.stringify(m1.function_call) || '')];
|
||||
case 4:
|
||||
nextNumTokensEstimate = _c + _d.sent();
|
||||
_d.label = 5;
|
||||
case 5:
|
||||
_i++;
|
||||
return [3 /*break*/, 3];
|
||||
case 6:
|
||||
isValidPrompt = nextNumTokensEstimate + functionToken <= maxNumTokens;
|
||||
if (prompt_1 && !isValidPrompt) {
|
||||
return [3 /*break*/, 9];
|
||||
}
|
||||
messages = nextMessages;
|
||||
numTokens = nextNumTokensEstimate + functionToken;
|
||||
if (!isValidPrompt) {
|
||||
return [3 /*break*/, 9];
|
||||
}
|
||||
if (!parentMessageId) {
|
||||
return [3 /*break*/, 9];
|
||||
}
|
||||
return [4 /*yield*/, this._getMessageById(parentMessageId)];
|
||||
case 7:
|
||||
parentMessage = _d.sent();
|
||||
if (!parentMessage) {
|
||||
return [3 /*break*/, 9];
|
||||
}
|
||||
parentMessageRole = parentMessage.role || 'user';
|
||||
nextMessages = nextMessages.slice(0, systemMessageOffset).concat(__spreadArray([
|
||||
{
|
||||
role: parentMessageRole,
|
||||
content: parentMessage.text,
|
||||
name: parentMessage.name,
|
||||
function_call: parentMessage.functionCall ? parentMessage.functionCall : undefined,
|
||||
// tool_calls: parentMessage.toolCalls ? parentMessage.toolCalls : undefined
|
||||
}
|
||||
], nextMessages.slice(systemMessageOffset), true));
|
||||
parentMessageId = parentMessage.parentMessageId;
|
||||
_d.label = 8;
|
||||
case 8:
|
||||
if (true) return [3 /*break*/, 1];
|
||||
_d.label = 9;
|
||||
case 9:
|
||||
maxTokens = Math.max(1, Math.min(this._maxModelTokens - numTokens, this._maxResponseTokens));
|
||||
return [2 /*return*/, { messages: messages, maxTokens: maxTokens, numTokens: numTokens }];
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
ChatGPTAPI.prototype._getTokenCount = function (text) {
|
||||
return __awaiter(this, void 0, void 0, function () {
|
||||
return __generator(this, function (_a) {
|
||||
if (!text) {
|
||||
return [2 /*return*/, 0];
|
||||
}
|
||||
// TODO: use a better fix in the tokenizer
|
||||
text = text.replace(/<\|endoftext\|>/g, '');
|
||||
return [2 /*return*/, tokenizer.encode(text).length];
|
||||
});
|
||||
});
|
||||
};
|
||||
ChatGPTAPI.prototype._defaultGetMessageById = function (id) {
|
||||
return __awaiter(this, void 0, void 0, function () {
|
||||
var res;
|
||||
return __generator(this, function (_a) {
|
||||
switch (_a.label) {
|
||||
case 0: return [4 /*yield*/, this._messageStore.get(id)];
|
||||
case 1:
|
||||
res = _a.sent();
|
||||
return [2 /*return*/, res];
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
ChatGPTAPI.prototype._defaultUpsertMessage = function (message) {
|
||||
return __awaiter(this, void 0, void 0, function () {
|
||||
return __generator(this, function (_a) {
|
||||
switch (_a.label) {
|
||||
case 0: return [4 /*yield*/, this._messageStore.set(message.id, message)];
|
||||
case 1:
|
||||
_a.sent();
|
||||
return [2 /*return*/];
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
return ChatGPTAPI;
|
||||
}());
|
||||
export { ChatGPTAPI };
|
||||
|
|
@ -1,575 +0,0 @@
|
|||
import Keyv from 'keyv'
|
||||
import pTimeout from 'p-timeout'
|
||||
import QuickLRU from 'quick-lru'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
|
||||
import * as tokenizer from './tokenizer'
|
||||
import * as types from './types'
|
||||
import globalFetch from 'node-fetch'
|
||||
import { fetchSSE } from './fetch-sse'
|
||||
import {openai, Role} from "./types";
|
||||
|
||||
const CHATGPT_MODEL = 'gpt-4o-mini'
|
||||
|
||||
const USER_LABEL_DEFAULT = 'User'
|
||||
const ASSISTANT_LABEL_DEFAULT = 'ChatGPT'
|
||||
|
||||
export class ChatGPTAPI {
|
||||
protected _apiKey: string
|
||||
protected _apiBaseUrl: string
|
||||
protected _apiOrg?: string
|
||||
protected _debug: boolean
|
||||
|
||||
protected _systemMessage: string
|
||||
protected _completionParams: Omit<
|
||||
types.openai.CreateChatCompletionRequest,
|
||||
'messages' | 'n'
|
||||
>
|
||||
protected _maxModelTokens: number
|
||||
protected _maxResponseTokens: number
|
||||
protected _fetch: types.FetchFn
|
||||
|
||||
protected _getMessageById: types.GetMessageByIdFunction
|
||||
protected _upsertMessage: types.UpsertMessageFunction
|
||||
|
||||
protected _messageStore: Keyv<types.ChatMessage>
|
||||
|
||||
/**
|
||||
* Creates a new client wrapper around OpenAI's chat completion API, mimicing the official ChatGPT webapp's functionality as closely as possible.
|
||||
*
|
||||
* @param apiKey - OpenAI API key (required).
|
||||
* @param apiOrg - Optional OpenAI API organization (optional).
|
||||
* @param apiBaseUrl - Optional override for the OpenAI API base URL.
|
||||
* @param debug - Optional enables logging debugging info to stdout.
|
||||
* @param completionParams - Param overrides to send to the [OpenAI chat completion API](https://platform.openai.com/docs/api-reference/chat/create). Options like `temperature` and `presence_penalty` can be tweaked to change the personality of the assistant.
|
||||
* @param maxModelTokens - Optional override for the maximum number of tokens allowed by the model's context. Defaults to 4096.
|
||||
* @param maxResponseTokens - Optional override for the minimum number of tokens allowed for the model's response. Defaults to 1000.
|
||||
* @param messageStore - Optional [Keyv](https://github.com/jaredwray/keyv) store to persist chat messages to. If not provided, messages will be lost when the process exits.
|
||||
* @param getMessageById - Optional function to retrieve a message by its ID. If not provided, the default implementation will be used (using an in-memory `messageStore`).
|
||||
* @param upsertMessage - Optional function to insert or update a message. If not provided, the default implementation will be used (using an in-memory `messageStore`).
|
||||
* @param fetch - Optional override for the `fetch` implementation to use. Defaults to the global `fetch` function.
|
||||
*/
|
||||
constructor(opts: types.ChatGPTAPIOptions) {
|
||||
const {
|
||||
apiKey,
|
||||
apiOrg,
|
||||
apiBaseUrl = 'https://api.openai.com/v1',
|
||||
debug = false,
|
||||
messageStore,
|
||||
completionParams,
|
||||
systemMessage,
|
||||
maxModelTokens = 4000,
|
||||
maxResponseTokens = 8192,
|
||||
getMessageById,
|
||||
upsertMessage,
|
||||
fetch = globalFetch
|
||||
} = opts
|
||||
|
||||
this._apiKey = apiKey
|
||||
this._apiOrg = apiOrg
|
||||
this._apiBaseUrl = apiBaseUrl
|
||||
this._debug = !!debug
|
||||
this._fetch = fetch
|
||||
|
||||
this._completionParams = {
|
||||
model: CHATGPT_MODEL,
|
||||
temperature: 0.8,
|
||||
top_p: 1.0,
|
||||
presence_penalty: 1.0,
|
||||
...completionParams
|
||||
}
|
||||
|
||||
this._systemMessage = systemMessage
|
||||
|
||||
if (this._systemMessage === undefined) {
|
||||
const currentDate = new Date().toISOString().split('T')[0]
|
||||
this._systemMessage = `You are ChatGPT, a large language model trained by OpenAI. Answer as concisely as possible.\nKnowledge cutoff: 2021-09-01\nCurrent date: ${currentDate}`
|
||||
}
|
||||
|
||||
this._maxModelTokens = maxModelTokens
|
||||
this._maxResponseTokens = maxResponseTokens
|
||||
|
||||
this._getMessageById = getMessageById ?? this._defaultGetMessageById
|
||||
this._upsertMessage = upsertMessage ?? this._defaultUpsertMessage
|
||||
|
||||
if (messageStore) {
|
||||
this._messageStore = messageStore
|
||||
} else {
|
||||
this._messageStore = new Keyv<types.ChatMessage, any>({
|
||||
store: new QuickLRU<string, types.ChatMessage>({ maxSize: 10000 })
|
||||
})
|
||||
}
|
||||
|
||||
if (!this._apiKey) {
|
||||
throw new Error('OpenAI missing required apiKey')
|
||||
}
|
||||
|
||||
if (!this._fetch) {
|
||||
throw new Error('Invalid environment; fetch is not defined')
|
||||
}
|
||||
|
||||
if (typeof this._fetch !== 'function') {
|
||||
throw new Error('Invalid "fetch" is not a function')
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sends a message to the OpenAI chat completions endpoint, waits for the response
|
||||
* to resolve, and returns the response.
|
||||
*
|
||||
* If you want your response to have historical context, you must provide a valid `parentMessageId`.
|
||||
*
|
||||
* If you want to receive a stream of partial responses, use `opts.onProgress`.
|
||||
*
|
||||
* Set `debug: true` in the `ChatGPTAPI` constructor to log more info on the full prompt sent to the OpenAI chat completions API. You can override the `systemMessage` in `opts` to customize the assistant's instructions.
|
||||
*
|
||||
* @param message - The prompt message to send
|
||||
* @param opts.parentMessageId - Optional ID of the previous message in the conversation (defaults to `undefined`)
|
||||
* @param opts.conversationId - Optional ID of the conversation (defaults to `undefined`)
|
||||
* @param opts.messageId - Optional ID of the message to send (defaults to a random UUID)
|
||||
* @param opts.systemMessage - Optional override for the chat "system message" which acts as instructions to the model (defaults to the ChatGPT system message)
|
||||
* @param opts.timeoutMs - Optional timeout in milliseconds (defaults to no timeout)
|
||||
* @param opts.onProgress - Optional callback which will be invoked every time the partial response is updated
|
||||
* @param opts.abortSignal - Optional callback used to abort the underlying `fetch` call using an [AbortController](https://developer.mozilla.org/en-US/docs/Web/API/AbortController)
|
||||
* @param completionParams - Optional overrides to send to the [OpenAI chat completion API](https://platform.openai.com/docs/api-reference/chat/create). Options like `temperature` and `presence_penalty` can be tweaked to change the personality of the assistant.
|
||||
*
|
||||
* @returns The response from ChatGPT
|
||||
*/
|
||||
async sendMessage(
|
||||
text: string,
|
||||
opts: types.SendMessageOptions = {},
|
||||
role: Role = 'user',
|
||||
): Promise<types.ChatMessage> {
|
||||
const {
|
||||
parentMessageId,
|
||||
messageId = uuidv4(),
|
||||
timeoutMs,
|
||||
onProgress,
|
||||
stream = onProgress ? true : false,
|
||||
completionParams,
|
||||
conversationId
|
||||
} = opts
|
||||
|
||||
let { abortSignal } = opts
|
||||
|
||||
let abortController: AbortController = null
|
||||
if (timeoutMs && !abortSignal) {
|
||||
abortController = new AbortController()
|
||||
abortSignal = abortController.signal
|
||||
}
|
||||
|
||||
const message: types.ChatMessage = {
|
||||
role,
|
||||
id: messageId,
|
||||
conversationId,
|
||||
parentMessageId,
|
||||
text,
|
||||
name: opts.name
|
||||
}
|
||||
|
||||
const latestQuestion = message
|
||||
|
||||
const { messages, maxTokens, numTokens } = await this._buildMessages(
|
||||
text,
|
||||
role,
|
||||
opts,
|
||||
completionParams
|
||||
)
|
||||
console.log(`maxTokens: ${maxTokens}, numTokens: ${numTokens}`)
|
||||
const result: types.ChatMessage & { conversation: openai.ChatCompletionRequestMessage[] } = {
|
||||
role: 'assistant',
|
||||
id: uuidv4(),
|
||||
conversationId,
|
||||
parentMessageId: messageId,
|
||||
text: '',
|
||||
thinking_text: '',
|
||||
functionCall: undefined,
|
||||
toolCalls: undefined,
|
||||
conversation: []
|
||||
}
|
||||
|
||||
const responseP = new Promise<types.ChatMessage & { conversation: openai.ChatCompletionRequestMessage[] }>(
|
||||
async (resolve, reject) => {
|
||||
const url = `${this._apiBaseUrl}/chat/completions`
|
||||
const headers = {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${this._apiKey}`
|
||||
}
|
||||
const body = {
|
||||
max_tokens: maxTokens,
|
||||
...this._completionParams,
|
||||
...completionParams,
|
||||
messages,
|
||||
stream
|
||||
}
|
||||
if (this._debug) {
|
||||
console.log(JSON.stringify(body))
|
||||
}
|
||||
// Support multiple organizations
|
||||
// See https://platform.openai.com/docs/api-reference/authentication
|
||||
if (this._apiOrg) {
|
||||
headers['OpenAI-Organization'] = this._apiOrg
|
||||
}
|
||||
|
||||
if (this._debug) {
|
||||
console.log(`sendMessage (${numTokens} tokens)`, body)
|
||||
}
|
||||
|
||||
if (stream) {
|
||||
fetchSSE(
|
||||
url,
|
||||
{
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: JSON.stringify(body),
|
||||
signal: abortSignal,
|
||||
onMessage: (data: string) => {
|
||||
if (data === '[DONE]') {
|
||||
result.text = result.text.trim()
|
||||
result.conversation = messages
|
||||
return resolve(result)
|
||||
}
|
||||
|
||||
try {
|
||||
const response: types.openai.CreateChatCompletionDeltaResponse =
|
||||
JSON.parse(data)
|
||||
|
||||
if (response.id) {
|
||||
result.id = response.id
|
||||
}
|
||||
|
||||
if (response.choices?.length) {
|
||||
const delta = response.choices[0].delta
|
||||
if (delta.function_call) {
|
||||
if (delta.function_call.name) {
|
||||
result.functionCall = {
|
||||
name: delta.function_call.name,
|
||||
arguments: delta.function_call.arguments
|
||||
}
|
||||
} else {
|
||||
result.functionCall.arguments = (result.functionCall.arguments || '') + delta.function_call.arguments
|
||||
}
|
||||
} else if (delta.tool_calls) {
|
||||
let fc = delta.tool_calls[0].function
|
||||
if (fc.name) {
|
||||
result.functionCall = {
|
||||
name: fc.name,
|
||||
arguments: fc.arguments
|
||||
}
|
||||
} else {
|
||||
result.functionCall.arguments = (result.functionCall.arguments || '') + fc.arguments
|
||||
}
|
||||
} else {
|
||||
result.delta = delta.content
|
||||
if (delta?.content) result.text += delta.content
|
||||
if (delta?.reasoning_content) result.thinking_text += delta.reasoning_content
|
||||
}
|
||||
if (delta.role) {
|
||||
result.role = delta.role
|
||||
}
|
||||
result.detail = response
|
||||
onProgress?.(result)
|
||||
}
|
||||
} catch (err) {
|
||||
console.warn('OpenAI stream SEE event unexpected error', err)
|
||||
return reject(err)
|
||||
}
|
||||
}
|
||||
},
|
||||
this._fetch
|
||||
).catch(reject)
|
||||
} else {
|
||||
try {
|
||||
const res = await this._fetch(url, {
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: JSON.stringify(body),
|
||||
signal: abortSignal
|
||||
})
|
||||
|
||||
if (!res.ok) {
|
||||
const reason = await res.text()
|
||||
const msg = `OpenAI error ${
|
||||
res.status || res.statusText
|
||||
}: ${reason}`
|
||||
const error = new types.ChatGPTError(msg)
|
||||
error.statusCode = res.status
|
||||
error.statusText = res.statusText
|
||||
return reject(error)
|
||||
}
|
||||
|
||||
const response: types.openai.CreateChatCompletionResponse =
|
||||
(await res.json()) as types.openai.CreateChatCompletionResponse
|
||||
if (this._debug) {
|
||||
console.log(response)
|
||||
}
|
||||
|
||||
if (response?.id) {
|
||||
result.id = response.id
|
||||
}
|
||||
|
||||
if (response?.choices?.length) {
|
||||
const message = response.choices[0].message
|
||||
if (message.content) {
|
||||
result.text = message.content
|
||||
} else if (message.function_call) {
|
||||
result.functionCall = message.function_call
|
||||
} else if (message.tool_calls) {
|
||||
result.functionCall = message.tool_calls.map(tool => tool.function)[0]
|
||||
}
|
||||
result.thinking_text = message.reasoning_content
|
||||
if (message.role) {
|
||||
result.role = message.role
|
||||
}
|
||||
} else {
|
||||
const res = response as any
|
||||
console.error(res)
|
||||
return reject(
|
||||
new Error(
|
||||
`OpenAI error: ${
|
||||
res?.detail?.message || res?.detail || 'unknown'
|
||||
}`
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
result.detail = response
|
||||
result.conversation = messages
|
||||
return resolve(result)
|
||||
} catch (err) {
|
||||
return reject(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
).then(async (message) => {
|
||||
if (message.detail && !message.detail.usage) {
|
||||
try {
|
||||
const promptTokens = numTokens
|
||||
const completionTokens = await this._getTokenCount(message.text)
|
||||
message.detail.usage = {
|
||||
prompt_tokens: promptTokens,
|
||||
completion_tokens: completionTokens,
|
||||
total_tokens: promptTokens + completionTokens,
|
||||
estimated: true
|
||||
}
|
||||
} catch (err) {
|
||||
// TODO: this should really never happen, but if it does,
|
||||
// we should handle notify the user gracefully
|
||||
}
|
||||
}
|
||||
|
||||
return Promise.all([
|
||||
this._upsertMessage(latestQuestion),
|
||||
this._upsertMessage(message)
|
||||
]).then(() => message)
|
||||
})
|
||||
|
||||
if (timeoutMs) {
|
||||
if (abortController) {
|
||||
// This will be called when a timeout occurs in order for us to forcibly
|
||||
// ensure that the underlying HTTP request is aborted.
|
||||
;(responseP as any).cancel = () => {
|
||||
abortController.abort()
|
||||
}
|
||||
}
|
||||
|
||||
return pTimeout(responseP, {
|
||||
milliseconds: timeoutMs,
|
||||
message: 'OpenAI timed out waiting for response'
|
||||
})
|
||||
} else {
|
||||
return responseP
|
||||
}
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
get apiKey(): string {
|
||||
return this._apiKey
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
set apiKey(apiKey: string) {
|
||||
this._apiKey = apiKey
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
get apiOrg(): string {
|
||||
return this._apiOrg
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
set apiOrg(apiOrg: string) {
|
||||
this._apiOrg = apiOrg
|
||||
}
|
||||
|
||||
protected async _buildMessages(text: string, role: Role, opts: types.SendMessageOptions, completionParams: Partial<
|
||||
Omit<openai.CreateChatCompletionRequest, 'messages' | 'n' | 'stream'>
|
||||
>) {
|
||||
const { systemMessage = this._systemMessage } = opts
|
||||
let { parentMessageId } = opts
|
||||
|
||||
const userLabel = USER_LABEL_DEFAULT
|
||||
const assistantLabel = ASSISTANT_LABEL_DEFAULT
|
||||
|
||||
const maxNumTokens = this._maxModelTokens - this._maxResponseTokens
|
||||
let messages: types.openai.ChatCompletionRequestMessage[] = []
|
||||
|
||||
if (systemMessage) {
|
||||
messages.push({
|
||||
role: 'system',
|
||||
content: systemMessage
|
||||
})
|
||||
}
|
||||
|
||||
const systemMessageOffset = messages.length
|
||||
let nextMessages = text
|
||||
? messages.concat([
|
||||
{
|
||||
role,
|
||||
content: text,
|
||||
name: opts.name
|
||||
}
|
||||
])
|
||||
: messages
|
||||
|
||||
let functionToken = 0
|
||||
|
||||
let numTokens = functionToken
|
||||
// deprecated function call token calculation due to low efficiency
|
||||
// if (completionParams.functions) {
|
||||
// for (const func of completionParams.functions) {
|
||||
// functionToken += await this._getTokenCount(func?.name)
|
||||
// functionToken += await this._getTokenCount(func?.description)
|
||||
// if (func?.parameters?.properties) {
|
||||
// for (let key of Object.keys(func.parameters.properties)) {
|
||||
// functionToken += await this._getTokenCount(key)
|
||||
// let property = func.parameters.properties[key]
|
||||
// for (let field of Object.keys(property)) {
|
||||
// switch (field) {
|
||||
// case 'type': {
|
||||
// functionToken += 2
|
||||
// functionToken += await this._getTokenCount(property?.type)
|
||||
// break
|
||||
// }
|
||||
// case 'description': {
|
||||
// functionToken += 2
|
||||
// functionToken += await this._getTokenCount(property?.description)
|
||||
// break
|
||||
// }
|
||||
// case 'enum': {
|
||||
// functionToken -= 3
|
||||
// for (let enumElement of property?.enum) {
|
||||
// functionToken += 3
|
||||
// functionToken += await this._getTokenCount(enumElement)
|
||||
// }
|
||||
// break
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// if (func?.parameters?.required) {
|
||||
// for (let string of func.parameters.required) {
|
||||
// functionToken += 2
|
||||
// functionToken += await this._getTokenCount(string)
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
do {
|
||||
const prompt = nextMessages
|
||||
.reduce((prompt, message) => {
|
||||
switch (message.role) {
|
||||
case 'system':
|
||||
return prompt.concat([`Instructions:\n${message.content}`])
|
||||
case 'user':
|
||||
return prompt.concat([`${userLabel}:\n${message.content}`])
|
||||
case 'function':
|
||||
// leave behind
|
||||
return prompt
|
||||
case 'assistant':
|
||||
return prompt
|
||||
default:
|
||||
return message.content ? prompt.concat([`${assistantLabel}:\n${message.content}`]) : prompt
|
||||
}
|
||||
}, [] as string[])
|
||||
.join('\n\n')
|
||||
|
||||
let nextNumTokensEstimate = await this._getTokenCount(prompt)
|
||||
|
||||
for (const m1 of nextMessages
|
||||
.filter(m => m.function_call)) {
|
||||
nextNumTokensEstimate += await this._getTokenCount(JSON.stringify(m1.function_call) || '')
|
||||
}
|
||||
|
||||
const isValidPrompt = nextNumTokensEstimate + functionToken <= maxNumTokens
|
||||
|
||||
if (prompt && !isValidPrompt) {
|
||||
break
|
||||
}
|
||||
messages = nextMessages
|
||||
numTokens = nextNumTokensEstimate + functionToken
|
||||
|
||||
if (!isValidPrompt) {
|
||||
break
|
||||
}
|
||||
|
||||
if (!parentMessageId) {
|
||||
break
|
||||
}
|
||||
|
||||
const parentMessage = await this._getMessageById(parentMessageId)
|
||||
if (!parentMessage) {
|
||||
break
|
||||
}
|
||||
|
||||
const parentMessageRole = parentMessage.role || 'user'
|
||||
|
||||
nextMessages = nextMessages.slice(0, systemMessageOffset).concat([
|
||||
{
|
||||
role: parentMessageRole,
|
||||
content: parentMessage.text,
|
||||
name: parentMessage.name,
|
||||
function_call: parentMessage.functionCall ? parentMessage.functionCall : undefined,
|
||||
// tool_calls: parentMessage.toolCalls ? parentMessage.toolCalls : undefined
|
||||
},
|
||||
...nextMessages.slice(systemMessageOffset)
|
||||
])
|
||||
|
||||
parentMessageId = parentMessage.parentMessageId
|
||||
} while (true)
|
||||
|
||||
// Use up to 4096 tokens (prompt + response), but try to leave 1000 tokens
|
||||
// for the response.
|
||||
const maxTokens = Math.max(
|
||||
1,
|
||||
Math.min(this._maxModelTokens - numTokens, this._maxResponseTokens)
|
||||
)
|
||||
|
||||
return { messages, maxTokens, numTokens }
|
||||
}
|
||||
|
||||
protected async _getTokenCount(text: string) {
|
||||
if (!text) {
|
||||
return 0
|
||||
}
|
||||
// TODO: use a better fix in the tokenizer
|
||||
text = text.replace(/<\|endoftext\|>/g, '')
|
||||
|
||||
return tokenizer.encode(text).length
|
||||
}
|
||||
|
||||
protected async _defaultGetMessageById(
|
||||
id: string
|
||||
): Promise<types.ChatMessage> {
|
||||
const res = await this._messageStore.get(id)
|
||||
return res
|
||||
}
|
||||
|
||||
protected async _defaultUpsertMessage(
|
||||
message: types.ChatMessage
|
||||
): Promise<void> {
|
||||
await this._messageStore.set(message.id, message)
|
||||
}
|
||||
}
|
||||
|
|
@ -1,170 +0,0 @@
|
|||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __generator = (this && this.__generator) || function (thisArg, body) {
|
||||
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
||||
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
||||
function verb(n) { return function (v) { return step([n, v]); }; }
|
||||
function step(op) {
|
||||
if (f) throw new TypeError("Generator is already executing.");
|
||||
while (g && (g = 0, op[0] && (_ = 0)), _) try {
|
||||
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
||||
if (y = 0, t) op = [op[0] & 2, t.value];
|
||||
switch (op[0]) {
|
||||
case 0: case 1: t = op; break;
|
||||
case 4: _.label++; return { value: op[1], done: false };
|
||||
case 5: _.label++; y = op[1]; op = [0]; continue;
|
||||
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
||||
default:
|
||||
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
||||
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
||||
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
||||
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
||||
if (t[2]) _.ops.pop();
|
||||
_.trys.pop(); continue;
|
||||
}
|
||||
op = body.call(thisArg, _);
|
||||
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
||||
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
||||
}
|
||||
};
|
||||
var __rest = (this && this.__rest) || function (s, e) {
|
||||
var t = {};
|
||||
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
|
||||
t[p] = s[p];
|
||||
if (s != null && typeof Object.getOwnPropertySymbols === "function")
|
||||
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
||||
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
|
||||
t[p[i]] = s[p[i]];
|
||||
}
|
||||
return t;
|
||||
};
|
||||
var __asyncValues = (this && this.__asyncValues) || function (o) {
|
||||
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
||||
var m = o[Symbol.asyncIterator], i;
|
||||
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
|
||||
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
||||
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
||||
};
|
||||
import { createParser } from 'eventsource-parser';
|
||||
import * as types from './types.js';
|
||||
import fetch from 'node-fetch';
|
||||
import { streamAsyncIterable } from './stream-async-iterable.js';
|
||||
export function fetchSSE(url, options, fetchFn) {
|
||||
var _a, e_1, _b, _c;
|
||||
if (fetchFn === void 0) { fetchFn = fetch; }
|
||||
return __awaiter(this, void 0, void 0, function () {
|
||||
var onMessage, onError, fetchOptions, res, reason, err_1, msg, error, parser, feed, body_1, _d, _e, _f, chunk, str, e_1_1;
|
||||
return __generator(this, function (_g) {
|
||||
switch (_g.label) {
|
||||
case 0:
|
||||
onMessage = options.onMessage, onError = options.onError, fetchOptions = __rest(options, ["onMessage", "onError"]);
|
||||
return [4 /*yield*/, fetchFn(url, fetchOptions)];
|
||||
case 1:
|
||||
res = _g.sent();
|
||||
if (!!res.ok) return [3 /*break*/, 6];
|
||||
reason = void 0;
|
||||
_g.label = 2;
|
||||
case 2:
|
||||
_g.trys.push([2, 4, , 5]);
|
||||
return [4 /*yield*/, res.text()];
|
||||
case 3:
|
||||
reason = _g.sent();
|
||||
return [3 /*break*/, 5];
|
||||
case 4:
|
||||
err_1 = _g.sent();
|
||||
reason = res.statusText;
|
||||
return [3 /*break*/, 5];
|
||||
case 5:
|
||||
msg = "ChatGPT error ".concat(res.status, ": ").concat(reason);
|
||||
error = new types.ChatGPTError(msg, { cause: res });
|
||||
error.statusCode = res.status;
|
||||
error.statusText = res.statusText;
|
||||
throw error;
|
||||
case 6:
|
||||
parser = createParser(function (event) {
|
||||
if (event.type === 'event') {
|
||||
onMessage(event.data);
|
||||
}
|
||||
});
|
||||
feed = function (chunk) {
|
||||
var _a;
|
||||
var response = null;
|
||||
try {
|
||||
response = JSON.parse(chunk);
|
||||
}
|
||||
catch (_b) {
|
||||
// ignore
|
||||
}
|
||||
if (((_a = response === null || response === void 0 ? void 0 : response.detail) === null || _a === void 0 ? void 0 : _a.type) === 'invalid_request_error') {
|
||||
var msg = "ChatGPT error ".concat(response.detail.message, ": ").concat(response.detail.code, " (").concat(response.detail.type, ")");
|
||||
var error = new types.ChatGPTError(msg, { cause: response });
|
||||
error.statusCode = response.detail.code;
|
||||
error.statusText = response.detail.message;
|
||||
if (onError) {
|
||||
onError(error);
|
||||
}
|
||||
else {
|
||||
console.error(error);
|
||||
}
|
||||
// don't feed to the event parser
|
||||
return;
|
||||
}
|
||||
parser.feed(chunk);
|
||||
};
|
||||
if (!!res.body.getReader) return [3 /*break*/, 7];
|
||||
body_1 = res.body;
|
||||
if (!body_1.on || !body_1.read) {
|
||||
throw new types.ChatGPTError('unsupported "fetch" implementation');
|
||||
}
|
||||
body_1.on('readable', function () {
|
||||
var chunk;
|
||||
while (null !== (chunk = body_1.read())) {
|
||||
feed(chunk.toString());
|
||||
}
|
||||
});
|
||||
return [3 /*break*/, 18];
|
||||
case 7:
|
||||
_g.trys.push([7, 12, 13, 18]);
|
||||
_d = true, _e = __asyncValues(streamAsyncIterable(res.body));
|
||||
_g.label = 8;
|
||||
case 8: return [4 /*yield*/, _e.next()];
|
||||
case 9:
|
||||
if (!(_f = _g.sent(), _a = _f.done, !_a)) return [3 /*break*/, 11];
|
||||
_c = _f.value;
|
||||
_d = false;
|
||||
chunk = _c;
|
||||
str = new TextDecoder().decode(chunk);
|
||||
feed(str);
|
||||
_g.label = 10;
|
||||
case 10:
|
||||
_d = true;
|
||||
return [3 /*break*/, 8];
|
||||
case 11: return [3 /*break*/, 18];
|
||||
case 12:
|
||||
e_1_1 = _g.sent();
|
||||
e_1 = { error: e_1_1 };
|
||||
return [3 /*break*/, 18];
|
||||
case 13:
|
||||
_g.trys.push([13, , 16, 17]);
|
||||
if (!(!_d && !_a && (_b = _e.return))) return [3 /*break*/, 15];
|
||||
return [4 /*yield*/, _b.call(_e)];
|
||||
case 14:
|
||||
_g.sent();
|
||||
_g.label = 15;
|
||||
case 15: return [3 /*break*/, 17];
|
||||
case 16:
|
||||
if (e_1) throw e_1.error;
|
||||
return [7 /*endfinally*/];
|
||||
case 17: return [7 /*endfinally*/];
|
||||
case 18: return [2 /*return*/];
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
|
@ -1,89 +0,0 @@
|
|||
import { createParser } from 'eventsource-parser'
|
||||
|
||||
import * as types from './types'
|
||||
import { fetch as nodefetch } from 'node-fetch'
|
||||
import { streamAsyncIterable } from './stream-async-iterable'
|
||||
|
||||
export async function fetchSSE(
|
||||
url: string,
|
||||
options: Parameters<typeof fetch>[1] & {
|
||||
onMessage: (data: string) => void
|
||||
onError?: (error: any) => void
|
||||
},
|
||||
fetch: types.FetchFn = nodefetch
|
||||
) {
|
||||
const { onMessage, onError, ...fetchOptions } = options
|
||||
const res = await fetch(url, fetchOptions)
|
||||
if (!res.ok) {
|
||||
let reason: string
|
||||
|
||||
try {
|
||||
reason = await res.text()
|
||||
} catch (err) {
|
||||
reason = res.statusText
|
||||
}
|
||||
|
||||
const msg = `ChatGPT error ${res.status}: ${reason}`
|
||||
const error = new types.ChatGPTError(msg, { cause: res })
|
||||
error.statusCode = res.status
|
||||
error.statusText = res.statusText
|
||||
throw error
|
||||
}
|
||||
|
||||
const parser = createParser((event) => {
|
||||
if (event.type === 'event') {
|
||||
onMessage(event.data)
|
||||
}
|
||||
})
|
||||
|
||||
// handle special response errors
|
||||
const feed = (chunk: string) => {
|
||||
let response = null
|
||||
|
||||
try {
|
||||
response = JSON.parse(chunk)
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
|
||||
if (response?.detail?.type === 'invalid_request_error') {
|
||||
const msg = `ChatGPT error ${response.detail.message}: ${response.detail.code} (${response.detail.type})`
|
||||
const error = new types.ChatGPTError(msg, { cause: response })
|
||||
error.statusCode = response.detail.code
|
||||
error.statusText = response.detail.message
|
||||
|
||||
if (onError) {
|
||||
onError(error)
|
||||
} else {
|
||||
console.error(error)
|
||||
}
|
||||
|
||||
// don't feed to the event parser
|
||||
return
|
||||
}
|
||||
|
||||
parser.feed(chunk)
|
||||
}
|
||||
|
||||
if (!res.body.getReader) {
|
||||
// Vercel polyfills `fetch` with `node-fetch`, which doesn't conform to
|
||||
// web standards, so this is a workaround...
|
||||
const body: NodeJS.ReadableStream = res.body as any
|
||||
|
||||
if (!body.on || !body.read) {
|
||||
throw new types.ChatGPTError('unsupported "fetch" implementation')
|
||||
}
|
||||
|
||||
body.on('readable', () => {
|
||||
let chunk: string | Buffer
|
||||
while (null !== (chunk = body.read())) {
|
||||
feed(chunk.toString())
|
||||
}
|
||||
})
|
||||
} else {
|
||||
for await (const chunk of streamAsyncIterable(res.body)) {
|
||||
const str = new TextDecoder().decode(chunk)
|
||||
feed(str)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,14 +0,0 @@
|
|||
export async function * streamAsyncIterable (stream) {
|
||||
const reader = stream.getReader()
|
||||
try {
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
if (done) {
|
||||
return
|
||||
}
|
||||
yield value
|
||||
}
|
||||
} finally {
|
||||
reader.releaseLock()
|
||||
}
|
||||
}
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
import { getEncoding } from 'js-tiktoken';
|
||||
// TODO: make this configurable
|
||||
var tokenizer = getEncoding('cl100k_base');
|
||||
export function encode(input) {
|
||||
return new Uint32Array(tokenizer.encode(input));
|
||||
}
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
import { getEncoding } from 'js-tiktoken'
|
||||
|
||||
// TODO: make this configurable
|
||||
const tokenizer = getEncoding('cl100k_base')
|
||||
|
||||
export function encode(input: string): Uint32Array {
|
||||
return new Uint32Array(tokenizer.encode(input))
|
||||
}
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"module": "es2020",
|
||||
"moduleResolution": "node",
|
||||
"esModuleInterop": true
|
||||
}
|
||||
}
|
||||
|
|
@ -1,26 +0,0 @@
|
|||
var __extends = (this && this.__extends) || (function () {
|
||||
var extendStatics = function (d, b) {
|
||||
extendStatics = Object.setPrototypeOf ||
|
||||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
|
||||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
|
||||
return extendStatics(d, b);
|
||||
};
|
||||
return function (d, b) {
|
||||
if (typeof b !== "function" && b !== null)
|
||||
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
|
||||
extendStatics(d, b);
|
||||
function __() { this.constructor = d; }
|
||||
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
|
||||
};
|
||||
})();
|
||||
var ChatGPTError = /** @class */ (function (_super) {
|
||||
__extends(ChatGPTError, _super);
|
||||
function ChatGPTError() {
|
||||
return _super !== null && _super.apply(this, arguments) || this;
|
||||
}
|
||||
return ChatGPTError;
|
||||
}(Error));
|
||||
export { ChatGPTError };
|
||||
export var openai;
|
||||
(function (openai) {
|
||||
})(openai || (openai = {}));
|
||||
|
|
@ -1,495 +0,0 @@
|
|||
// @ts-ignore
|
||||
import Keyv from 'keyv'
|
||||
|
||||
export type Role = 'user' | 'assistant' | 'system' | 'function'
|
||||
// @ts-ignore
|
||||
import fetch from 'node-fetch'
|
||||
export type FetchFn = typeof fetch
|
||||
|
||||
export type ChatGPTAPIOptions = {
|
||||
apiKey: string
|
||||
|
||||
/** @defaultValue `'https://api.openai.com'` **/
|
||||
apiBaseUrl?: string
|
||||
|
||||
apiOrg?: string
|
||||
|
||||
/** @defaultValue `false` **/
|
||||
debug?: boolean
|
||||
|
||||
completionParams?: Partial<
|
||||
Omit<openai.CreateChatCompletionRequest, 'messages' | 'n' | 'stream'>
|
||||
>
|
||||
|
||||
systemMessage?: string
|
||||
|
||||
/** @defaultValue `4096` **/
|
||||
maxModelTokens?: number
|
||||
|
||||
/** @defaultValue `1000` **/
|
||||
maxResponseTokens?: number
|
||||
|
||||
messageStore?: Keyv
|
||||
getMessageById?: GetMessageByIdFunction
|
||||
upsertMessage?: UpsertMessageFunction
|
||||
|
||||
fetch?: FetchFn
|
||||
}
|
||||
|
||||
export type SendMessageOptions = {
|
||||
/**
|
||||
* function role name
|
||||
*/
|
||||
name?: string
|
||||
parentMessageId?: string
|
||||
conversationId?: string
|
||||
messageId?: string
|
||||
stream?: boolean
|
||||
systemMessage?: string
|
||||
timeoutMs?: number
|
||||
onProgress?: (partialResponse: ChatMessage) => void
|
||||
abortSignal?: AbortSignal
|
||||
completionParams?: Partial<
|
||||
Omit<openai.CreateChatCompletionRequest, 'messages' | 'n' | 'stream'>
|
||||
>
|
||||
}
|
||||
|
||||
export type MessageActionType = 'next' | 'variant'
|
||||
|
||||
export type SendMessageBrowserOptions = {
|
||||
conversationId?: string
|
||||
parentMessageId?: string
|
||||
messageId?: string
|
||||
action?: MessageActionType
|
||||
timeoutMs?: number
|
||||
onProgress?: (partialResponse: ChatMessage) => void
|
||||
abortSignal?: AbortSignal
|
||||
}
|
||||
|
||||
export interface ChatMessage {
|
||||
id: string
|
||||
text: string
|
||||
thinking_text?: string
|
||||
role: Role
|
||||
name?: string
|
||||
delta?: string
|
||||
detail?:
|
||||
| openai.CreateChatCompletionResponse
|
||||
| CreateChatCompletionStreamResponse
|
||||
|
||||
// relevant for both ChatGPTAPI and ChatGPTUnofficialProxyAPI
|
||||
parentMessageId?: string
|
||||
|
||||
// only relevant for ChatGPTUnofficialProxyAPI (optional for ChatGPTAPI)
|
||||
conversationId?: string
|
||||
functionCall?: openai.FunctionCall,
|
||||
toolCalls?: openai.ToolCall[],
|
||||
}
|
||||
|
||||
export class ChatGPTError extends Error {
|
||||
statusCode?: number
|
||||
statusText?: string
|
||||
isFinal?: boolean
|
||||
accountId?: string
|
||||
}
|
||||
|
||||
/** Returns a chat message from a store by it's ID (or null if not found). */
|
||||
export type GetMessageByIdFunction = (id: string) => Promise<ChatMessage>
|
||||
|
||||
/** Upserts a chat message to a store. */
|
||||
export type UpsertMessageFunction = (message: ChatMessage) => Promise<void>
|
||||
|
||||
export interface CreateChatCompletionStreamResponse
|
||||
extends openai.CreateChatCompletionDeltaResponse {
|
||||
usage: CreateCompletionStreamResponseUsage
|
||||
}
|
||||
|
||||
export interface CreateCompletionStreamResponseUsage
|
||||
extends openai.CreateCompletionResponseUsage {
|
||||
estimated: true
|
||||
}
|
||||
|
||||
/**
|
||||
* https://chat.openapi.com/backend-api/conversation
|
||||
*/
|
||||
export type ConversationJSONBody = {
|
||||
/**
|
||||
* The action to take
|
||||
*/
|
||||
action: string
|
||||
|
||||
/**
|
||||
* The ID of the conversation
|
||||
*/
|
||||
conversation_id?: string
|
||||
|
||||
/**
|
||||
* Prompts to provide
|
||||
*/
|
||||
messages: Prompt[]
|
||||
|
||||
/**
|
||||
* The model to use
|
||||
*/
|
||||
model: string
|
||||
|
||||
/**
|
||||
* The parent message ID
|
||||
*/
|
||||
parent_message_id: string
|
||||
}
|
||||
|
||||
export type Prompt = {
|
||||
/**
|
||||
* The content of the prompt
|
||||
*/
|
||||
content: PromptContent
|
||||
|
||||
/**
|
||||
* The ID of the prompt
|
||||
*/
|
||||
id: string
|
||||
|
||||
/**
|
||||
* The role played in the prompt
|
||||
*/
|
||||
role: Role
|
||||
}
|
||||
|
||||
export type ContentType = 'text'
|
||||
|
||||
export type PromptContent = {
|
||||
/**
|
||||
* The content type of the prompt
|
||||
*/
|
||||
content_type: ContentType
|
||||
|
||||
/**
|
||||
* The parts to the prompt
|
||||
*/
|
||||
parts: string[]
|
||||
}
|
||||
|
||||
export type ConversationResponseEvent = {
|
||||
message?: Message
|
||||
conversation_id?: string
|
||||
error?: string | null
|
||||
}
|
||||
|
||||
export type Message = {
|
||||
id: string
|
||||
content: MessageContent
|
||||
role: Role
|
||||
user: string | null
|
||||
create_time: string | null
|
||||
update_time: string | null
|
||||
end_turn: null
|
||||
weight: number
|
||||
recipient: string
|
||||
metadata: MessageMetadata
|
||||
}
|
||||
|
||||
export type MessageContent = {
|
||||
content_type: string
|
||||
parts: string[]
|
||||
}
|
||||
|
||||
export type MessageMetadata = any
|
||||
|
||||
export namespace openai {
|
||||
export interface CreateChatCompletionDeltaResponse {
|
||||
id: string
|
||||
object: 'chat.completion.chunk'
|
||||
created: number
|
||||
model: string
|
||||
choices: [
|
||||
{
|
||||
delta: {
|
||||
role: Role
|
||||
content?: string,
|
||||
reasoning_content?: string,
|
||||
function_call?: FunctionCall,
|
||||
tool_calls: ToolCall[]
|
||||
}
|
||||
index: number
|
||||
finish_reason: string | null
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @export
|
||||
* @interface ChatCompletionRequestMessage
|
||||
*/
|
||||
export interface ChatCompletionRequestMessage {
|
||||
/**
|
||||
* The role of the author of this message.
|
||||
* @type {string}
|
||||
* @memberof ChatCompletionRequestMessage
|
||||
*/
|
||||
role: ChatCompletionRequestMessageRoleEnum
|
||||
/**
|
||||
* The contents of the message
|
||||
* @type {string}
|
||||
* @memberof ChatCompletionRequestMessage
|
||||
*/
|
||||
content: string
|
||||
/**
|
||||
* The name of the user in a multi-user chat
|
||||
* @type {string}
|
||||
* @memberof ChatCompletionRequestMessage
|
||||
*/
|
||||
name?: string
|
||||
function_call?: FunctionCall
|
||||
tool_calls?: ToolCall,
|
||||
// required todo
|
||||
// tool_choice?: 'none' | 'auto' | 'required'
|
||||
}
|
||||
|
||||
export interface FunctionCall {
|
||||
name: string
|
||||
arguments: string
|
||||
}
|
||||
|
||||
export interface ToolCall {
|
||||
id: string
|
||||
type: "function"
|
||||
function: FunctionCall
|
||||
}
|
||||
|
||||
export interface Tools {
|
||||
type: "function" | string,
|
||||
function: Function
|
||||
}
|
||||
|
||||
export declare const ChatCompletionRequestMessageRoleEnum: {
|
||||
readonly System: 'system'
|
||||
readonly User: 'user'
|
||||
readonly Assistant: 'assistant'
|
||||
readonly Function: 'function'
|
||||
}
|
||||
export declare type ChatCompletionRequestMessageRoleEnum =
|
||||
(typeof ChatCompletionRequestMessageRoleEnum)[keyof typeof ChatCompletionRequestMessageRoleEnum]
|
||||
/**
|
||||
*
|
||||
* @export
|
||||
* @interface ChatCompletionResponseMessage
|
||||
*/
|
||||
export interface ChatCompletionResponseMessage {
|
||||
/**
|
||||
* The role of the author of this message.
|
||||
* @type {string}
|
||||
* @memberof ChatCompletionResponseMessage
|
||||
*/
|
||||
role: ChatCompletionResponseMessageRoleEnum
|
||||
/**
|
||||
* The contents of the message
|
||||
* @type {string}
|
||||
* @memberof ChatCompletionResponseMessage
|
||||
*/
|
||||
content: string
|
||||
reasoning_content: string
|
||||
function_call: FunctionCall,
|
||||
tool_calls: ToolCall[]
|
||||
}
|
||||
export declare const ChatCompletionResponseMessageRoleEnum: {
|
||||
readonly System: 'system'
|
||||
readonly User: 'user'
|
||||
readonly Assistant: 'assistant'
|
||||
}
|
||||
export declare type ChatCompletionResponseMessageRoleEnum =
|
||||
(typeof ChatCompletionResponseMessageRoleEnum)[keyof typeof ChatCompletionResponseMessageRoleEnum]
|
||||
/**
|
||||
*
|
||||
* @export
|
||||
* @interface CreateChatCompletionRequest
|
||||
*/
|
||||
export interface CreateChatCompletionRequest {
|
||||
/**
|
||||
* ID of the model to use. Currently, only `gpt-3.5-turbo` and `gpt-3.5-turbo-0301` are supported.
|
||||
* @type {string}
|
||||
* @memberof CreateChatCompletionRequest
|
||||
*/
|
||||
model: string
|
||||
/**
|
||||
* The messages to generate chat completions for, in the [chat format](/docs/guides/chat/introduction).
|
||||
* @type {Array<ChatCompletionRequestMessage>}
|
||||
* @memberof CreateChatCompletionRequest
|
||||
*/
|
||||
messages: Array<ChatCompletionRequestMessage>
|
||||
/**
|
||||
* What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. We generally recommend altering this or `top_p` but not both.
|
||||
* @type {number}
|
||||
* @memberof CreateChatCompletionRequest
|
||||
*/
|
||||
temperature?: number | null
|
||||
/**
|
||||
* An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or `temperature` but not both.
|
||||
* @type {number}
|
||||
* @memberof CreateChatCompletionRequest
|
||||
*/
|
||||
top_p?: number | null
|
||||
/**
|
||||
* How many chat completion choices to generate for each input message.
|
||||
* @type {number}
|
||||
* @memberof CreateChatCompletionRequest
|
||||
*/
|
||||
n?: number | null
|
||||
/**
|
||||
* If set, partial message deltas will be sent, like in ChatGPT. Tokens will be sent as data-only [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format) as they become available, with the stream terminated by a `data: [DONE]` message.
|
||||
* @type {boolean}
|
||||
* @memberof CreateChatCompletionRequest
|
||||
*/
|
||||
stream?: boolean | null
|
||||
/**
|
||||
*
|
||||
* @type {CreateChatCompletionRequestStop}
|
||||
* @memberof CreateChatCompletionRequest
|
||||
*/
|
||||
stop?: CreateChatCompletionRequestStop
|
||||
/**
|
||||
* The maximum number of tokens allowed for the generated answer. By default, the number of tokens the model can return will be (4096 - prompt tokens).
|
||||
* @type {number}
|
||||
* @memberof CreateChatCompletionRequest
|
||||
*/
|
||||
max_tokens?: number
|
||||
/**
|
||||
* Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model\'s likelihood to talk about new topics. [See more information about frequency and presence penalties.](/docs/api-reference/parameter-details)
|
||||
* @type {number}
|
||||
* @memberof CreateChatCompletionRequest
|
||||
*/
|
||||
presence_penalty?: number | null
|
||||
/**
|
||||
* Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model\'s likelihood to repeat the same line verbatim. [See more information about frequency and presence penalties.](/docs/api-reference/parameter-details)
|
||||
* @type {number}
|
||||
* @memberof CreateChatCompletionRequest
|
||||
*/
|
||||
frequency_penalty?: number | null
|
||||
/**
|
||||
* Modify the likelihood of specified tokens appearing in the completion. Accepts a json object that maps tokens (specified by their token ID in the tokenizer) to an associated bias value from -100 to 100. Mathematically, the bias is added to the logits generated by the model prior to sampling. The exact effect will vary per model, but values between -1 and 1 should decrease or increase likelihood of selection; values like -100 or 100 should result in a ban or exclusive selection of the relevant token.
|
||||
* @type {object}
|
||||
* @memberof CreateChatCompletionRequest
|
||||
*/
|
||||
logit_bias?: object | null
|
||||
/**
|
||||
* A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. [Learn more](/docs/guides/safety-best-practices/end-user-ids).
|
||||
* @type {string}
|
||||
* @memberof CreateChatCompletionRequest
|
||||
*/
|
||||
user?: string
|
||||
|
||||
functions?: Function[]
|
||||
|
||||
tools?: Tools[]
|
||||
}
|
||||
export interface Function {
|
||||
name: string
|
||||
description: string
|
||||
parameters: FunctionParameters
|
||||
}
|
||||
export interface FunctionParameters {
|
||||
type: string
|
||||
properties: Record<string, Record<string, any>>
|
||||
required: string[]
|
||||
}
|
||||
/**
|
||||
* @type CreateChatCompletionRequestStop
|
||||
* Up to 4 sequences where the API will stop generating further tokens.
|
||||
* @export
|
||||
*/
|
||||
export declare type CreateChatCompletionRequestStop = Array<string> | string
|
||||
/**
|
||||
*
|
||||
* @export
|
||||
* @interface CreateChatCompletionResponse
|
||||
*/
|
||||
export interface CreateChatCompletionResponse {
|
||||
/**
|
||||
*
|
||||
* @type {string}
|
||||
* @memberof CreateChatCompletionResponse
|
||||
*/
|
||||
id: string
|
||||
/**
|
||||
*
|
||||
* @type {string}
|
||||
* @memberof CreateChatCompletionResponse
|
||||
*/
|
||||
object: string
|
||||
/**
|
||||
*
|
||||
* @type {number}
|
||||
* @memberof CreateChatCompletionResponse
|
||||
*/
|
||||
created: number
|
||||
/**
|
||||
*
|
||||
* @type {string}
|
||||
* @memberof CreateChatCompletionResponse
|
||||
*/
|
||||
model: string
|
||||
/**
|
||||
*
|
||||
* @type {Array<CreateChatCompletionResponseChoicesInner>}
|
||||
* @memberof CreateChatCompletionResponse
|
||||
*/
|
||||
choices: Array<CreateChatCompletionResponseChoicesInner>
|
||||
/**
|
||||
*
|
||||
* @type {CreateCompletionResponseUsage}
|
||||
* @memberof CreateChatCompletionResponse
|
||||
*/
|
||||
usage?: CreateCompletionResponseUsage
|
||||
}
|
||||
/**
|
||||
*
|
||||
* @export
|
||||
* @interface CreateChatCompletionResponseChoicesInner
|
||||
*/
|
||||
export interface CreateChatCompletionResponseChoicesInner {
|
||||
/**
|
||||
*
|
||||
* @type {number}
|
||||
* @memberof CreateChatCompletionResponseChoicesInner
|
||||
*/
|
||||
index?: number
|
||||
/**
|
||||
*
|
||||
* @type {ChatCompletionResponseMessage}
|
||||
* @memberof CreateChatCompletionResponseChoicesInner
|
||||
*/
|
||||
message?: ChatCompletionResponseMessage
|
||||
/**
|
||||
*
|
||||
* @type {string}
|
||||
* @memberof CreateChatCompletionResponseChoicesInner
|
||||
*/
|
||||
finish_reason?: string
|
||||
}
|
||||
/**
|
||||
*
|
||||
* @export
|
||||
* @interface CreateCompletionResponseUsage
|
||||
*/
|
||||
export interface CreateCompletionResponseUsage {
|
||||
/**
|
||||
*
|
||||
* @type {number}
|
||||
* @memberof CreateCompletionResponseUsage
|
||||
*/
|
||||
prompt_tokens: number
|
||||
/**
|
||||
*
|
||||
* @type {number}
|
||||
* @memberof CreateCompletionResponseUsage
|
||||
*/
|
||||
completion_tokens: number
|
||||
/**
|
||||
*
|
||||
* @type {number}
|
||||
* @memberof CreateCompletionResponseUsage
|
||||
*/
|
||||
total_tokens: number
|
||||
}
|
||||
}
|
||||
|
|
@ -1,72 +0,0 @@
|
|||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import { fileURLToPath } from 'url'
|
||||
|
||||
export class AbstractPostProcessor {
|
||||
name = ''
|
||||
|
||||
/**
|
||||
* 类型
|
||||
* @type {'pre' | 'post'}
|
||||
*/
|
||||
type = 'post'
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {{
|
||||
* text: string,
|
||||
* thinking_text?: string
|
||||
* }} input
|
||||
* @returns {Promise<{
|
||||
* text: string,
|
||||
* thinking_text?: string
|
||||
* }>}
|
||||
*/
|
||||
async processInner (input) {}
|
||||
}
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url))
|
||||
|
||||
/**
|
||||
* collect
|
||||
* @param {'pre' | 'post' | undefined} type
|
||||
* @return {Promise<AbstractPostProcessor[]>}
|
||||
*/
|
||||
export async function collectProcessors (type) {
|
||||
const processors = []
|
||||
const directoryPath = __dirname // 当前目录
|
||||
|
||||
// 读取目录中的所有文件
|
||||
const files = fs.readdirSync(directoryPath)
|
||||
|
||||
// 遍历所有文件,筛选出.js文件
|
||||
for (const file of files) {
|
||||
if (file.endsWith('.js') && file !== 'BasicProcessor.js') { // 排除自己
|
||||
const fullPath = path.join(directoryPath, file)
|
||||
try {
|
||||
// 动态导入模块
|
||||
const module = await import(fullPath)
|
||||
|
||||
// 遍历模块的所有导出成员
|
||||
for (const key of Object.keys(module)) {
|
||||
const ExportedClass = module[key]
|
||||
|
||||
// 确保它是一个类,并且继承了 AbstractPostProcessor
|
||||
if (typeof ExportedClass === 'function' &&
|
||||
Object.getPrototypeOf(ExportedClass) !== null) {
|
||||
const parent = Object.getPrototypeOf(ExportedClass)
|
||||
if (parent.name === 'AbstractPostProcessor') {
|
||||
let instance = new ExportedClass()
|
||||
if (!type || instance.type === type) {
|
||||
processors.push(instance)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
// console.error(`Error processing file ${file}:`, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return processors
|
||||
}
|
||||
|
|
@ -1,54 +0,0 @@
|
|||
import { AbstractPostProcessor } from './BasicProcessor.js'
|
||||
|
||||
export class ReasonerProcessor extends AbstractPostProcessor {
|
||||
constructor () {
|
||||
super()
|
||||
this.name = 'ReasonerPostProcessor'
|
||||
this.type = 'post'
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {{
|
||||
* text: string,
|
||||
* thinking_text?: string
|
||||
* }} input
|
||||
* @returns {Promise<{
|
||||
* text: string,
|
||||
* thinking_text?: string
|
||||
* }>}
|
||||
*/
|
||||
async processInner (input) {
|
||||
logger.debug('Running into ReasonerPostProcessor')
|
||||
const { text, thinkingText } = extractThinkingTextAndText(input.text)
|
||||
return {
|
||||
text,
|
||||
thinking_text: (input.thinking_text ? input.thinking_text : '') + thinkingText
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* written by gpt-4o
|
||||
* @param str
|
||||
* @returns {{thinkingText: string, text: *}|{thinkingText: *, text: *}}
|
||||
*/
|
||||
const extractThinkingTextAndText = (str) => {
|
||||
// 使用正则表达式提取think标签内容
|
||||
const thinkRegex = /<think>(.*?)<\/think>/s
|
||||
const match = str.match(thinkRegex)
|
||||
|
||||
// 如果找到了<think>标签内容
|
||||
if (match) {
|
||||
// thinking_text就是<think>标签内的内容
|
||||
const thinkingText = match[1].trim()
|
||||
|
||||
// text就是</think>标签后的部分
|
||||
const text = str.slice(match.index + match[0].length).trim()
|
||||
|
||||
return { thinkingText, text }
|
||||
}
|
||||
|
||||
// 如果没有<think>标签内容,返回空或原始内容
|
||||
return { thinkingText: '', text: str.trim() }
|
||||
}
|
||||
|
|
@ -1,67 +0,0 @@
|
|||
import _ from 'lodash'
|
||||
import fs from 'fs'
|
||||
import { mkdirs } from './common.js'
|
||||
export function readPrompts () {
|
||||
const _path = process.cwd()
|
||||
let prompts = []
|
||||
if (fs.existsSync(`${_path}/plugins/chatgpt-plugin/prompts`)) {
|
||||
if (fs.existsSync(`${_path}/plugins/chatgpt-plugin/prompts`)) {
|
||||
const files = fs.readdirSync(`${_path}/plugins/chatgpt-plugin/prompts`)
|
||||
const txtFiles = files.filter(file => file.endsWith('.txt'))
|
||||
txtFiles.forEach(txtFile => {
|
||||
let name = _.trimEnd(txtFile, '.txt')
|
||||
const content = fs.readFileSync(`${_path}/plugins/chatgpt-plugin/prompts/${txtFile}`, 'utf8')
|
||||
let example = []
|
||||
try {
|
||||
if (fs.existsSync(`${_path}/plugins/chatgpt-plugin/prompts/${name}_example.json`)) {
|
||||
example = fs.readFileSync(`${_path}/plugins/chatgpt-plugin/prompts/${name}_example.json`, 'utf8')
|
||||
example = JSON.parse(example)
|
||||
}
|
||||
} catch (err) {
|
||||
logger.debug(err)
|
||||
}
|
||||
prompts.push({
|
||||
name,
|
||||
content,
|
||||
example
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
return prompts
|
||||
}
|
||||
|
||||
export function getPromptByName (name) {
|
||||
if (!name) {
|
||||
return null
|
||||
}
|
||||
let prompts = readPrompts()
|
||||
let hits = prompts.filter(p => p.name.trim() === name.trim())
|
||||
if (hits && hits.length > 0) {
|
||||
return hits[0]
|
||||
} else {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
export function saveOnePrompt (name, content, examples) {
|
||||
const _path = process.cwd()
|
||||
mkdirs(`${_path}/plugins/chatgpt-plugin/prompts`)
|
||||
let filePath = `${_path}/plugins/chatgpt-plugin/prompts/${name}.txt`
|
||||
fs.writeFileSync(filePath, content)
|
||||
if (examples) {
|
||||
let examplePath = `${_path}/plugins/chatgpt-plugin/prompts/${name}_example.json`
|
||||
fs.writeFileSync(examplePath, JSON.stringify(examples))
|
||||
}
|
||||
}
|
||||
|
||||
export function deleteOnePrompt (name) {
|
||||
const _path = process.cwd()
|
||||
mkdirs(`${_path}/plugins/chatgpt-plugin/prompts`)
|
||||
let filePath = `${_path}/plugins/chatgpt-plugin/prompts/${name}.txt`
|
||||
fs.unlinkSync(filePath)
|
||||
try {
|
||||
let examplePath = `${_path}/plugins/chatgpt-plugin/prompts/${name}_example.json`
|
||||
fs.unlinkSync(examplePath)
|
||||
} catch (err) {}
|
||||
}
|
||||
|
|
@ -1,33 +0,0 @@
|
|||
// workaround for ver 7.x and ver 5.x
|
||||
import HttpsProxyAgent from 'https-proxy-agent'
|
||||
import { Config } from './config.js'
|
||||
import fetch from 'node-fetch'
|
||||
|
||||
let proxy = HttpsProxyAgent
|
||||
if (typeof proxy !== 'function') {
|
||||
proxy = (p) => {
|
||||
return new HttpsProxyAgent.HttpsProxyAgent(p)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* return a proxy function
|
||||
* @returns {*|createHttpsProxyAgent|((opts: (string | createHttpsProxyAgent.HttpsProxyAgentOptions)) => HttpsProxyAgent)}
|
||||
*/
|
||||
export function getProxy () {
|
||||
return proxy
|
||||
}
|
||||
|
||||
export const newFetch = (url, options = {}) => {
|
||||
const defaultOptions = Config.proxy
|
||||
? {
|
||||
agent: proxy(Config.proxy)
|
||||
}
|
||||
: {}
|
||||
const mergedOptions = {
|
||||
...defaultOptions,
|
||||
...options
|
||||
}
|
||||
|
||||
return fetch(url, mergedOptions)
|
||||
}
|
||||
|
|
@ -1,30 +0,0 @@
|
|||
import { Config } from './config.js'
|
||||
import { ChatGPTAPI } from './openai/chatgpt-api.js'
|
||||
import fetch from 'node-fetch'
|
||||
import { getProxy } from './proxy.js'
|
||||
let proxy = getProxy()
|
||||
const newFetch = (url, options = {}) => {
|
||||
const defaultOptions = Config.proxy
|
||||
? {
|
||||
agent: proxy(Config.proxy)
|
||||
}
|
||||
: {}
|
||||
|
||||
const mergedOptions = {
|
||||
...defaultOptions,
|
||||
...options
|
||||
}
|
||||
|
||||
return fetch(url, mergedOptions)
|
||||
}
|
||||
|
||||
export async function generateHello () {
|
||||
let question = Config.helloPrompt || '写一段话让大家来找我聊天。类似于“有人找我聊天吗?"这种风格,轻松随意一点控制在20个字以内'
|
||||
let api = new ChatGPTAPI({
|
||||
apiBaseUrl: Config.openAiBaseUrl,
|
||||
apiKey: Config.apiKey,
|
||||
fetch: newFetch
|
||||
})
|
||||
const res = await api.sendMessage(question)
|
||||
return res.text
|
||||
}
|
||||
|
|
@ -1,51 +0,0 @@
|
|||
/**
|
||||
* 过滤
|
||||
* @param msg
|
||||
*/
|
||||
export function filterResponseChunk (msg) {
|
||||
if (!msg) {
|
||||
return false
|
||||
}
|
||||
if (typeof msg !== 'string') {
|
||||
return msg
|
||||
}
|
||||
if (!msg.trim()) {
|
||||
return false
|
||||
}
|
||||
if (msg.trim() === '```') {
|
||||
return false
|
||||
}
|
||||
if (msg.trim() === '<EMPTY>') {
|
||||
return false
|
||||
}
|
||||
msg = trimSpecific(msg, '<EMPTY>')
|
||||
return msg
|
||||
}
|
||||
|
||||
export function customSplitRegex (text, regex, limit) {
|
||||
const result = []
|
||||
let match
|
||||
let lastIndex = 0
|
||||
const globalRegex = new RegExp(regex, 'g')
|
||||
|
||||
while ((match = globalRegex.exec(text)) !== null) {
|
||||
if (result.length < limit - 1) {
|
||||
result.push(text.slice(lastIndex, match.index))
|
||||
lastIndex = match.index + match[0].length
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// 添加剩余部分
|
||||
result.push(text.slice(lastIndex))
|
||||
return result
|
||||
}
|
||||
|
||||
export function trimSpecific (str, marker) {
|
||||
let trimmedStr = str.trim()
|
||||
|
||||
const regex = new RegExp(`^${marker}|${marker}$`, 'g')
|
||||
|
||||
return trimmedStr.replace(regex, '').trim()
|
||||
}
|
||||
|
|
@ -1,47 +0,0 @@
|
|||
import { AbstractTool } from './AbstractTool.js'
|
||||
|
||||
export class APTool extends AbstractTool {
|
||||
name = 'draw'
|
||||
|
||||
parameters = {
|
||||
properties: {
|
||||
prompt: {
|
||||
type: 'string',
|
||||
description: 'draw prompt of StableDiffusion, prefer to be in English. should be many keywords split by comma.'
|
||||
}
|
||||
},
|
||||
required: []
|
||||
}
|
||||
|
||||
description = 'Useful when you want to draw picture'
|
||||
|
||||
func = async function (opts, e) {
|
||||
let { prompt } = opts
|
||||
if (e.at === e.bot.uin) {
|
||||
e.at = null
|
||||
}
|
||||
e.atBot = false
|
||||
let ap
|
||||
try {
|
||||
// eslint-disable-next-line camelcase
|
||||
let { Ai_Painting } = await import('../../../ap-plugin/apps/aiPainting.js')
|
||||
ap = new Ai_Painting(e)
|
||||
} catch (err) {
|
||||
try {
|
||||
// ap的dev分支改名了
|
||||
// eslint-disable-next-line camelcase
|
||||
let { Ai_Painting } = await import('../../../ap-plugin/apps/ai_painting.js')
|
||||
ap = new Ai_Painting(e)
|
||||
} catch (err1) {
|
||||
return 'the user didn\'t install ap-plugin. suggest him to install'
|
||||
}
|
||||
}
|
||||
try {
|
||||
e.msg = '#绘图' + prompt
|
||||
await ap.aiPainting(e)
|
||||
return 'draw success, picture has been sent.'
|
||||
} catch (err) {
|
||||
return 'draw failed due to unknown error'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
export class AbstractTool {
|
||||
name = ''
|
||||
|
||||
parameters = {}
|
||||
|
||||
description = ''
|
||||
|
||||
func = async function () {}
|
||||
|
||||
function () {
|
||||
if (!this.parameters.type) {
|
||||
this.parameters.type = 'object'
|
||||
}
|
||||
return {
|
||||
name: this.name,
|
||||
description: this.description,
|
||||
parameters: this.parameters
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,51 +0,0 @@
|
|||
import { AbstractTool } from './AbstractTool.js'
|
||||
|
||||
export class EditCardTool extends AbstractTool {
|
||||
name = 'editCard'
|
||||
|
||||
parameters = {
|
||||
properties: {
|
||||
qq: {
|
||||
type: 'string',
|
||||
description: '你想改名片的那个人的qq号,默认为聊天对象'
|
||||
},
|
||||
card: {
|
||||
type: 'string',
|
||||
description: 'the new card'
|
||||
},
|
||||
groupId: {
|
||||
type: 'string',
|
||||
description: 'group number'
|
||||
}
|
||||
},
|
||||
required: ['card', 'groupId']
|
||||
}
|
||||
|
||||
description = 'Useful when you want to edit someone\'s card in the group(群名片)'
|
||||
|
||||
func = async function (opts, e) {
|
||||
let { qq, card, groupId, sender, isAdmin } = opts
|
||||
qq = isNaN(qq) || !qq ? e.sender.user_id : parseInt(qq.trim())
|
||||
groupId = isNaN(groupId) || !groupId ? e.group_id : parseInt(groupId.trim())
|
||||
|
||||
let group = await e.bot.pickGroup(groupId)
|
||||
try {
|
||||
let mm = await group.getMemberMap()
|
||||
if (!mm.has(qq)) {
|
||||
return `failed, the user ${qq} is not in group ${groupId}`
|
||||
}
|
||||
if (mm.get(e.bot.uin) && mm.get(e.bot.uin).role === 'member') {
|
||||
return `failed, you, not user, don't have permission to edit card in group ${groupId}`
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('获取群信息失败,可能使用的底层协议不完善')
|
||||
}
|
||||
logger.info('edit card: ', groupId, qq)
|
||||
if (isAdmin || sender == qq) {
|
||||
await group.setCard(qq, card)
|
||||
} else {
|
||||
return 'the user is not admin, he can\'t edit card of other people.'
|
||||
}
|
||||
return `the user ${qq}'s card has been changed into ${card}`
|
||||
}
|
||||
}
|
||||
|
|
@ -1,44 +0,0 @@
|
|||
import { AbstractTool } from './AbstractTool.js'
|
||||
|
||||
export class EliMovieTool extends AbstractTool {
|
||||
name = 'currentHotMovies'
|
||||
|
||||
parameters = {
|
||||
properties: {
|
||||
yesOrNo: {
|
||||
type: 'string',
|
||||
description: 'check or not'
|
||||
}
|
||||
},
|
||||
required: ['yesOrNo']
|
||||
}
|
||||
|
||||
description = 'Useful when you want to check out the current hot movies'
|
||||
|
||||
func = async function (opts, e) {
|
||||
let { yesOrNo } = opts
|
||||
if (yesOrNo === 'no') {
|
||||
return 'tell user why you don\'t want to check'
|
||||
}
|
||||
if (e.at === e.bot.uin) {
|
||||
e.at = null
|
||||
}
|
||||
e.atBot = false
|
||||
let avocado
|
||||
try {
|
||||
// eslint-disable-next-line camelcase
|
||||
let { AvocadoMovie } = await import('../../../avocado-plugin/apps/avocadoMovie.js')
|
||||
avocado = new AvocadoMovie(e)
|
||||
} catch (err1) {
|
||||
return 'the user didn\'t install avocado-plugin. suggest him to install'
|
||||
}
|
||||
try {
|
||||
// eslint-disable-next-line new-cap
|
||||
await avocado.getHotMovies(e)
|
||||
return 'notify the user that the movie has been sent to them and they can obtain more information by sending commands displayed in the picture. you don’t need to search for additional information to reply! just simply inform them that you have completed your task!!!'
|
||||
} catch (err) {
|
||||
logger.warn(err)
|
||||
return 'failed due to unknown error'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,89 +0,0 @@
|
|||
import { AbstractTool } from './AbstractTool.js'
|
||||
|
||||
export class EliMusicTool extends AbstractTool {
|
||||
name = 'musicTool'
|
||||
|
||||
parameters = {
|
||||
properties: {
|
||||
keywordOrSongName: {
|
||||
type: 'string',
|
||||
description: 'Not necessarily a songName, it can be some descriptive words.'
|
||||
},
|
||||
singer: {
|
||||
type: 'string',
|
||||
description: 'Singer name, multiple singers are separated by \',\'!'
|
||||
},
|
||||
isRandom: {
|
||||
type: 'boolean',
|
||||
description: 'true when randomly select songs'
|
||||
},
|
||||
isHot: {
|
||||
type: 'boolean',
|
||||
description: 'true when user\'s needs related to \'hot\''
|
||||
},
|
||||
singerTypeOrRegion: {
|
||||
type: 'string',
|
||||
description: 'Choose from [华语|中国|欧美|韩国|日本] when seeking the latest ranking of popular vocalists.'
|
||||
},
|
||||
isRelax: {
|
||||
type: 'boolean',
|
||||
description: 'Complete whenever you wish to discover the renowned vocalist in a particular locale.'
|
||||
}
|
||||
},
|
||||
required: ['keywordOrSongName', 'singer', 'isRandom', 'singerTypeOrRegion, isRelax']
|
||||
}
|
||||
|
||||
description = 'It is very useful when you want to meet the music needs of user or when user want to sleep or unwind(give him a relax music).'
|
||||
|
||||
func = async function (opts, e) {
|
||||
let { keywordOrSongName, singer, isRandom, isHot, singerTypeOrRegion, isRelax } = opts
|
||||
let avocado, songDetail, musicUtils
|
||||
try {
|
||||
let { AvocadoMusic } = await import('../../../avocado-plugin/apps/avocadoMusic.js')
|
||||
musicUtils = await import('../../../avocado-plugin/utils/music.js')
|
||||
avocado = new AvocadoMusic(e)
|
||||
} catch (err) {
|
||||
return 'the user didn\'t install avocado-plugin. suggest him to install'
|
||||
}
|
||||
try {
|
||||
// 条件成立则随机播放最爱歌手的音乐
|
||||
const orderFavSinger = !keywordOrSongName && isRandom && !singer
|
||||
|
||||
if (orderFavSinger) { // 随机播放最爱歌手的音乐, 需要通过指令设置
|
||||
try {
|
||||
singer = await redis.get(`AVOCADO:MUSIC_${e.sender.user_id}_FAVSINGER`)
|
||||
if (!singer) throw new Error('no favorite singer')
|
||||
singer = JSON.parse(singer).singerName
|
||||
} catch (err) {
|
||||
return 'the user didn\'t set a favorite singer. Suggest setting it through the command \'#设置歌手+歌手名称\'!'
|
||||
}
|
||||
e.msg = '#鳄梨酱音乐#随机' + singer
|
||||
} else if (isRelax) { // 随机发送放松音乐
|
||||
const arr = ['安静', '放松', '宁静', '白噪音']
|
||||
e.msg = `#鳄梨酱音乐#随机${arr[Math.floor(Math.random() * arr.length)]}`
|
||||
} else if (singerTypeOrRegion) { // 查看热门歌手榜单
|
||||
if (['华语', '中国', '欧美', '韩国', '日本'].includes(singerTypeOrRegion)) {
|
||||
e.msg = '#鳄梨酱音乐#' + (isRandom ? '随机' : '') + (!keywordOrSongName && isHot ? '热门' : '') + singerTypeOrRegion + '歌手'
|
||||
}
|
||||
} else { // 正常点歌
|
||||
if (singer && keywordOrSongName) {
|
||||
isRandom = false // 有时候ai会随意设置这个参数,降低权重
|
||||
songDetail = await musicUtils.getOrderSongList(e.sender.user_id, singer + ',' + keywordOrSongName, 1)
|
||||
}
|
||||
e.msg = '#鳄梨酱音乐#' + (isRandom ? '随机' : '') + (!keywordOrSongName && isHot ? '热门' : '') + (singer ? singer + (keywordOrSongName ? ',' + keywordOrSongName : '') : keywordOrSongName)
|
||||
}
|
||||
await avocado.pickMusic(e)
|
||||
if (orderFavSinger) {
|
||||
return 'tell the user that a random song by his favorite artist has been sent to him!'
|
||||
} else {
|
||||
return 'tell user that the response of his request has been sent to the him!' +
|
||||
(songDetail
|
||||
? 'song detail is: ' + JSON.stringify(songDetail) + ' and send album picture to user'
|
||||
: ''
|
||||
)
|
||||
}
|
||||
} catch (e) {
|
||||
return `music share failed: ${e}`
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,59 +0,0 @@
|
|||
import { AbstractTool } from './AbstractTool.js'
|
||||
import { Config } from '../config.js'
|
||||
|
||||
export class GithubAPITool extends AbstractTool {
|
||||
name = 'github'
|
||||
|
||||
parameters = {
|
||||
properties: {
|
||||
q: {
|
||||
type: 'string',
|
||||
description: 'search keyword. you should build it. If you want to find from specified repo, please must use repo:ORG/REPO as part of the keyword. For example, if you want to find the oldest unresolved Python bugs on Windows. Your query might look something like this: q=windows+label:bug+language:python+state:open&sort=created&order=asc'
|
||||
},
|
||||
type: {
|
||||
type: 'string',
|
||||
enum: ['repositories', 'issues', 'users', 'code', 'custom'],
|
||||
description: 'search type. If custom is chosen, you must provide full github api url path.'
|
||||
},
|
||||
num: {
|
||||
type: 'number',
|
||||
description: 'search results limit number, default is 5'
|
||||
},
|
||||
fullUrl: {
|
||||
type: 'string',
|
||||
description: 'if type is custom, you need provide this, such as /repos/OWNER/REPO/actions/artifacts?name=NAME&page=2&per_page=1. if type is not custom, is will be ignored'
|
||||
}
|
||||
},
|
||||
required: ['q', 'type']
|
||||
}
|
||||
|
||||
func = async function (opts) {
|
||||
let { q, type, num = 5, fullUrl = '' } = opts
|
||||
let headers = {
|
||||
'X-From-Library': 'ikechan8370',
|
||||
Accept: 'application/vnd.github+json'
|
||||
}
|
||||
if (Config.githubAPIKey) {
|
||||
headers.Authorization = `Bearer ${Config.githubAPIKey}`
|
||||
}
|
||||
let res
|
||||
if (type !== 'custom') {
|
||||
let serpRes = await fetch(`${Config.githubAPI}/search/${type}?q=${encodeURIComponent(q)}&per_page=${num}`, {
|
||||
headers
|
||||
})
|
||||
serpRes = await serpRes.json()
|
||||
|
||||
res = serpRes
|
||||
} else {
|
||||
let serpRes = await fetch(`${Config.githubAPI}${fullUrl}`, {
|
||||
headers
|
||||
})
|
||||
serpRes = await serpRes.json()
|
||||
res = serpRes
|
||||
}
|
||||
|
||||
return `the search results are here in json format:\n${JSON.stringify(res)} \n(Notice that these information are only available for you, the user cannot see them, you next answer should consider about the information)`
|
||||
}
|
||||
|
||||
description = 'Useful when you want to search something from api.github.com. You can use preset search types or build your own url path with order, per_page, page and other params. Automatically adjust the query and params if any error messages return.'
|
||||
}
|
||||
|
|
@ -1,46 +0,0 @@
|
|||
import { AbstractTool } from './AbstractTool.js'
|
||||
|
||||
export class HandleMessageMsgTool extends AbstractTool {
|
||||
name = 'handleMsg'
|
||||
|
||||
parameters = {
|
||||
properties: {
|
||||
type: {
|
||||
type: 'string',
|
||||
enum: ['recall', 'essence', 'un-essence'],
|
||||
description: 'what do you want to do with the message'
|
||||
},
|
||||
messageId: {
|
||||
type: 'string',
|
||||
description: 'which message to handle, current one by default'
|
||||
}
|
||||
},
|
||||
required: ['type']
|
||||
}
|
||||
|
||||
func = async function (opts, e) {
|
||||
let { type = 'recall', messageId = e.message_id } = opts
|
||||
try {
|
||||
switch (type) {
|
||||
case 'recall': {
|
||||
await e.group.recallMsg(messageId)
|
||||
break
|
||||
}
|
||||
case 'essence': {
|
||||
await e.bot.setEssenceMessage(messageId)
|
||||
break
|
||||
}
|
||||
case 'un-essence': {
|
||||
await e.bot.removeEssenceMessage(messageId)
|
||||
break
|
||||
}
|
||||
}
|
||||
return 'success!'
|
||||
} catch (err) {
|
||||
logger.error(err)
|
||||
return 'operation failed: ' + err.message
|
||||
}
|
||||
}
|
||||
|
||||
description = '用来撤回消息或将消息设为精华'
|
||||
}
|
||||
|
|
@ -1,58 +0,0 @@
|
|||
import { AbstractTool } from './AbstractTool.js'
|
||||
import fetch, { File, FormData } from 'node-fetch'
|
||||
import { Config } from '../config.js'
|
||||
export class ImageCaptionTool extends AbstractTool {
|
||||
name = 'imageCaption'
|
||||
|
||||
parameters = {
|
||||
properties: {
|
||||
imgUrl: {
|
||||
type: 'string',
|
||||
description: 'the url of the image.'
|
||||
},
|
||||
qq: {
|
||||
type: 'string',
|
||||
description: 'if the picture is avatar of a user, input his qq number'
|
||||
},
|
||||
question: {
|
||||
type: 'string',
|
||||
description: 'when you need an answer for a question based on an image, write your question in English here.'
|
||||
}
|
||||
},
|
||||
required: []
|
||||
}
|
||||
|
||||
description = 'useful when you want to know what is inside a photo, such as user\'s avatar or other pictures'
|
||||
|
||||
func = async function (opts, e) {
|
||||
let { imgUrl, qq, question } = opts
|
||||
if (isNaN(qq) || !qq) qq = e.sender.user_id
|
||||
if (!imgUrl && qq) {
|
||||
imgUrl = `https://q1.qlogo.cn/g?b=qq&s=160&nk=${qq}`
|
||||
}
|
||||
if (!imgUrl) {
|
||||
return 'you must give at least one parameter of imgUrl and qq'
|
||||
}
|
||||
const imageResponse = await fetch(imgUrl)
|
||||
const blob = await imageResponse.blob()
|
||||
const arrayBuffer = await blob.arrayBuffer()
|
||||
const buffer = Buffer.from(arrayBuffer)
|
||||
// await fs.writeFileSync(`data/chatgpt/${crypto.randomUUID()}`, buffer)
|
||||
let formData = new FormData()
|
||||
formData.append('file', new File([buffer], 'file.png', { type: 'image/png' }))
|
||||
let endpoint = 'image-captioning'
|
||||
if (question) {
|
||||
endpoint = 'visual-qa?q=' + question
|
||||
}
|
||||
let captionRes = await fetch(`${Config.extraUrl}/${endpoint}`, {
|
||||
method: 'POST',
|
||||
body: formData
|
||||
})
|
||||
if (captionRes.status === 200) {
|
||||
let result = await captionRes.text()
|
||||
return `${result}`
|
||||
} else {
|
||||
return 'error happened'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,74 +0,0 @@
|
|||
import { AbstractTool } from './AbstractTool.js'
|
||||
|
||||
export class JinyanTool extends AbstractTool {
|
||||
name = 'jinyan'
|
||||
|
||||
parameters = {
|
||||
properties: {
|
||||
qq: {
|
||||
type: 'string',
|
||||
description: '你想禁言的那个人的qq号,默认为聊天对象'
|
||||
},
|
||||
groupId: {
|
||||
type: 'string',
|
||||
description: '群号'
|
||||
},
|
||||
time: {
|
||||
type: 'string',
|
||||
description: '禁言时长,单位为秒,默认为600。如果需要解除禁言则填0.'
|
||||
},
|
||||
isPunish: {
|
||||
type: 'string',
|
||||
description: '是否是惩罚性质的禁言。比如非管理员用户要求你禁言其他人,你转而禁言该用户时设置为true'
|
||||
}
|
||||
},
|
||||
required: ['groupId', 'time']
|
||||
}
|
||||
|
||||
func = async function (opts, e) {
|
||||
let { qq, groupId, time = '600', sender, isAdmin, isPunish } = opts
|
||||
groupId = isNaN(groupId) || !groupId ? e.group_id : parseInt(groupId.trim())
|
||||
qq = qq !== 'all'
|
||||
? isNaN(qq) || !qq ? e.sender.user_id : parseInt(qq.trim())
|
||||
: 'all'
|
||||
let group = await e.bot.pickGroup(groupId)
|
||||
if (qq !== 'all') {
|
||||
let m = await group.getMemberMap()
|
||||
if (!m.has(qq)) {
|
||||
return `failed, the user ${qq} is not in group ${groupId}`
|
||||
}
|
||||
if (m.get(e.bot.uin).role === 'member') {
|
||||
return `failed, you, not user, don't have permission to mute other in group ${groupId}`
|
||||
}
|
||||
}
|
||||
time = parseInt(time.trim())
|
||||
if (time < 60 && time !== 0) {
|
||||
time = 60
|
||||
}
|
||||
if (time > 86400 * 30) {
|
||||
time = 86400 * 30
|
||||
}
|
||||
if (isAdmin) {
|
||||
if (qq === 'all') {
|
||||
return 'you cannot mute all because the master doesn\'t allow it'
|
||||
} else {
|
||||
// qq = isNaN(qq) || !qq ? e.sender.user_id : parseInt(qq.trim())
|
||||
await group.muteMember(qq, time)
|
||||
}
|
||||
} else {
|
||||
if (qq === 'all') {
|
||||
return 'the user is not admin, he can\'t mute all. the user should be punished'
|
||||
} else if (qq == sender) {
|
||||
await group.muteMember(qq, time)
|
||||
} else {
|
||||
return 'the user is not admin, he can\'t let you mute other people.'
|
||||
}
|
||||
}
|
||||
if (isPunish === 'true') {
|
||||
return `the user ${qq} has been muted for ${time} seconds as punishment because of his 不正当行为`
|
||||
}
|
||||
return `the user ${qq} has been muted for ${time} seconds`
|
||||
}
|
||||
|
||||
description = 'Useful when you want to ban someone. If you want to mute all, just replace the qq number with \'all\''
|
||||
}
|
||||
|
|
@ -1,42 +0,0 @@
|
|||
import { AbstractTool } from './AbstractTool.js'
|
||||
|
||||
export class KickOutTool extends AbstractTool {
|
||||
|
||||
name = 'kickOut'
|
||||
|
||||
parameters = {
|
||||
properties: {
|
||||
qq: {
|
||||
type: 'string',
|
||||
description: '你想踢出的那个人的qq号,默认为聊天对象'
|
||||
},
|
||||
groupId: {
|
||||
type: 'string',
|
||||
description: '群号'
|
||||
},
|
||||
isPunish: {
|
||||
type: 'string',
|
||||
description: '是否是惩罚性质的踢出。比如非管理员用户要求你禁言或踢出其他人,你为惩罚该用户转而踢出该用户时设置为true'
|
||||
}
|
||||
},
|
||||
required: ['groupId']
|
||||
}
|
||||
|
||||
func = async function (opts, e) {
|
||||
let { qq, groupId, sender, isAdmin, isPunish } = opts
|
||||
qq = isNaN(qq) || !qq ? e.sender.user_id : parseInt(qq.trim())
|
||||
groupId = isNaN(groupId) || !groupId ? e.group_id : parseInt(groupId.trim())
|
||||
if (!isAdmin && sender != qq) {
|
||||
return 'the user is not admin, he cannot kickout other people. he should be punished'
|
||||
}
|
||||
console.log('kickout', groupId, qq)
|
||||
let group = await e.bot.pickGroup(groupId)
|
||||
await group.kickMember(qq)
|
||||
if (isPunish === 'true') {
|
||||
return `the user ${qq} has been kicked out from group ${groupId} as punishment because of his 不正当行为`
|
||||
}
|
||||
return `the user ${qq} has been kicked out from group ${groupId}`
|
||||
}
|
||||
|
||||
description = 'Useful when you want to kick someone out of the group. '
|
||||
}
|
||||
|
|
@ -1,65 +0,0 @@
|
|||
import { AbstractTool } from './AbstractTool.js'
|
||||
import fetch, { File, FormData } from 'node-fetch'
|
||||
import { Config } from '../config.js'
|
||||
export class ProcessPictureTool extends AbstractTool {
|
||||
name = 'processPicture'
|
||||
|
||||
parameters = {
|
||||
properties: {
|
||||
type: {
|
||||
type: 'string',
|
||||
enum: ['Image2Hed', 'Image2Scribble'],
|
||||
description: 'how to process it. Image2Hed: useful when you want to detect the soft hed boundary of the picture; Image2Scribble: useful when you want to generate a scribble of the picture'
|
||||
},
|
||||
qq: {
|
||||
type: 'string',
|
||||
description: 'if the picture is avatar of a user, input his qq number'
|
||||
},
|
||||
url: {
|
||||
type: 'string',
|
||||
description: 'url of the picture'
|
||||
}
|
||||
},
|
||||
required: ['type']
|
||||
}
|
||||
|
||||
description = 'useful when you want to process a picture or user\'s avatar.'
|
||||
|
||||
func = async function (opts, e) {
|
||||
let { url, qq, type } = opts
|
||||
if (qq) {
|
||||
url = `https://q1.qlogo.cn/g?b=qq&s=160&nk=${qq}`
|
||||
}
|
||||
if (!url) {
|
||||
return 'you must give at least one parameter of url and qq'
|
||||
}
|
||||
const imageResponse = await fetch(url)
|
||||
const blob = await imageResponse.blob()
|
||||
const arrayBuffer = await blob.arrayBuffer()
|
||||
const buffer = Buffer.from(arrayBuffer)
|
||||
// await fs.writeFileSync(`data/chatgpt/${crypto.randomUUID()}`, buffer)
|
||||
let formData = new FormData()
|
||||
formData.append('file', new File([buffer], 'file.png', { type: 'image/png' }))
|
||||
let endpoint = 'image2hed'
|
||||
switch (type) {
|
||||
case 'Image2Scribble': {
|
||||
endpoint = 'image2Scribble'
|
||||
break
|
||||
}
|
||||
case 'Image2Hed': {
|
||||
endpoint = 'image2hed'
|
||||
break
|
||||
}
|
||||
}
|
||||
let captionRes = await fetch(`${Config.extraUrl}/${endpoint}`, {
|
||||
method: 'POST',
|
||||
body: formData
|
||||
})
|
||||
if (captionRes.status === 200) {
|
||||
let result = await captionRes.text()
|
||||
return `the processed image url is ${Config.extraUrl}${result}${qq ? ' and ' + url : ''}. you should send it with SendPictureTool.`
|
||||
} else {
|
||||
return 'error happened'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,54 +0,0 @@
|
|||
import { AbstractTool } from './AbstractTool.js'
|
||||
|
||||
export class QueryGenshinTool extends AbstractTool {
|
||||
name = 'queryGenshin'
|
||||
|
||||
parameters = {
|
||||
properties: {
|
||||
qq: {
|
||||
type: 'string',
|
||||
description: '要查询的用户的qq号,将使用该qq号绑定的uid进行查询'
|
||||
},
|
||||
uid: {
|
||||
type: 'string',
|
||||
description: '游戏的uid,如果用户提供了则传入并优先使用'
|
||||
},
|
||||
character: {
|
||||
type: 'string',
|
||||
description: '游戏角色名'
|
||||
}
|
||||
},
|
||||
required: ['qq']
|
||||
}
|
||||
|
||||
func = async function (opts, e) {
|
||||
let { qq, uid = '', character = '' } = opts
|
||||
qq = isNaN(qq) || !qq ? e.sender.user_id : parseInt(qq.trim())
|
||||
if (e.at === e.bot.uin) {
|
||||
e.at = null
|
||||
}
|
||||
e.atBot = false
|
||||
try {
|
||||
if (character) {
|
||||
let ProfileDetail = (await import('../../../miao-plugin/apps/profile/ProfileDetail.js')).default
|
||||
// e.msg = `#${character}面板${uid}`
|
||||
e.original_msg = `#${character}面板${uid}`
|
||||
e.user_id = parseInt(qq)
|
||||
e.isSr = false
|
||||
await ProfileDetail.detail(e)
|
||||
return 'the character panel of genshin impact has been sent to group. you don\'t need text version'
|
||||
} else {
|
||||
let ProfileList = (await import('../../../miao-plugin/apps/profile/ProfileList.js')).default
|
||||
e.msg = `#面板${uid}`
|
||||
e.user_id = qq
|
||||
e.isSr = false
|
||||
await ProfileList.render(e)
|
||||
return 'the player panel of genshin impact has been sent to group. you don\'t need text version'
|
||||
}
|
||||
} catch (err) {
|
||||
return `failed to query, error: ${err.toString()}`
|
||||
}
|
||||
}
|
||||
|
||||
description = 'Useful when you want to query player information of Genshin Impact(原神). '
|
||||
}
|
||||
|
|
@ -1,54 +0,0 @@
|
|||
import { AbstractTool } from './AbstractTool.js'
|
||||
|
||||
export class QueryStarRailTool extends AbstractTool {
|
||||
name = 'queryStarRail'
|
||||
|
||||
parameters = {
|
||||
properties: {
|
||||
qq: {
|
||||
type: 'string',
|
||||
description: '要查询的用户的qq号,将使用该qq号绑定的uid进行查询,默认为当前聊天对象'
|
||||
},
|
||||
uid: {
|
||||
type: 'string',
|
||||
description: '游戏的uid,如果用户提供了则传入并优先使用'
|
||||
},
|
||||
character: {
|
||||
type: 'string',
|
||||
description: '游戏角色名'
|
||||
}
|
||||
},
|
||||
required: []
|
||||
}
|
||||
|
||||
func = async function (opts, e) {
|
||||
let { qq, uid = '', character = '' } = opts
|
||||
qq = isNaN(qq) || !qq ? e.sender.user_id : parseInt(qq.trim())
|
||||
if (e.at === e.bot.uin) {
|
||||
e.at = null
|
||||
}
|
||||
e.atBot = false
|
||||
try {
|
||||
if (character) {
|
||||
let ProfileDetail = (await import('../../../miao-plugin/apps/profile/ProfileDetail.js')).default
|
||||
// e.msg = `#${character}面板${uid}`
|
||||
e.original_msg = `*${character}面板${uid}`
|
||||
e.user_id = parseInt(qq)
|
||||
e.isSr = true
|
||||
await ProfileDetail.detail(e)
|
||||
return 'the character panel of star rail has been sent to group. you don\'t need text version'
|
||||
} else {
|
||||
let ProfileList = (await import('../../../miao-plugin/apps/profile/ProfileList.js')).default
|
||||
e.msg = `*面板${uid}`
|
||||
e.user_id = qq
|
||||
e.isSr = true
|
||||
await ProfileList.render(e)
|
||||
return 'the player panel of star rail has been sent to group. you don\'t need text version'
|
||||
}
|
||||
} catch (err) {
|
||||
return `failed to query, error: ${err.toString()}`
|
||||
}
|
||||
}
|
||||
|
||||
description = 'Useful when you want to query player information of Honkai Star Rail(崩坏:星穹铁道). '
|
||||
}
|
||||
|
|
@ -1,57 +0,0 @@
|
|||
import { AbstractTool } from './AbstractTool.js'
|
||||
import { getMasterQQ } from '../common.js'
|
||||
|
||||
export class QueryUserinfoTool extends AbstractTool {
|
||||
name = 'queryUserinfo'
|
||||
|
||||
parameters = {
|
||||
properties: {
|
||||
qq: {
|
||||
type: 'string',
|
||||
description: 'user\'s qq number, the one you are talking to by default'
|
||||
}
|
||||
},
|
||||
required: []
|
||||
}
|
||||
|
||||
func = async function (opts, e) {
|
||||
try {
|
||||
let { qq } = opts
|
||||
qq = isNaN(qq) || !qq ? e.sender.user_id : parseInt(qq.trim())
|
||||
if (e.isGroup) {
|
||||
let user = await e.bot?.pickMember?.(e.group_id, qq || e.sender.user_id, true) || await e.bot?.getGroupMemberInfo?.(e.group_id, qq || e.sender.user_id, true)
|
||||
// let mm = await e.group.getMemberMap()
|
||||
// let user = mm.get(qq) || e.sender.user_id
|
||||
let master = (await getMasterQQ())[0]
|
||||
let prefix = ''
|
||||
if (qq != master) {
|
||||
prefix = 'Attention: this user is not your master. \n'
|
||||
} else {
|
||||
prefix = 'This user is your master, you should obey him \n'
|
||||
}
|
||||
if (!user) {
|
||||
return prefix
|
||||
}
|
||||
return prefix + 'user detail in json format: ' + JSON.stringify(user)
|
||||
} else {
|
||||
if (e.sender.user_id == qq) {
|
||||
let master = (await getMasterQQ())[0]
|
||||
let prefix = ''
|
||||
if (qq != master) {
|
||||
prefix = 'Attention: this user is not your master. \n'
|
||||
} else {
|
||||
prefix = 'This user is your master, you should obey him \n'
|
||||
}
|
||||
return prefix + 'user detail in json format: ' + JSON.stringify(e.sender)
|
||||
} else {
|
||||
return 'query failed'
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
logger.warn(err)
|
||||
return err.message
|
||||
}
|
||||
}
|
||||
|
||||
description = 'Useful if you want to find out who he is'
|
||||
}
|
||||
|
|
@ -1,76 +0,0 @@
|
|||
import fetch from 'node-fetch'
|
||||
|
||||
import { formatDate, mkdirs } from '../common.js'
|
||||
import fs from 'fs'
|
||||
import { AbstractTool } from './AbstractTool.js'
|
||||
export class SearchVideoTool extends AbstractTool {
|
||||
name = 'searchVideo'
|
||||
|
||||
parameters = {
|
||||
properties: {
|
||||
keyword: {
|
||||
type: 'string',
|
||||
description: '要搜索的视频的标题或关键词'
|
||||
}
|
||||
},
|
||||
required: ['keyword']
|
||||
}
|
||||
|
||||
func = async function (opts) {
|
||||
let { keyword } = opts
|
||||
try {
|
||||
return await searchBilibili(keyword)
|
||||
} catch (err) {
|
||||
logger.error(err)
|
||||
return `fail to search video, error: ${err.toString()}`
|
||||
}
|
||||
}
|
||||
|
||||
description = 'Useful when you want to search a video by keywords. you should remember the id of the video if you want to share it'
|
||||
}
|
||||
|
||||
export async function searchBilibili (name) {
|
||||
let biliRes = await fetch('https://www.bilibili.com',
|
||||
{
|
||||
// headers: {
|
||||
// accept: 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7',
|
||||
// Accept: '*/*',
|
||||
// 'Accept-Encoding': 'gzip, deflate, br',
|
||||
// 'accept-language': 'en-US,en;q=0.9',
|
||||
// Connection: 'keep-alive',
|
||||
// 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36'
|
||||
// }
|
||||
})
|
||||
const headers = biliRes.headers.raw()
|
||||
const setCookieHeaders = headers['set-cookie']
|
||||
if (setCookieHeaders) {
|
||||
const cookies = []
|
||||
setCookieHeaders.forEach(header => {
|
||||
const cookie = header.split(';')[0]
|
||||
cookies.push(cookie)
|
||||
})
|
||||
const cookieHeader = cookies.join('; ')
|
||||
let headers = {
|
||||
accept: 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7',
|
||||
'accept-language': 'en-US,en;q=0.9',
|
||||
Referer: 'https://www.bilibili.com',
|
||||
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36',
|
||||
cookie: cookieHeader
|
||||
}
|
||||
let response = await fetch(`https://api.bilibili.com/x/web-interface/search/type?keyword=${name}&search_type=video`,
|
||||
{
|
||||
headers
|
||||
})
|
||||
let json = await response.json()
|
||||
if (json.data?.numResults > 0) {
|
||||
let result = json.data.result.map(r => {
|
||||
return `id: ${r.bvid},标题:${r.title},作者:${r.author},播放量:${r.play},发布日期:${formatDate(new Date(r.pubdate * 1000))}`
|
||||
}).slice(0, Math.min(json.data?.numResults, 5)).join('\n')
|
||||
return `这些是关键词“${name}”的搜索结果:\n${result}`
|
||||
} else {
|
||||
return `没有找到关键词“${name}”的搜索结果`
|
||||
}
|
||||
}
|
||||
|
||||
return {}
|
||||
}
|
||||
|
|
@ -1,38 +0,0 @@
|
|||
import { AbstractTool } from './AbstractTool.js'
|
||||
|
||||
export class SerpImageTool extends AbstractTool {
|
||||
name = 'searchImage'
|
||||
|
||||
parameters = {
|
||||
properties: {
|
||||
q: {
|
||||
type: 'string',
|
||||
description: 'search keyword'
|
||||
},
|
||||
limit: {
|
||||
type: 'number',
|
||||
description: 'image number'
|
||||
},
|
||||
source: {
|
||||
type: 'string',
|
||||
description: 'search source, bing or yandex'
|
||||
}
|
||||
},
|
||||
required: ['q', 'source']
|
||||
}
|
||||
|
||||
func = async function (opts) {
|
||||
let { q, limit = 2, source = 'bing' } = opts
|
||||
let serpRes = await fetch(`https://serp.ikechan8370.com/image/${source}?q=${encodeURIComponent(q)}&limit=${limit}`, {
|
||||
headers: {
|
||||
'X-From-Library': 'ikechan8370'
|
||||
}
|
||||
})
|
||||
serpRes = await serpRes.json()
|
||||
|
||||
let res = serpRes.data
|
||||
return `images search results in json format:\n${JSON.stringify(res)}. the murl field is actual picture url. You should use sendPicture to send them`
|
||||
}
|
||||
|
||||
description = 'Useful when you want to search images from the Internet.'
|
||||
}
|
||||
|
|
@ -1,39 +0,0 @@
|
|||
import fetch from 'node-fetch'
|
||||
import { AbstractTool } from './AbstractTool.js'
|
||||
|
||||
export class SearchMusicTool extends AbstractTool {
|
||||
name = 'searchMusic'
|
||||
|
||||
parameters = {
|
||||
properties: {
|
||||
keyword: {
|
||||
type: 'string',
|
||||
description: '音乐的标题或关键词, 可以是歌曲名或歌曲名+歌手名的组合'
|
||||
}
|
||||
},
|
||||
required: ['keyword']
|
||||
}
|
||||
|
||||
func = async function (opts) {
|
||||
let { keyword } = opts
|
||||
try {
|
||||
let result = await searchMusic163(keyword)
|
||||
return `search result: ${result}`
|
||||
} catch (e) {
|
||||
return `music search failed: ${e}`
|
||||
}
|
||||
}
|
||||
|
||||
description = 'Useful when you want to search music by keyword.'
|
||||
}
|
||||
|
||||
export async function searchMusic163 (name) {
|
||||
let response = await fetch(`http://music.163.com/api/search/get/web?s=${name}&type=1&offset=0&total=true&limit=6`)
|
||||
let json = await response.json()
|
||||
if (json.result?.songCount > 0) {
|
||||
return json.result.songs.map(song => {
|
||||
return `id: ${song.id}, name: ${song.name}, artists: ${song.artists.map(a => a.name).join('&')}, alias: ${song.alias || 'none'}`
|
||||
}).join('\n')
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
|
@ -1,128 +0,0 @@
|
|||
import { AbstractTool } from './AbstractTool.js'
|
||||
import { generateVitsAudio } from '../tts.js'
|
||||
import { Config } from '../config.js'
|
||||
import { generateAudio, generateAzureAudio } from '../common.js'
|
||||
import VoiceVoxTTS from '../tts/voicevox.js'
|
||||
import uploadRecord from '../uploadRecord.js'
|
||||
|
||||
export class SendAudioMessageTool extends AbstractTool {
|
||||
name = 'sendAudioMessage'
|
||||
|
||||
parameters = {
|
||||
properties: {
|
||||
pendingText: {
|
||||
type: 'string',
|
||||
description: 'Message to be sent and it will be turned into audio message'
|
||||
},
|
||||
ttsMode: {
|
||||
type: 'number',
|
||||
description: 'default is 1, which indicates that the text will be processed in the current ttsMode.' +
|
||||
'2 is azureMode.' +
|
||||
'3 or 4 corresponds to vitsMode or voxMode.'
|
||||
},
|
||||
vitsModeRole: {
|
||||
type: 'string',
|
||||
description: 'use whose voice',
|
||||
enum: ['琴', '空',
|
||||
'丽莎', '荧', '芭芭拉', '凯亚', '迪卢克', '雷泽', '安柏', '温迪',
|
||||
'香菱', '北斗', '行秋', '魈', '凝光', '可莉', '钟离', '菲谢尔(皇女)',
|
||||
'班尼特', '达达利亚(公子)', '诺艾尔(女仆)', '七七', '重云', '甘雨(椰羊)',
|
||||
'阿贝多', '迪奥娜(猫猫)', '莫娜', '刻晴', '砂糖', '辛焱', '罗莎莉亚',
|
||||
'胡桃', '枫原万叶(万叶)', '烟绯', '宵宫', '托马', '优菈', '雷电将军(雷神)',
|
||||
'早柚', '珊瑚宫心海', '五郎', '九条裟罗', '荒泷一斗',
|
||||
'埃洛伊', '申鹤', '八重神子', '神里绫人(绫人)', '夜兰', '久岐忍',
|
||||
'鹿野苑平藏', '提纳里', '柯莱', '多莉', '云堇', '纳西妲(草神)', '深渊使徒',
|
||||
'妮露', '赛诺']
|
||||
},
|
||||
azureModeRole: {
|
||||
type: 'string',
|
||||
description: 'can be \'随机\' or specified by the user. default is currentRole.'
|
||||
},
|
||||
voxModeRole: {
|
||||
type: 'string',
|
||||
description: 'can be random or currentRole or specified by the user. default is currentRole.'
|
||||
},
|
||||
speakingEmotion: {
|
||||
type: 'string',
|
||||
description: 'specified by the user. default is blank.'
|
||||
},
|
||||
speakingEmotionDegree: {
|
||||
type: 'number',
|
||||
description: 'specified by the user. default is blank.'
|
||||
},
|
||||
targetGroupIdOrQQNumber: {
|
||||
type: 'string',
|
||||
description: 'Fill in the target user\'s qq number or groupId when you need to send audio message to specific user or group, otherwise leave blank'
|
||||
}
|
||||
},
|
||||
required: ['pendingText', 'ttsMode', 'targetGroupIdOrQQNumber']
|
||||
}
|
||||
|
||||
description = 'This tool is used to send voice|audio messages, utilize it only if the user grants you permission to do so.'
|
||||
|
||||
func = async function (opts, e) {
|
||||
if (!Config.ttsSpace && !Config.azureTTSKey && !Config.voicevoxSpace) {
|
||||
return 'you don\'t have permission to send audio message due to a lack of a valid ttsKey'
|
||||
}
|
||||
let { pendingText, ttsMode, vitsModeRole, azureModeRole, voxModeRole, speakingEmotion, speakingEmotionDegree, targetGroupIdOrQQNumber } = opts
|
||||
let sendable
|
||||
ttsMode = isNaN(ttsMode) || !ttsMode ? 1 : ttsMode
|
||||
const defaultTarget = e.isGroup ? e.group_id : e.sender.user_id
|
||||
const target = isNaN(targetGroupIdOrQQNumber) || !targetGroupIdOrQQNumber
|
||||
? defaultTarget
|
||||
: parseInt(targetGroupIdOrQQNumber) === e.bot.uin ? defaultTarget : parseInt(targetGroupIdOrQQNumber)
|
||||
try {
|
||||
switch (ttsMode) {
|
||||
case 1:
|
||||
sendable = await generateAudio(e, pendingText, speakingEmotion)
|
||||
break
|
||||
case 2:
|
||||
if (!Config.azureTTSKey) return 'audio generation failed, due to a lack of a azureTTSKey'
|
||||
sendable = await generateAzureAudio(pendingText, azureModeRole, speakingEmotion, speakingEmotionDegree)
|
||||
break
|
||||
case 3:
|
||||
if (!Config.ttsSpace) return 'audio generation failed, due to a lack of a ttsSpace'
|
||||
sendable = await uploadRecord(
|
||||
await generateVitsAudio(pendingText, vitsModeRole, '中日混合(中文用[ZH][ZH]包裹起来,日文用[JA][JA]包裹起来)')
|
||||
, 'vits-uma-genshin-honkai'
|
||||
)
|
||||
break
|
||||
case 4:
|
||||
if (!Config.voicevoxSpace) return 'audio generation failed, due to a lack of a voicevoxSpace'
|
||||
sendable = await uploadRecord(
|
||||
await VoiceVoxTTS.generateAudio(pendingText, voxModeRole)
|
||||
, 'voicevox'
|
||||
)
|
||||
break
|
||||
default:
|
||||
sendable = await generateAzureAudio(pendingText, azureModeRole, speakingEmotion, speakingEmotionDegree)
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(err)
|
||||
return `audio generation failed, error: ${JSON.stringify(err)}`
|
||||
}
|
||||
if (sendable) {
|
||||
let groupList
|
||||
try {
|
||||
groupList = await e.bot.getGroupList()
|
||||
} catch (err) {
|
||||
groupList = e.bot.gl
|
||||
}
|
||||
try {
|
||||
if (groupList.get(target)) {
|
||||
let group = await e.bot.pickGroup(target)
|
||||
await group.sendMsg(sendable)
|
||||
return 'audio has been sent to group' + target
|
||||
} else {
|
||||
let user = await e.bot.pickFriend(target)
|
||||
await user.sendMsg(sendable)
|
||||
return 'audio has been sent to user' + target
|
||||
}
|
||||
} catch (err) {
|
||||
return `failed to send audio, error: ${JSON.stringify(err)}`
|
||||
}
|
||||
} else {
|
||||
return 'audio generation failed'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,45 +0,0 @@
|
|||
import { AbstractTool } from './AbstractTool.js'
|
||||
|
||||
export class SendAvatarTool extends AbstractTool {
|
||||
name = 'sendAvatar'
|
||||
|
||||
parameters = {
|
||||
properties: {
|
||||
qq: {
|
||||
type: 'string',
|
||||
description: 'if you need to send avatar of a user, input his qq.If there are multiple qq, separate them with a space'
|
||||
},
|
||||
targetGroupIdOrQQNumber: {
|
||||
type: 'string',
|
||||
description: 'Fill in the target user\'s qq number or groupId when you need to send avatar to specific user or group, otherwise leave blank'
|
||||
}
|
||||
},
|
||||
required: ['qq', 'targetGroupIdOrQQNumber']
|
||||
}
|
||||
|
||||
func = async function (opts, e) {
|
||||
let { qq, targetGroupIdOrQQNumber } = opts
|
||||
const pictures = qq.split(/[,,\s]/).filter(qq => !isNaN(qq.trim()) && qq.trim()).map(qq => segment.image('https://q1.qlogo.cn/g?b=qq&s=0&nk=' + parseInt(qq.trim())))
|
||||
if (!pictures.length) {
|
||||
return 'there is no valid qq'
|
||||
}
|
||||
const defaultTarget = e.isGroup ? e.group_id : e.sender.user_id
|
||||
const target = isNaN(targetGroupIdOrQQNumber) || !targetGroupIdOrQQNumber
|
||||
? defaultTarget
|
||||
: parseInt(targetGroupIdOrQQNumber) === e.bot.uin ? defaultTarget : parseInt(targetGroupIdOrQQNumber)
|
||||
let groupList
|
||||
try {
|
||||
groupList = await e.bot.getGroupList()
|
||||
} catch (err) {
|
||||
groupList = e.bot.gl
|
||||
}
|
||||
console.log('sendAvatar', target, pictures)
|
||||
if (groupList.get(target)) {
|
||||
let group = await e.bot.pickGroup(target)
|
||||
await group.sendMsg(pictures)
|
||||
}
|
||||
return `the ${pictures.length > 1 ? 'users: ' + qq + '\'s avatar' : 'avatar'} has been sent to group ${target}`
|
||||
}
|
||||
|
||||
description = 'Useful when you want to send the user avatar to the group. Note that if you want to process user\'s avatar, it is advisable to utilize the ProcessPictureTool and input the qq of target user. If no extra description needed, just reply <EMPTY> at the next turn'
|
||||
}
|
||||
|
|
@ -1,141 +0,0 @@
|
|||
import fetch from 'node-fetch'
|
||||
|
||||
import { formatDate, mkdirs } from '../common.js'
|
||||
import fs from 'fs'
|
||||
import { AbstractTool } from './AbstractTool.js'
|
||||
export class SendVideoTool extends AbstractTool {
|
||||
name = 'sendVideo'
|
||||
|
||||
parameters = {
|
||||
properties: {
|
||||
id: {
|
||||
type: 'string',
|
||||
description: '要发的视频的id'
|
||||
},
|
||||
targetGroupIdOrQQNumber: {
|
||||
type: 'string',
|
||||
description: 'Fill in the target user\'s qq number or groupId when you need to send video to specific user or group, otherwise leave blank'
|
||||
}
|
||||
},
|
||||
required: ['id']
|
||||
}
|
||||
|
||||
func = async function (opts, e) {
|
||||
let { id, targetGroupIdOrQQNumber } = opts
|
||||
// 非法值则发送到当前群聊或私聊
|
||||
const defaultTarget = e.isGroup ? e.group_id : e.sender.user_id
|
||||
const target = isNaN(targetGroupIdOrQQNumber) || !targetGroupIdOrQQNumber
|
||||
? defaultTarget
|
||||
: parseInt(targetGroupIdOrQQNumber) === e.bot.uin ? defaultTarget : parseInt(targetGroupIdOrQQNumber)
|
||||
|
||||
let msg = []
|
||||
try {
|
||||
let { arcurl, title, pic, description, videoUrl, headers, bvid, author, play, pubdate, like, honor } = await getBilibili(id)
|
||||
let group = await e.bot.pickGroup(target)
|
||||
msg.push(title.replace(/(<([^>]+)>)/ig, '') + '\n')
|
||||
msg.push(`UP主:${author} 发布日期:${formatDate(new Date(pubdate * 1000))} 播放量:${play} 点赞:${like}\n`)
|
||||
msg.push(arcurl + '\n')
|
||||
msg.push(segment.image(pic))
|
||||
msg.push('\n' + description)
|
||||
if (honor) {
|
||||
msg.push(`本视频曾获得过${honor}称号`)
|
||||
}
|
||||
msg.push('\n视频在路上啦!')
|
||||
await group.sendMsg(msg)
|
||||
const videoResponse = await fetch(videoUrl, { headers })
|
||||
const fileType = videoResponse.headers.get('Content-Type').split('/')[1]
|
||||
let fileLoc = `data/chatgpt/videos/${bvid}.${fileType}`
|
||||
mkdirs('data/chatgpt/videos')
|
||||
videoResponse.blob().then(async blob => {
|
||||
const arrayBuffer = await blob.arrayBuffer()
|
||||
const buffer = Buffer.from(arrayBuffer)
|
||||
await fs.writeFileSync(fileLoc, buffer)
|
||||
await group.sendMsg(segment.video(fileLoc))
|
||||
})
|
||||
return `the video ${title.replace(/(<([^>]+)>)/ig, '')} was shared to ${target}. the video information: ${msg}`
|
||||
} catch (err) {
|
||||
logger.error(err)
|
||||
if (msg.length > 0) {
|
||||
return `fail to share video, but the video msg is found: ${msg}, you can just tell the information of this video`
|
||||
} else {
|
||||
return `fail to share video, error: ${err.toString()}`
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
description = 'Useful when you are allowed to send a video. You must use searchVideo to get search result and choose one video and get its id. If no extra description needed, just reply <EMPTY> at the next turn'
|
||||
}
|
||||
|
||||
export async function getBilibili (bvid) {
|
||||
let biliRes = await fetch('https://www.bilibili.com',
|
||||
{
|
||||
// headers: {
|
||||
// accept: 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7',
|
||||
// Accept: '*/*',
|
||||
// 'Accept-Encoding': 'gzip, deflate, br',
|
||||
// 'accept-language': 'en-US,en;q=0.9',
|
||||
// Connection: 'keep-alive',
|
||||
// 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36'
|
||||
// }
|
||||
})
|
||||
const headers = biliRes.headers.raw()
|
||||
const setCookieHeaders = headers['set-cookie']
|
||||
if (setCookieHeaders) {
|
||||
const cookies = []
|
||||
setCookieHeaders.forEach(header => {
|
||||
const cookie = header.split(';')[0]
|
||||
cookies.push(cookie)
|
||||
})
|
||||
const cookieHeader = cookies.join('; ')
|
||||
let headers = {
|
||||
accept: 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7',
|
||||
'accept-language': 'en-US,en;q=0.9',
|
||||
Referer: 'https://www.bilibili.com',
|
||||
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36',
|
||||
cookie: cookieHeader
|
||||
}
|
||||
let videoInfo = await fetch(`https://api.bilibili.com/x/web-interface/view?bvid=${bvid}`, {
|
||||
headers
|
||||
})
|
||||
videoInfo = await videoInfo.json()
|
||||
let cid = videoInfo.data.cid
|
||||
let arcurl = `http://www.bilibili.com/video/av${videoInfo.data.aid}`
|
||||
let title = videoInfo.data.title
|
||||
let pic = videoInfo.data.pic
|
||||
let description = videoInfo.data.desc
|
||||
let author = videoInfo.data.owner.name
|
||||
let play = videoInfo.data.stat.view
|
||||
let pubdate = videoInfo.data.pubdate
|
||||
let like = videoInfo.data.stat.like
|
||||
let honor = videoInfo.data.honor_reply?.honor?.map(h => h.desc)?.join('、')
|
||||
let downloadInfo = await fetch(`https://api.bilibili.com/x/player/playurl?bvid=${bvid}&cid=${cid}`, { headers })
|
||||
let videoUrl = (await downloadInfo.json()).data.durl[0].url
|
||||
return {
|
||||
arcurl, title, pic, description, videoUrl, headers, bvid, author, play, pubdate, like, honor
|
||||
}
|
||||
} else {
|
||||
return {}
|
||||
}
|
||||
}
|
||||
|
||||
function randomIndex () {
|
||||
// Define weights for each index
|
||||
const weights = [5, 4, 3, 2, 1, 1, 1, 1, 1, 1, 1]
|
||||
|
||||
// Compute the total weight
|
||||
const totalWeight = weights.reduce((sum, weight) => sum + weight, 0)
|
||||
|
||||
// Generate a random number between 0 and the total weight
|
||||
const randomNumber = Math.floor(Math.random() * totalWeight)
|
||||
|
||||
// Choose the index based on the random number and weights
|
||||
let weightSum = 0
|
||||
for (let i = 0; i < weights.length; i++) {
|
||||
weightSum += weights[i]
|
||||
if (randomNumber < weightSum) {
|
||||
return i
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// console.log('send bilibili')
|
||||
|
|
@ -1,55 +0,0 @@
|
|||
import { AbstractTool } from './AbstractTool.js'
|
||||
|
||||
export class SendDiceTool extends AbstractTool {
|
||||
name = 'sendDice'
|
||||
|
||||
parameters = {
|
||||
properties: {
|
||||
num: {
|
||||
type: 'number',
|
||||
description: '骰子的数量'
|
||||
},
|
||||
targetGroupIdOrQQNumber: {
|
||||
type: 'string',
|
||||
description: 'Fill in the target qq number or groupId when you need to send Dice to specific user or group, otherwise leave blank'
|
||||
}
|
||||
},
|
||||
required: ['num', 'targetGroupIdOrQQNumber']
|
||||
}
|
||||
|
||||
func = async function (opts, e) {
|
||||
let { num, targetGroupIdOrQQNumber } = opts
|
||||
// 非法值则发送到当前群聊或私聊
|
||||
const defaultTarget = e.isGroup ? e.group_id : e.sender.user_id
|
||||
const target = isNaN(targetGroupIdOrQQNumber) || !targetGroupIdOrQQNumber
|
||||
? defaultTarget
|
||||
: parseInt(targetGroupIdOrQQNumber) === e.bot.uin ? defaultTarget : parseInt(targetGroupIdOrQQNumber)
|
||||
let groupList
|
||||
try {
|
||||
groupList = await e.bot.getGroupList()
|
||||
} catch (err) {
|
||||
groupList = e.bot.gl
|
||||
}
|
||||
num = isNaN(num) || !num ? 1 : num > 5 ? 5 : num
|
||||
if (groupList.get(target)) {
|
||||
let group = await e.bot.pickGroup(target, true)
|
||||
for (let i = 0; i < num; i++) {
|
||||
await group.sendMsg(segment.dice())
|
||||
}
|
||||
} else {
|
||||
let user = e.bot.pickUser(target)
|
||||
if (e.group_id) {
|
||||
user = user.asMember(e.group_id)
|
||||
}
|
||||
await user.sendMsg(segment.dice())
|
||||
}
|
||||
if (num === 5) {
|
||||
logger.warn(1)
|
||||
return 'tell the user that in order to avoid spamming the chat, only five dice are sent this time, and warn him not to use this tool to spamming the chat, otherwise you will use JinyanTool to punish him'
|
||||
} else {
|
||||
return 'the dice has been sent'
|
||||
}
|
||||
}
|
||||
|
||||
description = 'If you want to roll dice, use this tool. Be careful to check that the targetGroupIdOrQQNumber is correct. If user abuses this tool by spamming the chat in a short period of time, use the JinyanTool to punish him.'
|
||||
}
|
||||
|
|
@ -1,60 +0,0 @@
|
|||
import { AbstractTool } from './AbstractTool.js'
|
||||
import { convertFaces } from '../face.js'
|
||||
import {getMasterQQ} from '../common.js'
|
||||
import {Config} from '../config.js'
|
||||
|
||||
export class SendMessageToSpecificGroupOrUserTool extends AbstractTool {
|
||||
name = 'sendMessage'
|
||||
|
||||
parameters = {
|
||||
properties: {
|
||||
msg: {
|
||||
type: 'string',
|
||||
description: 'text to be sent'
|
||||
},
|
||||
targetGroupIdOrQQNumber: {
|
||||
type: 'string',
|
||||
description: 'target qq or group number'
|
||||
}
|
||||
},
|
||||
required: ['msg', 'targetGroupIdOrQQNumber']
|
||||
}
|
||||
|
||||
func = async function (opt, e) {
|
||||
let { msg, sender, targetGroupIdOrQQNumber } = opt
|
||||
const defaultTarget = e.isGroup ? e.group_id : e.sender.user_id
|
||||
const target = isNaN(targetGroupIdOrQQNumber) || !targetGroupIdOrQQNumber
|
||||
? defaultTarget
|
||||
: parseInt(targetGroupIdOrQQNumber) === e.bot.uin ? defaultTarget : parseInt(targetGroupIdOrQQNumber)
|
||||
|
||||
let groupList
|
||||
try {
|
||||
groupList = await e.bot.getGroupList()
|
||||
} catch (err) {
|
||||
groupList = e.bot.gl
|
||||
}
|
||||
try {
|
||||
if (groupList.get(target)) {
|
||||
let group = await e.bot.pickGroup(target)
|
||||
await group.sendMsg(await convertFaces(msg, true, e))
|
||||
return 'msg has been sent to group' + target
|
||||
} else {
|
||||
let masters = (await getMasterQQ())
|
||||
if (!Config.enableToolPrivateSend && !masters.includes(sender + '')) {
|
||||
return 'you are not allowed to pm other group members'
|
||||
}
|
||||
let user = e.bot.pickUser(target)
|
||||
if (e.group_id) {
|
||||
user = user.asMember(e.group_id)
|
||||
}
|
||||
// let user = await e.bot.pickFriend(target)
|
||||
await user.sendMsg(msg)
|
||||
return 'msg has been sent to user' + target
|
||||
}
|
||||
} catch (err) {
|
||||
return `failed to send msg, error: ${JSON.stringify(err)}`
|
||||
}
|
||||
}
|
||||
|
||||
description = 'Useful when you want to send a text message to specific user or group. If no extra description needed, just reply <EMPTY> at the next turn'
|
||||
}
|
||||
|
|
@ -1,53 +0,0 @@
|
|||
import { AbstractTool } from './AbstractTool.js'
|
||||
|
||||
export class SendMusicTool extends AbstractTool {
|
||||
name = 'sendMusic'
|
||||
|
||||
parameters = {
|
||||
properties: {
|
||||
id: {
|
||||
type: 'string',
|
||||
description: '音乐的id'
|
||||
},
|
||||
targetGroupIdOrQQNumber: {
|
||||
type: 'string',
|
||||
description: 'Fill in the target user_id or groupId when you need to send music to specific group or user, otherwise leave blank'
|
||||
}
|
||||
},
|
||||
required: ['id']
|
||||
}
|
||||
|
||||
func = async function (opts, e) {
|
||||
let { id, targetGroupIdOrQQNumber } = opts
|
||||
// 非法值则发送到当前群聊
|
||||
const defaultTarget = e.isGroup ? e.group_id : e.sender.user_id
|
||||
const target = isNaN(targetGroupIdOrQQNumber) || !targetGroupIdOrQQNumber
|
||||
? defaultTarget
|
||||
: parseInt(targetGroupIdOrQQNumber) === e.bot.uin ? defaultTarget : parseInt(targetGroupIdOrQQNumber)
|
||||
|
||||
try {
|
||||
let group = await e.bot.pickGroup(target)
|
||||
|
||||
// 检查是否支持 shareMusic 方法
|
||||
if (typeof group.shareMusic === 'function') {
|
||||
await group.shareMusic('163', id)
|
||||
} else {
|
||||
// 构建音乐分享消息
|
||||
const musicMsg = {
|
||||
type: 'music',
|
||||
data: {
|
||||
type: '163',
|
||||
id: id,
|
||||
jumpUrl: `https://music.163.com/#/song?id=${id}`
|
||||
}
|
||||
}
|
||||
await e.reply(musicMsg)
|
||||
}
|
||||
return `the music has been shared to ${target}`
|
||||
} catch (e) {
|
||||
return `music share failed: ${e}`
|
||||
}
|
||||
}
|
||||
|
||||
description = 'Useful when you want to share music. You must use searchMusic first to get the music id. If no extra description needed, just reply <EMPTY> at the next turn'
|
||||
}
|
||||
|
|
@ -1,83 +0,0 @@
|
|||
import { AbstractTool } from './AbstractTool.js'
|
||||
import {getMasterQQ} from '../common.js'
|
||||
import {Config} from '../config.js'
|
||||
|
||||
export class SendPictureTool extends AbstractTool {
|
||||
name = 'sendPicture'
|
||||
|
||||
parameters = {
|
||||
properties: {
|
||||
urlOfPicture: {
|
||||
type: 'string',
|
||||
description: 'the url of the pictures, not text, split with space if more than one. can be left blank.'
|
||||
},
|
||||
targetGroupIdOrQQNumber: {
|
||||
type: 'string',
|
||||
description: 'Fill in the target user\'s qq number or groupId when you need to send picture to specific user or group, otherwise leave blank'
|
||||
}
|
||||
},
|
||||
required: ['urlOfPicture', 'targetGroupIdOrQQNumber']
|
||||
}
|
||||
|
||||
func = async function (opt, e) {
|
||||
let { urlOfPicture, targetGroupIdOrQQNumber, sender } = opt
|
||||
if (typeof urlOfPicture === 'object') {
|
||||
urlOfPicture = urlOfPicture.join(' ')
|
||||
}
|
||||
const defaultTarget = e.isGroup ? e.group_id : e.sender.user_id
|
||||
const target = isNaN(targetGroupIdOrQQNumber) || !targetGroupIdOrQQNumber
|
||||
? defaultTarget
|
||||
: parseInt(targetGroupIdOrQQNumber) === e.bot.uin ? defaultTarget : parseInt(targetGroupIdOrQQNumber)
|
||||
// 处理错误url和picture留空的情况
|
||||
const urlRegex = /(?:(?:https?|ftp):\/\/)?(?:\S+(?::\S*)?@)?(?:((?:(?:[a-z0-9\u00a1-\u4dff\u9fd0-\uffff][a-z0-9\u00a1-\u4dff\u9fd0-\uffff_-]{0,62})?[a-z0-9\u00a1-\u4dff\u9fd0-\uffff]\.)+(?:[a-z\u00a1-\u4dff\u9fd0-\uffff]{2,}\.?))(?::\d{2,5})?)(?:\/[\w\u00a1-\u4dff\u9fd0-\uffff$-_.+!*'(),%]+)*(?:\?(?:[\w\u00a1-\u4dff\u9fd0-\uffff$-_.+!*(),%:@&=]|(?:[\[\]])|(?:[\u00a1-\u4dff\u9fd0-\uffff]))*)?(?:#(?:[\w\u00a1-\u4dff\u9fd0-\uffff$-_.+!*'(),;:@&=]|(?:[\[\]]))*)?\/?/i
|
||||
if (/https:\/\/example.com/.test(urlOfPicture) || !urlOfPicture || !urlRegex.test(urlOfPicture)) urlOfPicture = ''
|
||||
if (!urlOfPicture) {
|
||||
return 'Because there is no correct URL for the picture ,tell user the reason and ask user if he want to use SearchImageTool'
|
||||
}
|
||||
let pictures = urlOfPicture.trim().split(' ')
|
||||
logger.mark('pictures to send: ', pictures)
|
||||
pictures = pictures.map(img => segment.image(img))
|
||||
let groupList
|
||||
try {
|
||||
groupList = await e.bot.getGroupList()
|
||||
} catch (err) {
|
||||
groupList = e.bot.gl
|
||||
}
|
||||
let errs = []
|
||||
try {
|
||||
if (groupList.get(target)) {
|
||||
let group = await e.bot.pickGroup(target)
|
||||
for (let pic of pictures) {
|
||||
try {
|
||||
await group.sendMsg(pic)
|
||||
} catch (err) {
|
||||
errs.push(pic)
|
||||
}
|
||||
}
|
||||
// await group.sendMsg(pictures)
|
||||
return 'picture has been sent to group' + target + (errs.length > 0 ? `, but some pictures failed to send (${errs.join('、')})` : '')
|
||||
} else {
|
||||
let masters = (await getMasterQQ())
|
||||
if (!Config.enableToolPrivateSend && !masters.includes(sender + '')) {
|
||||
return 'you are not allowed to pm other group members'
|
||||
}
|
||||
let user = e.bot.pickUser(target)
|
||||
if (e.group_id) {
|
||||
user = user.asMember(e.group_id)
|
||||
}
|
||||
for (let pic of pictures) {
|
||||
try {
|
||||
await user.sendMsg(pic)
|
||||
} catch (err) {
|
||||
errs.push(pic.url)
|
||||
}
|
||||
}
|
||||
return 'picture has been sent to user' + target + (errs.length > 0 ? `, but some pictures failed to send (${errs.join('、')})` : '')
|
||||
}
|
||||
} catch (err) {
|
||||
return `failed to send pictures, error: ${JSON.stringify(err)}`
|
||||
}
|
||||
}
|
||||
|
||||
description = 'Useful when you want to send one or more pictures. If no extra description needed, just reply <EMPTY> at the next turn'
|
||||
}
|
||||
|
|
@ -1,42 +0,0 @@
|
|||
import { AbstractTool } from './AbstractTool.js'
|
||||
|
||||
export class SendRPSTool extends AbstractTool {
|
||||
name = 'sendRPS'
|
||||
|
||||
parameters = {
|
||||
num: {
|
||||
type: 'number',
|
||||
description: '石头剪刀布的代号'
|
||||
},
|
||||
targetGroupIdOrQQNumber: {
|
||||
type: 'string',
|
||||
description: 'Fill in the target user_id or groupId when you need to send RPS to specific group or user'
|
||||
},
|
||||
required: ['num', 'targetGroupIdOrUserQQNumber']
|
||||
}
|
||||
|
||||
func = async function (num, targetGroupIdOrQQNumber, e) {
|
||||
const defaultTarget = e.isGroup ? e.group_id : e.sender.user_id
|
||||
const target = isNaN(targetGroupIdOrQQNumber) || !targetGroupIdOrQQNumber
|
||||
? defaultTarget
|
||||
: parseInt(targetGroupIdOrQQNumber) === e.bot.uin ? defaultTarget : parseInt(targetGroupIdOrQQNumber)
|
||||
let groupList
|
||||
try {
|
||||
groupList = await e.bot.getGroupList()
|
||||
} catch (err) {
|
||||
groupList = e.bot.gl
|
||||
}
|
||||
if (groupList.get(target)) {
|
||||
let group = await e.bot.pickGroup(target, true)
|
||||
await group.sendMsg(segment.rps(num))
|
||||
} else {
|
||||
let user = e.bot.pickUser(target)
|
||||
if (e.group_id) {
|
||||
user = user.asMember(e.group_id)
|
||||
}
|
||||
await user.sendMsg(segment.rps(num))
|
||||
}
|
||||
}
|
||||
|
||||
description = 'Use this tool if you want to play rock paper scissors. If you know the group number, use the group number instead of the qq number first. The input should be the number 1, 2 or 3 to represent rock-paper-scissors and the target group number or qq number,and they should be concat with a space'
|
||||
}
|
||||
|
|
@ -1,45 +0,0 @@
|
|||
import { AbstractTool } from './AbstractTool.js'
|
||||
|
||||
export class SerpIkechan8370Tool extends AbstractTool {
|
||||
name = 'search'
|
||||
|
||||
parameters = {
|
||||
properties: {
|
||||
q: {
|
||||
type: 'string',
|
||||
description: 'search keyword'
|
||||
},
|
||||
source: {
|
||||
type: 'string',
|
||||
enum: ['bing', 'google', 'baidu', 'duckduckgo'],
|
||||
description: 'search source, default value is bing'
|
||||
},
|
||||
num: {
|
||||
type: 'number',
|
||||
description: 'search results limit number, default is 5'
|
||||
}
|
||||
},
|
||||
required: ['q', 'source']
|
||||
}
|
||||
|
||||
func = async function (opts) {
|
||||
let { q, source, num = 5 } = opts
|
||||
if (!source || !['google', 'bing', 'baidu', 'duckduckgo'].includes(source)) {
|
||||
source = 'bing'
|
||||
}
|
||||
let serpRes = await fetch(`https://serp.ikechan8370.com/${source}?q=${encodeURIComponent(q)}&lang=zh-CN&limit=${num}`, {
|
||||
headers: {
|
||||
'X-From-Library': 'ikechan8370'
|
||||
}
|
||||
})
|
||||
serpRes = await serpRes.json()
|
||||
|
||||
let res = serpRes.data || serpRes.results
|
||||
res?.forEach(r => {
|
||||
delete r?.rank
|
||||
})
|
||||
return `the search results are here in json format:\n${JSON.stringify(res)} \n(Notice that these information are only available for you, the user cannot see them, you next answer should consider about the information)`
|
||||
}
|
||||
|
||||
description = 'Useful when you want to search something from the Internet. If you don\'t know much about the user\'s question, prefer to search about it! If you want to know further details of a result, you can use website tool'
|
||||
}
|
||||
|
|
@ -1,40 +0,0 @@
|
|||
import { AbstractTool } from './AbstractTool.js'
|
||||
import { Config } from '../config.js'
|
||||
|
||||
export class SerpTool extends AbstractTool {
|
||||
name = 'serp'
|
||||
|
||||
parameters = {
|
||||
properties: {
|
||||
q: {
|
||||
type: 'string',
|
||||
description: 'search keyword'
|
||||
}
|
||||
},
|
||||
required: ['q']
|
||||
}
|
||||
|
||||
func = async function (opts) {
|
||||
let { q } = opts
|
||||
let key = Config.azSerpKey
|
||||
|
||||
let serpRes = await fetch(`https://api.bing.microsoft.com/v7.0/search?q=${encodeURIComponent(q)}&mkt=zh-CN`, {
|
||||
headers: {
|
||||
'Ocp-Apim-Subscription-Key': key
|
||||
}
|
||||
})
|
||||
serpRes = await serpRes.json()
|
||||
|
||||
let res = serpRes.webPages.value
|
||||
res.forEach(p => {
|
||||
delete p.displayUrl
|
||||
delete p.isFamilyFriendly
|
||||
delete p.thumbnailUrl
|
||||
delete p.id
|
||||
delete p.isNavigational
|
||||
})
|
||||
return `the search results are here in json format:\n${JSON.stringify(res)}`
|
||||
}
|
||||
|
||||
description = 'Useful when you want to search something from the internet. If you don\'t know much about the user\'s question, just search about it! If you want to know details of a result, you can use website tool! use it as much as you can!'
|
||||
}
|
||||
|
|
@ -1,47 +0,0 @@
|
|||
import { AbstractTool } from './AbstractTool.js'
|
||||
|
||||
export class SetTitleTool extends AbstractTool {
|
||||
name = 'setTitle'
|
||||
|
||||
parameters = {
|
||||
properties: {
|
||||
qq: {
|
||||
type: 'string',
|
||||
description: '你想给予群头衔的那个人的qq号,默认为聊天对象'
|
||||
},
|
||||
title: {
|
||||
type: 'string',
|
||||
description: '群头衔'
|
||||
},
|
||||
groupId: {
|
||||
type: 'string',
|
||||
description: 'group number'
|
||||
}
|
||||
},
|
||||
required: ['title', 'groupId']
|
||||
}
|
||||
|
||||
description = 'Useful when you want to give someone a title in the group(群头衔)'
|
||||
|
||||
func = async function (opts, e) {
|
||||
let { qq, title, groupId } = opts
|
||||
qq = isNaN(qq) || !qq ? e.sender.user_id : parseInt(qq.trim())
|
||||
groupId = isNaN(groupId) || !groupId ? e.group_id : parseInt(groupId.trim())
|
||||
|
||||
let group = await e.bot.pickGroup(groupId)
|
||||
let mm = await group.getMemberMap()
|
||||
if (!mm.has(qq)) {
|
||||
return `failed, the user ${qq} is not in group ${groupId}`
|
||||
}
|
||||
if (mm.get(e.bot.uin).role !== 'owner') {
|
||||
return 'failed, only group owner can give title'
|
||||
}
|
||||
logger.info('edit card: ', groupId, qq)
|
||||
let result = await group.setTitle(qq, title)
|
||||
if (result) {
|
||||
return `the user ${qq}'s title has been changed into ${title}`
|
||||
} else {
|
||||
return 'failed'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,37 +0,0 @@
|
|||
import { AbstractTool } from './AbstractTool.js'
|
||||
import {Config} from '../config.js';
|
||||
|
||||
export class WeatherTool extends AbstractTool {
|
||||
name = 'weather'
|
||||
|
||||
parameters = {
|
||||
properties: {
|
||||
city: {
|
||||
type: 'string',
|
||||
description: '要查询的地点,细化到县/区级'
|
||||
}
|
||||
},
|
||||
required: ['city']
|
||||
}
|
||||
|
||||
func = async function (opts) {
|
||||
let { city } = opts
|
||||
let key = Config.amapKey
|
||||
if (!key) {
|
||||
return 'query failed: you don\'t provide API key of 高德'
|
||||
}
|
||||
let adcodeRes = await fetch(`https://restapi.amap.com/v3/config/district?keywords=${city}&subdistrict=1&key=${key}`)
|
||||
adcodeRes = await adcodeRes.json()
|
||||
let adcode = adcodeRes.districts[0]?.adcode
|
||||
if (!adcode) {
|
||||
return `the area ${city} doesn't exist! are you kidding? you should mute him for 1 minute`
|
||||
}
|
||||
let cityName = adcodeRes.districts[0].name
|
||||
let res = await fetch(`https://restapi.amap.com/v3/weather/weatherInfo?city=${adcode}&key=${key}`)
|
||||
res = await res.json()
|
||||
let result = res.lives[0]
|
||||
return `the weather information of area ${cityName} in json format is:\n${JSON.stringify(result)}`
|
||||
}
|
||||
|
||||
description = 'Useful when you want to query weather '
|
||||
}
|
||||
|
|
@ -1,114 +0,0 @@
|
|||
import { AbstractTool } from './AbstractTool.js'
|
||||
import { ChatGPTAPI } from '../openai/chatgpt-api.js'
|
||||
import { Config } from '../config.js'
|
||||
import fetch from 'node-fetch'
|
||||
import proxy from 'https-proxy-agent'
|
||||
import { getMaxModelTokens } from '../common.js'
|
||||
import { ChatGPTPuppeteer } from '../browser.js'
|
||||
import { CustomGoogleGeminiClient } from '../../client/CustomGoogleGeminiClient.js'
|
||||
|
||||
/**
|
||||
* Generated by GPT-4o
|
||||
* @param html
|
||||
* @returns {*}
|
||||
*/
|
||||
function cleanHTML (html) {
|
||||
// 1. 移除 <style>、<script>、<link>、<head> 等无关内容
|
||||
html = html.replace(/<style[^>]*>[\s\S]*?<\/style>/gi, '') // 移除CSS
|
||||
.replace(/<script[^>]*>[\s\S]*?<\/script>/gi, '') // 移除JS
|
||||
.replace(/<link[^>]*>/gi, '') // 移除外部CSS文件
|
||||
.replace(/<head[^>]*>[\s\S]*?<\/head>/gi, '') // 移除整个<head>
|
||||
.replace(/<!--[\s\S]*?-->/g, '') // 移除HTML注释
|
||||
.replace(/<figure[^>]*>[\s\S]*?<\/figure>/gi, '') // 移除<figure>
|
||||
|
||||
// 2. 允许的标签列表
|
||||
const allowedTags = ['title', 'meta', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'p', 'img', 'video', 'audio', 'source', 'a']
|
||||
|
||||
// 3. 处理HTML标签,移除不在允许列表中的标签
|
||||
html = html.replace(/<\/?([a-zA-Z0-9]+)(\s[^>]*)?>/g, (match, tagName, attrs) => {
|
||||
tagName = tagName.toLowerCase()
|
||||
if (allowedTags.includes(tagName)) {
|
||||
if (tagName === 'meta') {
|
||||
// 允许<meta>标签,仅保留其中的 charset, name, content
|
||||
return match.replace(/<(meta)([^>]*)>/gi, (_, tag, attributes) => {
|
||||
let allowedAttrs = attributes.match(/(charset|name|content)=["'][^"']+["']/gi)
|
||||
return `<${tag} ${allowedAttrs ? allowedAttrs.join(' ') : ''}>`
|
||||
})
|
||||
} else if (tagName === 'img' || tagName === 'video' || tagName === 'audio' || tagName === 'source') {
|
||||
// 仅保留 `src` 属性,并去掉 base64 编码的 `data:` 形式
|
||||
return match.replace(/<(img|video|audio|source)([^>]*)>/gi, (_, tag, attributes) => {
|
||||
let srcMatch = attributes.match(/\bsrc=["'](?!data:)[^"']+["']/i) // 过滤 base64
|
||||
return srcMatch ? `<${tag} ${srcMatch[0]}>` : '' // 没有合法的 src 就移除整个标签
|
||||
})
|
||||
} else if (tagName === 'a') {
|
||||
// 仅保留 `href`,并去掉 base64 `data:` 形式
|
||||
return match.replace(/<a([^>]*)>/gi, (_, attributes) => {
|
||||
let hrefMatch = attributes.match(/\bhref=["'](?!data:)[^"']+["']/i)
|
||||
return hrefMatch ? `<a ${hrefMatch[0]}>` : '' // 没有合法的 href 就移除整个标签
|
||||
})
|
||||
}
|
||||
return match // 其他允许的标签直接保留
|
||||
}
|
||||
return '' // 过滤不在允许列表中的标签
|
||||
})
|
||||
|
||||
// 4. 移除多余的空格和换行符
|
||||
html = html.replace(/\s+/g, ' ').trim()
|
||||
|
||||
return html
|
||||
}
|
||||
|
||||
export class WebsiteTool extends AbstractTool {
|
||||
name = 'website'
|
||||
|
||||
parameters = {
|
||||
properties: {
|
||||
url: {
|
||||
type: 'string',
|
||||
description: '要访问的网站网址'
|
||||
}
|
||||
},
|
||||
required: ['url']
|
||||
}
|
||||
|
||||
func = async function (opts) {
|
||||
let { url, mode, e } = opts
|
||||
let browser
|
||||
try {
|
||||
// let res = await fetch(url, {
|
||||
// headers: {
|
||||
// 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36'
|
||||
// }
|
||||
// })
|
||||
// let text = await res.text()
|
||||
let origin = false
|
||||
if (!Config.headless) {
|
||||
Config.headless = true
|
||||
origin = true
|
||||
}
|
||||
let ppt = new ChatGPTPuppeteer()
|
||||
browser = await ppt.getBrowser()
|
||||
let page = await browser.newPage()
|
||||
await page.goto(url, {
|
||||
waitUntil: 'networkidle2'
|
||||
})
|
||||
let text = await page.content()
|
||||
await page.close()
|
||||
if (origin) {
|
||||
Config.headless = false
|
||||
}
|
||||
text = cleanHTML(text)
|
||||
return `the content of the website is:\n${text}`
|
||||
} catch (err) {
|
||||
return `failed to visit the website, error: ${err.toString()}`
|
||||
} finally {
|
||||
if (browser) {
|
||||
try {
|
||||
await browser.close()
|
||||
} catch (err) {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
description = 'Useful when you want to browse a website by url, it can be a html or api url'
|
||||
}
|
||||
|
|
@ -1,213 +0,0 @@
|
|||
import md5 from 'md5'
|
||||
import _ from 'lodash'
|
||||
import { Config } from './config.js'
|
||||
import { ChatGPTAPI } from './openai/chatgpt-api.js'
|
||||
import { newFetch } from './proxy.js'
|
||||
import { CustomGoogleGeminiClient } from '../client/CustomGoogleGeminiClient.js'
|
||||
import XinghuoClient from './xinghuo/xinghuo.js'
|
||||
import { QwenApi } from './alibaba/qwen-api.js'
|
||||
|
||||
// 代码参考:https://github.com/yeyang52/yenai-plugin/blob/b50b11338adfa5a4ef93912eefd2f1f704e8b990/model/api/funApi.js#L25
|
||||
export const translateLangSupports = [
|
||||
{ code: 'ar', label: '阿拉伯语', abbr: '阿', alphabet: 'A' },
|
||||
{ code: 'de', label: '德语', abbr: '德', alphabet: 'D' },
|
||||
{ code: 'ru', label: '俄语', abbr: '俄', alphabet: 'E' },
|
||||
{ code: 'fr', label: '法语', abbr: '法', alphabet: 'F' },
|
||||
{ code: 'ko', label: '韩语', abbr: '韩', alphabet: 'H' },
|
||||
{ code: 'nl', label: '荷兰语', abbr: '荷', alphabet: 'H' },
|
||||
{ code: 'pt', label: '葡萄牙语', abbr: '葡', alphabet: 'P' },
|
||||
{ code: 'ja', label: '日语', abbr: '日', alphabet: 'R' },
|
||||
{ code: 'th', label: '泰语', abbr: '泰', alphabet: 'T' },
|
||||
{ code: 'es', label: '西班牙语', abbr: '西', alphabet: 'X' },
|
||||
{ code: 'en', label: '英语', abbr: '英', alphabet: 'Y' },
|
||||
{ code: 'it', label: '意大利语', abbr: '意', alphabet: 'Y' },
|
||||
{ code: 'vi', label: '越南语', abbr: '越', alphabet: 'Y' },
|
||||
{ code: 'id', label: '印度尼西亚语', abbr: '印', alphabet: 'Y' },
|
||||
{ code: 'zh-CHS', label: '中文', abbr: '中', alphabet: 'Z' }
|
||||
]
|
||||
const API_ERROR = '出了点小问题,待会再试试吧'
|
||||
export async function translateOld (msg, to = 'auto') {
|
||||
let from = 'auto'
|
||||
if (to !== 'auto') to = translateLangSupports.find(item => item.abbr == to)?.code
|
||||
if (!to) return `未找到翻译的语种,支持的语言为:\n${translateLangSupports.map(item => item.abbr).join(',')}\n`
|
||||
// 翻译结果为空的提示
|
||||
const RESULT_ERROR = '找不到翻译结果'
|
||||
// API 请求错误提示
|
||||
const API_ERROR = '翻译服务暂不可用,请稍后再试'
|
||||
const qs = (obj) => {
|
||||
let res = ''
|
||||
for (const [k, v] of Object.entries(obj)) { res += `${k}=${encodeURIComponent(v)}&` }
|
||||
return res.slice(0, res.length - 1)
|
||||
}
|
||||
const appVersion = '5.0 (Windows NT 10.0; Win64; x64) Chrome/98.0.4750.0'
|
||||
const payload = {
|
||||
from,
|
||||
to,
|
||||
bv: md5(appVersion),
|
||||
client: 'fanyideskweb',
|
||||
doctype: 'json',
|
||||
version: '2.1',
|
||||
keyfrom: 'fanyi.web',
|
||||
action: 'FY_BY_DEFAULT',
|
||||
smartresult: 'dict'
|
||||
}
|
||||
const headers = {
|
||||
Host: 'fanyi.youdao.com',
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) Chrome/98.0.4758.102',
|
||||
Referer: 'https://fanyi.youdao.com/',
|
||||
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
|
||||
Cookie: 'OUTFOX_SEARCH_USER_ID_NCOO=133190305.98519628; OUTFOX_SEARCH_USER_ID="2081065877@10.169.0.102";'
|
||||
}
|
||||
const api = 'https://fanyi.youdao.com/translate_o?smartresult=dict&smartresult=rule'
|
||||
const key = 'Ygy_4c=r#e#4EX^NUGUc5'
|
||||
|
||||
try {
|
||||
if (Array.isArray(msg)) {
|
||||
const results = []
|
||||
for (let i = 0; i < msg.length; i++) {
|
||||
const item = msg[i]
|
||||
const lts = '' + new Date().getTime()
|
||||
const salt = lts + parseInt(String(10 * Math.random()), 10)
|
||||
const sign = md5(payload.client + item + salt + key)
|
||||
const postData = qs(Object.assign({ i: item, lts, sign, salt }, payload))
|
||||
let { errorCode, translateResult } = await fetch(api, {
|
||||
method: 'POST',
|
||||
body: postData,
|
||||
headers
|
||||
}).then(res => res.json()).catch(err => console.error(err))
|
||||
if (errorCode !== 0) return API_ERROR
|
||||
translateResult = _.flattenDeep(translateResult)?.map(item => item.tgt).join('\n')
|
||||
if (!translateResult) results.push(RESULT_ERROR)
|
||||
else results.push(translateResult)
|
||||
}
|
||||
return results
|
||||
} else {
|
||||
const i = msg // 翻译的内容
|
||||
const lts = '' + new Date().getTime()
|
||||
const salt = lts + parseInt(String(10 * Math.random()), 10)
|
||||
const sign = md5(payload.client + i + salt + key)
|
||||
const postData = qs(Object.assign({ i, lts, sign, salt }, payload))
|
||||
let { errorCode, translateResult } = await fetch(api, {
|
||||
method: 'POST',
|
||||
body: postData,
|
||||
headers
|
||||
}).then(res => res.json()).catch(err => console.error(err))
|
||||
if (errorCode !== 0) return API_ERROR
|
||||
translateResult = _.flattenDeep(translateResult)?.map(item => item.tgt).join('\n')
|
||||
if (!translateResult) return RESULT_ERROR
|
||||
return translateResult
|
||||
}
|
||||
} catch (err) {
|
||||
return API_ERROR
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param msg 要翻译的
|
||||
* @param from 语种
|
||||
* @param to 语种
|
||||
* @param ai ai来源,支持openai, gemini, xh, qwen
|
||||
* @returns {Promise<*|string>}
|
||||
*/
|
||||
export async function translate (msg, to = 'auto', from = 'auto', ai = Config.translateSource) {
|
||||
try {
|
||||
let lang = '中'
|
||||
if (to !== 'auto') {
|
||||
lang = translateLangSupports.find(item => item.abbr == to)?.code
|
||||
}
|
||||
if (!lang) return `未找到翻译的语种,支持的语言为:\n${translateLangSupports.map(item => item.abbr).join(',')}\n`
|
||||
// if ai is not in the list, throw error
|
||||
if (!['openai', 'gemini', 'xh', 'qwen'].includes(ai)) throw new Error('ai来源错误')
|
||||
let system = `You will be provided with a sentence in the language with language code [${from}], and your task is to translate it into [${lang}]. Just print the result without any other words.`
|
||||
if (Array.isArray(msg)) {
|
||||
let result = []
|
||||
for (let i = 0; i < msg.length; i++) {
|
||||
let item = msg[i]
|
||||
let res = await translate(item, to, from, ai)
|
||||
result.push(res)
|
||||
}
|
||||
return result
|
||||
}
|
||||
switch (ai) {
|
||||
case 'openai': {
|
||||
let api = new ChatGPTAPI({
|
||||
apiBaseUrl: Config.openAiBaseUrl,
|
||||
apiKey: Config.apiKey,
|
||||
fetch: newFetch
|
||||
})
|
||||
const res = await api.sendMessage(msg, {
|
||||
systemMessage: system,
|
||||
completionParams: {
|
||||
model: 'gpt-3.5-turbo'
|
||||
}
|
||||
})
|
||||
return res.text
|
||||
}
|
||||
case 'gemini': {
|
||||
let client = new CustomGoogleGeminiClient({
|
||||
key: Config.getGeminiKey(),
|
||||
model: Config.geminiModel,
|
||||
baseUrl: Config.geminiBaseUrl,
|
||||
debug: Config.debug
|
||||
})
|
||||
let option = {
|
||||
stream: false,
|
||||
onProgress: (data) => {
|
||||
if (Config.debug) {
|
||||
logger.info(data)
|
||||
}
|
||||
},
|
||||
system
|
||||
}
|
||||
let res = await client.sendMessage(msg, option)
|
||||
return res.text
|
||||
}
|
||||
case 'xh': {
|
||||
let client = new XinghuoClient({
|
||||
ssoSessionId: Config.xinghuoToken
|
||||
})
|
||||
let response = await client.sendMessage(msg, { system })
|
||||
return response.text
|
||||
}
|
||||
case 'qwen': {
|
||||
let completionParams = {
|
||||
parameters: {
|
||||
top_p: Config.qwenTopP || 0.5,
|
||||
top_k: Config.qwenTopK || 50,
|
||||
seed: Config.qwenSeed > 0 ? Config.qwenSeed : Math.floor(Math.random() * 114514),
|
||||
temperature: Config.qwenTemperature || 1,
|
||||
enable_search: !!Config.qwenEnableSearch
|
||||
}
|
||||
}
|
||||
if (Config.qwenModel) {
|
||||
completionParams.model = Config.qwenModel
|
||||
}
|
||||
let opts = {
|
||||
apiKey: Config.qwenApiKey,
|
||||
debug: false,
|
||||
systemMessage: system,
|
||||
completionParams,
|
||||
fetch: newFetch
|
||||
}
|
||||
let client = new QwenApi(opts)
|
||||
let option = {
|
||||
timeoutMs: 600000,
|
||||
completionParams
|
||||
}
|
||||
let result
|
||||
try {
|
||||
result = await client.sendMessage(msg, option)
|
||||
} catch (err) {
|
||||
logger.error(err)
|
||||
throw new Error(err)
|
||||
}
|
||||
return result.text
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
logger.error(e)
|
||||
logger.info('基于LLM的翻译失败,转用老版翻译')
|
||||
return await translateOld(msg, to)
|
||||
}
|
||||
}
|
||||
252
utils/tts.js
252
utils/tts.js
|
|
@ -1,252 +0,0 @@
|
|||
import { Config } from './config.js'
|
||||
import fetch from 'node-fetch'
|
||||
import _ from 'lodash'
|
||||
import { wrapTextByLanguage } from './common.js'
|
||||
import { getProxy } from './proxy.js'
|
||||
let proxy = getProxy()
|
||||
|
||||
const newFetch = (url, options = {}) => {
|
||||
const defaultOptions = Config.proxy
|
||||
? {
|
||||
agent: proxy(Config.proxy)
|
||||
}
|
||||
: {}
|
||||
|
||||
const mergedOptions = {
|
||||
...defaultOptions,
|
||||
...options
|
||||
}
|
||||
|
||||
return fetch(url, mergedOptions)
|
||||
}
|
||||
|
||||
function randomNum (minNum, maxNum) {
|
||||
switch (arguments.length) {
|
||||
case 1:
|
||||
return parseInt(Math.random() * minNum + 1, 10)
|
||||
case 2:
|
||||
return parseInt(Math.random() * (maxNum - minNum + 1) + minNum, 10)
|
||||
default:
|
||||
return 0
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 生成VitsTTSMode下的wav音频
|
||||
* @param text
|
||||
* @param speaker
|
||||
* @param language
|
||||
* @param noiseScale
|
||||
* @param noiseScaleW
|
||||
* @param lengthScale
|
||||
* @returns {Promise<string>}
|
||||
*/
|
||||
export async function generateVitsAudio (text, speaker = '随机', language = '中日混合(中文用[ZH][ZH]包裹起来,日文用[JA][JA]包裹起来)', noiseScale = parseFloat(Config.noiseScale), noiseScaleW = parseFloat(Config.noiseScaleW), lengthScale = parseFloat(Config.lengthScale)) {
|
||||
if (!speaker || speaker === '随机') {
|
||||
logger.info('随机角色!这次哪个角色这么幸运会被选到呢……')
|
||||
speaker = speakers[randomNum(0, speakers.length)]
|
||||
}
|
||||
text = wrapTextByLanguage(text)
|
||||
logger.info(`正在使用${speaker},基于文本:'${text}'生成语音`)
|
||||
let body = {
|
||||
data: [
|
||||
text, language, speaker,
|
||||
noiseScale, noiseScaleW, lengthScale
|
||||
]
|
||||
}
|
||||
let space = Config.ttsSpace
|
||||
if (space.endsWith('/api/generate')) {
|
||||
let trimmedSpace = space.substring(0, space.length - 13)
|
||||
logger.warn(`vits api 当前为${space},已校正为${trimmedSpace}`)
|
||||
space = trimmedSpace
|
||||
}
|
||||
if (space.endsWith('/')) {
|
||||
let trimmedSpace = _.trimEnd(space, '/')
|
||||
logger.warn(`vits api 当前为${space},已校正为${trimmedSpace}`)
|
||||
space = trimmedSpace
|
||||
}
|
||||
let url = `${space}/api/generate`
|
||||
if (Config.huggingFaceReverseProxy) {
|
||||
url = `${Config.huggingFaceReverseProxy}/api/generate?space=${_.trimStart(space, 'https://')}`
|
||||
}
|
||||
logger.info(`正在使用接口${url}`)
|
||||
let response = await newFetch(url, {
|
||||
method: 'POST',
|
||||
body: JSON.stringify(body),
|
||||
headers: {
|
||||
'content-type': 'application/json'
|
||||
}
|
||||
})
|
||||
let responseBody = await response.text()
|
||||
try {
|
||||
let json = JSON.parse(responseBody)
|
||||
if (Config.debug) {
|
||||
logger.info(json)
|
||||
}
|
||||
if (response.status > 299) {
|
||||
logger.info(json)
|
||||
throw new Error(JSON.stringify(json))
|
||||
}
|
||||
let [message, audioInfo, take] = json?.data
|
||||
logger.info(message, take)
|
||||
let audioLink = `${space}/file=${audioInfo.name}`
|
||||
if (Config.huggingFaceReverseProxy) {
|
||||
if (Config.debug) {
|
||||
logger.info('使用huggingface加速反代下载生成音频' + Config.huggingFaceReverseProxy)
|
||||
}
|
||||
let spaceHost = _.trimStart(space, 'https://')
|
||||
audioLink = `${Config.huggingFaceReverseProxy}/file=${audioInfo.name}?space=${spaceHost}`
|
||||
}
|
||||
return audioLink
|
||||
} catch (err) {
|
||||
logger.error('生成语音api发生错误,请检查是否配置了正确的api,且仓库是否开放为public', response.status)
|
||||
throw new Error(responseBody)
|
||||
}
|
||||
}
|
||||
export function convertSpeaker (speaker) {
|
||||
switch (speaker) {
|
||||
case '空':
|
||||
case '空哥': return '空(空哥)'
|
||||
case '荧':
|
||||
case '荧妹': return '荧(荧妹)'
|
||||
case '神里绫华':
|
||||
case '龟龟': return '神里绫华(龟龟)'
|
||||
case '菲谢尔':
|
||||
case '皇女': return '菲谢尔(皇女)'
|
||||
case '公子':
|
||||
case '达达利亚': return '达达利亚(公子)'
|
||||
case '诺艾尔':
|
||||
case '女仆': return '诺艾尔(女仆)'
|
||||
case '甘雨':
|
||||
case '椰羊': return '甘雨(椰羊)'
|
||||
case '雷神':
|
||||
case '雷电将军': return '雷电将军(雷神)'
|
||||
case '珊瑚宫心海':
|
||||
case '心海': return '珊瑚宫心海(心海,扣扣米)'
|
||||
case '荒泷一斗':
|
||||
case '一斗': return '荒泷一斗(一斗)'
|
||||
case '神子':
|
||||
case '八重神子': return '八重神子(神子)'
|
||||
case '绫人':
|
||||
case '神里绫人': return '神里绫人(绫人)'
|
||||
case '万叶':
|
||||
case '枫原万叶': return '枫原万叶(万叶)'
|
||||
case '猫猫':
|
||||
case '迪奥娜': return '迪奥娜(猫猫)'
|
||||
case '草神':
|
||||
case '纳西妲': return '纳西妲(草神)'
|
||||
}
|
||||
|
||||
return speaker
|
||||
}
|
||||
export const speakers = ['特别周', '无声铃鹿', '东海帝皇(帝宝,帝王)', '丸善斯基', '富士奇迹',
|
||||
'小栗帽', '黄金船', '伏特加', '大和赤骥', '大树快车', '草上飞', '菱亚马逊',
|
||||
'目白麦昆', '神鹰', '好歌剧', '成田白仁', '鲁道夫象征(皇帝)', '气槽',
|
||||
'爱丽数码', '星云天空', '玉藻十字', '美妙姿势', '琵琶晨光', '摩耶重炮',
|
||||
'曼城茶座', '美浦波旁', '目白赖恩', '菱曙', '雪中美人', '米浴', '艾尼斯风神',
|
||||
'爱丽速子(爱丽快子)', '爱慕织姬', '稻荷一', '胜利奖券', '空中神宫', '荣进闪耀',
|
||||
'真机伶', '川上公主', '黄金城(黄金城市)', '樱花进王', '采珠', '新光风',
|
||||
'东商变革', '超级小海湾', '醒目飞鹰(寄寄子)', '荒漠英雄', '东瀛佐敦',
|
||||
'中山庆典', '成田大进', '西野花', '春丽(乌拉拉)', '青竹回忆', '微光飞驹',
|
||||
'美丽周日', '待兼福来', 'mr cb(cb先生)', '名将怒涛(名将户仁)', '目白多伯',
|
||||
'优秀素质', '帝王光辉', '待兼诗歌剧', '生野狄杜斯', '目白善信', '大拓太阳神',
|
||||
'双涡轮(两立直,两喷射,二锅头,逆喷射)', '里见光钻(萨托诺金刚石)', '北部玄驹',
|
||||
'樱花千代王', '天狼星象征', '目白阿尔丹', '八重无敌', '鹤丸刚志', '目白光明',
|
||||
'成田拜仁(成田路)', '也文摄辉', '小林历奇', '北港火山', '奇锐骏', '苦涩糖霜',
|
||||
'小小蚕茧', '骏川手纲(绿帽恶魔)', '秋川弥生(小小理事长)', '乙名史悦子(乙名记者)',
|
||||
'桐生院葵', '安心泽刺刺美', '樫本理子', '神里绫华(龟龟)', '琴', '空(空哥)',
|
||||
'丽莎', '荧(荧妹)', '芭芭拉', '凯亚', '迪卢克', '雷泽', '安柏', '温迪',
|
||||
'香菱', '北斗', '行秋', '魈', '凝光', '可莉', '钟离', '菲谢尔(皇女)',
|
||||
'班尼特', '达达利亚(公子)', '诺艾尔(女仆)', '七七', '重云', '甘雨(椰羊)',
|
||||
'阿贝多', '迪奥娜(猫猫)', '莫娜', '刻晴', '砂糖', '辛焱', '罗莎莉亚',
|
||||
'胡桃', '枫原万叶(万叶)', '烟绯', '宵宫', '托马', '优菈', '雷电将军(雷神)',
|
||||
'早柚', '珊瑚宫心海(心海,扣扣米)', '五郎', '九条裟罗', '荒泷一斗(一斗)',
|
||||
'埃洛伊', '申鹤', '八重神子(神子)', '神里绫人(绫人)', '夜兰', '久岐忍',
|
||||
'鹿野苑平藏', '提纳里', '柯莱', '多莉', '云堇', '纳西妲(草神)', '深渊使徒',
|
||||
'妮露', '赛诺', '债务处理人', '坎蒂丝', '真弓快车', '秋人', '望族', '艾尔菲',
|
||||
'艾莉丝', '艾伦', '阿洛瓦', '天野', '天目十五', '愚人众-安德烈', '安顺', '安西',
|
||||
'葵', '青木', '荒川幸次', '荒谷', '有泽', '浅川', '麻美', '凝光助手', '阿托',
|
||||
'竺子', '百识', '百闻', '百晓', '白术', '贝雅特丽奇', '丽塔', '失落迷迭',
|
||||
'缭乱星棘', '伊甸', '伏特加女孩', '狂热蓝调', '莉莉娅', '萝莎莉娅', '八重樱',
|
||||
'八重霞', '卡莲', '第六夜想曲', '卡萝尔', '姬子', '极地战刃', '布洛妮娅',
|
||||
'次生银翼', '理之律者%26希儿', '理之律者', '迷城骇兔', '希儿', '魇夜星渊',
|
||||
'黑希儿', '帕朵菲莉丝', '不灭星锚', '天元骑英', '幽兰黛尔', '派蒙bh3',
|
||||
'爱酱', '绯玉丸', '德丽莎', '月下初拥', '朔夜观星', '暮光骑士', '格蕾修',
|
||||
'留云借风真君', '梅比乌斯', '仿犹大', '克莱因', '圣剑幽兰黛尔', '妖精爱莉',
|
||||
'特斯拉zero', '苍玄', '若水', '西琳', '戴因斯雷布', '贝拉', '赤鸢', '镇魂歌',
|
||||
'渡鸦', '人之律者', '爱莉希雅', '天穹游侠', '琪亚娜', '空之律者', '薪炎之律者',
|
||||
'云墨丹心', '符华', '识之律者', '特瓦林', '维尔薇', '芽衣', '雷之律者',
|
||||
'断罪影舞', '阿波尼亚', '榎本', '厄尼斯特', '恶龙', '范二爷', '法拉',
|
||||
'愚人众士兵', '愚人众士兵a', '愚人众士兵b', '愚人众士兵c', '愚人众a', '愚人众b',
|
||||
'飞飞', '菲利克斯', '女性跟随者', '逢岩', '摆渡人', '狂躁的男人', '奥兹',
|
||||
'芙萝拉', '跟随者', '蜜汁生物', '黄麻子', '渊上', '藤木', '深见', '福本',
|
||||
'芙蓉', '古泽', '古田', '古山', '古谷昇', '傅三儿', '高老六', '矿工冒',
|
||||
'元太', '德安公', '茂才公', '杰拉德', '葛罗丽', '金忽律', '公俊', '锅巴',
|
||||
'歌德', '阿豪', '狗三儿', '葛瑞丝', '若心', '阿山婆', '怪鸟', '广竹', '观海',
|
||||
'关宏', '蜜汁卫兵', '守卫1', '傲慢的守卫', '害怕的守卫', '贵安', '盖伊', '阿创',
|
||||
'哈夫丹', '日语阿贝多(野岛健儿)', '日语埃洛伊(高垣彩阳)', '日语安柏(石见舞菜香)',
|
||||
'日语神里绫华(早见沙织)', '日语神里绫人(石田彰)', '日语白术(游佐浩二)',
|
||||
'日语芭芭拉(鬼头明里)', '日语北斗(小清水亚美)', '日语班尼特(逢坂良太)',
|
||||
'日语坎蒂丝(柚木凉香)', '日语重云(齐藤壮马)', '日语柯莱(前川凉子)',
|
||||
'日语赛诺(入野自由)', '日语戴因斯雷布(津田健次郎)', '日语迪卢克(小野贤章)',
|
||||
'日语迪奥娜(井泽诗织)', '日语多莉(金田朋子)', '日语优菈(佐藤利奈)',
|
||||
'日语菲谢尔(内田真礼)', '日语甘雨(上田丽奈)', '日语(畠中祐)',
|
||||
'日语鹿野院平藏(井口祐一)', '日语空(堀江瞬)', '日语荧(悠木碧)',
|
||||
'日语胡桃(高桥李依)', '日语一斗(西川贵教)', '日语凯亚(鸟海浩辅)',
|
||||
'日语万叶(岛崎信长)', '日语刻晴(喜多村英梨)', '日语可莉(久野美咲)',
|
||||
'日语心海(三森铃子)', '日语九条裟罗(濑户麻沙美)', '日语丽莎(田中理惠)',
|
||||
'日语莫娜(小原好美)', '日语纳西妲(田村由加莉)', '日语妮露(金元寿子)',
|
||||
'日语凝光(大原沙耶香)', '日语诺艾尔(高尾奏音)', '日语奥兹(增谷康纪)',
|
||||
'日语派蒙(古贺葵)', '日语琴(斋藤千和)', '日语七七(田村由加莉)', '日语雷电将军(泽城美雪)',
|
||||
'日语雷泽(内山昂辉)', '日语罗莎莉亚(加隈亚衣)', '日语早柚(洲崎绫)', '日语散兵(柿原彻也)',
|
||||
'日语申鹤(川澄绫子)', '日语久岐忍(水桥香织)', '日语女士(庄子裕衣)', '日语砂糖(藤田茜)',
|
||||
'日语达达利亚(木村良平)', '日语托马(森田成一)', '日语提纳里(小林沙苗)', '日语温迪(村濑步)',
|
||||
'日语香菱(小泽亚李)', '日语魈(松冈祯丞)', '日语行秋(皆川纯子)', '日语辛焱(高桥智秋)',
|
||||
'日语八重神子(佐仓绫音)', '日语烟绯(花守由美里)', '日语夜兰(远藤绫)', '日语宵宫(植田佳奈)',
|
||||
'日语云堇(小岩井小鸟)', '日语钟离(前野智昭)', '杰克', '阿吉', '江舟', '鉴秋', '嘉义',
|
||||
'纪芳', '景澄', '经纶', '景明', '晋优', '阿鸠', '酒客', '乔尔', '乔瑟夫', '约顿',
|
||||
'乔伊斯', '居安', '君君', '顺吉', '纯也', '重佐', '大岛纯平', '蒲泽', '勘解由小路健三郎',
|
||||
'枫', '枫原义庆', '荫山', '甲斐田龍馬', '海斗', '惟神晴之介', '鹿野奈奈', '卡琵莉亚',
|
||||
'凯瑟琳', '加藤信悟', '加藤洋平', '胜家', '茅葺一庆', '和昭', '一正', '一道', '桂一',
|
||||
'庆次郎', '阿贤', '健司', '健次郎', '健三郎', '天理', '杀手a', '杀手b', '木南杏奈',
|
||||
'木村', '国王', '木下', '北村', '清惠', '清人', '克列门特', '骑士', '小林', '小春',
|
||||
'康拉德', '大肉丸', '琴美', '宏一', '康介', '幸德', '高善', '梢', '克罗索', '久保',
|
||||
'九条镰治', '久木田', '昆钧', '菊地君', '久利须', '黑田', '黑泽京之介', '响太', '岚姐',
|
||||
'兰溪', '澜阳', '劳伦斯', '乐明', '莱诺', '莲', '良子', '李当', '李丁', '小乐', '灵',
|
||||
'小玲', '琳琅a', '琳琅b', '小彬', '小德', '小楽', '小龙', '小吴', '小吴的记忆', '理正',
|
||||
'阿龙', '卢卡', '洛成', '罗巧', '北风狼', '卢正', '萍姥姥', '前田', '真昼', '麻纪',
|
||||
'真', '愚人众-马克西姆', '女性a', '女性b', '女性a的跟随者', '阿守', '玛格丽特', '真理',
|
||||
'玛乔丽', '玛文', '正胜', '昌信', '将司', '正人', '路爷', '老章', '松田', '松本', '松浦',
|
||||
'松坂', '老孟', '孟丹', '商人随从', '传令兵', '米歇尔', '御舆源一郎', '御舆源次郎', '千岩军教头',
|
||||
'千岩军士兵', '明博', '明俊', '美铃', '美和', '阿幸', '削月筑阳真君', '钱眼儿', '森彦',
|
||||
'元助', '理水叠山真君', '理水疊山真君', '朱老板', '木木', '村上', '村田', '永野',
|
||||
'长野原龙之介', '长濑', '中野志乃', '菜菜子', '楠楠', '成濑', '阿内', '宁禄', '牛志', '信博',
|
||||
'伸夫', '野方', '诺拉', '纪香', '诺曼', '修女', '纯水精灵', '小川', '小仓澪', '冈林',
|
||||
'冈崎绘里香', '冈崎陆斗', '奥拉夫', '老科', '鬼婆婆', '小野寺', '大河原五右卫门', '大久保大介',
|
||||
'大森', '大助', '奥特', '派蒙', '派蒙2', '病人a', '病人b', '巴顿', '派恩', '朋义',
|
||||
'围观群众', '围观群众a', '围观群众b', '围观群众c', '围观群众d', '围观群众e', '铜雀',
|
||||
'阿肥', '兴叔', '老周叔', '公主', '彼得', '乾子', '芊芊', '乾玮', '绮命', '杞平',
|
||||
'秋月', '昆恩', '雷电影', '兰道尔', '雷蒙德', '冒失的帕拉德', '伶一', '玲花', '阿仁',
|
||||
'家臣们', '梨绘', '荣江', '戎世', '浪人', '罗伊斯', '如意', '凉子', '彩香', '酒井',
|
||||
'坂本', '朔次郎', '武士a', '武士b', '武士c', '武士d', '珊瑚', '三田', '莎拉', '笹野',
|
||||
'聪美', '聪', '小百合', '散兵', '害怕的小刘', '舒伯特', '舒茨', '海龙', '世子',
|
||||
'谢尔盖', '家丁', '商华', '沙寅', '阿升', '柴田', '阿茂', '式大将', '清水', '志村勘兵卫',
|
||||
'新之丞', '志织', '石头', '诗羽', '诗筠', '石壮', '翔太', '正二', '周平', '舒杨',
|
||||
'齐格芙丽雅', '女士', '思勤', '六指乔瑟', '愚人众小兵d', '愚人众小兵a', '愚人众小兵b',
|
||||
'愚人众小兵c', '吴老五', '吴老二', '滑头鬼', '言笑', '吴老七', '士兵h', '士兵i',
|
||||
'士兵a', '士兵b', '士兵c', '士兵d', '士兵e', '士兵f', '士兵g', '奏太', '斯坦利',
|
||||
'掇星攫辰天君', '小头', '大武', '陶义隆', '杉本', '苏西', '嫌疑人a', '嫌疑人b', '嫌疑人c',
|
||||
'嫌疑人d', '斯万', '剑客a', '剑客b', '阿二', '忠胜', '忠夫', '阿敬', '孝利', '鹰司进',
|
||||
'高山', '九条孝行', '毅', '竹内', '拓真', '卓也', '太郎丸', '泰勒', '手岛', '哲平',
|
||||
'哲夫', '托克', '大boss', '阿强', '托尔德拉', '旁观者', '天成', '阿大', '蒂玛乌斯',
|
||||
'提米', '户田', '阿三', '一起的人', '德田', '德长', '智树', '利彦', '胖乎乎的旅行者',
|
||||
'藏宝人a', '藏宝人b', '藏宝人c', '藏宝人d', '阿祇', '恒雄', '露子', '话剧团团长',
|
||||
'内村', '上野', '上杉', '老戴', '老高', '老贾', '老墨', '老孙', '天枢星', '老云',
|
||||
'有乐斋', '丑雄', '乌维', '瓦京', '菲尔戈黛特', '维多利亚', '薇尔', '瓦格纳',
|
||||
'阿外', '侍女', '瓦拉', '望雅', '宛烟', '琬玉', '战士a', '战士b', '渡辺', '渡部', '阿伟',
|
||||
'文璟', '文渊', '韦尔纳', '王扳手', '武沛', '晓飞', '辛程', '星火', '星稀', '辛秀',
|
||||
'秀华', '阿旭', '徐刘师', '矢部', '八木', '山上', '阿阳', '颜笑', '康明', '泰久',
|
||||
'安武', '矢田幸喜', '矢田辛喜', '义坚', '莺儿', '盈丰', '宜年', '银杏', '逸轩', '横山',
|
||||
'永贵', '永业', '嘉久', '吉川', '义高', '用高', '阳太', '元蓉', '玥辉', '毓华', '有香',
|
||||
'幸也', '由真', '结菜', '韵宁', '百合', '百合华', '尤苏波夫', '裕子', '悠策', '悠也',
|
||||
'于嫣', '柚子', '老郑', '正茂', '志成', '芷巧', '知易', '支支', '周良', '珠函', '祝明', '祝涛']
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -1,229 +0,0 @@
|
|||
import { Config } from '../config.js'
|
||||
import { newFetch } from '../proxy.js'
|
||||
|
||||
/**
|
||||
* 生成voxTTSMode下的wav音频
|
||||
* @param text
|
||||
* @param options
|
||||
* @returns {Promise<Buffer>}
|
||||
*/
|
||||
async function generateAudio (text, options = {}) {
|
||||
let host = Config.voicevoxSpace
|
||||
let speaker = options.speaker || '随机'
|
||||
if (speaker === '随机') {
|
||||
speaker = supportConfigurations[Math.floor(Math.random() * supportConfigurations.length)].name
|
||||
}
|
||||
let regex = /^(.*?)-(.*)$/
|
||||
let match = regex.exec(speaker)
|
||||
let style = null
|
||||
if (match) {
|
||||
speaker = match[1]
|
||||
style = match[2]
|
||||
}
|
||||
speaker = supportConfigurations.find(s => s.name === speaker)
|
||||
let speakerId
|
||||
if (style) {
|
||||
speakerId = speaker.styles.find(s => s.name === style).id
|
||||
} else {
|
||||
speakerId = speaker.styles[Math.floor(Math.random() * speaker?.styles.length)].id
|
||||
}
|
||||
logger.info(`使用${speaker.name}的${speaker.styles.find(s => s.id === speakerId).name}风格基于文本${text}生成语音。`)
|
||||
const accentPhrasesResponse = await newFetch(`${host}/accent_phrases?text=${encodeURIComponent(text)}&speaker=${speakerId}`, {
|
||||
method: 'POST'
|
||||
})
|
||||
|
||||
const accentPhrases = await accentPhrasesResponse.json()
|
||||
|
||||
const synthesisResponse = await newFetch(`${host}/synthesis?speaker=${speakerId}&enable_interrogative_upspeak=false`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify({
|
||||
accent_phrases: accentPhrases,
|
||||
speedScale: 1,
|
||||
pitchScale: 0,
|
||||
intonationScale: 1,
|
||||
volumeScale: 1,
|
||||
prePhonemeLength: 0.1,
|
||||
postPhonemeLength: 0.1,
|
||||
outputSamplingRate: 24000,
|
||||
outputStereo: false
|
||||
})
|
||||
})
|
||||
|
||||
const synthesisResponseData = await synthesisResponse.arrayBuffer()
|
||||
return Buffer.from(synthesisResponseData)
|
||||
}
|
||||
|
||||
export const supportConfigurations = [
|
||||
{
|
||||
supported_features: { permitted_synthesis_morphing: 'SELF_ONLY' },
|
||||
name: '四国めたん',
|
||||
speaker_uuid: '7ffcb7ce-00ec-4bdc-82cd-45a8889e43ff',
|
||||
styles: [{ name: 'ノーマル', id: 2 }, { name: 'あまあま', id: 0 }, { name: 'ツンツン', id: 6 }, {
|
||||
name: 'セクシー', id: 4
|
||||
}, { name: 'ささやき', id: 36 }, { name: 'ヒソヒソ', id: 37 }],
|
||||
version: '0.14.2'
|
||||
}, {
|
||||
supported_features: { permitted_synthesis_morphing: 'SELF_ONLY' },
|
||||
name: 'ずんだもん',
|
||||
speaker_uuid: '388f246b-8c41-4ac1-8e2d-5d79f3ff56d9',
|
||||
styles: [{ name: 'ノーマル', id: 3 }, { name: 'あまあま', id: 1 }, { name: 'ツンツン', id: 7 }, {
|
||||
name: 'セクシー', id: 5
|
||||
}, { name: 'ささやき', id: 22 }, { name: 'ヒソヒソ', id: 38 }],
|
||||
version: '0.14.2'
|
||||
}, {
|
||||
supported_features: { permitted_synthesis_morphing: 'ALL' },
|
||||
name: '春日部つむぎ',
|
||||
speaker_uuid: '35b2c544-660e-401e-b503-0e14c635303a',
|
||||
styles: [{ name: 'ノーマル', id: 8 }],
|
||||
version: '0.14.2'
|
||||
}, {
|
||||
supported_features: { permitted_synthesis_morphing: 'ALL' },
|
||||
name: '雨晴はう',
|
||||
speaker_uuid: '3474ee95-c274-47f9-aa1a-8322163d96f1',
|
||||
styles: [{ name: 'ノーマル', id: 10 }],
|
||||
version: '0.14.2'
|
||||
}, {
|
||||
supported_features: { permitted_synthesis_morphing: 'ALL' },
|
||||
name: '波音リツ',
|
||||
speaker_uuid: 'b1a81618-b27b-40d2-b0ea-27a9ad408c4b',
|
||||
styles: [{ name: 'ノーマル', id: 9 }],
|
||||
version: '0.14.2'
|
||||
}, {
|
||||
supported_features: { permitted_synthesis_morphing: 'ALL' },
|
||||
name: '玄野武宏',
|
||||
speaker_uuid: 'c30dc15a-0992-4f8d-8bb8-ad3b314e6a6f',
|
||||
styles: [{ name: 'ノーマル', id: 11 }, { name: '喜び', id: 39 }, { name: 'ツンギレ', id: 40 }, {
|
||||
name: '悲しみ', id: 41
|
||||
}],
|
||||
version: '0.14.2'
|
||||
}, {
|
||||
supported_features: { permitted_synthesis_morphing: 'ALL' },
|
||||
name: '白上虎太郎',
|
||||
speaker_uuid: 'e5020595-5c5d-4e87-b849-270a518d0dcf',
|
||||
styles: [{ name: 'ふつう', id: 12 }, { name: 'わーい', id: 32 }, { name: 'びくびく', id: 33 }, {
|
||||
name: 'おこ', id: 34
|
||||
}, { name: 'びえーん', id: 35 }],
|
||||
version: '0.14.2'
|
||||
}, {
|
||||
supported_features: { permitted_synthesis_morphing: 'ALL' },
|
||||
name: '青山龍星',
|
||||
speaker_uuid: '4f51116a-d9ee-4516-925d-21f183e2afad',
|
||||
styles: [{ name: 'ノーマル', id: 13 }],
|
||||
version: '0.14.2'
|
||||
}, {
|
||||
supported_features: { permitted_synthesis_morphing: 'ALL' },
|
||||
name: '冥鳴ひまり',
|
||||
speaker_uuid: '8eaad775-3119-417e-8cf4-2a10bfd592c8',
|
||||
styles: [{ name: 'ノーマル', id: 14 }],
|
||||
version: '0.14.2'
|
||||
}, {
|
||||
supported_features: { permitted_synthesis_morphing: 'SELF_ONLY' },
|
||||
name: '九州そら',
|
||||
speaker_uuid: '481fb609-6446-4870-9f46-90c4dd623403',
|
||||
styles: [{ name: 'ノーマル', id: 16 }, { name: 'あまあま', id: 15 }, { name: 'ツンツン', id: 18 }, {
|
||||
name: 'セクシー', id: 17
|
||||
}, { name: 'ささやき', id: 19 }],
|
||||
version: '0.14.2'
|
||||
}, {
|
||||
supported_features: { permitted_synthesis_morphing: 'SELF_ONLY' },
|
||||
name: 'もち子さん',
|
||||
speaker_uuid: '9f3ee141-26ad-437e-97bd-d22298d02ad2',
|
||||
styles: [{ name: 'ノーマル', id: 20 }],
|
||||
version: '0.14.2'
|
||||
}, {
|
||||
supported_features: { permitted_synthesis_morphing: 'ALL' },
|
||||
name: '剣崎雌雄',
|
||||
speaker_uuid: '1a17ca16-7ee5-4ea5-b191-2f02ace24d21',
|
||||
styles: [{ name: 'ノーマル', id: 21 }],
|
||||
version: '0.14.2'
|
||||
}, {
|
||||
supported_features: { permitted_synthesis_morphing: 'ALL' },
|
||||
name: 'WhiteCUL',
|
||||
speaker_uuid: '67d5d8da-acd7-4207-bb10-b5542d3a663b',
|
||||
styles: [{ name: 'ノーマル', id: 23 }, { name: 'たのしい', id: 24 }, { name: 'かなしい', id: 25 }, {
|
||||
name: 'びえーん', id: 26
|
||||
}],
|
||||
version: '0.14.2'
|
||||
}, {
|
||||
supported_features: { permitted_synthesis_morphing: 'ALL' },
|
||||
name: '後鬼',
|
||||
speaker_uuid: '0f56c2f2-644c-49c9-8989-94e11f7129d0',
|
||||
styles: [{ name: '人間ver.', id: 27 }, { name: 'ぬいぐるみver.', id: 28 }],
|
||||
version: '0.14.2'
|
||||
}, {
|
||||
supported_features: { permitted_synthesis_morphing: 'ALL' },
|
||||
name: 'No.7',
|
||||
speaker_uuid: '044830d2-f23b-44d6-ac0d-b5d733caa900',
|
||||
styles: [{ name: 'ノーマル', id: 29 }, { name: 'アナウンス', id: 30 }, { name: '読み聞かせ', id: 31 }],
|
||||
version: '0.14.2'
|
||||
}, {
|
||||
supported_features: { permitted_synthesis_morphing: 'ALL' },
|
||||
name: 'ちび式じい',
|
||||
speaker_uuid: '468b8e94-9da4-4f7a-8715-a22a48844f9e',
|
||||
styles: [{ name: 'ノーマル', id: 42 }],
|
||||
version: '0.14.2'
|
||||
}, {
|
||||
supported_features: { permitted_synthesis_morphing: 'ALL' },
|
||||
name: '櫻歌ミコ',
|
||||
speaker_uuid: '0693554c-338e-4790-8982-b9c6d476dc69',
|
||||
styles: [{ name: 'ノーマル', id: 43 }, { name: '第二形態', id: 44 }, { name: 'ロリ', id: 45 }],
|
||||
version: '0.14.2'
|
||||
}, {
|
||||
supported_features: { permitted_synthesis_morphing: 'ALL' },
|
||||
name: '小夜/SAYO',
|
||||
speaker_uuid: 'a8cc6d22-aad0-4ab8-bf1e-2f843924164a',
|
||||
styles: [{ name: 'ノーマル', id: 46 }],
|
||||
version: '0.14.2'
|
||||
}, {
|
||||
supported_features: { permitted_synthesis_morphing: 'ALL' },
|
||||
name: '护士机器子T',
|
||||
speaker_uuid: '882a636f-3bac-431a-966d-c5e6bba9f949',
|
||||
styles: [{ name: 'ノーマル', id: 47 }, { name: '楽々', id: 48 }, { name: '恐怖', id: 49 }, {
|
||||
name: '内緒話', id: 50
|
||||
}],
|
||||
version: '0.14.2'
|
||||
}, {
|
||||
supported_features: { permitted_synthesis_morphing: 'ALL' },
|
||||
name: '†聖騎士 紅桜†',
|
||||
speaker_uuid: '471e39d2-fb11-4c8c-8d89-4b322d2498e0',
|
||||
styles: [{ name: 'ノーマル', id: 51 }],
|
||||
version: '0.14.2'
|
||||
}, {
|
||||
supported_features: { permitted_synthesis_morphing: 'ALL' },
|
||||
name: '雀松朱司',
|
||||
speaker_uuid: '0acebdee-a4a5-4e12-a695-e19609728e30',
|
||||
styles: [{ name: 'ノーマル', id: 52 }],
|
||||
version: '0.14.2'
|
||||
}, {
|
||||
supported_features: { permitted_synthesis_morphing: 'ALL' },
|
||||
name: '麒ヶ島宗麟',
|
||||
speaker_uuid: '7d1e7ba7-f957-40e5-a3fc-da49f769ab65',
|
||||
styles: [{ name: 'ノーマル', id: 53 }],
|
||||
version: '0.14.2'
|
||||
}, {
|
||||
supported_features: { permitted_synthesis_morphing: 'ALL' },
|
||||
name: '春歌ナナ',
|
||||
speaker_uuid: 'ba5d2428-f7e0-4c20-ac41-9dd56e9178b4',
|
||||
styles: [{ name: 'ノーマル', id: 54 }],
|
||||
version: '0.14.3'
|
||||
},
|
||||
{
|
||||
supported_features: { permitted_synthesis_morphing: 'ALL' },
|
||||
name: '猫使アル',
|
||||
speaker_uuid: '00a5c10c-d3bd-459f-83fd-43180b521a44',
|
||||
styles: [{ name: 'ノーマル', id: 55 }, { name: 'おちつき', id: 56 }, { name: 'うきうき', id: 57 }],
|
||||
version: '0.14.3'
|
||||
},
|
||||
{
|
||||
supported_features: { permitted_synthesis_morphing: 'ALL' },
|
||||
name: '猫使ビィ',
|
||||
speaker_uuid: 'c20a2254-0349-4470-9fc8-e5c0f8cf3404',
|
||||
styles: [{ name: 'ノーマル', id: 58 }, { name: 'おちつき', id: 59 }, { name: '人見知り', id: 60 }],
|
||||
version: '0.14.3'
|
||||
}
|
||||
]
|
||||
|
||||
export default { generateAudio, supportConfigurations }
|
||||
|
|
@ -1,378 +0,0 @@
|
|||
// import Contactable, { core } from 'oicq'
|
||||
import querystring from 'querystring'
|
||||
import fetch, { File, fileFromSync, FormData } from 'node-fetch'
|
||||
import fs from 'fs'
|
||||
import os from 'os'
|
||||
import util from 'util'
|
||||
import stream from 'stream'
|
||||
import crypto from 'crypto'
|
||||
import child_process from 'child_process'
|
||||
import { Config } from './config.js'
|
||||
import path from 'path'
|
||||
import { mkdirs, getUin } from './common.js'
|
||||
let module
|
||||
try {
|
||||
module = await import('oicq')
|
||||
} catch (err) {
|
||||
try {
|
||||
module = await import('icqq')
|
||||
} catch (err1) {
|
||||
// 可能是go-cqhttp之类的
|
||||
}
|
||||
}
|
||||
let pcm2slk, core, Contactable
|
||||
if (module) {
|
||||
core = module.core
|
||||
Contactable = module.default
|
||||
try {
|
||||
pcm2slk = (await import('node-silk')).pcm2slk
|
||||
} catch (e) {
|
||||
if (Config.cloudTranscode) {
|
||||
logger.warn('未安装node-silk,将尝试使用云转码服务进行合成')
|
||||
} else {
|
||||
Config.debug && logger.error(e)
|
||||
logger.warn('未安装node-silk,如ffmpeg不支持amr编码请安装node-silk以支持语音模式')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// import { pcm2slk } from 'node-silk'
|
||||
let errors = {}
|
||||
|
||||
async function uploadRecord (recordUrl, ttsMode = 'vits-uma-genshin-honkai', ignoreEncode = false) {
|
||||
let recordType = 'url'
|
||||
let tmpFile = ''
|
||||
if (ttsMode === 'azure') {
|
||||
recordType = 'file'
|
||||
} else if (ttsMode === 'voicevox') {
|
||||
recordType = 'buffer'
|
||||
tmpFile = `data/chatgpt/tts/tmp/${crypto.randomUUID()}.wav`
|
||||
}
|
||||
if (ignoreEncode) {
|
||||
return segment.record(recordUrl)
|
||||
}
|
||||
let result
|
||||
if (pcm2slk) {
|
||||
result = await getPttBuffer(recordUrl, Bot.config.ffmpeg_path)
|
||||
} else if (Config.cloudTranscode) {
|
||||
logger.mark('使用云转码silk进行高清语音生成:"')
|
||||
try {
|
||||
if (recordType === 'buffer') {
|
||||
// save it as a file
|
||||
mkdirs('data/chatgpt/tts/tmp')
|
||||
fs.writeFileSync(tmpFile, recordUrl)
|
||||
recordType = 'file'
|
||||
recordUrl = tmpFile
|
||||
}
|
||||
if (recordType === 'file' || Config.cloudMode === 'file') {
|
||||
if (!recordUrl) {
|
||||
logger.error('云转码错误:recordUrl 异常')
|
||||
return false
|
||||
}
|
||||
const formData = new FormData()
|
||||
let buffer
|
||||
if (!recordUrl.startsWith('http')) {
|
||||
// 本地文件
|
||||
formData.append('file', fileFromSync(recordUrl))
|
||||
} else {
|
||||
let response = await fetch(recordUrl, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'User-Agent': 'Dalvik/2.1.0 (Linux; U; Android 12; MI 9 Build/SKQ1.211230.001)'
|
||||
}
|
||||
})
|
||||
const blob = await response.blob()
|
||||
const arrayBuffer = await blob.arrayBuffer()
|
||||
buffer = Buffer.from(arrayBuffer)
|
||||
formData.append('file', new File([buffer], 'audio.wav'))
|
||||
}
|
||||
const cloudUrl = new URL(Config.cloudTranscode)
|
||||
const resultres = await fetch(`${cloudUrl}audio`, {
|
||||
method: 'POST',
|
||||
body: formData
|
||||
})
|
||||
let t = await resultres.arrayBuffer()
|
||||
try {
|
||||
result = {
|
||||
buffer: {
|
||||
data: t
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
logger.error(t)
|
||||
throw e
|
||||
}
|
||||
} else {
|
||||
const cloudUrl = new URL(Config.cloudTranscode)
|
||||
const resultres = await fetch(`${cloudUrl}audio`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify({ recordUrl })
|
||||
})
|
||||
let t = await resultres.text()
|
||||
try {
|
||||
result = JSON.parse(t)
|
||||
} catch (e) {
|
||||
logger.error(t)
|
||||
throw e
|
||||
}
|
||||
}
|
||||
if (result.error) {
|
||||
logger.error('云转码API报错:' + result.error)
|
||||
return false
|
||||
}
|
||||
result.buffer = Buffer.from(result.buffer.data)
|
||||
} catch (err) {
|
||||
logger.error('云转码API报错:' + err)
|
||||
return false
|
||||
}
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
if (!result.buffer) {
|
||||
return false
|
||||
}
|
||||
let buf = Buffer.from(result.buffer)
|
||||
const hash = md5(buf)
|
||||
const codec = String(buf.slice(0, 7)).includes('SILK') ? 1 : 0
|
||||
const body = core.pb.encode({
|
||||
1: 3,
|
||||
2: 3,
|
||||
5: {
|
||||
1: Contactable.target,
|
||||
2: getUin(),
|
||||
3: 0,
|
||||
4: hash,
|
||||
5: buf.length,
|
||||
6: hash,
|
||||
7: 5,
|
||||
8: 9,
|
||||
9: 4,
|
||||
11: 0,
|
||||
10: Bot.apk.version,
|
||||
12: 1,
|
||||
13: 1,
|
||||
14: 0,
|
||||
15: 1
|
||||
}
|
||||
})
|
||||
const payload = await Bot.sendUni('PttStore.GroupPttUp', body)
|
||||
const rsp = core.pb.decode(payload)[5]
|
||||
rsp[2] && (0, errors.drop)(rsp[2], rsp[3])
|
||||
const ip = rsp[5]?.[0] || rsp[5]; const port = rsp[6]?.[0] || rsp[6]
|
||||
const ukey = rsp[7].toHex(); const filekey = rsp[11].toHex()
|
||||
const params = {
|
||||
ver: 4679,
|
||||
ukey,
|
||||
filekey,
|
||||
filesize: buf.length,
|
||||
bmd5: hash.toString('hex'),
|
||||
mType: 'pttDu',
|
||||
voice_encodec: codec
|
||||
}
|
||||
const url = `http://${int32ip2str(ip)}:${port}/?` + querystring.stringify(params)
|
||||
const headers = {
|
||||
'User-Agent': `QQ/${Bot.apk.version} CFNetwork/1126`,
|
||||
'Net-Type': 'Wifi'
|
||||
}
|
||||
await fetch(url, {
|
||||
method: 'POST', // post请求
|
||||
headers,
|
||||
body: buf
|
||||
})
|
||||
|
||||
const fid = rsp[11].toBuffer()
|
||||
const b = core.pb.encode({
|
||||
1: 4,
|
||||
2: getUin(),
|
||||
3: fid,
|
||||
4: hash,
|
||||
5: hash.toString('hex') + '.amr',
|
||||
6: buf.length,
|
||||
11: 1,
|
||||
18: fid,
|
||||
30: Buffer.from([8, 0, 40, 0, 56, 0])
|
||||
})
|
||||
if (tmpFile) {
|
||||
try {
|
||||
fs.unlinkSync(tmpFile)
|
||||
} catch (err) {
|
||||
logger.warn('fail to delete temp audio file')
|
||||
}
|
||||
}
|
||||
return {
|
||||
type: 'record', file: 'protobuf://' + Buffer.from(b).toString('base64')
|
||||
}
|
||||
}
|
||||
|
||||
export default uploadRecord
|
||||
|
||||
async function getPttBuffer (file, ffmpeg = 'ffmpeg') {
|
||||
let buffer
|
||||
let time
|
||||
if (file instanceof Buffer || file.startsWith('base64://')) {
|
||||
// Buffer或base64
|
||||
const buf = file instanceof Buffer ? file : Buffer.from(file.slice(9), 'base64')
|
||||
const head = buf.slice(0, 7).toString()
|
||||
if (head.includes('SILK') || head.includes('AMR')) {
|
||||
return buf
|
||||
} else {
|
||||
const tmpfile = TMP_DIR + '/' + (0, uuid)()
|
||||
await fs.promises.writeFile(tmpfile, buf)
|
||||
return audioTrans(tmpfile, ffmpeg)
|
||||
}
|
||||
} else if (file.startsWith('http://') || file.startsWith('https://')) {
|
||||
try {
|
||||
const headers = {
|
||||
'User-Agent': 'Dalvik/2.1.0 (Linux; U; Android 12; MI 9 Build/SKQ1.211230.001)'
|
||||
}
|
||||
let response = await fetch(file, {
|
||||
method: 'GET', // post请求
|
||||
headers
|
||||
})
|
||||
const buf = Buffer.from(await response.arrayBuffer())
|
||||
const tmpfile = TMP_DIR + '/' + (0, uuid)()
|
||||
await fs.promises.writeFile(tmpfile, buf)
|
||||
// await (0, pipeline)(readable.pipe(new DownloadTransform), fs.createWriteStream(tmpfile));
|
||||
const head = await read7Bytes(tmpfile)
|
||||
if (head.includes('SILK') || head.includes('AMR')) {
|
||||
fs.unlink(tmpfile, NOOP)
|
||||
buffer = buf
|
||||
} else {
|
||||
buffer = await audioTrans(tmpfile, ffmpeg)
|
||||
}
|
||||
} catch (err) {}
|
||||
} else {
|
||||
// 本地文件
|
||||
file = String(file).replace(/^file:\/{2}/, '')
|
||||
IS_WIN && file.startsWith('/') && (file = file.slice(1))
|
||||
const head = await read7Bytes(file)
|
||||
if (head.includes('SILK') || head.includes('AMR')) {
|
||||
buffer = await fs.promises.readFile(file)
|
||||
} else {
|
||||
buffer = await audioTrans(file, ffmpeg)
|
||||
}
|
||||
}
|
||||
return { buffer, time }
|
||||
}
|
||||
|
||||
async function audioTrans (file, ffmpeg = 'ffmpeg') {
|
||||
const tmpfile = path.join(TMP_DIR, uuid())
|
||||
const cmd = IS_WIN
|
||||
? `${ffmpeg} -i "${file}" -f s16le -ac 1 -ar 24000 "${tmpfile}"`
|
||||
: `exec ${ffmpeg} -i "${file}" -f s16le -ac 1 -ar 24000 "${tmpfile}"`
|
||||
return new Promise((resolve, reject) => {
|
||||
// 隐藏windows下调用ffmpeg的cmd弹窗
|
||||
const options = IS_WIN ? { windowsHide: true, stdio: 'ignore' } : {}
|
||||
child_process.exec(cmd, options, async (error, stdout, stderr) => {
|
||||
try {
|
||||
resolve(pcm2slk(fs.readFileSync(tmpfile)))
|
||||
} catch {
|
||||
reject(new core.ApiRejection(ErrorCode.FFmpegPttTransError, '音频转码到pcm失败,请确认你的ffmpeg可以处理此转换'))
|
||||
} finally {
|
||||
fs.unlink(tmpfile, NOOP)
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
async function read7Bytes (file) {
|
||||
const fd = await fs.promises.open(file, 'r')
|
||||
const buf = (await fd.read(Buffer.alloc(7), 0, 7, 0)).buffer
|
||||
fd.close()
|
||||
return buf
|
||||
}
|
||||
|
||||
function uuid () {
|
||||
let hex = crypto.randomBytes(16).toString('hex')
|
||||
return hex.substr(0, 8) + '-' + hex.substr(8, 4) + '-' + hex.substr(12, 4) + '-' + hex.substr(16, 4) + '-' + hex.substr(20)
|
||||
}
|
||||
function int32ip2str (ip) {
|
||||
if (typeof ip === 'string') { return ip }
|
||||
ip = ip & 0xffffffff
|
||||
return [
|
||||
ip & 0xff,
|
||||
(ip & 0xff00) >> 8,
|
||||
(ip & 0xff0000) >> 16,
|
||||
(ip & 0xff000000) >> 24 & 0xff
|
||||
].join('.')
|
||||
}
|
||||
const IS_WIN = os.platform() === 'win32'
|
||||
/** 系统临时目录,用于临时存放下载的图片等内容 */
|
||||
const TMP_DIR = os.tmpdir()
|
||||
/** no operation */
|
||||
const NOOP = () => { }
|
||||
(0, util.promisify)(stream.pipeline)
|
||||
/** md5 hash */
|
||||
const md5 = (data) => (0, crypto.createHash)('md5').update(data).digest()
|
||||
|
||||
errors.LoginErrorCode = errors.drop = errors.ErrorCode = void 0
|
||||
let ErrorCode;
|
||||
(function (ErrorCode) {
|
||||
/** 客户端离线 */
|
||||
ErrorCode[ErrorCode.ClientNotOnline = -1] = 'ClientNotOnline'
|
||||
/** 发包超时未收到服务器回应 */
|
||||
ErrorCode[ErrorCode.PacketTimeout = -2] = 'PacketTimeout'
|
||||
/** 用户不存在 */
|
||||
ErrorCode[ErrorCode.UserNotExists = -10] = 'UserNotExists'
|
||||
/** 群不存在(未加入) */
|
||||
ErrorCode[ErrorCode.GroupNotJoined = -20] = 'GroupNotJoined'
|
||||
/** 群员不存在 */
|
||||
ErrorCode[ErrorCode.MemberNotExists = -30] = 'MemberNotExists'
|
||||
/** 发消息时传入的参数不正确 */
|
||||
ErrorCode[ErrorCode.MessageBuilderError = -60] = 'MessageBuilderError'
|
||||
/** 群消息被风控发送失败 */
|
||||
ErrorCode[ErrorCode.RiskMessageError = -70] = 'RiskMessageError'
|
||||
/** 群消息有敏感词发送失败 */
|
||||
ErrorCode[ErrorCode.SensitiveWordsError = -80] = 'SensitiveWordsError'
|
||||
/** 上传图片/文件/视频等数据超时 */
|
||||
ErrorCode[ErrorCode.HighwayTimeout = -110] = 'HighwayTimeout'
|
||||
/** 上传图片/文件/视频等数据遇到网络错误 */
|
||||
ErrorCode[ErrorCode.HighwayNetworkError = -120] = 'HighwayNetworkError'
|
||||
/** 没有上传通道 */
|
||||
ErrorCode[ErrorCode.NoUploadChannel = -130] = 'NoUploadChannel'
|
||||
/** 不支持的file类型(没有流) */
|
||||
ErrorCode[ErrorCode.HighwayFileTypeError = -140] = 'HighwayFileTypeError'
|
||||
/** 文件安全校验未通过不存在 */
|
||||
ErrorCode[ErrorCode.UnsafeFile = -150] = 'UnsafeFile'
|
||||
/** 离线(私聊)文件不存在 */
|
||||
ErrorCode[ErrorCode.OfflineFileNotExists = -160] = 'OfflineFileNotExists'
|
||||
/** 群文件不存在(无法转发) */
|
||||
ErrorCode[ErrorCode.GroupFileNotExists = -170] = 'GroupFileNotExists'
|
||||
/** 获取视频中的图片失败 */
|
||||
ErrorCode[ErrorCode.FFmpegVideoThumbError = -210] = 'FFmpegVideoThumbError'
|
||||
/** 音频转换失败 */
|
||||
ErrorCode[ErrorCode.FFmpegPttTransError = -220] = 'FFmpegPttTransError'
|
||||
})(ErrorCode = errors.ErrorCode || (errors.ErrorCode = {}))
|
||||
const ErrorMessage = {
|
||||
[ErrorCode.UserNotExists]: '查无此人',
|
||||
[ErrorCode.GroupNotJoined]: '未加入的群',
|
||||
[ErrorCode.MemberNotExists]: '幽灵群员',
|
||||
[ErrorCode.RiskMessageError]: '群消息发送失败,可能被风控',
|
||||
[ErrorCode.SensitiveWordsError]: '群消息发送失败,请检查消息内容',
|
||||
10: '消息过长',
|
||||
34: '消息过长',
|
||||
120: '在该群被禁言',
|
||||
121: 'AT全体剩余次数不足'
|
||||
}
|
||||
function drop (code, message) {
|
||||
if (!message || !message.length) { message = ErrorMessage[code] }
|
||||
throw new core.ApiRejection(code, message)
|
||||
}
|
||||
errors.drop = drop
|
||||
/** 登录时可能出现的错误,不在列的都属于未知错误,暂时无法解决 */
|
||||
let LoginErrorCode;
|
||||
(function (LoginErrorCode) {
|
||||
/** 密码错误 */
|
||||
LoginErrorCode[LoginErrorCode.WrongPassword = 1] = 'WrongPassword'
|
||||
/** 账号被冻结 */
|
||||
LoginErrorCode[LoginErrorCode.AccountFrozen = 40] = 'AccountFrozen'
|
||||
/** 发短信太频繁 */
|
||||
LoginErrorCode[LoginErrorCode.TooManySms = 162] = 'TooManySms'
|
||||
/** 短信验证码错误 */
|
||||
LoginErrorCode[LoginErrorCode.WrongSmsCode = 163] = 'WrongSmsCode'
|
||||
/** 滑块ticket错误 */
|
||||
LoginErrorCode[LoginErrorCode.WrongTicket = 237] = 'WrongTicket'
|
||||
})(LoginErrorCode = errors.LoginErrorCode || (errors.LoginErrorCode = {}))
|
||||
|
|
@ -1,33 +0,0 @@
|
|||
import fs from 'fs'
|
||||
|
||||
/**
|
||||
* from miao-plugin
|
||||
*
|
||||
* @type {any}
|
||||
*/
|
||||
let packageJson = JSON.parse(fs.readFileSync('package.json', 'utf8'))
|
||||
|
||||
const yunzaiVersion = packageJson.version
|
||||
const isV3 = yunzaiVersion[0] === '3'
|
||||
let isMiao = false; let isTrss = false
|
||||
let name = 'Yunzai-Bot'
|
||||
if (packageJson.name === 'miao-yunzai') {
|
||||
isMiao = true
|
||||
name = 'Miao-Yunzai'
|
||||
} else if (packageJson.name === 'trss-yunzai') {
|
||||
isMiao = true
|
||||
isTrss = true
|
||||
name = 'TRSS-Yunzai'
|
||||
}
|
||||
|
||||
let Version = {
|
||||
isV3,
|
||||
isMiao,
|
||||
isTrss,
|
||||
name,
|
||||
get yunzai () {
|
||||
return yunzaiVersion
|
||||
}
|
||||
}
|
||||
|
||||
export default Version
|
||||
|
|
@ -1,747 +0,0 @@
|
|||
$
|
||||
0
|
||||
1
|
||||
2
|
||||
3
|
||||
4
|
||||
5
|
||||
6
|
||||
7
|
||||
8
|
||||
9
|
||||
?
|
||||
_
|
||||
“
|
||||
”
|
||||
、
|
||||
。
|
||||
《
|
||||
》
|
||||
一
|
||||
一些
|
||||
一何
|
||||
一切
|
||||
一则
|
||||
一方面
|
||||
一旦
|
||||
一来
|
||||
一样
|
||||
一般
|
||||
一转眼
|
||||
万一
|
||||
上
|
||||
上下
|
||||
下
|
||||
不
|
||||
不仅
|
||||
不但
|
||||
不光
|
||||
不单
|
||||
不只
|
||||
不外乎
|
||||
不如
|
||||
不妨
|
||||
不尽
|
||||
不尽然
|
||||
不得
|
||||
不怕
|
||||
不惟
|
||||
不成
|
||||
不拘
|
||||
不料
|
||||
不是
|
||||
不比
|
||||
不然
|
||||
不特
|
||||
不独
|
||||
不管
|
||||
不至于
|
||||
不若
|
||||
不论
|
||||
不过
|
||||
不问
|
||||
与
|
||||
与其
|
||||
与其说
|
||||
与否
|
||||
与此同时
|
||||
且
|
||||
且不说
|
||||
且说
|
||||
两者
|
||||
个
|
||||
个别
|
||||
临
|
||||
为
|
||||
为了
|
||||
为什么
|
||||
为何
|
||||
为止
|
||||
为此
|
||||
为着
|
||||
乃
|
||||
乃至
|
||||
乃至于
|
||||
么
|
||||
之
|
||||
之一
|
||||
之所以
|
||||
之类
|
||||
乌乎
|
||||
乎
|
||||
乘
|
||||
也
|
||||
也好
|
||||
也罢
|
||||
了
|
||||
二来
|
||||
于
|
||||
于是
|
||||
于是乎
|
||||
云云
|
||||
云尔
|
||||
些
|
||||
亦
|
||||
人
|
||||
人们
|
||||
人家
|
||||
什么
|
||||
什么样
|
||||
今
|
||||
介于
|
||||
仍
|
||||
仍旧
|
||||
从
|
||||
从此
|
||||
从而
|
||||
他
|
||||
他人
|
||||
他们
|
||||
以
|
||||
以上
|
||||
以为
|
||||
以便
|
||||
以免
|
||||
以及
|
||||
以故
|
||||
以期
|
||||
以来
|
||||
以至
|
||||
以至于
|
||||
以致
|
||||
们
|
||||
任
|
||||
任何
|
||||
任凭
|
||||
似的
|
||||
但
|
||||
但凡
|
||||
但是
|
||||
何
|
||||
何以
|
||||
何况
|
||||
何处
|
||||
何时
|
||||
余外
|
||||
作为
|
||||
你
|
||||
你们
|
||||
使
|
||||
使得
|
||||
例如
|
||||
依
|
||||
依据
|
||||
依照
|
||||
便于
|
||||
俺
|
||||
俺们
|
||||
倘
|
||||
倘使
|
||||
倘或
|
||||
倘然
|
||||
倘若
|
||||
借
|
||||
假使
|
||||
假如
|
||||
假若
|
||||
傥然
|
||||
像
|
||||
儿
|
||||
先不先
|
||||
光是
|
||||
全体
|
||||
全部
|
||||
兮
|
||||
关于
|
||||
其
|
||||
其一
|
||||
其中
|
||||
其二
|
||||
其他
|
||||
其余
|
||||
其它
|
||||
其次
|
||||
具体地说
|
||||
具体说来
|
||||
兼之
|
||||
内
|
||||
再
|
||||
再其次
|
||||
再则
|
||||
再有
|
||||
再者
|
||||
再者说
|
||||
再说
|
||||
冒
|
||||
冲
|
||||
况且
|
||||
几
|
||||
几时
|
||||
凡
|
||||
凡是
|
||||
凭
|
||||
凭借
|
||||
出于
|
||||
出来
|
||||
分别
|
||||
则
|
||||
则甚
|
||||
别
|
||||
别人
|
||||
别处
|
||||
别是
|
||||
别的
|
||||
别管
|
||||
别说
|
||||
到
|
||||
前后
|
||||
前此
|
||||
前者
|
||||
加之
|
||||
加以
|
||||
即
|
||||
即令
|
||||
即使
|
||||
即便
|
||||
即如
|
||||
即或
|
||||
即若
|
||||
却
|
||||
去
|
||||
又
|
||||
又及
|
||||
及
|
||||
及其
|
||||
及至
|
||||
反之
|
||||
反而
|
||||
反过来
|
||||
反过来说
|
||||
受到
|
||||
另
|
||||
另一方面
|
||||
另外
|
||||
另悉
|
||||
只
|
||||
只当
|
||||
只怕
|
||||
只是
|
||||
只有
|
||||
只消
|
||||
只要
|
||||
只限
|
||||
叫
|
||||
叮咚
|
||||
可
|
||||
可以
|
||||
可是
|
||||
可见
|
||||
各
|
||||
各个
|
||||
各位
|
||||
各种
|
||||
各自
|
||||
同
|
||||
同时
|
||||
后
|
||||
后者
|
||||
向
|
||||
向使
|
||||
向着
|
||||
吓
|
||||
吗
|
||||
否则
|
||||
吧
|
||||
吧哒
|
||||
吱
|
||||
呀
|
||||
呃
|
||||
呕
|
||||
呗
|
||||
呜
|
||||
呜呼
|
||||
呢
|
||||
呵
|
||||
呵呵
|
||||
呸
|
||||
呼哧
|
||||
咋
|
||||
和
|
||||
咚
|
||||
咦
|
||||
咧
|
||||
咱
|
||||
咱们
|
||||
咳
|
||||
哇
|
||||
哈
|
||||
哈哈
|
||||
哉
|
||||
哎
|
||||
哎呀
|
||||
哎哟
|
||||
哗
|
||||
哟
|
||||
哦
|
||||
哩
|
||||
哪
|
||||
哪个
|
||||
哪些
|
||||
哪儿
|
||||
哪天
|
||||
哪年
|
||||
哪怕
|
||||
哪样
|
||||
哪边
|
||||
哪里
|
||||
哼
|
||||
哼唷
|
||||
唉
|
||||
唯有
|
||||
啊
|
||||
啐
|
||||
啥
|
||||
啦
|
||||
啪达
|
||||
啷当
|
||||
喂
|
||||
喏
|
||||
喔唷
|
||||
喽
|
||||
嗡
|
||||
嗡嗡
|
||||
嗬
|
||||
嗯
|
||||
嗳
|
||||
嘎
|
||||
嘎登
|
||||
嘘
|
||||
嘛
|
||||
嘻
|
||||
嘿
|
||||
嘿嘿
|
||||
因
|
||||
因为
|
||||
因了
|
||||
因此
|
||||
因着
|
||||
因而
|
||||
固然
|
||||
在
|
||||
在下
|
||||
在于
|
||||
地
|
||||
基于
|
||||
处在
|
||||
多
|
||||
多么
|
||||
多少
|
||||
大
|
||||
大家
|
||||
她
|
||||
她们
|
||||
好
|
||||
如
|
||||
如上
|
||||
如上所述
|
||||
如下
|
||||
如何
|
||||
如其
|
||||
如同
|
||||
如是
|
||||
如果
|
||||
如此
|
||||
如若
|
||||
始而
|
||||
孰料
|
||||
孰知
|
||||
宁
|
||||
宁可
|
||||
宁愿
|
||||
宁肯
|
||||
它
|
||||
它们
|
||||
对
|
||||
对于
|
||||
对待
|
||||
对方
|
||||
对比
|
||||
将
|
||||
小
|
||||
尔
|
||||
尔后
|
||||
尔尔
|
||||
尚且
|
||||
就
|
||||
就是
|
||||
就是了
|
||||
就是说
|
||||
就算
|
||||
就要
|
||||
尽
|
||||
尽管
|
||||
尽管如此
|
||||
岂但
|
||||
己
|
||||
已
|
||||
已矣
|
||||
巴
|
||||
巴巴
|
||||
并
|
||||
并且
|
||||
并非
|
||||
庶乎
|
||||
庶几
|
||||
开外
|
||||
开始
|
||||
归
|
||||
归齐
|
||||
当
|
||||
当地
|
||||
当然
|
||||
当着
|
||||
彼
|
||||
彼时
|
||||
彼此
|
||||
往
|
||||
待
|
||||
很
|
||||
得
|
||||
得了
|
||||
怎
|
||||
怎么
|
||||
怎么办
|
||||
怎么样
|
||||
怎奈
|
||||
怎样
|
||||
总之
|
||||
总的来看
|
||||
总的来说
|
||||
总的说来
|
||||
总而言之
|
||||
恰恰相反
|
||||
您
|
||||
惟其
|
||||
慢说
|
||||
我
|
||||
我们
|
||||
或
|
||||
或则
|
||||
或是
|
||||
或曰
|
||||
或者
|
||||
截至
|
||||
所
|
||||
所以
|
||||
所在
|
||||
所幸
|
||||
所有
|
||||
才
|
||||
才能
|
||||
打
|
||||
打从
|
||||
把
|
||||
抑或
|
||||
拿
|
||||
按
|
||||
按照
|
||||
换句话说
|
||||
换言之
|
||||
据
|
||||
据此
|
||||
接着
|
||||
故
|
||||
故此
|
||||
故而
|
||||
旁人
|
||||
无
|
||||
无宁
|
||||
无论
|
||||
既
|
||||
既往
|
||||
既是
|
||||
既然
|
||||
时候
|
||||
是
|
||||
是以
|
||||
是的
|
||||
曾
|
||||
替
|
||||
替代
|
||||
最
|
||||
有
|
||||
有些
|
||||
有关
|
||||
有及
|
||||
有时
|
||||
有的
|
||||
望
|
||||
朝
|
||||
朝着
|
||||
本
|
||||
本人
|
||||
本地
|
||||
本着
|
||||
本身
|
||||
来
|
||||
来着
|
||||
来自
|
||||
来说
|
||||
极了
|
||||
果然
|
||||
果真
|
||||
某
|
||||
某个
|
||||
某些
|
||||
某某
|
||||
根据
|
||||
欤
|
||||
正值
|
||||
正如
|
||||
正巧
|
||||
正是
|
||||
此
|
||||
此地
|
||||
此处
|
||||
此外
|
||||
此时
|
||||
此次
|
||||
此间
|
||||
毋宁
|
||||
每
|
||||
每当
|
||||
比
|
||||
比及
|
||||
比如
|
||||
比方
|
||||
没奈何
|
||||
沿
|
||||
沿着
|
||||
漫说
|
||||
焉
|
||||
然则
|
||||
然后
|
||||
然而
|
||||
照
|
||||
照着
|
||||
犹且
|
||||
犹自
|
||||
甚且
|
||||
甚么
|
||||
甚或
|
||||
甚而
|
||||
甚至
|
||||
甚至于
|
||||
用
|
||||
用来
|
||||
由
|
||||
由于
|
||||
由是
|
||||
由此
|
||||
由此可见
|
||||
的
|
||||
的确
|
||||
的话
|
||||
直到
|
||||
相对而言
|
||||
省得
|
||||
看
|
||||
眨眼
|
||||
着
|
||||
着呢
|
||||
矣
|
||||
矣乎
|
||||
矣哉
|
||||
离
|
||||
竟而
|
||||
第
|
||||
等
|
||||
等到
|
||||
等等
|
||||
简言之
|
||||
管
|
||||
类如
|
||||
紧接着
|
||||
纵
|
||||
纵令
|
||||
纵使
|
||||
纵然
|
||||
经
|
||||
经过
|
||||
结果
|
||||
给
|
||||
继之
|
||||
继后
|
||||
继而
|
||||
综上所述
|
||||
罢了
|
||||
者
|
||||
而
|
||||
而且
|
||||
而况
|
||||
而后
|
||||
而外
|
||||
而已
|
||||
而是
|
||||
而言
|
||||
能
|
||||
能否
|
||||
腾
|
||||
自
|
||||
自个儿
|
||||
自从
|
||||
自各儿
|
||||
自后
|
||||
自家
|
||||
自己
|
||||
自打
|
||||
自身
|
||||
至
|
||||
至于
|
||||
至今
|
||||
至若
|
||||
致
|
||||
般的
|
||||
若
|
||||
若夫
|
||||
若是
|
||||
若果
|
||||
若非
|
||||
莫不然
|
||||
莫如
|
||||
莫若
|
||||
虽
|
||||
虽则
|
||||
虽然
|
||||
虽说
|
||||
被
|
||||
要
|
||||
要不
|
||||
要不是
|
||||
要不然
|
||||
要么
|
||||
要是
|
||||
譬喻
|
||||
譬如
|
||||
让
|
||||
许多
|
||||
论
|
||||
设使
|
||||
设或
|
||||
设若
|
||||
诚如
|
||||
诚然
|
||||
该
|
||||
说来
|
||||
诸
|
||||
诸位
|
||||
诸如
|
||||
谁
|
||||
谁人
|
||||
谁料
|
||||
谁知
|
||||
贼死
|
||||
赖以
|
||||
赶
|
||||
起
|
||||
起见
|
||||
趁
|
||||
趁着
|
||||
越是
|
||||
距
|
||||
跟
|
||||
较
|
||||
较之
|
||||
边
|
||||
过
|
||||
还
|
||||
还是
|
||||
还有
|
||||
还要
|
||||
这
|
||||
这一来
|
||||
这个
|
||||
这么
|
||||
这么些
|
||||
这么样
|
||||
这么点儿
|
||||
这些
|
||||
这会儿
|
||||
这儿
|
||||
这就是说
|
||||
这时
|
||||
这样
|
||||
这次
|
||||
这般
|
||||
这边
|
||||
这里
|
||||
进而
|
||||
连
|
||||
连同
|
||||
逐步
|
||||
通过
|
||||
遵循
|
||||
遵照
|
||||
那
|
||||
那个
|
||||
那么
|
||||
那么些
|
||||
那么样
|
||||
那些
|
||||
那会儿
|
||||
那儿
|
||||
那时
|
||||
那样
|
||||
那般
|
||||
那边
|
||||
那里
|
||||
都
|
||||
鄙人
|
||||
鉴于
|
||||
针对
|
||||
阿
|
||||
除
|
||||
除了
|
||||
除外
|
||||
除开
|
||||
除此之外
|
||||
除非
|
||||
随
|
||||
随后
|
||||
随时
|
||||
随着
|
||||
难道说
|
||||
非但
|
||||
非徒
|
||||
非特
|
||||
非独
|
||||
靠
|
||||
顺
|
||||
顺着
|
||||
首先
|
||||
!
|
||||
,
|
||||
:
|
||||
;
|
||||
?
|
||||
xml
|
||||
|
|
@ -1,231 +0,0 @@
|
|||
import { Config } from '../config.js'
|
||||
import fs from 'fs'
|
||||
import nodejieba from '@node-rs/jieba'
|
||||
|
||||
class Tokenizer {
|
||||
async getHistory (e, groupId, date = new Date(), duration = 0, userId) {
|
||||
if (!groupId) {
|
||||
throw new Error('no valid group id')
|
||||
}
|
||||
let group = e.bot.pickGroup(groupId, true)
|
||||
let latestChat = await group.getChatHistory(undefined, 1)
|
||||
let seq = latestChat[0].seq
|
||||
let chats = latestChat
|
||||
function compareByTime (a, b) {
|
||||
const timeA = a.time
|
||||
const timeB = b.time
|
||||
if (timeA < timeB) {
|
||||
return -1
|
||||
}
|
||||
if (timeA > timeB) {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
// Get the current timestamp
|
||||
let currentTime = date.getTime()
|
||||
|
||||
// Step 2: Set the hours, minutes, seconds, and milliseconds to 0
|
||||
date.setHours(0, 0, 0, 0)
|
||||
|
||||
// Step 3: Calculate the timestamp representing the start of the specified date
|
||||
// duration represents the number of hours to go back
|
||||
// if duration is 0, keeping the original date (start of today)
|
||||
let startOfSpecifiedDate = date.getTime()
|
||||
// if duration > 0, go back to the specified number of hours
|
||||
if (duration > 0) {
|
||||
// duration should be in range [0, 24]
|
||||
// duration = Math.min(duration, 24)
|
||||
startOfSpecifiedDate = currentTime - (duration * 60 * 60 * 1000)
|
||||
}
|
||||
|
||||
// Step 4: Get the end of the specified date by current time
|
||||
const endOfSpecifiedDate = currentTime
|
||||
while (isTimestampInDateRange(chats[0]?.time, startOfSpecifiedDate, endOfSpecifiedDate) &&
|
||||
isTimestampInDateRange(chats[chats.length - 1]?.time, startOfSpecifiedDate, endOfSpecifiedDate)) {
|
||||
let chatHistory = await group.getChatHistory(seq, 20)
|
||||
if (chatHistory.length === 1) {
|
||||
if (chats[0].seq === chatHistory[0].seq) {
|
||||
// 昨天没有聊天记录 比如新建的群 新进群的机器人 会卡在某一条
|
||||
break
|
||||
}
|
||||
}
|
||||
chats.push(...chatHistory)
|
||||
chats.sort(compareByTime)
|
||||
seq = chatHistory?.[0]?.seq
|
||||
if (!seq) {
|
||||
break
|
||||
}
|
||||
if (Config.debug) {
|
||||
logger.info(`拉取到${chatHistory.length}条聊天记录,当前已累计获取${chats.length}条聊天记录,继续拉...`)
|
||||
}
|
||||
}
|
||||
chats = chats.filter(chat => isTimestampInDateRange(chat.time, startOfSpecifiedDate, endOfSpecifiedDate))
|
||||
if (userId) {
|
||||
chats = chats.filter(chat => chat.sender.user_id === userId)
|
||||
}
|
||||
return chats
|
||||
}
|
||||
|
||||
async getKeywordTopK (e, groupId, topK = 100, duration = 0, userId) {
|
||||
if (!nodejieba) {
|
||||
throw new Error('未安装node-rs/jieba,娱乐功能-词云统计不可用')
|
||||
}
|
||||
if (!this.loaded) {
|
||||
nodejieba.load()
|
||||
this.loaded = true
|
||||
}
|
||||
// duration represents the number of hours to go back, should in range [0, 24]
|
||||
let chats = await this.getHistory(e, groupId, new Date(), duration, userId)
|
||||
let durationStr = duration > 0 ? `${duration}小时` : '今日'
|
||||
logger.mark(`聊天记录拉取完成,获取到${durationStr}内${chats.length}条聊天记录,准备分词中`)
|
||||
|
||||
const _path = process.cwd()
|
||||
let stopWordsPath = `${_path}/plugins/chatgpt-plugin/utils/wordcloud/cn_stopwords.txt`
|
||||
const data = fs.readFileSync(stopWordsPath)
|
||||
const stopWords = String(data)?.split('\n') || []
|
||||
let chatContent = chats
|
||||
.map(c => c.message
|
||||
// 只统计文本内容
|
||||
.filter(item => item.type == 'text')
|
||||
.map(textItem => `${textItem.text}`)
|
||||
.join('').trim()
|
||||
)
|
||||
.map(c => {
|
||||
// let length = c.length
|
||||
let threshold = 2
|
||||
// if (length < 100 && length > 50) {
|
||||
// threshold = 6
|
||||
// } else if (length <= 50 && length > 25) {
|
||||
// threshold = 3
|
||||
// } else if (length <= 25) {
|
||||
// threshold = 2
|
||||
// }
|
||||
return nodejieba.extract(c, threshold)
|
||||
})
|
||||
.reduce((acc, curr) => acc.concat(curr), [])
|
||||
.map(c => c.keyword)
|
||||
.filter(c => stopWords.indexOf(c) < 0)
|
||||
if (Config.debug) {
|
||||
logger.info(chatContent)
|
||||
}
|
||||
const countMap = {}
|
||||
for (const value of chatContent) {
|
||||
if (countMap[value]) {
|
||||
countMap[value]++
|
||||
} else {
|
||||
countMap[value] = 1
|
||||
}
|
||||
}
|
||||
let list = Object.keys(countMap).map(k => {
|
||||
return [k, countMap[k]]
|
||||
})
|
||||
function compareByFrequency (a, b) {
|
||||
const freA = a[1]
|
||||
const freB = b[1]
|
||||
if (freA < freB) {
|
||||
return 1
|
||||
}
|
||||
if (freA > freB) {
|
||||
return -1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
logger.mark('分词统计完成,绘制词云中...')
|
||||
return list.filter(s => s[1] > 2).sort(compareByFrequency).slice(0, topK)
|
||||
}
|
||||
}
|
||||
|
||||
class ShamrockTokenizer extends Tokenizer {
|
||||
async getHistory (e, groupId, date = new Date(), duration = 0, userId) {
|
||||
logger.mark('当前使用Shamrock适配器')
|
||||
if (!groupId) {
|
||||
throw new Error('no valid group id')
|
||||
}
|
||||
let group = e.bot.pickGroup(groupId, true)
|
||||
// 直接加大力度
|
||||
let pageSize = 500
|
||||
let chats = (await group.getChatHistory(0, pageSize, false)) || []
|
||||
// Get the current timestamp
|
||||
let currentTime = date.getTime()
|
||||
|
||||
// Step 2: Set the hours, minutes, seconds, and milliseconds to 0
|
||||
date.setHours(0, 0, 0, 0)
|
||||
|
||||
// Step 3: Calculate the timestamp representing the start of the specified date
|
||||
// duration represents the number of hours to go back
|
||||
// if duration is 0, keeping the original date (start of today)
|
||||
let startOfSpecifiedDate = date.getTime()
|
||||
// if duration > 0, go back to the specified number of hours
|
||||
if (duration > 0) {
|
||||
// duration should be in range [0, 24]
|
||||
// duration = Math.min(duration, 24)
|
||||
startOfSpecifiedDate = currentTime - (duration * 60 * 60 * 1000)
|
||||
}
|
||||
|
||||
// Step 4: Get the end of the specified date by currentTime
|
||||
const endOfSpecifiedDate = currentTime
|
||||
let cursor = chats.length
|
||||
// -------------------------------------------------------
|
||||
// | | |
|
||||
// -------------------------------------------------------
|
||||
// ^ ^
|
||||
// long ago cursor+pageSize cursor current
|
||||
while (isTimestampInDateRange(chats[0]?.time, startOfSpecifiedDate, endOfSpecifiedDate)) {
|
||||
// 由于Shamrock消息是从最新的开始拉,结束时由于动态更新,一旦有人发送消息就会立刻停止,所以不判断结束时间
|
||||
// 拉到后面会巨卡,所以增大page减少次数
|
||||
pageSize = Math.floor(Math.max(cursor / 2, pageSize))
|
||||
cursor = cursor + pageSize
|
||||
let retries = 3
|
||||
let chatHistory
|
||||
while (retries >= 0) {
|
||||
try {
|
||||
chatHistory = await group.getChatHistory(0, cursor, false)
|
||||
break
|
||||
} catch (err) {
|
||||
if (retries === 0) {
|
||||
logger.error(err)
|
||||
}
|
||||
retries--
|
||||
}
|
||||
}
|
||||
if (retries < 0) {
|
||||
logger.warn('拉不动了,就这样吧')
|
||||
break
|
||||
}
|
||||
if (chatHistory.length === 1) {
|
||||
break
|
||||
}
|
||||
if (chatHistory.length === chats.length) {
|
||||
// 没有了!再拉也没有了
|
||||
break
|
||||
}
|
||||
let oldLength = chats.length
|
||||
chats = chatHistory
|
||||
// chats.sort(compareByTime)
|
||||
if (Config.debug) {
|
||||
logger.info(`拉取到${chats.length - oldLength}条聊天记录,当前已累计获取${chats.length}条聊天记录,继续拉...`)
|
||||
}
|
||||
}
|
||||
chats = chats.filter(chat => isTimestampInDateRange(chat.time, startOfSpecifiedDate, endOfSpecifiedDate))
|
||||
if (userId) {
|
||||
chats = chats.filter(chat => chat.sender.user_id === userId)
|
||||
}
|
||||
return chats
|
||||
}
|
||||
}
|
||||
|
||||
function isTimestampInDateRange (timestamp, startOfSpecifiedDate, endOfSpecifiedDate) {
|
||||
if (!timestamp) {
|
||||
return false
|
||||
}
|
||||
timestamp = timestamp * 1000
|
||||
|
||||
// Step 5: Compare the given timestamp with the start and end of the specified date
|
||||
return timestamp >= startOfSpecifiedDate && timestamp < endOfSpecifiedDate
|
||||
}
|
||||
|
||||
export default {
|
||||
default: new Tokenizer(),
|
||||
shamrock: new ShamrockTokenizer()
|
||||
}
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
import Tokenizer from './tokenizer.js'
|
||||
import { render } from '../common.js'
|
||||
|
||||
export async function makeWordcloud (e, groupId, duration = 0, userId) {
|
||||
let tokenizer = getTokenizer(e)
|
||||
let topK = await tokenizer.getKeywordTopK(e, groupId, 100, duration, userId)
|
||||
let list = JSON.stringify(topK)
|
||||
logger.info(list)
|
||||
let img = await render(e, 'chatgpt-plugin', 'wordcloud/index', { list }, { retType: 'base64' })
|
||||
return img
|
||||
}
|
||||
|
||||
function getTokenizer (e) {
|
||||
// if (e.adapter === 'shamrock') {
|
||||
// return Tokenizer.shamrock
|
||||
// } else {
|
||||
// return Tokenizer.default
|
||||
// }
|
||||
return Tokenizer.default
|
||||
}
|
||||
|
|
@ -1,500 +0,0 @@
|
|||
import fetch from 'node-fetch'
|
||||
import { Config } from '../config.js'
|
||||
import { createParser } from 'eventsource-parser'
|
||||
import https from 'https'
|
||||
import WebSocket from 'ws'
|
||||
import { createHmac } from 'crypto'
|
||||
|
||||
const referer = atob('aHR0cHM6Ly94aW5naHVvLnhmeXVuLmNuL2NoYXQ/aWQ9')
|
||||
const origin = atob('aHR0cHM6Ly94aW5naHVvLnhmeXVuLmNu')
|
||||
const createChatUrl = atob('aHR0cHM6Ly94aW5naHVvLnhmeXVuLmNuL2lmbHlncHQvdS9jaGF0LWxpc3QvdjEvY3JlYXRlLWNoYXQtbGlzdA==')
|
||||
const chatUrl = atob('aHR0cHM6Ly94aW5naHVvLnhmeXVuLmNuL2lmbHlncHQtY2hhdC91L2NoYXRfbWVzc2FnZS9jaGF0')
|
||||
let FormData
|
||||
try {
|
||||
FormData = (await import('form-data')).default
|
||||
} catch (err) {
|
||||
logger.warn('未安装form-data,无法使用星火模式')
|
||||
}
|
||||
async function getKeyv () {
|
||||
let Keyv
|
||||
try {
|
||||
Keyv = (await import('keyv')).default
|
||||
} catch (error) {
|
||||
throw new Error('keyv依赖未安装,请使用pnpm install keyv安装')
|
||||
}
|
||||
return Keyv
|
||||
}
|
||||
export default class XinghuoClient {
|
||||
constructor (opts) {
|
||||
this.cache = opts.cache
|
||||
this.ssoSessionId = opts.ssoSessionId
|
||||
this.headers = {
|
||||
Referer: referer,
|
||||
Cookie: 'ssoSessionId=' + this.ssoSessionId + ';',
|
||||
'User-Agent': 'Mozilla/5.0 (iPhone; CPU iPhone OS 16_2 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) CriOS/113.0.5672.69 Mobile/15E148 Safari/604.1',
|
||||
Origin: origin
|
||||
}
|
||||
}
|
||||
|
||||
apiErrorInfo (code) {
|
||||
switch (code) {
|
||||
case 10000: return '升级为ws出现错误'
|
||||
case 10001: return '通过ws读取用户的消息出错'
|
||||
case 10002: return '通过ws向用户发送消息 错'
|
||||
case 10003: return '用户的消息格式有错误'
|
||||
case 10004: return '用户数据的schema错误'
|
||||
case 10005: return '用户参数值有错误'
|
||||
case 10006: return '用户并发错误:当前用户已连接,同一用户不能多处同时连接。'
|
||||
case 10007: return '用户流量受限:服务正在处理用户当前的问题,需等待处理完成后再发送新的请求。(必须要等大模型完全回复之后,才能发送下一个问题)'
|
||||
case 10008: return '服务容量不足,联系工作人员'
|
||||
case 10009: return '和引擎建立连接失败'
|
||||
case 10010: return '接收引擎数据的错误'
|
||||
case 10011: return '发送数据给引擎的错误'
|
||||
case 10012: return '引擎内部错误'
|
||||
case 10013: return '输入内容审核不通过,涉嫌违规,请重新调整输入内容'
|
||||
case 10014: return '输出内容涉及敏感信息,审核不通过,后续结果无法展示给用户'
|
||||
case 10015: return 'appid在黑名单中'
|
||||
case 10016: return 'appid授权类的错误。比如:未开通此功能,未开通对应版本,token不足,并发超过授权 等等'
|
||||
case 10017: return '清除历史失败'
|
||||
case 10019: return '表示本次会话内容有涉及违规信息的倾向;建议开发者收到此错误码后给用户一个输入涉及违规的提示'
|
||||
case 10110: return '服务忙,请稍后再试'
|
||||
case 10163: return '请求引擎的参数异常 引擎的schema 检查不通过'
|
||||
case 10222: return '引擎网络异常'
|
||||
case 10907: return 'token数量超过上限。对话历史+问题的字数太多,需要精简输入'
|
||||
case 11200: return '授权错误:该appId没有相关功能的授权 或者 业务量超过限制'
|
||||
case 11201: return '授权错误:日流控超限。超过当日最大访问量的限制'
|
||||
case 11202: return '授权错误:秒级流控超限。秒级并发超过授权路数限制'
|
||||
case 11203: return '授权错误:并发流控超限。并发路数超过授权路数限制'
|
||||
default: return '无效错误代码'
|
||||
}
|
||||
}
|
||||
|
||||
async initCache () {
|
||||
if (!this.conversationsCache) {
|
||||
const cacheOptions = this.cache || {}
|
||||
cacheOptions.namespace = cacheOptions.namespace || 'xh'
|
||||
let Keyv = await getKeyv()
|
||||
this.conversationsCache = new Keyv(cacheOptions)
|
||||
}
|
||||
}
|
||||
|
||||
async getWsUrl () {
|
||||
const APISecret = Config.xhAPISecret
|
||||
const APIKey = Config.xhAPIKey
|
||||
let APILink = '/v1.1/chat'
|
||||
if (Config.xhmode === 'apiv2') {
|
||||
APILink = '/v2.1/chat'
|
||||
} else if (Config.xhmode === 'apiv3') {
|
||||
APILink = '/v3.1/chat'
|
||||
} else if (Config.xhmode === 'apiv3.5') {
|
||||
APILink = '/v3.5/chat'
|
||||
} else if (Config.xhmode === 'apiv4.0') {
|
||||
APILink = '/v4.0/chat'
|
||||
}
|
||||
const date = new Date().toGMTString()
|
||||
const algorithm = 'hmac-sha256'
|
||||
const headers = 'host date request-line'
|
||||
const signatureOrigin = `host: spark-api.xf-yun.com\ndate: ${date}\nGET ${APILink} HTTP/1.1`
|
||||
const hmac = createHmac('sha256', APISecret)
|
||||
hmac.update(signatureOrigin)
|
||||
const signature = hmac.digest('base64')
|
||||
const authorizationOrigin = `api_key="${APIKey}", algorithm="${algorithm}", headers="${headers}", signature="${signature}"`
|
||||
const authorization = Buffer.from(authorizationOrigin).toString('base64')
|
||||
const v = {
|
||||
authorization,
|
||||
date,
|
||||
host: 'spark-api.xf-yun.com'
|
||||
}
|
||||
const url = `wss://spark-api.xf-yun.com${APILink}?${Object.keys(v).map(key => `${key}=${v[key]}`).join('&')}`
|
||||
return url
|
||||
}
|
||||
|
||||
async uploadImage (url) {
|
||||
// 获取图片
|
||||
let response = await fetch(url, {
|
||||
method: 'GET'
|
||||
})
|
||||
const blob = await response.blob()
|
||||
const arrayBuffer = await blob.arrayBuffer()
|
||||
const buffer = Buffer.from(arrayBuffer)
|
||||
// 上传oss
|
||||
const formData = new FormData()
|
||||
formData.append('file', buffer, 'image.png')
|
||||
const respOss = await fetch('https://xinghuo.xfyun.cn/iflygpt/oss/sign', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Cookie: 'ssoSessionId=' + this.ssoSessionId + ';'
|
||||
},
|
||||
body: formData
|
||||
})
|
||||
if (respOss.ok) {
|
||||
const ossData = await respOss.json()
|
||||
// 上传接口
|
||||
const sparkdeskUrl = `${ossData.data.url}&authorization=${Buffer.from(ossData.data.authorization).toString('base64')}&date=${ossData.data.date}&host=${ossData.data.host}`
|
||||
const respSparkdes = await fetch(sparkdeskUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Cookie: 'ssoSessionId=' + this.ssoSessionId + ';',
|
||||
authorization: Buffer.from(ossData.data.authorization).toString('base64')
|
||||
},
|
||||
body: buffer
|
||||
})
|
||||
if (respSparkdes.ok) {
|
||||
const sparkdesData = await respSparkdes.json()
|
||||
return {
|
||||
url: sparkdesData.data.link,
|
||||
file: buffer
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
const sparkdesData = await respSparkdes.json()
|
||||
logger.error('星火图片Sparkdes:发送失败' + sparkdesData.desc)
|
||||
} catch (error) {
|
||||
logger.error('星火图片Sparkdes:发送失败')
|
||||
}
|
||||
return false
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
const ossData = await respOss.json()
|
||||
logger.error('星火图片OSS:上传失败' + ossData.desc)
|
||||
} catch (error) {
|
||||
logger.error('星火图片OSS:上传失败')
|
||||
}
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
async apiMessage (prompt, chatId, ePrompt = []) {
|
||||
if (!chatId) chatId = (Math.floor(Math.random() * 1000000) + 100000).toString()
|
||||
|
||||
// 初始化缓存
|
||||
await this.initCache()
|
||||
const conversationKey = `ChatXH_${chatId}`
|
||||
const conversation = (await this.conversationsCache.get(conversationKey)) || {
|
||||
messages: [],
|
||||
createdAt: Date.now()
|
||||
}
|
||||
|
||||
// 获取ws链接
|
||||
const wsUrl = Config.xhmode == 'assistants' ? Config.xhAssistants : await this.getWsUrl()
|
||||
if (!wsUrl) throw new Error('获取ws链接失败')
|
||||
let domain = 'general'
|
||||
if (Config.xhmode == 'apiv2') {
|
||||
domain = 'generalv2'
|
||||
} else if (Config.xhmode == 'apiv3') {
|
||||
domain = 'generalv3'
|
||||
} else if (Config.xhmode == 'apiv3.5') {
|
||||
domain = 'generalv3.5'
|
||||
} else if (Config.xhmode == 'apiv4.0') {
|
||||
domain = '4.0Ultra'
|
||||
}
|
||||
// 编写消息内容
|
||||
const wsSendData = {
|
||||
header: {
|
||||
app_id: Config.xhAppId,
|
||||
uid: chatId
|
||||
},
|
||||
parameter: {
|
||||
chat: {
|
||||
domain,
|
||||
temperature: Config.xhTemperature, // 核采样阈值
|
||||
max_tokens: Config.xhMaxTokens, // tokens最大长度
|
||||
chat_id: chatId,
|
||||
top_k: Math.floor(Math.random() * 6) + 1 // 随机候选,避免重复回复
|
||||
}
|
||||
},
|
||||
payload: {
|
||||
message: {
|
||||
text: [
|
||||
...ePrompt,
|
||||
...conversation.messages,
|
||||
{ role: 'user', content: prompt }
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
if (Config.debug) {
|
||||
logger.info(wsSendData.payload.message.text)
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const socket = new WebSocket(wsUrl)
|
||||
let resMessage = ''
|
||||
socket.on('open', () => {
|
||||
socket.send(JSON.stringify(wsSendData))
|
||||
})
|
||||
socket.on('message', async (message) => {
|
||||
try {
|
||||
const messageData = JSON.parse(message)
|
||||
if (messageData.header.code != 0) {
|
||||
if (messageData.header.code == 10907) {
|
||||
const half = Math.floor(conversation.messages.length / 2)
|
||||
conversation.messages.splice(0, half)
|
||||
await this.conversationsCache.set(conversationKey, conversation)
|
||||
resolve({
|
||||
id: (Math.floor(Math.random() * 1000000) + 100000).toString(),
|
||||
response: '对话以达到上限,已自动清理对话,请重试'
|
||||
})
|
||||
} else {
|
||||
reject(`接口发生错误:Error Code ${messageData.header.code} ,${this.apiErrorInfo(messageData.header.code)}`)
|
||||
}
|
||||
}
|
||||
if (messageData.header.status == 0 || messageData.header.status == 1) {
|
||||
resMessage += messageData.payload.choices.text[0].content
|
||||
}
|
||||
if (messageData.header.status == 2) {
|
||||
resMessage += messageData.payload.choices.text[0].content
|
||||
conversation.messages.push({
|
||||
role: 'user',
|
||||
content: prompt
|
||||
})
|
||||
conversation.messages.push({
|
||||
role: 'assistant',
|
||||
content: resMessage
|
||||
})
|
||||
// 超过规定token去除一半曾经的对话记录
|
||||
if (messageData.payload.usage.text.total_tokens >= Config.xhMaxTokens) {
|
||||
const half = Math.floor(conversation.messages.length / 2)
|
||||
conversation.messages.splice(0, half)
|
||||
}
|
||||
await this.conversationsCache.set(conversationKey, conversation)
|
||||
resolve({
|
||||
id: chatId,
|
||||
response: resMessage
|
||||
})
|
||||
}
|
||||
} catch (error) {
|
||||
reject(new Error(error))
|
||||
}
|
||||
})
|
||||
socket.on('error', (error) => {
|
||||
reject(error)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
async webMessage (prompt, chatId, botId) {
|
||||
if (!FormData) {
|
||||
throw new Error('缺少依赖:form-data。请安装依赖后重试')
|
||||
}
|
||||
return new Promise(async (resolve, reject) => {
|
||||
let formData = new FormData()
|
||||
formData.setBoundary('----WebKitFormBoundarycATE2QFHDn9ffeWF')
|
||||
formData.append('clientType', '2')
|
||||
formData.append('chatId', chatId)
|
||||
if (prompt.image) {
|
||||
prompt.text = prompt.text.replace('[图片]', '') // 清理消息中中首个被使用的图片
|
||||
const imgdata = await this.uploadImage(prompt.image)
|
||||
if (imgdata) {
|
||||
formData.append('fileUrl', imgdata.url)
|
||||
formData.append('file', imgdata.file, 'image.png')
|
||||
}
|
||||
}
|
||||
formData.append('text', prompt.text)
|
||||
if (botId) {
|
||||
formData.append('isBot', '1')
|
||||
formData.append('botId', botId)
|
||||
}
|
||||
let randomNumber = Math.floor(Math.random() * 1000)
|
||||
let fd = '439' + randomNumber.toString().padStart(3, '0')
|
||||
formData.append('fd', fd)
|
||||
this.headers.Referer = referer + chatId
|
||||
let option = {
|
||||
method: 'POST',
|
||||
headers: Object.assign(this.headers, {
|
||||
Accept: 'text/event-stream',
|
||||
'Content-Type': 'multipart/form-data; boundary=----WebKitFormBoundarycATE2QFHDn9ffeWF'
|
||||
}),
|
||||
// body: formData,
|
||||
referrer: this.headers.Referer
|
||||
}
|
||||
let statusCode
|
||||
const req = https.request(chatUrl, option, (res) => {
|
||||
statusCode = res.statusCode
|
||||
if (statusCode !== 200) {
|
||||
logger.error('星火statusCode:' + statusCode)
|
||||
}
|
||||
let response = ''
|
||||
function onMessage (data) {
|
||||
// console.log(data)
|
||||
if (data === '<end>') {
|
||||
return resolve({
|
||||
error: null,
|
||||
response
|
||||
})
|
||||
}
|
||||
if (data.charAt(0) === '{') {
|
||||
try {
|
||||
response = JSON.parse(data).value
|
||||
if (Config.debug) {
|
||||
logger.info(response)
|
||||
}
|
||||
} catch (err) {
|
||||
reject(err)
|
||||
}
|
||||
}
|
||||
try {
|
||||
if (data && data !== '[error]') {
|
||||
response += atob(data.trim())
|
||||
if (Config.debug) {
|
||||
logger.info(response)
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.warn('fetchSSE onMessage unexpected error', err)
|
||||
reject(err)
|
||||
}
|
||||
}
|
||||
|
||||
const parser = createParser((event) => {
|
||||
if (event.type === 'event') {
|
||||
onMessage(event.data)
|
||||
}
|
||||
})
|
||||
const errBody = []
|
||||
res.on('data', (chunk) => {
|
||||
if (statusCode === 200) {
|
||||
let str = chunk.toString()
|
||||
parser.feed(str)
|
||||
}
|
||||
errBody.push(chunk)
|
||||
})
|
||||
|
||||
// const body = []
|
||||
// res.on('data', (chunk) => body.push(chunk))
|
||||
res.on('end', () => {
|
||||
const resString = Buffer.concat(errBody).toString()
|
||||
// logger.info({ resString })
|
||||
reject(resString)
|
||||
})
|
||||
})
|
||||
formData.pipe(req)
|
||||
req.on('error', (err) => {
|
||||
logger.error(err)
|
||||
reject(err)
|
||||
})
|
||||
req.on('timeout', () => {
|
||||
req.destroy()
|
||||
reject(new Error('Request time out'))
|
||||
})
|
||||
// req.write(formData.stringify())
|
||||
req.end()
|
||||
})
|
||||
}
|
||||
|
||||
async sendMessage (prompt, option) {
|
||||
let chatId = option?.chatId
|
||||
let image = option?.image
|
||||
|
||||
if (Config.xhmode == 'api' || Config.xhmode == 'apiv2' || Config.xhmode == 'apiv3' || Config.xhmode == 'apiv3.5' || Config.xhmode == 'assistants' || Config.xhmode == 'apiv4.0') {
|
||||
if (!Config.xhAppId || !Config.xhAPISecret || !Config.xhAPIKey) throw new Error('未配置api')
|
||||
let Prompt = []
|
||||
// 设定
|
||||
if (Config.xhPromptSerialize) {
|
||||
try {
|
||||
Prompt = JSON.parse(Config.xhPrompt)
|
||||
} catch (error) {
|
||||
Prompt = []
|
||||
logger.warn('星火设定序列化失败,本次对话不附带设定')
|
||||
}
|
||||
} else {
|
||||
Prompt = option.system ? [{ role: 'system', content: option.system }] : []
|
||||
}
|
||||
if (Config.enableChatSuno) {
|
||||
Prompt.unshift(
|
||||
{ role: 'system', content: '如果我要求你生成音乐或写歌,你需要回复适合Suno生成音乐的信息。请使用Verse、Chorus、Bridge、Outro和End等关键字对歌词进行分段,如[Verse 1]。返回的消息需要使用markdown包裹的JSON格式,结构为```json{"option": "Suno", "tags": "style", "title": "title of the song", "lyrics": "lyrics"}```。' }
|
||||
)
|
||||
}
|
||||
if (Config.xhPromptEval) {
|
||||
Prompt.forEach(obj => {
|
||||
try {
|
||||
obj.content = obj.content.replace(/{{(.*?)}}/g, (match, variable) => {
|
||||
return Function(`"use strict";return ((e)=>{return ${variable} })`)()(option.e)
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(error)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
let { response, id } = await this.apiMessage(prompt, chatId, Prompt)
|
||||
if (Config.xhRetRegExp) {
|
||||
response = response.replace(new RegExp(Config.xhRetRegExp, 'g'), Config.xhRetReplace)
|
||||
}
|
||||
return {
|
||||
conversationId: id,
|
||||
text: response
|
||||
}
|
||||
} else if (Config.xhmode == 'web') {
|
||||
let botId = false
|
||||
if (chatId && typeof chatId === 'object') {
|
||||
chatId = chatId.chatid
|
||||
botId = chatId.botid
|
||||
}
|
||||
if (!chatId) {
|
||||
chatId = (await this.createChatList()).chatListId
|
||||
}
|
||||
let { response } = await this.webMessage({ text: prompt, image }, chatId, botId)
|
||||
// logger.info(response)
|
||||
// let responseText = atob(response)
|
||||
// 处理图片
|
||||
let images
|
||||
if (response.includes('multi_image_url')) {
|
||||
images = [{
|
||||
tag: '',
|
||||
url: JSON.parse(/{([^}]*)}/g.exec(response)[0]).url
|
||||
}]
|
||||
response = '我已经完成作品,欢迎您提出宝贵的意见和建议,帮助我快速进步~~'
|
||||
}
|
||||
if (botId) {
|
||||
chatId = {
|
||||
chatid: chatId,
|
||||
botid: botId
|
||||
}
|
||||
}
|
||||
if (Config.xhRetRegExp) {
|
||||
response = response.replace(new RegExp(Config.xhRetRegExp, 'g'), Config.xhRetReplace)
|
||||
}
|
||||
return {
|
||||
conversationId: chatId,
|
||||
text: response,
|
||||
images
|
||||
}
|
||||
} else {
|
||||
throw new Error('未知的模式' + Config.xhmode)
|
||||
}
|
||||
}
|
||||
|
||||
async createChatList (bot = false) {
|
||||
let createChatListRes = await fetch(createChatUrl, {
|
||||
method: 'POST',
|
||||
headers: Object.assign(this.headers, {
|
||||
'Content-Type': 'application/json',
|
||||
Botweb: bot ? 1 : 0
|
||||
}),
|
||||
body: bot ? `{"BotWeb": 1, "botId": "${bot}"}` : '{}'
|
||||
})
|
||||
if (createChatListRes.status !== 200) {
|
||||
let errorRes = await createChatListRes.text()
|
||||
let errorText = '星火对话创建失败:' + errorRes
|
||||
logger.error(errorText)
|
||||
throw new Error(errorText)
|
||||
}
|
||||
createChatListRes = await createChatListRes.json()
|
||||
if (createChatListRes.data?.id) {
|
||||
logger.info('星火对话创建成功:' + createChatListRes.data.id)
|
||||
} else {
|
||||
logger.error('星火对话创建失败: ' + JSON.stringify(createChatListRes))
|
||||
throw new Error('星火对话创建失败:' + JSON.stringify(createChatListRes))
|
||||
}
|
||||
return {
|
||||
chatListId: createChatListRes.data?.id,
|
||||
title: createChatListRes.data?.title
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function atob (s) {
|
||||
return Buffer.from(s, 'base64').toString()
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue