This commit is contained in:
ikechan8370 2023-07-29 20:54:01 +08:00
commit 407b3752a4
11 changed files with 245 additions and 183 deletions

View file

@ -41,7 +41,7 @@ async function getKeyv () {
const genRanHex = (size) => [...Array(size)].map(() => Math.floor(Math.random() * 16).toString(16)).join('')
export default class SydneyAIClient {
constructor (opts) {
constructor(opts) {
this.opts = {
...opts,
host: opts.host || Config.sydneyReverseProxy || 'https://edgeservices.bing.com/edgesvc'
@ -52,7 +52,7 @@ export default class SydneyAIClient {
this.debug = opts.debug
}
async initCache () {
async initCache() {
if (!this.conversationsCache) {
const cacheOptions = this.opts.cache || {}
cacheOptions.namespace = cacheOptions.namespace || 'bing'
@ -61,7 +61,7 @@ export default class SydneyAIClient {
}
}
async createNewConversation () {
async createNewConversation() {
await this.initCache()
const fetchOptions = {
headers: {
@ -126,7 +126,7 @@ export default class SydneyAIClient {
}
}
async createWebSocketConnection () {
async createWebSocketConnection() {
await this.initCache()
// let WebSocket = await getWebSocket()
return new Promise((resolve, reject) => {
@ -190,13 +190,13 @@ export default class SydneyAIClient {
})
}
async cleanupWebSocketConnection (ws) {
async cleanupWebSocketConnection(ws) {
clearInterval(ws.bingPingInterval)
ws.close()
ws.removeAllListeners()
}
async sendMessage (
async sendMessage(
message,
opts = {}
) {
@ -364,7 +364,6 @@ export default class SydneyAIClient {
let maxConv = Config.maxNumUserMessagesInConversation
const currentDate = moment().format('YYYY-MM-DDTHH:mm:ssZ')
const imageDate = await this.kblobImage(opts.imageUrl)
// console.log(imageDate)
const obj = {
arguments: [
{
@ -573,9 +572,9 @@ export default class SydneyAIClient {
const message = messages.length
? messages[messages.length - 1]
: {
adaptiveCards: adaptiveCardsSoFar,
text: replySoFar.join('')
}
adaptiveCards: adaptiveCardsSoFar,
text: replySoFar.join('')
}
if (messages[0].contentOrigin === 'Apology') {
console.log('Apology found')
if (!replySoFar[0]) {
@ -640,9 +639,9 @@ export default class SydneyAIClient {
const message = messages.length
? messages[messages.length - 1]
: {
adaptiveCards: adaptiveCardsSoFar,
text: replySoFar.join('')
}
adaptiveCards: adaptiveCardsSoFar,
text: replySoFar.join('')
}
// 获取到图片内容
if (message.contentType === 'IMAGE') {
message.imageTag = messages.filter(m => m.contentType === 'IMAGE').map(m => m.text).join('')
@ -808,7 +807,6 @@ export default class SydneyAIClient {
return false
}
}
/**
* Iterate through messages, building an array based on the parentMessageId.
* Each message has an id and a parentMessageId. The parentMessageId is the id of the message that this message is a reply to.
@ -816,7 +814,7 @@ export default class SydneyAIClient {
* @param parentMessageId
* @returns {*[]} An array containing the messages in the order they should be displayed, starting with the root message.
*/
static getMessagesForConversation (messages, parentMessageId) {
static getMessagesForConversation(messages, parentMessageId) {
const orderedMessages = []
let currentMessageId = parentMessageId
while (currentMessageId) {
@ -832,7 +830,7 @@ export default class SydneyAIClient {
}
}
async function generateRandomIP () {
async function generateRandomIP() {
let ip = await redis.get('CHATGPT:BING_IP')
if (ip) {
return ip

View file

@ -69,11 +69,15 @@ export async function solveCaptchaOneShot (token) {
if (!solveUrl) {
throw new Error('no captcha source')
}
logger.info(`尝试解决token${token}的验证码`)
let result = await fetch(solveUrl, {
method: 'POST',
body: {
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({
_U: token
}
})
})
if (result.status === 200) {
return await result.json()

View file

@ -1,17 +1,17 @@
// import { remark } from 'remark'
// import stripMarkdown from 'strip-markdown'
import { exec } from 'child_process'
import {exec} from 'child_process'
import lodash from 'lodash'
import fs from 'node:fs'
import path from 'node:path'
import buffer from 'buffer'
import yaml from 'yaml'
import puppeteer from '../../../lib/puppeteer/puppeteer.js'
import { Config } from './config.js'
import { convertSpeaker, generateVitsAudio, speakers as vitsRoleList } from './tts.js'
import VoiceVoxTTS, { supportConfigurations as voxRoleList } from './tts/voicevox.js'
import AzureTTS, { supportConfigurations as azureRoleList } from './tts/microsoft-azure.js'
import { translate } from './translate.js'
import {Config} from './config.js'
import {convertSpeaker, generateVitsAudio, speakers as vitsRoleList} from './tts.js'
import VoiceVoxTTS, {supportConfigurations as voxRoleList} from './tts/voicevox.js'
import AzureTTS, {supportConfigurations as azureRoleList} from './tts/microsoft-azure.js'
import {translate} from './translate.js'
import uploadRecord from './uploadRecord.js'
// export function markdownToText (markdown) {
// return remark()
@ -346,7 +346,8 @@ export async function renderUrl (e, url, renderCfg = {}) {
// 云渲染
if (Config.cloudRender) {
url = url.replace(`127.0.0.1:${Config.serverPort || 3321}`, Config.serverHost || `${await getPublicIP()}:${Config.serverPort || 3321}`)
const resultres = await fetch(`${Config.cloudTranscode}/screenshot`, {
const cloudUrl = new URL(Config.cloudTranscode)
const resultres = await fetch(`${cloudUrl.href}screenshot`, {
method: 'POST',
headers: {
'Content-Type': 'application/json'
@ -844,9 +845,7 @@ export async function generateAudio (e, pendingText, speakingEmotion, emotionDeg
try {
try {
sendable = await uploadRecord(wav, Config.ttsMode)
if (sendable) {
await e.reply(sendable)
} else {
if (!sendable) {
// 如果合成失败尝试使用ffmpeg合成
sendable = segment.record(wav)
}
@ -897,6 +896,7 @@ export async function generateAzureAudio (pendingText, role = '随机', speaking
let languagePrefix = azureRoleList.find(config => config.code === speaker).languageDetail.charAt(0)
languagePrefix = languagePrefix.startsWith('E') ? '英' : languagePrefix
pendingText = (await translate(pendingText, languagePrefix)).replace('\n', '')
} else {
let role, languagePrefix
role = azureRoleList[Math.floor(Math.random() * azureRoleList.length)]
@ -938,3 +938,4 @@ export function getUserSpeaker (userSetting) {
return userSetting.ttsRoleVoiceVox || Config.voicevoxTTSSpeaker
}
}

View file

@ -47,7 +47,7 @@ function randomNum (minNum, maxNum) {
* @param lengthScale
* @returns {Promise<string>}
*/
export async function generateVitsAudio (text, speaker = '随机', language = '中日混合(中文用[ZH][ZH]包裹起来,日文用[JA][JA]包裹起来)', noiseScale = Config.noiseScale, noiseScaleW = Config.noiseScaleW, lengthScale = Config.lengthScale) {
export async function generateVitsAudio (text, speaker = '随机', language = '中日混合(中文用[ZH][ZH]包裹起来,日文用[JA][JA]包裹起来)', noiseScale = parseFloat(Config.noiseScale), noiseScaleW = parseFloat(Config.noiseScaleW), lengthScale = parseFloat(Config.lengthScale)) {
if (!speaker || speaker === '随机') {
logger.info('随机角色!这次哪个角色这么幸运会被选到呢……')
speaker = speakers[randomNum(0, speakers.length)]

View file

@ -83,7 +83,8 @@ async function uploadRecord (recordUrl, ttsMode = 'vits-uma-genshin-honkai') {
buffer = Buffer.from(arrayBuffer)
formData.append('file', new File([buffer], 'audio.wav'))
}
const resultres = await fetch(`${Config.cloudTranscode}/audio`, {
const cloudUrl = new URL(Config.cloudTranscode)
const resultres = await fetch(`${cloudUrl}audio`, {
method: 'POST',
body: formData
})
@ -99,7 +100,8 @@ async function uploadRecord (recordUrl, ttsMode = 'vits-uma-genshin-honkai') {
throw e
}
} else {
const resultres = await fetch(`${Config.cloudTranscode}/audio`, {
const cloudUrl = new URL(Config.cloudTranscode)
const resultres = await fetch(`${cloudUrl}audio`, {
method: 'POST',
headers: {
'Content-Type': 'application/json'