203 lines
6.7 KiB
TypeScript
203 lines
6.7 KiB
TypeScript
import { ipcMain, shell, IpcMainEvent, dialog } from 'electron'
|
||
import Constants from './utils/Constants'
|
||
import Store from 'electron-store'
|
||
import axios from 'axios'
|
||
import OpenAI from 'openai'
|
||
|
||
const store = new Store()
|
||
/*
|
||
* IPC Communications
|
||
* */
|
||
|
||
// const getBaseUrl = async () => {
|
||
// try {
|
||
// const response = await axios.get('https://aqq-jbjsjuxivc.cn-hangzhou.fcapp.run');
|
||
// console.log('获取baseUrl成功:', response.data);
|
||
// return response.data.data;
|
||
// } catch (error) {
|
||
// console.error('获取baseUrl失败:', error);
|
||
// throw error;
|
||
// }
|
||
// }
|
||
// const baseUrl = ""
|
||
|
||
export default class IPCs {
|
||
static initialize(): void {
|
||
// Get application version
|
||
ipcMain.handle('msgRequestGetVersion', () => {
|
||
return Constants.APP_VERSION
|
||
})
|
||
|
||
ipcMain.handle('store-get', (event, key) => {
|
||
return store.get(key) || null
|
||
})
|
||
|
||
ipcMain.handle('store-set', (event, key, value) => {
|
||
store.set(key, value)
|
||
})
|
||
|
||
ipcMain.handle('fetch-models', async () => {
|
||
try {
|
||
const response = await axios.get('https://get-model-list-vcwjgnvcld.cn-hangzhou.fcapp.run') // 从网络获取数据
|
||
console.log('获取模型数据成功:', response.data)
|
||
return response.data.data
|
||
} catch (error) {
|
||
console.error('获取模型数据失败:', error)
|
||
throw error
|
||
}
|
||
})
|
||
|
||
ipcMain.handle('getBaseUrl', async () => {
|
||
try {
|
||
const response = await axios.get('https://aqq-jbjsjuxivc.cn-hangzhou.fcapp.run')
|
||
console.log('获取baseUrl成功:', response.data)
|
||
return response.data.data
|
||
} catch (error) {
|
||
console.error('获取baseUrl失败:', error)
|
||
throw error
|
||
}
|
||
})
|
||
|
||
// const requestParams = {
|
||
// input: '这是一个测试输入', // 输入文本
|
||
// model: 'gpt-4o', // 设置模型
|
||
// stream: false, // 是否使用流传输
|
||
// temperature: 0.8, // 设置温度
|
||
// // 其他可选参数
|
||
// max_output_tokens: 100, // 最大输出令牌数
|
||
// top_p: 0.9, // 核采样参数
|
||
// };
|
||
|
||
ipcMain.handle('call-openai', async (event, baseURL, apiKey, model, count, rawArticle) => {
|
||
try {
|
||
const client = new OpenAI({
|
||
apiKey: apiKey,
|
||
baseURL: baseURL
|
||
})
|
||
|
||
const requests = Array.from({ length: count }, (_, index) => {
|
||
return client.chat.completions
|
||
.create({
|
||
model: model,
|
||
messages: [
|
||
{
|
||
role: 'system',
|
||
content:
|
||
'你是一个小红书文案写手,能够熟练地根据用户的输入,改写成内容相近,但表达方式不同的新文案。你的文案中需要具备吸人眼球的钩子,能够牢牢抓住用户的注意力。请直接输出新的文案,不要输出其他任何提示性词语, 以纯文本的形式输出。注意:输出的文案不要超过900字。'
|
||
},
|
||
{ role: 'user', content: rawArticle }
|
||
],
|
||
max_tokens: 1800, // 约900字
|
||
stream: true
|
||
})
|
||
.then(async (stream) => {
|
||
let fullResponse = ''
|
||
for await (const chunk of stream) {
|
||
const content = chunk.choices[0].delta.content || ''
|
||
fullResponse += content
|
||
// 实时发送每个请求的部分结果
|
||
event.sender.send('openai-partial-response', { index, content })
|
||
}
|
||
// 返回最终结果
|
||
return { index, response: fullResponse }
|
||
})
|
||
})
|
||
|
||
// 并发执行所有请求
|
||
const results = await Promise.all(requests)
|
||
|
||
// 返回所有请求的最终结果
|
||
return results
|
||
} catch (error) {
|
||
console.error('Error:', error)
|
||
throw error // 将错误传递给调用者
|
||
}
|
||
})
|
||
|
||
ipcMain.handle(
|
||
'call-openai-title',
|
||
async (event, baseUrl: string, apiKey: string, model: string, count: number, text: string) => {
|
||
try {
|
||
const client = new OpenAI({
|
||
baseURL: baseUrl,
|
||
apiKey: apiKey,
|
||
maxRetries: 2,
|
||
timeout: 30000
|
||
})
|
||
|
||
const requests = Array.from({ length: count }, (_, index) => {
|
||
return client.chat.completions
|
||
.create({
|
||
model: model,
|
||
messages: [
|
||
{
|
||
role: 'system',
|
||
content: '你是一个小红书标题写手,能够熟练地根据用户的输入,改写成内容相近,但表达方式不同的新标题。你的标题中需要具备吸人眼球的钩子,能够牢牢抓住用户的注意力。请直接输出新的标题,不要输出其他任何提示性词语, 以纯文本的形式输出。注意:输出的标题不要超过20字。'
|
||
},
|
||
{ role: 'user', content: text }
|
||
],
|
||
max_tokens: 50, // 约20字
|
||
temperature: 0.8,
|
||
stream: true
|
||
})
|
||
.then(async (stream) => {
|
||
let fullResponse = ''
|
||
for await (const chunk of stream) {
|
||
const content = chunk.choices[0].delta.content || ''
|
||
fullResponse += content
|
||
// 实时发送每个请求的部分结果
|
||
event.sender.send('openai-partial-response', { index, content })
|
||
}
|
||
// 返回最终结果
|
||
return { index, response: fullResponse.trim() }
|
||
})
|
||
})
|
||
|
||
// 并发执行所有请求
|
||
const results = await Promise.all(requests)
|
||
|
||
// 返回所有请求的最终结果
|
||
return results
|
||
} catch (error) {
|
||
console.error('生成标题失败:', error)
|
||
throw error
|
||
}
|
||
}
|
||
)
|
||
|
||
ipcMain.handle('get-api-key', () => {
|
||
return store.get('api-key') || null
|
||
})
|
||
|
||
ipcMain.handle('set-api-key', (_, apiKey) => {
|
||
if (!apiKey) {
|
||
store.delete('api-key')
|
||
} else {
|
||
store.set('api-key', apiKey)
|
||
}
|
||
})
|
||
|
||
// Open url via web browser
|
||
ipcMain.on('msgOpenExternalLink', async (event: IpcMainEvent, url: string) => {
|
||
await shell.openExternal(url)
|
||
})
|
||
|
||
// Open file
|
||
ipcMain.handle('msgOpenFile', async (event: IpcMainEvent, filter: string) => {
|
||
const filters = []
|
||
if (filter === 'text') {
|
||
filters.push({ name: 'Text', extensions: ['txt', 'json'] })
|
||
} else if (filter === 'zip') {
|
||
filters.push({ name: 'Zip', extensions: ['zip'] })
|
||
}
|
||
const dialogResult = await dialog.showOpenDialog({
|
||
properties: ['openFile'],
|
||
filters
|
||
})
|
||
return dialogResult
|
||
})
|
||
|
||
|
||
}
|
||
}
|