chore: restructure project and add i18n support

- Reorganize project structure with new electron and shared directories
- Add comprehensive i18n support with Chinese, English, and Japanese locales
- Update build configurations and TypeScript paths for new structure
- Add various UI components including chat interface and task management
- Include Windows release binaries and localization files
- Update dependencies and fix import paths throughout the codebase
This commit is contained in:
duanshuwen
2026-04-06 14:39:06 +08:00
parent e76b034d50
commit 6615d11dd6
311 changed files with 823682 additions and 4460 deletions

View File

@@ -0,0 +1,4 @@
export abstract class BaseProvider {
abstract chat(messages: DialogueMessageProps[], modelName: string): Promise<AsyncIterable<UniversalChunk>>
}

View File

@@ -0,0 +1,58 @@
import { BaseProvider } from "./BaseProvider";
import OpenAI from "openai";
import logManager from "@electron/service/logger"
function _transformChunk(chunk: OpenAI.Chat.Completions.ChatCompletionChunk): UniversalChunk {
const choice = chunk.choices[0];
return {
isEnd: choice?.finish_reason === 'stop',
result: choice?.delta?.content ?? '',
}
}
export class OpenAIProvider extends BaseProvider {
private client: OpenAI;
constructor(apiKey: string, baseURL: string) {
super();
this.client = new OpenAI({ apiKey, baseURL });
}
async chat(messages: DialogueMessageProps[], model: string): Promise<AsyncIterable<UniversalChunk>> {
const startTime = Date.now();
const lastMessage = messages[messages.length - 1];
logManager.logApiRequest('chat.completions.create', {
model,
lastMessage: lastMessage?.content?.substring(0, 100) + (lastMessage?.content?.length > 100 ? '...' : ''),
messageCount: messages.length,
}, 'POST');
try {
const chunks = await this.client.chat.completions.create({
model,
messages,
stream: true,
});
const responseTime = Date.now() - startTime;
logManager.logApiResponse('chat.completions.create', { success: true }, 200, responseTime);
// return chunk;
return {
async *[Symbol.asyncIterator]() {
for await (const chunk of chunks) {
yield _transformChunk(chunk);
}
}
}
} catch (error) {
const responseTime = Date.now() - startTime;
logManager.logApiResponse('chat.completions.create', { error: error instanceof Error ? error.message : String(error) }, 500, responseTime);
throw error;
}
}
}

123
electron/providers/index.ts Normal file
View File

@@ -0,0 +1,123 @@
import type { Provider } from "@lib/types"
import { OpenAIProvider } from "./OpenAIProvider"
import { parseOpenAISetting } from '@lib/utils'
import { decode } from 'js-base64'
import { configManager } from '@electron/service/config-service'
import { logManager } from '@electron/service/logger'
import { CONFIG_KEYS } from "@lib/constants"
const providers = [
{
id: 1,
name: 'bigmodel',
title: '智谱AI',
models: ['glm-4.5-flash'],
openAISetting: {
baseURL: 'https://open.bigmodel.cn/api/paas/v4',
apiKey: process.env.BIGMODEL_API_KEY || '',
},
createdAt: new Date().getTime(),
updatedAt: new Date().getTime()
},
{
id: 2,
name: 'deepseek',
title: '深度求索 (DeepSeek)',
models: ['deepseek-chat'],
openAISetting: {
baseURL: 'https://api.deepseek.com/v1',
apiKey: process.env.DEEPSEEK_API_KEY || '',
},
createdAt: new Date().getTime(),
updatedAt: new Date().getTime()
},
{
id: 3,
name: 'siliconflow',
title: '硅基流动',
models: ['Qwen/Qwen3-8B', 'deepseek-ai/DeepSeek-R1-0528-Qwen3-8B'],
openAISetting: {
baseURL: 'https://api.siliconflow.cn/v1',
apiKey: process.env.SILICONFLOW_API_KEY || '',
},
createdAt: new Date().getTime(),
updatedAt: new Date().getTime()
},
{
id: 4,
name: 'qianfan',
title: '百度千帆',
models: ['ernie-speed-128k', 'ernie-4.0-8k', 'ernie-3.5-8k'],
openAISetting: {
baseURL: 'https://qianfan.baidubce.com/v2',
apiKey: process.env.QIANFAN_API_KEY || '',
},
createdAt: new Date().getTime(),
updatedAt: new Date().getTime()
},
];
interface _Provider extends Omit<Provider, 'openAISetting'> {
openAISetting?: {
apiKey: string,
baseURL: string,
};
}
const _parseProvider = () => {
let result: Provider[] = [];
let isBase64Parsed = false;
const providerConfig = configManager.get(CONFIG_KEYS.PROVIDER);
const mapCallback = (provider: Provider) => ({
...provider,
openAISetting: typeof provider.openAISetting === 'string'
? parseOpenAISetting(provider.openAISetting ?? '')
: provider.openAISetting,
})
try {
result = JSON.parse(decode(providerConfig)) as Provider[];
isBase64Parsed = true;
} catch (error) {
logManager.error(`parse base64 provider failed: ${error}`);
}
if (!isBase64Parsed) try {
result = JSON.parse(providerConfig) as Provider[]
} catch (error) {
logManager.error(`parse provider failed: ${error}`);
}
if (!result.length) return;
return result.map(mapCallback) as _Provider[]
}
const getProviderConfig = () => {
try {
return _parseProvider();
} catch (error) {
logManager.error(`get provider config failed: ${error}`);
return null;
}
}
export function createProvider(name: string) {
const providers = getProviderConfig();
if (!providers) {
throw new Error('provider config not found');
}
for (const provider of providers) {
if (provider.name === name) {
if (!provider.openAISetting?.apiKey || !provider.openAISetting?.baseURL) {
throw new Error('apiKey or baseURL not found');
}
// TODO: visible
return new OpenAIProvider(provider.openAISetting.apiKey, provider.openAISetting.baseURL);
}
}
}