Add unit tests for skill capabilities, skill planner, and UV setup

- Implement tests for random ID generation, ensuring preference for crypto.randomUUID.
- Create tests for runtime context capabilities, validating the injection of enabled skill capabilities.
- Add tests for skill capability parsing, including classification and command example extraction.
- Introduce tests for the skill planner, verifying tool call planning based on user requests and attachment requirements.
- Establish tests for UV setup, ensuring proper handling of Python installation scenarios and environment checks.
This commit is contained in:
DEV_DSW
2026-04-24 17:02:59 +08:00
parent e11a2296cc
commit 4c61e93c3e
42 changed files with 12560 additions and 224 deletions

View File

@@ -2,11 +2,115 @@ export interface ChatOptions {
signal?: AbortSignal;
}
export interface GatewayChatMessage {
role: 'system' | 'user' | 'assistant' | 'tool';
content: string;
export interface GatewayTextContentBlock {
type: 'text';
text: string;
}
export abstract class BaseProvider {
abstract chat(messages: GatewayChatMessage[], modelName: string, options?: ChatOptions): Promise<AsyncIterable<UniversalChunk>>
export interface GatewayThinkingContentBlock {
type: 'thinking';
thinking: string;
signature?: string;
}
export interface GatewayToolUseContentBlock {
type: 'tool_use';
id: string;
name: string;
input?: unknown;
summary?: string;
}
export interface GatewayToolResultContentBlock {
type: 'tool_result';
toolCallId?: string;
content?: string | GatewayChatContentBlock[];
result?: unknown;
summary?: string;
ok?: boolean;
error?: unknown;
}
export type GatewayChatContentBlock =
| GatewayTextContentBlock
| GatewayThinkingContentBlock
| GatewayToolUseContentBlock
| GatewayToolResultContentBlock;
export type GatewayChatMessageRole =
| 'system'
| 'user'
| 'assistant'
| 'tool'
| 'toolresult'
| 'tool_result';
export interface GatewayChatMessage {
role: GatewayChatMessageRole;
content: string | GatewayChatContentBlock[];
name?: string;
toolCallId?: string;
metadata?: Record<string, unknown>;
}
export interface GatewayToolDefinition {
name: string;
description?: string;
inputSchema?: unknown;
}
export type GatewayToolChoice =
| 'auto'
| 'none'
| 'required'
| {
type: 'tool';
name: string;
};
export interface ToolCapableChatOptions extends ChatOptions {
tools?: GatewayToolDefinition[];
toolChoice?: GatewayToolChoice;
metadata?: Record<string, unknown>;
}
export interface ProviderToolCallDelta {
index?: number;
id?: string;
name?: string;
argumentsDelta?: string;
raw?: unknown;
}
export interface ProviderCapabilities {
structuredMessages: boolean;
toolCalls: boolean;
toolResults: boolean;
thinking: boolean;
}
export interface ProviderStreamChunk extends UniversalChunk {
content?: GatewayChatContentBlock[];
toolCalls?: ProviderToolCallDelta[];
finishReason?: string | null;
raw?: unknown;
}
export const DEFAULT_PROVIDER_CAPABILITIES: ProviderCapabilities = {
structuredMessages: false,
toolCalls: false,
toolResults: false,
thinking: false,
};
export abstract class BaseProvider {
getCapabilities(): ProviderCapabilities {
return DEFAULT_PROVIDER_CAPABILITIES;
}
abstract chat(
messages: GatewayChatMessage[],
modelName: string,
options?: ToolCapableChatOptions
): Promise<AsyncIterable<ProviderStreamChunk>>
}

View File

@@ -1,15 +1,218 @@
import { BaseProvider, ChatOptions, GatewayChatMessage } from "./BaseProvider";
import {
BaseProvider,
GatewayChatContentBlock,
GatewayChatMessage,
GatewayToolChoice,
GatewayToolDefinition,
GatewayToolResultContentBlock,
ProviderCapabilities,
ProviderStreamChunk,
ToolCapableChatOptions,
} from "./BaseProvider";
import OpenAI from "openai";
import logManager from "@electron/service/logger"
function _transformChunk(chunk: OpenAI.Chat.Completions.ChatCompletionChunk): UniversalChunk {
const OPENAI_PROVIDER_CAPABILITIES: ProviderCapabilities = {
structuredMessages: true,
toolCalls: true,
toolResults: true,
thinking: false,
};
function _flattenContent(content: string | GatewayChatContentBlock[] | undefined): string {
if (typeof content === 'string') {
return content;
}
if (!Array.isArray(content)) {
return '';
}
return content
.map((block) => {
if (!block || typeof block !== 'object') {
return '';
}
if (block.type === 'text' && typeof block.text === 'string') {
return block.text;
}
if (block.type === 'thinking' && typeof block.thinking === 'string') {
return block.thinking;
}
if (block.type === 'tool_result') {
return _flattenContent(block.content);
}
return '';
})
.filter(Boolean)
.join('\n');
}
function _extractToolCalls(content: GatewayChatContentBlock[]): Array<Record<string, unknown>> | undefined {
const toolCalls = content
.flatMap((block, index) => {
if (block.type !== 'tool_use' || !block.name) {
return [];
}
return [{
id: block.id || `tool_call_${index}`,
type: 'function',
function: {
name: block.name,
arguments: JSON.stringify(block.input ?? {}),
},
}];
});
return toolCalls.length ? toolCalls : undefined;
}
function _findToolResultBlock(
content: GatewayChatMessage['content']
): GatewayToolResultContentBlock | undefined {
if (!Array.isArray(content)) {
return undefined;
}
return content.find(
(block): block is GatewayToolResultContentBlock => block.type === 'tool_result'
);
}
function _transformMessage(message: GatewayChatMessage): Record<string, unknown> | null {
const normalizedRole = message.role === 'toolresult' || message.role === 'tool_result'
? 'tool'
: message.role;
if (normalizedRole === 'assistant') {
const content = Array.isArray(message.content) ? message.content : undefined;
const toolCalls = content ? _extractToolCalls(content) : undefined;
const text = _flattenContent(message.content).trim();
if (!text && !toolCalls?.length) {
return null;
}
return {
role: 'assistant',
content: text || null,
...(toolCalls?.length ? { tool_calls: toolCalls } : {}),
};
}
if (normalizedRole === 'tool') {
const resultBlock = _findToolResultBlock(message.content);
const toolCallId = message.toolCallId || resultBlock?.toolCallId;
const text = _flattenContent(resultBlock?.content ?? message.content).trim();
return {
role: 'tool',
tool_call_id: toolCallId || `${message.name || 'tool'}_call`,
content: text || resultBlock?.summary || 'Tool result',
};
}
const text = _flattenContent(message.content).trim();
if (!text) {
return null;
}
return {
role: normalizedRole,
content: text,
};
}
function _transformMessages(messages: GatewayChatMessage[]): Array<Record<string, unknown>> {
return messages
.map((message) => _transformMessage(message))
.filter((message): message is Record<string, unknown> => message !== null);
}
function _normalizeToolSchema(inputSchema: unknown): Record<string, unknown> {
if (inputSchema && typeof inputSchema === 'object' && !Array.isArray(inputSchema)) {
return inputSchema as Record<string, unknown>;
}
return {
type: 'object',
properties: {},
additionalProperties: true,
};
}
function _transformTools(tools?: GatewayToolDefinition[]): Array<Record<string, unknown>> | undefined {
if (!tools?.length) {
return undefined;
}
return tools.map((tool) => ({
type: 'function',
function: {
name: tool.name,
...(tool.description ? { description: tool.description } : {}),
parameters: _normalizeToolSchema(tool.inputSchema),
},
}));
}
function _transformToolChoice(choice?: GatewayToolChoice): string | Record<string, unknown> | undefined {
if (!choice) {
return undefined;
}
if (typeof choice === 'string') {
return choice;
}
return {
type: 'function',
function: {
name: choice.name,
},
};
}
function _summarizeMessage(message?: GatewayChatMessage): string {
if (!message) {
return '';
}
return _flattenContent(message.content);
}
function _transformChunk(chunk: OpenAI.Chat.Completions.ChatCompletionChunk): ProviderStreamChunk {
const choice = chunk.choices[0];
const usage = (chunk as any).usage;
const delta = choice?.delta as any;
const result = delta?.content ?? '';
const toolCalls = Array.isArray(delta?.tool_calls)
? delta.tool_calls.map((toolCall: any) => ({
index: typeof toolCall?.index === 'number' ? toolCall.index : undefined,
id: typeof toolCall?.id === 'string' ? toolCall.id : undefined,
name: typeof toolCall?.function?.name === 'string' ? toolCall.function.name : undefined,
argumentsDelta:
typeof toolCall?.function?.arguments === 'string'
? toolCall.function.arguments
: undefined,
raw: toolCall,
}))
: undefined;
return {
isEnd: choice?.finish_reason != null || (chunk.choices.length === 0 && usage != null),
result: choice?.delta?.content ?? '',
result,
usage: usage ?? undefined,
content: result ? [{ type: 'text', text: result }] : undefined,
toolCalls: toolCalls?.length ? toolCalls : undefined,
finishReason: choice?.finish_reason ?? null,
raw: chunk,
}
}
@@ -21,24 +224,47 @@ export class OpenAIProvider extends BaseProvider {
this.client = new OpenAI({ apiKey, baseURL, defaultHeaders: headers });
}
async chat(messages: GatewayChatMessage[], model: string, options?: ChatOptions): Promise<AsyncIterable<UniversalChunk>> {
const startTime = Date.now();
getCapabilities(): ProviderCapabilities {
return OPENAI_PROVIDER_CAPABILITIES;
}
async chat(
messages: GatewayChatMessage[],
model: string,
options?: ToolCapableChatOptions
): Promise<AsyncIterable<ProviderStreamChunk>> {
const startTime = Date.now();
const transformedMessages = _transformMessages(messages);
const tools = _transformTools(options?.tools);
const toolChoice = tools?.length ? _transformToolChoice(options?.toolChoice) : undefined;
const lastMessage = messages[messages.length - 1];
logManager.logApiRequest('chat.completions.create', {
model,
lastMessage: lastMessage?.content?.substring(0, 100) + (lastMessage?.content?.length > 100 ? '...' : ''),
messageCount: messages.length,
lastMessage: _summarizeMessage(lastMessage).substring(0, 100)
+ (_summarizeMessage(lastMessage).length > 100 ? '...' : ''),
messageCount: transformedMessages.length,
toolCount: tools?.length ?? 0,
toolChoice: typeof toolChoice === 'string' ? toolChoice : (toolChoice ? 'named' : undefined),
}, 'POST');
try {
const chunks = await this.client.chat.completions.create({
const request: Record<string, unknown> = {
model,
messages: messages as any,
messages: transformedMessages,
stream: true,
stream_options: { include_usage: true },
}, {
};
if (tools?.length) {
request.tools = tools;
}
if (toolChoice) {
request.tool_choice = toolChoice;
}
const chunks = await this.client.chat.completions.create(request as any, {
signal: options?.signal,
});