feat: enhance token usage tracking and history management
- Updated HTML assets for improved loading. - Integrated token usage tracking in chat processing, appending usage details to transcripts. - Enhanced OpenAIProvider to include usage data in chat completion responses. - Implemented asynchronous retrieval of recent token usage history. - Added utility functions for managing transcript files and parsing usage data. - Updated UI components to reflect changes in usage status handling. - Ensured consistent usage status definitions across the application.
This commit is contained in:
File diff suppressed because it is too large
Load Diff
4
dist/index.html
vendored
4
dist/index.html
vendored
@@ -8,8 +8,8 @@
|
||||
http-equiv="Content-Security-Policy"
|
||||
content="default-src 'self'; script-src 'self'; style-src 'self' 'unsafe-inline'; img-src 'self' data: http://8.138.234.141 https://one-feel-bucket.oss-cn-guangzhou.aliyuncs.com; connect-src 'self' http://8.138.234.141 https://api.iconify.design wss://onefeel.brother7.cn"
|
||||
/>
|
||||
<script type="module" crossorigin src="./assets/index-DVErU5RB.js"></script>
|
||||
<link rel="stylesheet" crossorigin href="./assets/index-CjolQV3k.css">
|
||||
<script type="module" crossorigin src="./assets/index-CtY0TCqT.js"></script>
|
||||
<link rel="stylesheet" crossorigin href="./assets/index-Dd5p8HHW.css">
|
||||
</head>
|
||||
<body>
|
||||
<div id="app"></div>
|
||||
|
||||
@@ -6,6 +6,7 @@ import logManager from '@electron/service/logger';
|
||||
import type { RawMessage } from '@src/pages/home/model/ChatModel';
|
||||
import { sessionStore } from '../session-store';
|
||||
import type { GatewayEvent, GatewayRpcParams, GatewayRpcReturns } from '../types';
|
||||
import { appendTranscriptLine } from '@electron/utils/token-usage-writer';
|
||||
|
||||
export interface GatewayChatMessage {
|
||||
role: 'system' | 'user' | 'assistant' | 'tool';
|
||||
@@ -34,11 +35,13 @@ async function processChatStream(
|
||||
runId: string,
|
||||
provider: BaseProvider,
|
||||
model: string,
|
||||
providerName: string,
|
||||
messages: GatewayChatMessage[],
|
||||
signal: AbortSignal,
|
||||
broadcast: (event: GatewayEvent) => void
|
||||
) {
|
||||
let assistantContent = '';
|
||||
let finalUsage: any = undefined;
|
||||
|
||||
try {
|
||||
const chunks = await provider.chat(messages, model, { signal });
|
||||
@@ -56,9 +59,12 @@ async function processChatStream(
|
||||
});
|
||||
}
|
||||
|
||||
if (chunk.isEnd) {
|
||||
break;
|
||||
if (chunk.usage !== undefined) {
|
||||
finalUsage = chunk.usage;
|
||||
}
|
||||
|
||||
// Do not break on isEnd; the iterable may still yield a trailing usage chunk.
|
||||
// The loop will finish naturally when the generator is done.
|
||||
}
|
||||
|
||||
if (!signal.aborted) {
|
||||
@@ -70,6 +76,18 @@ async function processChatStream(
|
||||
sessionStore.appendMessage(sessionKey, finalMessage);
|
||||
sessionStore.clearActiveRun(sessionKey);
|
||||
|
||||
appendTranscriptLine(sessionKey, {
|
||||
type: 'message',
|
||||
timestamp: new Date().toISOString(),
|
||||
message: {
|
||||
role: 'assistant',
|
||||
content: assistantContent,
|
||||
model,
|
||||
provider: providerName,
|
||||
usage: finalUsage,
|
||||
},
|
||||
});
|
||||
|
||||
broadcast({
|
||||
type: 'chat:final',
|
||||
sessionKey,
|
||||
@@ -96,9 +114,19 @@ export function handleChatSend(
|
||||
const runId = randomUUID();
|
||||
|
||||
// 1. Append user message
|
||||
sessionStore.appendMessage(sessionKey, {
|
||||
const userMessage: RawMessage = {
|
||||
...message,
|
||||
timestamp: message.timestamp || Date.now(),
|
||||
};
|
||||
sessionStore.appendMessage(sessionKey, userMessage);
|
||||
|
||||
appendTranscriptLine(sessionKey, {
|
||||
type: 'message',
|
||||
timestamp: new Date().toISOString(),
|
||||
message: {
|
||||
role: 'user',
|
||||
content: typeof userMessage.content === 'string' ? userMessage.content : '',
|
||||
},
|
||||
});
|
||||
|
||||
// 2. Resolve provider account
|
||||
@@ -127,7 +155,8 @@ export function handleChatSend(
|
||||
|
||||
// Run async stream processing in background
|
||||
const provider = createProvider(accountId);
|
||||
processChatStream(sessionKey, runId, provider, model, messages, abortController.signal, broadcast).catch(
|
||||
const providerName = account.vendorId || account.label || account.model || 'unknown';
|
||||
processChatStream(sessionKey, runId, provider, model, providerName, messages, abortController.signal, broadcast).catch(
|
||||
(err) => {
|
||||
logManager.error('Unexpected error in processChatStream:', err);
|
||||
sessionStore.clearActiveRun(sessionKey);
|
||||
|
||||
@@ -82,7 +82,8 @@ async function handleLocalProviderApi(path: string, method: string, body: any) {
|
||||
return { success: true, ok: true, json: result, data: result };
|
||||
}
|
||||
if (path === '/api/usage/recent-token-history' && method === 'GET') {
|
||||
return { success: true, ok: true, json: providerApiService.getUsageHistory(), data: providerApiService.getUsageHistory() };
|
||||
const usageHistory = await providerApiService.getUsageHistory();
|
||||
return { success: true, ok: true, json: usageHistory, data: usageHistory };
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -5,9 +5,11 @@ import logManager from "@electron/service/logger"
|
||||
|
||||
function _transformChunk(chunk: OpenAI.Chat.Completions.ChatCompletionChunk): UniversalChunk {
|
||||
const choice = chunk.choices[0];
|
||||
const usage = (chunk as any).usage;
|
||||
return {
|
||||
isEnd: choice?.finish_reason != null,
|
||||
isEnd: choice?.finish_reason != null || (chunk.choices.length === 0 && usage != null),
|
||||
result: choice?.delta?.content ?? '',
|
||||
usage: usage ?? undefined,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -35,6 +37,7 @@ export class OpenAIProvider extends BaseProvider {
|
||||
model,
|
||||
messages: messages as any,
|
||||
stream: true,
|
||||
stream_options: { include_usage: true },
|
||||
}, {
|
||||
signal: options?.signal,
|
||||
});
|
||||
|
||||
@@ -233,7 +233,8 @@ export const providerApiService = {
|
||||
return { success: true };
|
||||
},
|
||||
|
||||
getUsageHistory() {
|
||||
return [] as any[];
|
||||
async getUsageHistory(limit?: number) {
|
||||
const { getRecentTokenUsageHistory } = await import('@electron/utils/token-usage');
|
||||
return getRecentTokenUsageHistory(limit);
|
||||
},
|
||||
};
|
||||
|
||||
330
electron/utils/token-usage-core.ts
Normal file
330
electron/utils/token-usage-core.ts
Normal file
@@ -0,0 +1,330 @@
|
||||
export interface TokenUsageHistoryEntry {
|
||||
timestamp: string;
|
||||
sessionId: string;
|
||||
agentId: string;
|
||||
model?: string;
|
||||
provider?: string;
|
||||
content?: string;
|
||||
usageStatus: 'available' | 'missing' | 'error';
|
||||
inputTokens: number;
|
||||
outputTokens: number;
|
||||
cacheReadTokens: number;
|
||||
cacheWriteTokens: number;
|
||||
totalTokens: number;
|
||||
costUsd?: number;
|
||||
}
|
||||
|
||||
export function extractSessionIdFromTranscriptFileName(fileName: string): string | undefined {
|
||||
if (!fileName.endsWith('.jsonl') && !fileName.includes('.jsonl.reset.')) return undefined;
|
||||
return fileName
|
||||
.replace(/\.reset\..+$/, '')
|
||||
.replace(/\.deleted\.jsonl$/, '')
|
||||
.replace(/\.jsonl$/, '');
|
||||
}
|
||||
|
||||
interface TranscriptUsageShape {
|
||||
[key: string]: unknown;
|
||||
input?: number;
|
||||
output?: number;
|
||||
total?: number;
|
||||
cacheRead?: number;
|
||||
cacheWrite?: number;
|
||||
promptTokens?: number;
|
||||
completionTokens?: number;
|
||||
totalTokens?: number;
|
||||
input_tokens?: number;
|
||||
output_tokens?: number;
|
||||
total_tokens?: number;
|
||||
cache_read?: number;
|
||||
cache_write?: number;
|
||||
prompt_tokens?: number;
|
||||
completion_tokens?: number;
|
||||
cache_read_tokens?: number;
|
||||
cache_write_tokens?: number;
|
||||
inputTokenCount?: number;
|
||||
input_token_count?: number;
|
||||
outputTokenCount?: number;
|
||||
output_token_count?: number;
|
||||
promptTokenCount?: number;
|
||||
prompt_token_count?: number;
|
||||
completionTokenCount?: number;
|
||||
completion_token_count?: number;
|
||||
totalTokenCount?: number;
|
||||
total_token_count?: number;
|
||||
cacheReadTokenCount?: number;
|
||||
cacheReadTokens?: number;
|
||||
cache_write_token_count?: number;
|
||||
cost?: {
|
||||
total?: number;
|
||||
};
|
||||
}
|
||||
|
||||
type UsageRecordStatus = 'available' | 'missing' | 'error';
|
||||
|
||||
interface ParsedUsageTokens {
|
||||
inputTokens: number;
|
||||
outputTokens: number;
|
||||
cacheReadTokens: number;
|
||||
cacheWriteTokens: number;
|
||||
totalTokens: number;
|
||||
costUsd?: number;
|
||||
usageStatus: UsageRecordStatus;
|
||||
}
|
||||
|
||||
function normalizeUsageNumber(value: unknown): number | undefined {
|
||||
if (typeof value === 'number' && Number.isFinite(value)) {
|
||||
return value;
|
||||
}
|
||||
if (typeof value === 'string') {
|
||||
const trimmed = value.trim();
|
||||
if (trimmed.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
const parsed = Number(trimmed);
|
||||
return Number.isFinite(parsed) ? parsed : undefined;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function firstUsageNumber(usage: TranscriptUsageShape | undefined, candidates: string[]): number | undefined {
|
||||
if (!usage) return undefined;
|
||||
for (const key of candidates) {
|
||||
const value = usage[key];
|
||||
const parsed = normalizeUsageNumber(value);
|
||||
if (parsed !== undefined) return parsed;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function parseUsageFromShape(usage: unknown): ParsedUsageTokens | undefined {
|
||||
if (usage === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (usage === null || typeof usage !== 'object' || Array.isArray(usage)) {
|
||||
return {
|
||||
usageStatus: 'error',
|
||||
inputTokens: 0,
|
||||
outputTokens: 0,
|
||||
cacheReadTokens: 0,
|
||||
cacheWriteTokens: 0,
|
||||
totalTokens: 0,
|
||||
};
|
||||
}
|
||||
|
||||
const usageShape = usage as TranscriptUsageShape;
|
||||
|
||||
const inputTokens = firstUsageNumber(usageShape, [
|
||||
'input',
|
||||
'promptTokens',
|
||||
'prompt_tokens',
|
||||
'input_tokens',
|
||||
'inputTokenCount',
|
||||
'input_token_count',
|
||||
'promptTokenCount',
|
||||
'prompt_token_count',
|
||||
]);
|
||||
const outputTokens = firstUsageNumber(usageShape, [
|
||||
'output',
|
||||
'completionTokens',
|
||||
'completion_tokens',
|
||||
'output_tokens',
|
||||
'outputTokenCount',
|
||||
'output_token_count',
|
||||
'completionTokenCount',
|
||||
'completion_token_count',
|
||||
]);
|
||||
const cacheReadTokens = firstUsageNumber(usageShape, [
|
||||
'cacheRead',
|
||||
'cache_read',
|
||||
'cacheReadTokens',
|
||||
'cache_read_tokens',
|
||||
'cacheReadTokenCount',
|
||||
'cache_read_token_count',
|
||||
]);
|
||||
const cacheWriteTokens = firstUsageNumber(usageShape, [
|
||||
'cacheWrite',
|
||||
'cache_write',
|
||||
'cacheWriteTokens',
|
||||
'cache_write_tokens',
|
||||
'cacheWriteTokenCount',
|
||||
'cache_write_token_count',
|
||||
]);
|
||||
const explicitTotalTokens = firstUsageNumber(usageShape, [
|
||||
'total',
|
||||
'totalTokens',
|
||||
'total_tokens',
|
||||
'totalTokenCount',
|
||||
'total_token_count',
|
||||
]);
|
||||
|
||||
const hasUsageValue =
|
||||
inputTokens !== undefined
|
||||
|| outputTokens !== undefined
|
||||
|| cacheReadTokens !== undefined
|
||||
|| cacheWriteTokens !== undefined
|
||||
|| explicitTotalTokens !== undefined
|
||||
|| normalizeUsageNumber(usageShape.cost?.total) !== undefined;
|
||||
|
||||
if (!hasUsageValue) {
|
||||
return {
|
||||
usageStatus: 'missing',
|
||||
inputTokens: 0,
|
||||
outputTokens: 0,
|
||||
cacheReadTokens: 0,
|
||||
cacheWriteTokens: 0,
|
||||
totalTokens: 0,
|
||||
};
|
||||
}
|
||||
|
||||
const totalTokens = explicitTotalTokens ?? (
|
||||
(inputTokens ?? 0)
|
||||
+ (outputTokens ?? 0)
|
||||
+ (cacheReadTokens ?? 0)
|
||||
+ (cacheWriteTokens ?? 0)
|
||||
);
|
||||
|
||||
return {
|
||||
usageStatus: 'available',
|
||||
inputTokens: inputTokens ?? 0,
|
||||
outputTokens: outputTokens ?? 0,
|
||||
cacheReadTokens: cacheReadTokens ?? 0,
|
||||
cacheWriteTokens: cacheWriteTokens ?? 0,
|
||||
totalTokens,
|
||||
costUsd: normalizeUsageNumber(usageShape.cost?.total),
|
||||
};
|
||||
}
|
||||
|
||||
interface TranscriptLineShape {
|
||||
type?: string;
|
||||
timestamp?: string;
|
||||
message?: {
|
||||
role?: string;
|
||||
model?: string;
|
||||
modelRef?: string;
|
||||
provider?: string;
|
||||
usage?: TranscriptUsageShape;
|
||||
details?: {
|
||||
provider?: string;
|
||||
model?: string;
|
||||
usage?: TranscriptUsageShape;
|
||||
content?: unknown;
|
||||
externalContent?: {
|
||||
provider?: string;
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
function normalizeUsageContent(value: unknown): string | undefined {
|
||||
if (typeof value === 'string') {
|
||||
const trimmed = value.trim();
|
||||
return trimmed.length > 0 ? trimmed : undefined;
|
||||
}
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
const chunks = value
|
||||
.map((item) => normalizeUsageContent(item))
|
||||
.filter((item): item is string => Boolean(item));
|
||||
if (chunks.length === 0) return undefined;
|
||||
return chunks.join('\n\n');
|
||||
}
|
||||
|
||||
if (value && typeof value === 'object') {
|
||||
const record = value as Record<string, unknown>;
|
||||
if (typeof record.text === 'string') {
|
||||
const trimmed = record.text.trim();
|
||||
if (trimmed.length > 0) return trimmed;
|
||||
}
|
||||
if (typeof record.content === 'string') {
|
||||
const trimmed = record.content.trim();
|
||||
if (trimmed.length > 0) return trimmed;
|
||||
}
|
||||
if (Array.isArray(record.content)) {
|
||||
return normalizeUsageContent(record.content);
|
||||
}
|
||||
if (typeof record.thinking === 'string') {
|
||||
const trimmed = record.thinking.trim();
|
||||
if (trimmed.length > 0) return trimmed;
|
||||
}
|
||||
try {
|
||||
return JSON.stringify(record, null, 2);
|
||||
} catch {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
export function parseUsageEntriesFromJsonl(
|
||||
content: string,
|
||||
context: { sessionId: string; agentId: string },
|
||||
limit?: number,
|
||||
): TokenUsageHistoryEntry[] {
|
||||
const entries: TokenUsageHistoryEntry[] = [];
|
||||
const lines = content.split(/\r?\n/).filter(Boolean);
|
||||
const maxEntries = typeof limit === 'number' && Number.isFinite(limit)
|
||||
? Math.max(Math.floor(limit), 0)
|
||||
: Number.POSITIVE_INFINITY;
|
||||
|
||||
for (let i = lines.length - 1; i >= 0 && entries.length < maxEntries; i -= 1) {
|
||||
let parsed: TranscriptLineShape;
|
||||
try {
|
||||
parsed = JSON.parse(lines[i]) as TranscriptLineShape;
|
||||
} catch {
|
||||
continue;
|
||||
}
|
||||
|
||||
const message = parsed.message;
|
||||
if (!message || !parsed.timestamp) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (message.role === 'assistant' && 'usage' in message) {
|
||||
const usage = parseUsageFromShape(message.usage);
|
||||
if (!usage) continue;
|
||||
|
||||
const contentText = normalizeUsageContent((message as Record<string, unknown>).content);
|
||||
entries.push({
|
||||
timestamp: parsed.timestamp,
|
||||
sessionId: context.sessionId,
|
||||
agentId: context.agentId,
|
||||
model: message.model ?? message.modelRef,
|
||||
provider: message.provider,
|
||||
...(contentText ? { content: contentText } : {}),
|
||||
...usage,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
if (message.role !== 'toolResult') {
|
||||
continue;
|
||||
}
|
||||
|
||||
const details = message.details;
|
||||
if (!details || !('usage' in details)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const usage = parseUsageFromShape(details.usage);
|
||||
if (!usage) continue;
|
||||
|
||||
const provider = details.provider ?? details.externalContent?.provider ?? message.provider;
|
||||
const model = details.model ?? message.model ?? message.modelRef;
|
||||
const contentText = normalizeUsageContent(details.content)
|
||||
?? normalizeUsageContent((message as Record<string, unknown>).content);
|
||||
|
||||
entries.push({
|
||||
timestamp: parsed.timestamp,
|
||||
sessionId: context.sessionId,
|
||||
agentId: context.agentId,
|
||||
model,
|
||||
provider,
|
||||
...(contentText ? { content: contentText } : {}),
|
||||
...usage,
|
||||
});
|
||||
}
|
||||
|
||||
return entries;
|
||||
}
|
||||
34
electron/utils/token-usage-writer.ts
Normal file
34
electron/utils/token-usage-writer.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
import { app } from 'electron';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
export function getTranscriptFilePath(sessionKey: string): string {
|
||||
let agentId: string;
|
||||
let sessionId: string;
|
||||
|
||||
if (sessionKey.startsWith('agent:')) {
|
||||
const parts = sessionKey.split(':');
|
||||
agentId = parts[1] ?? 'default';
|
||||
sessionId = parts.slice(2).join(':') || sessionKey;
|
||||
} else if (sessionKey.startsWith('local:')) {
|
||||
const parts = sessionKey.split(':');
|
||||
agentId = parts[1] ?? 'local';
|
||||
sessionId = parts.slice(2).join(':') || sessionKey;
|
||||
} else {
|
||||
agentId = 'default';
|
||||
sessionId = sessionKey;
|
||||
}
|
||||
|
||||
if (!sessionId) {
|
||||
sessionId = 'unknown';
|
||||
}
|
||||
|
||||
const baseDir = path.join(app.getPath('userData'), 'agents', agentId, 'sessions');
|
||||
return path.join(baseDir, `${sessionId}.jsonl`);
|
||||
}
|
||||
|
||||
export function appendTranscriptLine(sessionKey: string, lineObject: any): void {
|
||||
const filePath = getTranscriptFilePath(sessionKey);
|
||||
fs.mkdirSync(path.dirname(filePath), { recursive: true });
|
||||
fs.appendFileSync(filePath, JSON.stringify(lineObject) + '\n', 'utf-8');
|
||||
}
|
||||
101
electron/utils/token-usage.ts
Normal file
101
electron/utils/token-usage.ts
Normal file
@@ -0,0 +1,101 @@
|
||||
import { readdir, readFile, stat } from 'fs/promises';
|
||||
import { join } from 'path';
|
||||
import { app } from 'electron';
|
||||
import logManager from '@electron/service/logger';
|
||||
import {
|
||||
extractSessionIdFromTranscriptFileName,
|
||||
parseUsageEntriesFromJsonl,
|
||||
type TokenUsageHistoryEntry,
|
||||
} from './token-usage-core';
|
||||
|
||||
export {
|
||||
extractSessionIdFromTranscriptFileName,
|
||||
parseUsageEntriesFromJsonl,
|
||||
type TokenUsageHistoryEntry,
|
||||
} from './token-usage-core';
|
||||
|
||||
async function listAgentIdsWithSessionDirs(): Promise<string[]> {
|
||||
const agentsDir = join(app.getPath('userData'), 'agents');
|
||||
const agentIds = new Set<string>();
|
||||
|
||||
try {
|
||||
const agentEntries = await readdir(agentsDir, { withFileTypes: true });
|
||||
for (const entry of agentEntries) {
|
||||
if (entry.isDirectory()) {
|
||||
const normalized = entry.name.trim();
|
||||
if (normalized) {
|
||||
agentIds.add(normalized);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Ignore disk discovery failures and return whatever we already found.
|
||||
}
|
||||
|
||||
return [...agentIds];
|
||||
}
|
||||
|
||||
async function listRecentSessionFiles(): Promise<Array<{ filePath: string; sessionId: string; agentId: string; mtimeMs: number }>> {
|
||||
const agentsDir = join(app.getPath('userData'), 'agents');
|
||||
|
||||
try {
|
||||
const agentEntries = await listAgentIdsWithSessionDirs();
|
||||
const files: Array<{ filePath: string; sessionId: string; agentId: string; mtimeMs: number }> = [];
|
||||
|
||||
for (const agentId of agentEntries) {
|
||||
const sessionsDir = join(agentsDir, agentId, 'sessions');
|
||||
try {
|
||||
const sessionEntries = await readdir(sessionsDir);
|
||||
|
||||
for (const fileName of sessionEntries) {
|
||||
const sessionId = extractSessionIdFromTranscriptFileName(fileName);
|
||||
if (!sessionId) continue;
|
||||
const filePath = join(sessionsDir, fileName);
|
||||
try {
|
||||
const fileStat = await stat(filePath);
|
||||
files.push({
|
||||
filePath,
|
||||
sessionId,
|
||||
agentId,
|
||||
mtimeMs: fileStat.mtimeMs,
|
||||
});
|
||||
} catch {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
files.sort((a, b) => b.mtimeMs - a.mtimeMs);
|
||||
return files;
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
export async function getRecentTokenUsageHistory(limit?: number): Promise<TokenUsageHistoryEntry[]> {
|
||||
const files = await listRecentSessionFiles();
|
||||
const results: TokenUsageHistoryEntry[] = [];
|
||||
const maxEntries = typeof limit === 'number' && Number.isFinite(limit)
|
||||
? Math.max(Math.floor(limit), 0)
|
||||
: Number.POSITIVE_INFINITY;
|
||||
|
||||
for (const file of files) {
|
||||
if (results.length >= maxEntries) break;
|
||||
try {
|
||||
const content = await readFile(file.filePath, 'utf8');
|
||||
const entries = parseUsageEntriesFromJsonl(content, {
|
||||
sessionId: file.sessionId,
|
||||
agentId: file.agentId,
|
||||
}, Number.isFinite(maxEntries) ? maxEntries - results.length : undefined);
|
||||
results.push(...entries);
|
||||
} catch (error) {
|
||||
logManager.error(`Failed to read token usage transcript ${file.filePath}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
results.sort((a, b) => Date.parse(b.timestamp) - Date.parse(a.timestamp));
|
||||
return Number.isFinite(maxEntries) ? results.slice(0, maxEntries) : results;
|
||||
}
|
||||
1
global.d.ts
vendored
1
global.d.ts
vendored
@@ -201,6 +201,7 @@ declare global {
|
||||
interface UniversalChunk {
|
||||
isEnd: boolean;
|
||||
result: string;
|
||||
usage?: any;
|
||||
}
|
||||
|
||||
interface DialogueBackStream {
|
||||
|
||||
@@ -128,7 +128,7 @@
|
||||
</div>
|
||||
</div>
|
||||
<div class="mt-3 flex flex-wrap gap-x-4 gap-y-1.5 text-[12.5px] font-medium text-[#99A0AE]">
|
||||
<template v-if="entry.usageStatus === 'available' || entry.usageStatus === undefined">
|
||||
<template v-if="entry.usageStatus === 'available'">
|
||||
<span class="flex items-center gap-1.5"><div class="w-2 h-2 rounded-full bg-sky-500"></div>{{ t('models.recentTokenHistory.input', `Input: ${formatTokenCount(entry.inputTokens)}`) }}</span>
|
||||
<span class="flex items-center gap-1.5"><div class="w-2 h-2 rounded-full bg-violet-500"></div>{{ t('models.recentTokenHistory.output', `Output: ${formatTokenCount(entry.outputTokens)}`) }}</span>
|
||||
<span v-if="entry.cacheReadTokens > 0" class="flex items-center gap-1.5"><div class="w-2 h-2 rounded-full bg-amber-500"></div>{{ t('models.recentTokenHistory.cacheRead', `Cache Read: ${formatTokenCount(entry.cacheReadTokens)}`) }}</span>
|
||||
@@ -247,7 +247,7 @@ const fetchUsage = async () => {
|
||||
try {
|
||||
const entries = await hostApiFetch<UsageHistoryEntry[]>('/api/usage/recent-token-history');
|
||||
const normalized = Array.isArray(entries) ? entries : [];
|
||||
fetchState.value.stableData = resolveStableUsageHistory(fetchState.value.stableData, normalized);
|
||||
fetchState.value.stableData = resolveStableUsageHistory(fetchState.value.stableData, normalized, { preservePreviousOnEmpty: true });
|
||||
fetchState.value.data = normalized;
|
||||
fetchState.value.status = 'done';
|
||||
} catch (error) {
|
||||
|
||||
@@ -5,7 +5,7 @@ export type UsageHistoryEntry = {
|
||||
model?: string;
|
||||
provider?: string;
|
||||
content?: string;
|
||||
usageStatus?: 'available' | 'missing' | 'error';
|
||||
usageStatus: 'available' | 'missing' | 'error';
|
||||
inputTokens: number;
|
||||
outputTokens: number;
|
||||
cacheReadTokens: number;
|
||||
|
||||
Reference in New Issue
Block a user