mirror of
https://github.com/rcourtman/Pulse.git
synced 2026-02-18 00:17:39 +01:00
chore: remove unused frontend API functions
Remove API functions that are defined but never called: - ai.ts: OAuth flow, execute/executeStream, chat session sync - charts.ts: getStorageCharts, getMetricsStoreStats - notifications.ts: queue/DLQ management, health check - updates.ts: update history functions Also removes unused type definitions (MetricsStoreStats, UpdateHistoryEntry)
This commit is contained in:
@@ -4,12 +4,9 @@ import type {
|
||||
AISettings,
|
||||
AISettingsUpdateRequest,
|
||||
AITestResult,
|
||||
AIExecuteRequest,
|
||||
AIExecuteResponse,
|
||||
AIStreamEvent,
|
||||
AICostSummary,
|
||||
AIChatSession,
|
||||
AIChatSessionSummary,
|
||||
} from '@/types/ai';
|
||||
import type {
|
||||
AnomaliesResponse,
|
||||
@@ -99,38 +96,6 @@ export class AIAPI {
|
||||
return apiFetchJSON(`${this.baseUrl}/ai/intelligence/learning`) as Promise<LearningStatusResponse>;
|
||||
}
|
||||
|
||||
// Start OAuth flow for Claude Pro/Max subscription
|
||||
// Returns the authorization URL to redirect the user to
|
||||
static async startOAuth(): Promise<{ auth_url: string; state: string }> {
|
||||
return apiFetchJSON(`${this.baseUrl}/ai/oauth/start`, {
|
||||
method: 'POST',
|
||||
}) as Promise<{ auth_url: string; state: string }>;
|
||||
}
|
||||
|
||||
// Exchange manually-pasted authorization code for tokens
|
||||
static async exchangeOAuthCode(code: string, state: string): Promise<{ success: boolean; message: string }> {
|
||||
return apiFetchJSON(`${this.baseUrl}/ai/oauth/exchange`, {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({ code, state }),
|
||||
}) as Promise<{ success: boolean; message: string }>;
|
||||
}
|
||||
|
||||
// Disconnect OAuth and clear tokens
|
||||
static async disconnectOAuth(): Promise<{ success: boolean; message: string }> {
|
||||
return apiFetchJSON(`${this.baseUrl}/ai/oauth/disconnect`, {
|
||||
method: 'POST',
|
||||
}) as Promise<{ success: boolean; message: string }>;
|
||||
}
|
||||
|
||||
|
||||
// Execute an AI prompt
|
||||
static async execute(request: AIExecuteRequest): Promise<AIExecuteResponse> {
|
||||
return apiFetchJSON(`${this.baseUrl}/ai/execute`, {
|
||||
method: 'POST',
|
||||
body: JSON.stringify(request),
|
||||
}) as Promise<AIExecuteResponse>;
|
||||
}
|
||||
|
||||
// Analyze a Kubernetes cluster with AI
|
||||
static async analyzeKubernetesCluster(clusterId: string): Promise<AIExecuteResponse> {
|
||||
return apiFetchJSON(`${this.baseUrl}/ai/kubernetes/analyze`, {
|
||||
@@ -264,230 +229,6 @@ export class AIAPI {
|
||||
}
|
||||
}
|
||||
|
||||
// Execute an AI prompt with streaming
|
||||
// Returns an abort function to cancel the request
|
||||
static async executeStream(
|
||||
request: AIExecuteRequest,
|
||||
onEvent: (event: AIStreamEvent) => void,
|
||||
signal?: AbortSignal
|
||||
): Promise<void> {
|
||||
logger.debug('[AI SSE] Starting streaming request', request);
|
||||
|
||||
const response = await apiFetch(`${this.baseUrl}/ai/execute/stream`, {
|
||||
method: 'POST',
|
||||
body: JSON.stringify(request),
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Accept': 'text/event-stream',
|
||||
},
|
||||
signal,
|
||||
});
|
||||
|
||||
logger.debug('[AI SSE] Response status', { status: response.status, contentType: response.headers.get('content-type') });
|
||||
|
||||
if (!response.ok) {
|
||||
const text = await response.text();
|
||||
logger.error('[AI SSE] Request failed', text);
|
||||
throw new Error(text || `Request failed with status ${response.status}`);
|
||||
}
|
||||
|
||||
const reader = response.body?.getReader();
|
||||
if (!reader) {
|
||||
logger.error('[AI SSE] No response body');
|
||||
throw new Error('No response body');
|
||||
}
|
||||
|
||||
const decoder = new TextDecoder();
|
||||
let buffer = '';
|
||||
let lastEventTime = Date.now();
|
||||
let receivedComplete = false;
|
||||
let receivedDone = false;
|
||||
|
||||
// Timeout to detect stalled streams (5 minutes - Opus models can take a long time)
|
||||
const STREAM_TIMEOUT_MS = 300000;
|
||||
|
||||
logger.debug('[AI SSE] Starting to read stream');
|
||||
|
||||
try {
|
||||
for (; ;) {
|
||||
// Check for stream timeout
|
||||
if (Date.now() - lastEventTime > STREAM_TIMEOUT_MS) {
|
||||
logger.warn('[AI SSE] Stream timeout', { seconds: STREAM_TIMEOUT_MS / 1000 });
|
||||
break;
|
||||
}
|
||||
|
||||
// Create a promise with timeout for the read operation
|
||||
const readPromise = reader.read();
|
||||
const timeoutPromise = new Promise<never>((_, reject) => {
|
||||
setTimeout(() => reject(new Error('Read timeout')), STREAM_TIMEOUT_MS);
|
||||
});
|
||||
|
||||
let result: ReadableStreamReadResult<Uint8Array>;
|
||||
try {
|
||||
result = await Promise.race([readPromise, timeoutPromise]);
|
||||
} catch (e) {
|
||||
if ((e as Error).message === 'Read timeout') {
|
||||
logger.warn('[AI SSE] Read timeout, ending stream');
|
||||
break;
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
|
||||
const { done, value } = result;
|
||||
if (done) {
|
||||
logger.debug('[AI SSE] Stream ended normally');
|
||||
break;
|
||||
}
|
||||
|
||||
lastEventTime = Date.now();
|
||||
const chunk = decoder.decode(value, { stream: true });
|
||||
|
||||
// Log chunk info only if it's not just a heartbeat
|
||||
if (!chunk.includes(': heartbeat')) {
|
||||
logger.debug('[AI SSE] Received chunk', { bytes: chunk.length });
|
||||
}
|
||||
|
||||
buffer += chunk;
|
||||
|
||||
// Process complete SSE messages (separated by double newlines)
|
||||
// Handle both \n\n and \r\n\r\n for cross-platform compatibility
|
||||
const normalizedBuffer = buffer.replace(/\r\n/g, '\n');
|
||||
const messages = normalizedBuffer.split('\n\n');
|
||||
buffer = messages.pop() || ''; // Keep incomplete message in buffer
|
||||
|
||||
for (const message of messages) {
|
||||
// Skip empty messages and heartbeat comments
|
||||
if (!message.trim() || message.trim().startsWith(':')) {
|
||||
if (message.includes('heartbeat')) {
|
||||
logger.debug('[AI SSE] Received heartbeat');
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// Parse SSE message (can have multiple lines, look for data: prefix)
|
||||
const dataLines = message.split('\n').filter(line => line.startsWith('data: '));
|
||||
for (const line of dataLines) {
|
||||
try {
|
||||
const jsonStr = line.slice(6); // Remove 'data: ' prefix
|
||||
if (!jsonStr.trim()) continue;
|
||||
|
||||
const data = JSON.parse(jsonStr);
|
||||
logger.debug('[AI SSE] Parsed event', { type: data.type, data });
|
||||
|
||||
// Track completion events
|
||||
if (data.type === 'complete') {
|
||||
receivedComplete = true;
|
||||
}
|
||||
if (data.type === 'done') {
|
||||
receivedDone = true;
|
||||
}
|
||||
|
||||
onEvent(data as AIStreamEvent);
|
||||
} catch (e) {
|
||||
logger.error('[AI SSE] Failed to parse event', { error: e, line });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Process any remaining buffer content
|
||||
if (buffer.trim() && buffer.trim().startsWith('data: ')) {
|
||||
try {
|
||||
const jsonStr = buffer.slice(6);
|
||||
if (jsonStr.trim()) {
|
||||
const data = JSON.parse(jsonStr);
|
||||
logger.debug('[AI SSE] Parsed final buffered event', { type: data.type });
|
||||
onEvent(data as AIStreamEvent);
|
||||
if (data.type === 'complete') receivedComplete = true;
|
||||
if (data.type === 'done') receivedDone = true;
|
||||
}
|
||||
} catch {
|
||||
logger.warn('[AI SSE] Could not parse remaining buffer', { preview: buffer.substring(0, 100) });
|
||||
}
|
||||
}
|
||||
|
||||
// If we ended without receiving a done event, send a synthetic one
|
||||
// This ensures the UI properly clears the streaming state
|
||||
if (!receivedDone) {
|
||||
logger.warn('[AI SSE] Stream ended without done event, sending synthetic done');
|
||||
onEvent({ type: 'done', data: undefined });
|
||||
}
|
||||
|
||||
} finally {
|
||||
reader.releaseLock();
|
||||
logger.debug('[AI SSE] Reader released', { receivedComplete, receivedDone });
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================
|
||||
// AI Chat Sessions API - sync across devices
|
||||
// ============================================
|
||||
|
||||
// List all chat sessions for the current user
|
||||
static async listChatSessions(): Promise<AIChatSessionSummary[]> {
|
||||
return apiFetchJSON(`${this.baseUrl}/ai/chat/sessions`) as Promise<AIChatSessionSummary[]>;
|
||||
}
|
||||
|
||||
// Get a specific chat session by ID
|
||||
static async getChatSession(sessionId: string): Promise<AIChatSession> {
|
||||
const response = await apiFetchJSON(`${this.baseUrl}/ai/chat/sessions/${sessionId}`);
|
||||
// Convert server format to client format (snake_case to camelCase)
|
||||
return this.deserializeChatSession(response);
|
||||
}
|
||||
|
||||
// Save a chat session (create or update)
|
||||
static async saveChatSession(session: AIChatSession): Promise<AIChatSession> {
|
||||
const response = await apiFetchJSON(`${this.baseUrl}/ai/chat/sessions/${session.id}`, {
|
||||
method: 'PUT',
|
||||
body: JSON.stringify(this.serializeChatSession(session)),
|
||||
});
|
||||
return this.deserializeChatSession(response);
|
||||
}
|
||||
|
||||
// Delete a chat session
|
||||
static async deleteChatSession(sessionId: string): Promise<void> {
|
||||
await apiFetch(`${this.baseUrl}/ai/chat/sessions/${sessionId}`, {
|
||||
method: 'DELETE',
|
||||
});
|
||||
}
|
||||
|
||||
// Helper to convert server format (snake_case) to client format (camelCase)
|
||||
private static deserializeChatSession(data: any): AIChatSession {
|
||||
return {
|
||||
id: data.id,
|
||||
username: data.username || '',
|
||||
title: data.title || '',
|
||||
createdAt: new Date(data.created_at || data.createdAt),
|
||||
updatedAt: new Date(data.updated_at || data.updatedAt),
|
||||
messages: (data.messages || []).map((m: any) => ({
|
||||
id: m.id,
|
||||
role: m.role,
|
||||
content: m.content,
|
||||
timestamp: new Date(m.timestamp),
|
||||
model: m.model,
|
||||
tokens: m.tokens,
|
||||
toolCalls: m.tool_calls || m.toolCalls,
|
||||
})),
|
||||
};
|
||||
}
|
||||
|
||||
// Helper to convert client format (camelCase) to server format (snake_case)
|
||||
private static serializeChatSession(session: AIChatSession): any {
|
||||
return {
|
||||
id: session.id,
|
||||
title: session.title,
|
||||
messages: session.messages.map((m) => ({
|
||||
id: m.id,
|
||||
role: m.role,
|
||||
content: m.content,
|
||||
timestamp: m.timestamp.toISOString(),
|
||||
model: m.model,
|
||||
tokens: m.tokens,
|
||||
tool_calls: m.toolCalls,
|
||||
})),
|
||||
};
|
||||
}
|
||||
|
||||
// Remediation plans
|
||||
static async getRemediationPlans(): Promise<RemediationPlansResponse> {
|
||||
const data = await apiFetchJSON(`${this.baseUrl}/ai/remediation/plans`) as { plans?: RemediationPlan[]; executions?: unknown[] };
|
||||
|
||||
@@ -79,22 +79,6 @@ export interface AllMetricsHistoryResponse {
|
||||
source?: 'store' | 'memory' | 'live';
|
||||
}
|
||||
|
||||
export interface MetricsStoreStats {
|
||||
enabled: boolean;
|
||||
dbPath?: string;
|
||||
dbSize?: number;
|
||||
rawCount?: number;
|
||||
minuteCount?: number;
|
||||
hourlyCount?: number;
|
||||
dailyCount?: number;
|
||||
totalWrites?: number;
|
||||
bufferSize?: number;
|
||||
lastFlush?: string;
|
||||
lastRollup?: string;
|
||||
lastRetention?: string;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
export type TimeRange = '5m' | '15m' | '30m' | '1h' | '4h' | '12h' | '24h' | '7d';
|
||||
|
||||
export class ChartsAPI {
|
||||
@@ -109,20 +93,6 @@ export class ChartsAPI {
|
||||
return apiFetchJSON(url);
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch storage-specific chart data
|
||||
* @param rangeMinutes Range in minutes (default: 60)
|
||||
*/
|
||||
static async getStorageCharts(rangeMinutes: number = 60): Promise<Record<string, {
|
||||
usage?: MetricPoint[];
|
||||
used?: MetricPoint[];
|
||||
total?: MetricPoint[];
|
||||
avail?: MetricPoint[];
|
||||
}>> {
|
||||
const url = `${this.baseUrl}/storage/charts?range=${rangeMinutes}`;
|
||||
return apiFetchJSON(url);
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch persistent metrics history for a specific resource
|
||||
* This uses the SQLite-backed store with longer retention (up to 90 days)
|
||||
@@ -146,11 +116,4 @@ export class ChartsAPI {
|
||||
return apiFetchJSON(url);
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch statistics about the persistent metrics store
|
||||
*/
|
||||
static async getMetricsStoreStats(): Promise<MetricsStoreStats> {
|
||||
const url = `${this.baseUrl}/metrics-store/stats`;
|
||||
return apiFetchJSON(url);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -198,56 +198,4 @@ export class NotificationsAPI {
|
||||
body: JSON.stringify(webhook),
|
||||
});
|
||||
}
|
||||
|
||||
// Queue and DLQ management
|
||||
static async getQueueStats(): Promise<Record<string, number>> {
|
||||
return apiFetchJSON(`${this.baseUrl}/queue/stats`);
|
||||
}
|
||||
|
||||
static async getDLQ(limit = 100): Promise<unknown[]> {
|
||||
return apiFetchJSON(`${this.baseUrl}/dlq?limit=${limit}`);
|
||||
}
|
||||
|
||||
static async retryDLQItem(id: string): Promise<{ success: boolean }> {
|
||||
return apiFetchJSON(`${this.baseUrl}/dlq/retry`, {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({ id }),
|
||||
});
|
||||
}
|
||||
|
||||
static async deleteDLQItem(id: string): Promise<{ success: boolean }> {
|
||||
return apiFetchJSON(`${this.baseUrl}/dlq/delete`, {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({ id }),
|
||||
});
|
||||
}
|
||||
|
||||
static async getNotificationHealth(): Promise<{
|
||||
queue?: {
|
||||
healthy: boolean;
|
||||
pending: number;
|
||||
sending: number;
|
||||
sent: number;
|
||||
failed: number;
|
||||
dlq: number;
|
||||
};
|
||||
email: {
|
||||
enabled: boolean;
|
||||
configured: boolean;
|
||||
};
|
||||
webhooks: {
|
||||
total: number;
|
||||
enabled: number;
|
||||
};
|
||||
encryption: {
|
||||
enabled: boolean;
|
||||
};
|
||||
healthy: boolean;
|
||||
}> {
|
||||
return apiFetchJSON(`${this.baseUrl}/health`);
|
||||
}
|
||||
|
||||
static async getWebhookHistory(): Promise<unknown[]> {
|
||||
return apiFetchJSON(`${this.baseUrl}/webhook-history`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -41,30 +41,6 @@ export interface UpdatePlan {
|
||||
downloadUrl?: string;
|
||||
}
|
||||
|
||||
export interface UpdateHistoryEntry {
|
||||
event_id: string;
|
||||
timestamp: string;
|
||||
action: 'update' | 'rollback';
|
||||
channel: string;
|
||||
version_from: string;
|
||||
version_to: string;
|
||||
deployment_type: string;
|
||||
initiated_by: 'user' | 'auto' | 'api';
|
||||
initiated_via: 'ui' | 'cli' | 'script' | 'webhook';
|
||||
status: 'in_progress' | 'success' | 'failed' | 'rolled_back' | 'cancelled';
|
||||
duration_ms: number;
|
||||
backup_path?: string;
|
||||
log_path?: string;
|
||||
error?: {
|
||||
message: string;
|
||||
code?: string;
|
||||
details?: string;
|
||||
};
|
||||
download_bytes?: number;
|
||||
related_event_id?: string;
|
||||
notes?: string;
|
||||
}
|
||||
|
||||
export class UpdatesAPI {
|
||||
static async checkForUpdates(channel?: string): Promise<UpdateInfo> {
|
||||
const url = channel ? `/api/updates/check?channel=${channel}` : '/api/updates/check';
|
||||
@@ -92,19 +68,4 @@ export class UpdatesAPI {
|
||||
: `/api/updates/plan?version=${version}`;
|
||||
return apiFetchJSON(url);
|
||||
}
|
||||
|
||||
static async getUpdateHistory(
|
||||
limit?: number,
|
||||
status?: string
|
||||
): Promise<UpdateHistoryEntry[]> {
|
||||
const params = new URLSearchParams();
|
||||
if (limit) params.append('limit', limit.toString());
|
||||
if (status) params.append('status', status);
|
||||
const url = `/api/updates/history${params.toString() ? `?${params.toString()}` : ''}`;
|
||||
return apiFetchJSON(url);
|
||||
}
|
||||
|
||||
static async getUpdateHistoryEntry(eventId: string): Promise<UpdateHistoryEntry> {
|
||||
return apiFetchJSON(`/api/updates/history/entry?id=${eventId}`);
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user