]> git.djapps.eu Git - pkg/ggml/sources/llama.cpp/commitdiff
webui: add OAI-Compat Harmony tool-call streaming visualization and persistence in...
authorPascal <redacted>
Sat, 15 Nov 2025 20:09:32 +0000 (21:09 +0100)
committerGitHub <redacted>
Sat, 15 Nov 2025 20:09:32 +0000 (21:09 +0100)
* webui: add OAI-Compat Harmony tool-call live streaming visualization and persistence in chat UI

- Purely visual and diagnostic change, no effect on model context, prompt
  construction, or inference behavior

- Captured assistant tool call payloads during streaming and non-streaming
  completions, and persisted them in chat state and storage for downstream use

- Exposed parsed tool call labels beneath the assistant's model info line
  with graceful fallback when parsing fails

- Added tool call badges beneath assistant responses that expose JSON tooltips
  and copy their payloads when clicked, matching the existing model badge styling

- Added a user-facing setting to toggle tool call visibility to the Developer
  settings section directly under the model selector option

* webui: remove scroll listener causing unnecessary layout updates (model selector)

* Update tools/server/webui/src/lib/components/app/chat/ChatMessages/ChatMessageAssistant.svelte

Co-authored-by: Aleksander Grygier <redacted>
* Update tools/server/webui/src/lib/components/app/chat/ChatMessages/ChatMessageAssistant.svelte

Co-authored-by: Aleksander Grygier <redacted>
* chore: npm run format & update webui build output

* chore: update webui build output

---------

Co-authored-by: Aleksander Grygier <redacted>
12 files changed:
tools/server/public/index.html.gz
tools/server/webui/src/lib/components/app/chat/ChatForm/ChatFormModelSelector.svelte
tools/server/webui/src/lib/components/app/chat/ChatMessages/ChatMessage.svelte
tools/server/webui/src/lib/components/app/chat/ChatMessages/ChatMessageAssistant.svelte
tools/server/webui/src/lib/components/app/chat/ChatSettings/ChatSettingsDialog.svelte
tools/server/webui/src/lib/constants/settings-config.ts
tools/server/webui/src/lib/services/chat.ts
tools/server/webui/src/lib/stores/chat.svelte.ts
tools/server/webui/src/lib/stores/database.ts
tools/server/webui/src/lib/types/api.d.ts
tools/server/webui/src/lib/types/database.d.ts
tools/server/webui/src/lib/types/settings.d.ts

index 8b6b16d01bbb6c588d7b61812962c1cf3ecf5e3b..72180d95a3d2c788a53d434995584368ff3d0529 100644 (file)
Binary files a/tools/server/public/index.html.gz and b/tools/server/public/index.html.gz differ
index 689415f8df84bca093de5107dbba323085a78f2c..f07eb7a4395a48f561d9defe990e480e533016cc 100644 (file)
                }
        }
 
-       function handleScroll() {
-               if (isOpen) {
-                       updateMenuPosition();
-               }
-       }
-
        async function handleSelect(value: string | undefined) {
                if (!value) return;
 
        }
 </script>
 
-<svelte:window onresize={handleResize} onscroll={handleScroll} />
+<svelte:window onresize={handleResize} />
 
 <svelte:document onpointerdown={handlePointerDown} onkeydown={handleKeydown} />
 
index 7ade6bc61f3336451a7f5080412d3bbc6707ec47..e47a5a7dba9e6ee4282424bc05f1f73625ae184c 100644 (file)
@@ -2,6 +2,7 @@
        import { getDeletionInfo } from '$lib/stores/chat.svelte';
        import { copyToClipboard } from '$lib/utils/copy';
        import { isIMEComposing } from '$lib/utils/is-ime-composing';
+       import type { ApiChatCompletionToolCall } from '$lib/types/api';
        import ChatMessageAssistant from './ChatMessageAssistant.svelte';
        import ChatMessageUser from './ChatMessageUser.svelte';
 
                return null;
        });
 
+       let toolCallContent = $derived.by((): ApiChatCompletionToolCall[] | string | null => {
+               if (message.role === 'assistant') {
+                       const trimmedToolCalls = message.toolCalls?.trim();
+
+                       if (!trimmedToolCalls) {
+                               return null;
+                       }
+
+                       try {
+                               const parsed = JSON.parse(trimmedToolCalls);
+
+                               if (Array.isArray(parsed)) {
+                                       return parsed as ApiChatCompletionToolCall[];
+                               }
+                       } catch {
+                               // Harmony-only path: fall back to the raw string so issues surface visibly.
+                       }
+
+                       return trimmedToolCalls;
+               }
+               return null;
+       });
+
        function handleCancelEdit() {
                isEditing = false;
                editedContent = message.content;
                {showDeleteDialog}
                {siblingInfo}
                {thinkingContent}
+               {toolCallContent}
        />
 {/if}
index d8f5630fd14f7462cc5547ded0dd230793376fa0..26372426d29d5c1b5ba68a44d63e4276957ecc87 100644 (file)
@@ -11,7 +11,8 @@
                Gauge,
                Clock,
                WholeWord,
-               ChartNoAxesColumn
+               ChartNoAxesColumn,
+               Wrench
        } from '@lucide/svelte';
        import { Button } from '$lib/components/ui/button';
        import { Checkbox } from '$lib/components/ui/checkbox';
@@ -21,6 +22,7 @@
        import { config } from '$lib/stores/settings.svelte';
        import { modelName as serverModelName } from '$lib/stores/server.svelte';
        import { copyToClipboard } from '$lib/utils/copy';
+       import type { ApiChatCompletionToolCall } from '$lib/types/api';
 
        interface Props {
                class?: string;
@@ -51,6 +53,7 @@
                siblingInfo?: ChatMessageSiblingInfo | null;
                textareaElement?: HTMLTextAreaElement;
                thinkingContent: string | null;
+               toolCallContent: ApiChatCompletionToolCall[] | string | null;
        }
 
        let {
                shouldBranchAfterEdit = false,
                siblingInfo = null,
                textareaElement = $bindable(),
-               thinkingContent
+               thinkingContent,
+               toolCallContent = null
        }: Props = $props();
 
+       const toolCalls = $derived(
+               Array.isArray(toolCallContent) ? (toolCallContent as ApiChatCompletionToolCall[]) : null
+       );
+       const fallbackToolCalls = $derived(typeof toolCallContent === 'string' ? toolCallContent : null);
+
        const processingState = useProcessingState();
        let currentConfig = $derived(config());
        let serverModel = $derived(serverModelName());
 
                void copyToClipboard(model ?? '');
        }
+
+       function formatToolCallBadge(toolCall: ApiChatCompletionToolCall, index: number) {
+               const callNumber = index + 1;
+               const functionName = toolCall.function?.name?.trim();
+               const label = functionName || `Call #${callNumber}`;
+
+               const payload: Record<string, unknown> = {};
+
+               const id = toolCall.id?.trim();
+               if (id) {
+                       payload.id = id;
+               }
+
+               const type = toolCall.type?.trim();
+               if (type) {
+                       payload.type = type;
+               }
+
+               if (toolCall.function) {
+                       const fnPayload: Record<string, unknown> = {};
+
+                       const name = toolCall.function.name?.trim();
+                       if (name) {
+                               fnPayload.name = name;
+                       }
+
+                       const rawArguments = toolCall.function.arguments?.trim();
+                       if (rawArguments) {
+                               try {
+                                       fnPayload.arguments = JSON.parse(rawArguments);
+                               } catch {
+                                       fnPayload.arguments = rawArguments;
+                               }
+                       }
+
+                       if (Object.keys(fnPayload).length > 0) {
+                               payload.function = fnPayload;
+                       }
+               }
+
+               const formattedPayload = JSON.stringify(payload, null, 2);
+
+               return {
+                       label,
+                       tooltip: formattedPayload,
+                       copyValue: formattedPayload
+               };
+       }
+
+       function handleCopyToolCall(payload: string) {
+               void copyToClipboard(payload, 'Tool call copied to clipboard');
+       }
 </script>
 
 <div
                        </span>
                {/if}
 
+               {#if config().showToolCalls}
+                       {#if (toolCalls && toolCalls.length > 0) || fallbackToolCalls}
+                               <span class="inline-flex flex-wrap items-center gap-2 text-xs text-muted-foreground">
+                                       <span class="inline-flex items-center gap-1">
+                                               <Wrench class="h-3.5 w-3.5" />
+
+                                               <span>Tool calls:</span>
+                                       </span>
+
+                                       {#if toolCalls && toolCalls.length > 0}
+                                               {#each toolCalls as toolCall, index (toolCall.id ?? `${index}`)}
+                                                       {@const badge = formatToolCallBadge(toolCall, index)}
+                                                       <button
+                                                               type="button"
+                                                               class="tool-call-badge inline-flex cursor-pointer items-center gap-1 rounded-sm bg-muted-foreground/15 px-1.5 py-0.75"
+                                                               title={badge.tooltip}
+                                                               aria-label={`Copy tool call ${badge.label}`}
+                                                               onclick={() => handleCopyToolCall(badge.copyValue)}
+                                                       >
+                                                               {badge.label}
+
+                                                               <Copy class="ml-1 h-3 w-3" />
+                                                       </button>
+                                               {/each}
+                                       {:else if fallbackToolCalls}
+                                               <button
+                                                       type="button"
+                                                       class="tool-call-badge tool-call-badge--fallback inline-flex cursor-pointer items-center gap-1 rounded-sm bg-muted-foreground/15 px-1.5 py-0.75"
+                                                       title={fallbackToolCalls}
+                                                       aria-label="Copy tool call payload"
+                                                       onclick={() => handleCopyToolCall(fallbackToolCalls)}
+                                               >
+                                                       {fallbackToolCalls}
+
+                                                       <Copy class="ml-1 h-3 w-3" />
+                                               </button>
+                                       {/if}
+                               </span>
+                       {/if}
+               {/if}
+
                {#if currentConfig.showMessageStats && message.timings && message.timings.predicted_n && message.timings.predicted_ms}
                        {@const tokensPerSecond = (message.timings.predicted_n / message.timings.predicted_ms) * 1000}
                        <span class="inline-flex items-center gap-2 text-xs text-muted-foreground">
                white-space: pre-wrap;
                word-break: break-word;
        }
+
+       .tool-call-badge {
+               max-width: 12rem;
+               white-space: nowrap;
+               overflow: hidden;
+               text-overflow: ellipsis;
+       }
+
+       .tool-call-badge--fallback {
+               max-width: 20rem;
+               white-space: normal;
+               word-break: break-word;
+       }
 </style>
index e4672b787ee89f9cb9ef8a712667639c3ae9016e..d2a0a739c54d1beb6d1e1bdd0907d99aa0754017 100644 (file)
                                        label: 'Enable model selector',
                                        type: 'checkbox'
                                },
+                               {
+                                       key: 'showToolCalls',
+                                       label: 'Show tool call labels',
+                                       type: 'checkbox'
+                               },
                                {
                                        key: 'disableReasoningFormat',
                                        label: 'Show raw LLM output',
index c25f380846cf435308fe5504a79d9b6dedcbb2d6..7547832d95ae1bbbb0772f540e894085f4bbc217 100644 (file)
@@ -6,6 +6,7 @@ export const SETTING_CONFIG_DEFAULT: Record<string, string | number | boolean> =
        theme: 'system',
        showTokensPerSecond: false,
        showThoughtInProgress: false,
+       showToolCalls: false,
        disableReasoningFormat: false,
        keepStatsVisible: false,
        showMessageStats: true,
@@ -80,6 +81,8 @@ export const SETTING_CONFIG_INFO: Record<string, string> = {
        custom: 'Custom JSON parameters to send to the API. Must be valid JSON format.',
        showTokensPerSecond: 'Display generation speed in tokens per second during streaming.',
        showThoughtInProgress: 'Expand thought process by default when generating messages.',
+       showToolCalls:
+               'Display tool call labels and payloads from Harmony-compatible delta.tool_calls data below assistant messages.',
        disableReasoningFormat:
                'Show raw LLM output without backend parsing and frontend Markdown rendering to inspect streaming across different models.',
        keepStatsVisible: 'Keep processing statistics visible after generation finishes.',
index 414e060764d7ece8c2ce3aad55100d09a8825c70..1908d83909eab0e5a1a6300ab1cded0416c8dbc8 100644 (file)
@@ -1,6 +1,25 @@
 import { config } from '$lib/stores/settings.svelte';
 import { selectedModelName } from '$lib/stores/models.svelte';
 import { slotsService } from './slots';
+import type {
+       ApiChatCompletionRequest,
+       ApiChatCompletionResponse,
+       ApiChatCompletionStreamChunk,
+       ApiChatCompletionToolCall,
+       ApiChatCompletionToolCallDelta,
+       ApiChatMessageData
+} from '$lib/types/api';
+import type {
+       DatabaseMessage,
+       DatabaseMessageExtra,
+       DatabaseMessageExtraAudioFile,
+       DatabaseMessageExtraImageFile,
+       DatabaseMessageExtraLegacyContext,
+       DatabaseMessageExtraPdfFile,
+       DatabaseMessageExtraTextFile
+} from '$lib/types/database';
+import type { ChatMessagePromptProgress, ChatMessageTimings } from '$lib/types/chat';
+import type { SettingsChatServiceOptions } from '$lib/types/settings';
 /**
  * ChatService - Low-level API communication layer for llama.cpp server interactions
  *
@@ -53,6 +72,7 @@ export class ChatService {
                        onComplete,
                        onError,
                        onReasoningChunk,
+                       onToolCallChunk,
                        onModel,
                        onFirstValidChunk,
                        // Generation parameters
@@ -201,6 +221,7 @@ export class ChatService {
                                        onComplete,
                                        onError,
                                        onReasoningChunk,
+                                       onToolCallChunk,
                                        onModel,
                                        onFirstValidChunk,
                                        conversationId,
@@ -208,7 +229,13 @@ export class ChatService {
                                );
                                return;
                        } else {
-                               return this.handleNonStreamResponse(response, onComplete, onError, onModel);
+                               return this.handleNonStreamResponse(
+                                       response,
+                                       onComplete,
+                                       onError,
+                                       onToolCallChunk,
+                                       onModel
+                               );
                        }
                } catch (error) {
                        if (error instanceof Error && error.name === 'AbortError') {
@@ -264,10 +291,12 @@ export class ChatService {
                onComplete?: (
                        response: string,
                        reasoningContent?: string,
-                       timings?: ChatMessageTimings
+                       timings?: ChatMessageTimings,
+                       toolCalls?: string
                ) => void,
                onError?: (error: Error) => void,
                onReasoningChunk?: (chunk: string) => void,
+               onToolCallChunk?: (chunk: string) => void,
                onModel?: (model: string) => void,
                onFirstValidChunk?: () => void,
                conversationId?: string,
@@ -282,11 +311,53 @@ export class ChatService {
                const decoder = new TextDecoder();
                let aggregatedContent = '';
                let fullReasoningContent = '';
+               let aggregatedToolCalls: ApiChatCompletionToolCall[] = [];
                let hasReceivedData = false;
                let lastTimings: ChatMessageTimings | undefined;
                let streamFinished = false;
                let modelEmitted = false;
                let firstValidChunkEmitted = false;
+               let toolCallIndexOffset = 0;
+               let hasOpenToolCallBatch = false;
+
+               const finalizeOpenToolCallBatch = () => {
+                       if (!hasOpenToolCallBatch) {
+                               return;
+                       }
+
+                       toolCallIndexOffset = aggregatedToolCalls.length;
+                       hasOpenToolCallBatch = false;
+               };
+
+               const processToolCallDelta = (toolCalls?: ApiChatCompletionToolCallDelta[]) => {
+                       if (!toolCalls || toolCalls.length === 0) {
+                               return;
+                       }
+
+                       aggregatedToolCalls = this.mergeToolCallDeltas(
+                               aggregatedToolCalls,
+                               toolCalls,
+                               toolCallIndexOffset
+                       );
+
+                       if (aggregatedToolCalls.length === 0) {
+                               return;
+                       }
+
+                       hasOpenToolCallBatch = true;
+
+                       const serializedToolCalls = JSON.stringify(aggregatedToolCalls);
+
+                       if (!serializedToolCalls) {
+                               return;
+                       }
+
+                       hasReceivedData = true;
+
+                       if (!abortSignal?.aborted) {
+                               onToolCallChunk?.(serializedToolCalls);
+                       }
+               };
 
                try {
                        let chunk = '';
@@ -325,6 +396,7 @@ export class ChatService {
 
                                                        const content = parsed.choices[0]?.delta?.content;
                                                        const reasoningContent = parsed.choices[0]?.delta?.reasoning_content;
+                                                       const toolCalls = parsed.choices[0]?.delta?.tool_calls;
                                                        const timings = parsed.timings;
                                                        const promptProgress = parsed.prompt_progress;
 
@@ -342,6 +414,7 @@ export class ChatService {
                                                        }
 
                                                        if (content) {
+                                                               finalizeOpenToolCallBatch();
                                                                hasReceivedData = true;
                                                                aggregatedContent += content;
                                                                if (!abortSignal?.aborted) {
@@ -350,12 +423,15 @@ export class ChatService {
                                                        }
 
                                                        if (reasoningContent) {
+                                                               finalizeOpenToolCallBatch();
                                                                hasReceivedData = true;
                                                                fullReasoningContent += reasoningContent;
                                                                if (!abortSignal?.aborted) {
                                                                        onReasoningChunk?.(reasoningContent);
                                                                }
                                                        }
+
+                                                       processToolCallDelta(toolCalls);
                                                } catch (e) {
                                                        console.error('Error parsing JSON chunk:', e);
                                                }
@@ -368,12 +444,26 @@ export class ChatService {
                        if (abortSignal?.aborted) return;
 
                        if (streamFinished) {
-                               if (!hasReceivedData && aggregatedContent.length === 0) {
+                               finalizeOpenToolCallBatch();
+
+                               if (
+                                       !hasReceivedData &&
+                                       aggregatedContent.length === 0 &&
+                                       aggregatedToolCalls.length === 0
+                               ) {
                                        const noResponseError = new Error('No response received from server. Please try again.');
                                        throw noResponseError;
                                }
 
-                               onComplete?.(aggregatedContent, fullReasoningContent || undefined, lastTimings);
+                               const finalToolCalls =
+                                       aggregatedToolCalls.length > 0 ? JSON.stringify(aggregatedToolCalls) : undefined;
+
+                               onComplete?.(
+                                       aggregatedContent,
+                                       fullReasoningContent || undefined,
+                                       lastTimings,
+                                       finalToolCalls
+                               );
                        }
                } catch (error) {
                        const err = error instanceof Error ? error : new Error('Stream error');
@@ -386,6 +476,54 @@ export class ChatService {
                }
        }
 
+       private mergeToolCallDeltas(
+               existing: ApiChatCompletionToolCall[],
+               deltas: ApiChatCompletionToolCallDelta[],
+               indexOffset = 0
+       ): ApiChatCompletionToolCall[] {
+               const result = existing.map((call) => ({
+                       ...call,
+                       function: call.function ? { ...call.function } : undefined
+               }));
+
+               for (const delta of deltas) {
+                       const index =
+                               typeof delta.index === 'number' && delta.index >= 0
+                                       ? delta.index + indexOffset
+                                       : result.length;
+
+                       while (result.length <= index) {
+                               result.push({ function: undefined });
+                       }
+
+                       const target = result[index]!;
+
+                       if (delta.id) {
+                               target.id = delta.id;
+                       }
+
+                       if (delta.type) {
+                               target.type = delta.type;
+                       }
+
+                       if (delta.function) {
+                               const fn = target.function ? { ...target.function } : {};
+
+                               if (delta.function.name) {
+                                       fn.name = delta.function.name;
+                               }
+
+                               if (delta.function.arguments) {
+                                       fn.arguments = (fn.arguments ?? '') + delta.function.arguments;
+                               }
+
+                               target.function = fn;
+                       }
+               }
+
+               return result;
+       }
+
        /**
         * Handles non-streaming response from the chat completion API.
         * Parses the JSON response and extracts the generated content.
@@ -401,9 +539,11 @@ export class ChatService {
                onComplete?: (
                        response: string,
                        reasoningContent?: string,
-                       timings?: ChatMessageTimings
+                       timings?: ChatMessageTimings,
+                       toolCalls?: string
                ) => void,
                onError?: (error: Error) => void,
+               onToolCallChunk?: (chunk: string) => void,
                onModel?: (model: string) => void
        ): Promise<string> {
                try {
@@ -423,17 +563,31 @@ export class ChatService {
 
                        const content = data.choices[0]?.message?.content || '';
                        const reasoningContent = data.choices[0]?.message?.reasoning_content;
+                       const toolCalls = data.choices[0]?.message?.tool_calls;
 
                        if (reasoningContent) {
                                console.log('Full reasoning content:', reasoningContent);
                        }
 
-                       if (!content.trim()) {
+                       let serializedToolCalls: string | undefined;
+
+                       if (toolCalls && toolCalls.length > 0) {
+                               const mergedToolCalls = this.mergeToolCallDeltas([], toolCalls);
+
+                               if (mergedToolCalls.length > 0) {
+                                       serializedToolCalls = JSON.stringify(mergedToolCalls);
+                                       if (serializedToolCalls) {
+                                               onToolCallChunk?.(serializedToolCalls);
+                                       }
+                               }
+                       }
+
+                       if (!content.trim() && !serializedToolCalls) {
                                const noResponseError = new Error('No response received from server. Please try again.');
                                throw noResponseError;
                        }
 
-                       onComplete?.(content, reasoningContent);
+                       onComplete?.(content, reasoningContent, undefined, serializedToolCalls);
 
                        return content;
                } catch (error) {
index 3f97a89183d826c533bda8f0a0cccdafb1d4cc03..5b5a9d74a5bc678c9dee65a63a8b97034b6d401c 100644 (file)
@@ -205,6 +205,7 @@ class ChatStore {
                                        type,
                                        timestamp: Date.now(),
                                        thinking: '',
+                                       toolCalls: '',
                                        children: [],
                                        extra: extras
                                },
@@ -360,6 +361,7 @@ class ChatStore {
        ): Promise<void> {
                let streamedContent = '';
                let streamedReasoningContent = '';
+               let streamedToolCallContent = '';
 
                let resolvedModel: string | null = null;
                let modelPersisted = false;
@@ -468,6 +470,20 @@ class ChatStore {
                                        this.updateMessageAtIndex(messageIndex, { thinking: streamedReasoningContent });
                                },
 
+                               onToolCallChunk: (toolCallChunk: string) => {
+                                       const chunk = toolCallChunk.trim();
+
+                                       if (!chunk) {
+                                               return;
+                                       }
+
+                                       streamedToolCallContent = chunk;
+
+                                       const messageIndex = this.findMessageIndex(assistantMessage.id);
+
+                                       this.updateMessageAtIndex(messageIndex, { toolCalls: streamedToolCallContent });
+                               },
+
                                onModel: (modelName: string) => {
                                        recordModel(modelName);
                                },
@@ -475,18 +491,21 @@ class ChatStore {
                                onComplete: async (
                                        finalContent?: string,
                                        reasoningContent?: string,
-                                       timings?: ChatMessageTimings
+                                       timings?: ChatMessageTimings,
+                                       toolCallContent?: string
                                ) => {
                                        slotsService.stopStreaming();
 
                                        const updateData: {
                                                content: string;
                                                thinking: string;
+                                               toolCalls: string;
                                                timings?: ChatMessageTimings;
                                                model?: string;
                                        } = {
                                                content: finalContent || streamedContent,
                                                thinking: reasoningContent || streamedReasoningContent,
+                                               toolCalls: toolCallContent || streamedToolCallContent,
                                                timings: timings
                                        };
 
@@ -499,7 +518,11 @@ class ChatStore {
 
                                        const messageIndex = this.findMessageIndex(assistantMessage.id);
 
-                                       const localUpdateData: { timings?: ChatMessageTimings; model?: string } = {
+                                       const localUpdateData: {
+                                               timings?: ChatMessageTimings;
+                                               model?: string;
+                                               toolCalls?: string;
+                                       } = {
                                                timings: timings
                                        };
 
@@ -507,6 +530,10 @@ class ChatStore {
                                                localUpdateData.model = updateData.model;
                                        }
 
+                                       if (updateData.toolCalls !== undefined) {
+                                               localUpdateData.toolCalls = updateData.toolCalls;
+                                       }
+
                                        this.updateMessageAtIndex(messageIndex, localUpdateData);
 
                                        await DatabaseStore.updateCurrentNode(assistantMessage.convId, assistantMessage.id);
@@ -620,6 +647,7 @@ class ChatStore {
                                content: '',
                                timestamp: Date.now(),
                                thinking: '',
+                               toolCalls: '',
                                children: [],
                                model: null
                        },
@@ -1443,6 +1471,7 @@ class ChatStore {
                                                role: messageToEdit.role,
                                                content: newContent,
                                                thinking: messageToEdit.thinking || '',
+                                               toolCalls: messageToEdit.toolCalls || '',
                                                children: [],
                                                model: messageToEdit.model // Preserve original model info when branching
                                        },
@@ -1518,6 +1547,7 @@ class ChatStore {
                                        role: messageToEdit.role,
                                        content: newContent,
                                        thinking: messageToEdit.thinking || '',
+                                       toolCalls: messageToEdit.toolCalls || '',
                                        children: [],
                                        extra: messageToEdit.extra ? JSON.parse(JSON.stringify(messageToEdit.extra)) : undefined,
                                        model: messageToEdit.model // Preserve original model info when branching
@@ -1589,6 +1619,7 @@ class ChatStore {
                                        role: 'assistant',
                                        content: '',
                                        thinking: '',
+                                       toolCalls: '',
                                        children: [],
                                        model: null
                                },
@@ -1647,6 +1678,7 @@ class ChatStore {
                                        role: 'assistant',
                                        content: '',
                                        thinking: '',
+                                       toolCalls: '',
                                        children: [],
                                        model: null
                                },
index 6394c5b7eda74d90866a5f0e2679e8e67ef2a552..82edcc3227c278962dcebf1f3324a5992e035abb 100644 (file)
@@ -114,6 +114,7 @@ export class DatabaseStore {
                                ...message,
                                id: uuid(),
                                parent: parentId,
+                               toolCalls: message.toolCalls ?? '',
                                children: []
                        };
 
@@ -154,6 +155,7 @@ export class DatabaseStore {
                        content: '',
                        parent: null,
                        thinking: '',
+                       toolCalls: '',
                        children: []
                };
 
index 6ebc43db0e3ef72160c6c7c34894ece79a9cabdb..1a8bc649899574e17c65a40d4321200b8860c5f0 100644 (file)
@@ -183,6 +183,23 @@ export interface ApiChatCompletionRequest {
        samplers?: string[];
        // Custom parameters (JSON string)
        custom?: Record<string, unknown>;
+       timings_per_token?: boolean;
+}
+
+export interface ApiChatCompletionToolCallFunctionDelta {
+       name?: string;
+       arguments?: string;
+}
+
+export interface ApiChatCompletionToolCallDelta {
+       index?: number;
+       id?: string;
+       type?: string;
+       function?: ApiChatCompletionToolCallFunctionDelta;
+}
+
+export interface ApiChatCompletionToolCall extends ApiChatCompletionToolCallDelta {
+       function?: ApiChatCompletionToolCallFunctionDelta & { arguments?: string };
 }
 
 export interface ApiChatCompletionStreamChunk {
@@ -195,6 +212,7 @@ export interface ApiChatCompletionStreamChunk {
                        content?: string;
                        reasoning_content?: string;
                        model?: string;
+                       tool_calls?: ApiChatCompletionToolCallDelta[];
                };
        }>;
        timings?: {
@@ -216,6 +234,7 @@ export interface ApiChatCompletionResponse {
                        content: string;
                        reasoning_content?: string;
                        model?: string;
+                       tool_calls?: ApiChatCompletionToolCallDelta[];
                };
        }>;
 }
index b5318b73f41080cb72407262df6746748b30e75d..16debc6d67d58809986e6cb0e9249a56f92f1e66 100644 (file)
@@ -60,6 +60,7 @@ export interface DatabaseMessage {
        content: string;
        parent: string;
        thinking: string;
+       toolCalls?: string;
        children: string[];
        extra?: DatabaseMessageExtra[];
        timings?: ChatMessageTimings;
index 946ef015e92e111b3c809b77201bb3cb63d65870..b85b0597d0068e0c7757144bda3087c5972d2e86 100644 (file)
@@ -38,12 +38,19 @@ export interface SettingsChatServiceOptions {
        samplers?: string | string[];
        // Custom parameters
        custom?: string;
+       timings_per_token?: boolean;
        // Callbacks
        onChunk?: (chunk: string) => void;
        onReasoningChunk?: (chunk: string) => void;
+       onToolCallChunk?: (chunk: string) => void;
        onModel?: (model: string) => void;
        onFirstValidChunk?: () => void;
-       onComplete?: (response: string, reasoningContent?: string, timings?: ChatMessageTimings) => void;
+       onComplete?: (
+               response: string,
+               reasoningContent?: string,
+               timings?: ChatMessageTimings,
+               toolCalls?: string
+       ) => void;
        onError?: (error: Error) => void;
 }