]> git.djapps.eu Git - pkg/ggml/sources/llama.cpp/commitdiff
webui: introduce OpenAI-compatible model selector in JSON payload (#16562)
authorPascal <redacted>
Wed, 22 Oct 2025 14:58:23 +0000 (16:58 +0200)
committerGitHub <redacted>
Wed, 22 Oct 2025 14:58:23 +0000 (16:58 +0200)
* webui: introduce OpenAI-compatible model selector in JSON payload

* webui: restore OpenAI-Compatible model source of truth and unify metadata capture

This change re-establishes a single, reliable source of truth for the active model:
fully aligned with the OpenAI-Compat API behavior

It introduces a unified metadata flow that captures the model field from both
streaming and non-streaming responses, wiring a new onModel callback through ChatService
The model name is now resolved directly from the API payload rather than relying on
server /props or UI assumptions

ChatStore records and persists the resolved model for each assistant message during
streaming, ensuring consistency across the UI and database
Type definitions for API and settings were also extended to include model metadata
and the onModel callback, completing the alignment with OpenAI-Compat semantics

* webui: address review feedback from allozaur

* webui: move model selector into ChatForm (idea by @allozaur)

* webui: make model selector more subtle and integrated into ChatForm

* webui: replaced the Flowbite selector with a native Svelte dropdown

* webui: add developer setting to toggle the chat model selector

* webui: address review feedback from allozaur

Normalized streamed model names during chat updates
by trimming input and removing directory components before saving
or persisting them, so the conversation UI shows only the filename

Forced model names within the chat form selector dropdown to render as
a single-line, truncated entry with a tooltip revealing the full name

* webui: toggle displayed model source for legacy vs OpenAI-Compat modes

When the selector is disabled, it falls back to the active server model name from /props

When the model selector is enabled, the displayed model comes from the message metadata
(the one explicitly selected and sent in the request)

* Update tools/server/webui/src/lib/components/app/chat/ChatForm/ChatFormActions.svelte

Co-authored-by: Aleksander Grygier <redacted>
* Update tools/server/webui/src/lib/constants/localstorage-keys.ts

Co-authored-by: Aleksander Grygier <redacted>
* Update tools/server/webui/src/lib/components/app/chat/ChatForm/ChatFormModelSelector.svelte

Co-authored-by: Aleksander Grygier <redacted>
* Update tools/server/webui/src/lib/components/app/chat/ChatMessages/ChatMessageAssistant.svelte

Co-authored-by: Aleksander Grygier <redacted>
* Update tools/server/webui/src/lib/services/chat.ts

Co-authored-by: Aleksander Grygier <redacted>
* Update tools/server/webui/src/lib/services/chat.ts

Co-authored-by: Aleksander Grygier <redacted>
* webui: refactor model selector and persistence helpers

- Replace inline portal and event listeners with proper Svelte bindings
- Introduce 'persisted' store helper for localStorage sync without runes
- Extract 'normalizeModelName' utils + Vitest coverage
- Simplify ChatFormModelSelector structure and cleanup logic

Replaced the persisted store helper's use of '$state/$effect' runes with
a plain TS implementation to prevent orphaned effect runtime errors
outside component context

Co-authored-by: Aleksander Grygier <redacted>
* webui: document normalizeModelName usage with inline examples

* Update tools/server/webui/src/lib/components/app/chat/ChatForm/ChatFormModelSelector.svelte

Co-authored-by: Aleksander Grygier <redacted>
* Update tools/server/webui/src/lib/stores/models.svelte.ts

Co-authored-by: Aleksander Grygier <redacted>
* Update tools/server/webui/src/lib/stores/models.svelte.ts

Co-authored-by: Aleksander Grygier <redacted>
* webui: extract ModelOption type into dedicated models.d.ts

Co-authored-by: Aleksander Grygier <redacted>
* webui: refine ChatMessageAssistant displayedModel source logic

* webui: stabilize dropdown, simplify model extraction, and init assistant model field

* chore: update webui static build

* Update tools/server/webui/src/lib/components/app/chat/ChatMessages/ChatMessageAssistant.svelte

Co-authored-by: Aleksander Grygier <redacted>
* chore: npm format, update webui static build

* webui: align sidebar trigger position, remove z-index glitch

* chore: update webui build output

---------

Co-authored-by: Aleksander Grygier <redacted>
22 files changed:
tools/server/public/index.html.gz
tools/server/webui/src/lib/components/app/chat/ChatForm/ChatFormActions.svelte
tools/server/webui/src/lib/components/app/chat/ChatForm/ChatFormModelSelector.svelte [new file with mode: 0644]
tools/server/webui/src/lib/components/app/chat/ChatMessages/ChatMessageAssistant.svelte
tools/server/webui/src/lib/components/app/chat/ChatSettings/ChatSettingsDialog.svelte
tools/server/webui/src/lib/components/app/index.ts
tools/server/webui/src/lib/components/ui/select/select-trigger.svelte
tools/server/webui/src/lib/constants/localstorage-keys.ts
tools/server/webui/src/lib/constants/settings-config.ts
tools/server/webui/src/lib/services/chat.ts
tools/server/webui/src/lib/services/models.ts [new file with mode: 0644]
tools/server/webui/src/lib/stores/chat.svelte.ts
tools/server/webui/src/lib/stores/models.svelte.ts [new file with mode: 0644]
tools/server/webui/src/lib/stores/persisted.svelte.ts [new file with mode: 0644]
tools/server/webui/src/lib/stores/settings.svelte.ts
tools/server/webui/src/lib/types/api.d.ts
tools/server/webui/src/lib/types/models.d.ts [new file with mode: 0644]
tools/server/webui/src/lib/types/settings.d.ts
tools/server/webui/src/lib/utils/model-names.test.ts [new file with mode: 0644]
tools/server/webui/src/lib/utils/model-names.ts [new file with mode: 0644]
tools/server/webui/src/lib/utils/portal-to-body.ts [new file with mode: 0644]
tools/server/webui/src/routes/+layout.svelte

index 08450a93cb3f43e837577a80f9f1b048d76e56af..7b56d87e430ea9b96df1b22b2b1432f639333e09 100644 (file)
Binary files a/tools/server/public/index.html.gz and b/tools/server/public/index.html.gz differ
index a6f3c7320826f71f5e24d3a04946856af13a133d..ef03f73f8de8fb720e33402e85f1b7e9a7b3fda2 100644 (file)
@@ -3,6 +3,8 @@
        import { Button } from '$lib/components/ui/button';
        import ChatFormActionFileAttachments from './ChatFormActionFileAttachments.svelte';
        import ChatFormActionRecord from './ChatFormActionRecord.svelte';
+       import ChatFormModelSelector from './ChatFormModelSelector.svelte';
+       import { config } from '$lib/stores/settings.svelte';
        import type { FileTypeCategory } from '$lib/enums/files';
 
        interface Props {
                onMicClick,
                onStop
        }: Props = $props();
+
+       let currentConfig = $derived(config());
 </script>
 
-<div class="flex items-center justify-between gap-1 {className}">
-       <ChatFormActionFileAttachments {disabled} {onFileUpload} />
+<div class="flex w-full items-center gap-2 {className}">
+       <ChatFormActionFileAttachments class="mr-auto" {disabled} {onFileUpload} />
+
+       {#if currentConfig.modelSelectorEnabled}
+               <ChatFormModelSelector class="shrink-0" />
+       {/if}
 
-       <div class="flex gap-2">
-               {#if isLoading}
-                       <Button
-                               type="button"
-                               onclick={onStop}
-                               class="h-8 w-8 bg-transparent p-0 hover:bg-destructive/20"
-                       >
-                               <span class="sr-only">Stop</span>
-                               <Square class="h-8 w-8 fill-destructive stroke-destructive" />
-                       </Button>
-               {:else}
-                       <ChatFormActionRecord {disabled} {isLoading} {isRecording} {onMicClick} />
+       {#if isLoading}
+               <Button
+                       type="button"
+                       onclick={onStop}
+                       class="h-8 w-8 bg-transparent p-0 hover:bg-destructive/20"
+               >
+                       <span class="sr-only">Stop</span>
+                       <Square class="h-8 w-8 fill-destructive stroke-destructive" />
+               </Button>
+       {:else}
+               <ChatFormActionRecord {disabled} {isLoading} {isRecording} {onMicClick} />
 
-                       <Button
-                               type="submit"
-                               disabled={!canSend || disabled || isLoading}
-                               class="h-8 w-8 rounded-full p-0"
-                       >
-                               <span class="sr-only">Send</span>
-                               <ArrowUp class="h-12 w-12" />
-                       </Button>
-               {/if}
-       </div>
+               <Button
+                       type="submit"
+                       disabled={!canSend || disabled || isLoading}
+                       class="h-8 w-8 rounded-full p-0"
+               >
+                       <span class="sr-only">Send</span>
+                       <ArrowUp class="h-12 w-12" />
+               </Button>
+       {/if}
 </div>
diff --git a/tools/server/webui/src/lib/components/app/chat/ChatForm/ChatFormModelSelector.svelte b/tools/server/webui/src/lib/components/app/chat/ChatForm/ChatFormModelSelector.svelte
new file mode 100644 (file)
index 0000000..689415f
--- /dev/null
@@ -0,0 +1,358 @@
+<script lang="ts">
+       import { onMount, tick } from 'svelte';
+       import { ChevronDown, Loader2 } from '@lucide/svelte';
+       import { cn } from '$lib/components/ui/utils';
+       import { portalToBody } from '$lib/utils/portal-to-body';
+       import {
+               fetchModels,
+               modelOptions,
+               modelsError,
+               modelsLoading,
+               modelsUpdating,
+               selectModel,
+               selectedModelId
+       } from '$lib/stores/models.svelte';
+       import type { ModelOption } from '$lib/types/models';
+
+       interface Props {
+               class?: string;
+       }
+
+       let { class: className = '' }: Props = $props();
+
+       let options = $derived(modelOptions());
+       let loading = $derived(modelsLoading());
+       let updating = $derived(modelsUpdating());
+       let error = $derived(modelsError());
+       let activeId = $derived(selectedModelId());
+
+       let isMounted = $state(false);
+       let isOpen = $state(false);
+       let container: HTMLDivElement | null = null;
+       let triggerButton = $state<HTMLButtonElement | null>(null);
+       let menuRef = $state<HTMLDivElement | null>(null);
+       let menuPosition = $state<{
+               top: number;
+               left: number;
+               width: number;
+               placement: 'top' | 'bottom';
+               maxHeight: number;
+       } | null>(null);
+       let lockedWidth: number | null = null;
+
+       onMount(async () => {
+               try {
+                       await fetchModels();
+               } catch (error) {
+                       console.error('Unable to load models:', error);
+               } finally {
+                       isMounted = true;
+               }
+       });
+
+       function handlePointerDown(event: PointerEvent) {
+               if (!container) return;
+
+               const target = event.target as Node | null;
+
+               if (target && !container.contains(target) && !(menuRef && menuRef.contains(target))) {
+                       closeMenu();
+               }
+       }
+
+       function handleKeydown(event: KeyboardEvent) {
+               if (event.key === 'Escape') {
+                       closeMenu();
+               }
+       }
+
+       function handleResize() {
+               if (isOpen) {
+                       updateMenuPosition();
+               }
+       }
+
+       function handleScroll() {
+               if (isOpen) {
+                       updateMenuPosition();
+               }
+       }
+
+       async function handleSelect(value: string | undefined) {
+               if (!value) return;
+
+               const option = options.find((item) => item.id === value);
+               if (!option) {
+                       console.error('Model is no longer available');
+                       return;
+               }
+
+               try {
+                       await selectModel(option.id);
+               } catch (error) {
+                       console.error('Failed to switch model:', error);
+               }
+       }
+
+       const VIEWPORT_GUTTER = 8;
+       const MENU_OFFSET = 6;
+       const MENU_MAX_WIDTH = 320;
+
+       async function openMenu() {
+               if (loading || updating) return;
+
+               isOpen = true;
+               await tick();
+               updateMenuPosition();
+               requestAnimationFrame(() => updateMenuPosition());
+       }
+
+       function toggleOpen() {
+               if (loading || updating) return;
+
+               if (isOpen) {
+                       closeMenu();
+               } else {
+                       void openMenu();
+               }
+       }
+
+       function closeMenu() {
+               if (!isOpen) return;
+
+               isOpen = false;
+               menuPosition = null;
+               lockedWidth = null;
+       }
+
+       async function handleOptionSelect(optionId: string) {
+               try {
+                       await handleSelect(optionId);
+               } finally {
+                       closeMenu();
+               }
+       }
+
+       $effect(() => {
+               if (loading || updating) {
+                       closeMenu();
+               }
+       });
+
+       $effect(() => {
+               const optionCount = options.length;
+
+               if (!isOpen || optionCount <= 0) return;
+
+               queueMicrotask(() => updateMenuPosition());
+       });
+
+       function updateMenuPosition() {
+               if (!isOpen || !triggerButton || !menuRef) return;
+
+               const triggerRect = triggerButton.getBoundingClientRect();
+               const viewportWidth = window.innerWidth;
+               const viewportHeight = window.innerHeight;
+
+               if (viewportWidth === 0 || viewportHeight === 0) return;
+
+               const scrollWidth = menuRef.scrollWidth;
+               const scrollHeight = menuRef.scrollHeight;
+
+               const availableWidth = Math.max(0, viewportWidth - VIEWPORT_GUTTER * 2);
+               const constrainedMaxWidth = Math.min(MENU_MAX_WIDTH, availableWidth || MENU_MAX_WIDTH);
+               const safeMaxWidth =
+                       constrainedMaxWidth > 0 ? constrainedMaxWidth : Math.min(MENU_MAX_WIDTH, viewportWidth);
+               const desiredMinWidth = Math.min(160, safeMaxWidth || 160);
+
+               let width = lockedWidth;
+               if (width === null) {
+                       const naturalWidth = Math.min(scrollWidth, safeMaxWidth);
+                       const baseWidth = Math.max(triggerRect.width, naturalWidth, desiredMinWidth);
+                       width = Math.min(baseWidth, safeMaxWidth || baseWidth);
+                       lockedWidth = width;
+               } else {
+                       width = Math.min(Math.max(width, desiredMinWidth), safeMaxWidth || width);
+               }
+
+               if (width > 0) {
+                       menuRef.style.width = `${width}px`;
+               }
+
+               const availableBelow = Math.max(
+                       0,
+                       viewportHeight - VIEWPORT_GUTTER - triggerRect.bottom - MENU_OFFSET
+               );
+               const availableAbove = Math.max(0, triggerRect.top - VIEWPORT_GUTTER - MENU_OFFSET);
+               const viewportAllowance = Math.max(0, viewportHeight - VIEWPORT_GUTTER * 2);
+               const fallbackAllowance = Math.max(1, viewportAllowance > 0 ? viewportAllowance : scrollHeight);
+
+               function computePlacement(placement: 'top' | 'bottom') {
+                       const available = placement === 'bottom' ? availableBelow : availableAbove;
+                       const allowedHeight =
+                               available > 0 ? Math.min(available, fallbackAllowance) : fallbackAllowance;
+                       const maxHeight = Math.min(scrollHeight, allowedHeight);
+                       const height = Math.max(0, maxHeight);
+
+                       let top: number;
+                       if (placement === 'bottom') {
+                               const rawTop = triggerRect.bottom + MENU_OFFSET;
+                               const minTop = VIEWPORT_GUTTER;
+                               const maxTop = viewportHeight - VIEWPORT_GUTTER - height;
+                               if (maxTop < minTop) {
+                                       top = minTop;
+                               } else {
+                                       top = Math.min(Math.max(rawTop, minTop), maxTop);
+                               }
+                       } else {
+                               const rawTop = triggerRect.top - MENU_OFFSET - height;
+                               const minTop = VIEWPORT_GUTTER;
+                               const maxTop = viewportHeight - VIEWPORT_GUTTER - height;
+                               if (maxTop < minTop) {
+                                       top = minTop;
+                               } else {
+                                       top = Math.max(Math.min(rawTop, maxTop), minTop);
+                               }
+                       }
+
+                       return { placement, top, height, maxHeight };
+               }
+
+               const belowMetrics = computePlacement('bottom');
+               const aboveMetrics = computePlacement('top');
+
+               let metrics = belowMetrics;
+               if (scrollHeight > belowMetrics.maxHeight && aboveMetrics.maxHeight > belowMetrics.maxHeight) {
+                       metrics = aboveMetrics;
+               }
+
+               menuRef.style.maxHeight = metrics.maxHeight > 0 ? `${Math.round(metrics.maxHeight)}px` : '';
+
+               let left = triggerRect.right - width;
+               const maxLeft = viewportWidth - VIEWPORT_GUTTER - width;
+               if (maxLeft < VIEWPORT_GUTTER) {
+                       left = VIEWPORT_GUTTER;
+               } else {
+                       if (left > maxLeft) {
+                               left = maxLeft;
+                       }
+                       if (left < VIEWPORT_GUTTER) {
+                               left = VIEWPORT_GUTTER;
+                       }
+               }
+
+               menuPosition = {
+                       top: Math.round(metrics.top),
+                       left: Math.round(left),
+                       width: Math.round(width),
+                       placement: metrics.placement,
+                       maxHeight: Math.round(metrics.maxHeight)
+               };
+       }
+
+       function getDisplayOption(): ModelOption | undefined {
+               if (activeId) {
+                       return options.find((option) => option.id === activeId);
+               }
+
+               return options[0];
+       }
+</script>
+
+<svelte:window onresize={handleResize} onscroll={handleScroll} />
+
+<svelte:document onpointerdown={handlePointerDown} onkeydown={handleKeydown} />
+
+<div
+       class={cn('relative z-10 flex max-w-[200px] min-w-[120px] flex-col items-end gap-1', className)}
+       bind:this={container}
+>
+       {#if loading && options.length === 0 && !isMounted}
+               <div class="flex items-center gap-2 text-xs text-muted-foreground">
+                       <Loader2 class="h-4 w-4 animate-spin" />
+                       Loading models…
+               </div>
+       {:else if options.length === 0}
+               <p class="text-xs text-muted-foreground">No models available.</p>
+       {:else}
+               {@const selectedOption = getDisplayOption()}
+
+               <div class="relative w-full">
+                       <button
+                               type="button"
+                               class={cn(
+                                       'flex w-full items-center justify-end gap-2 rounded-md px-2 py-1 text-sm text-muted-foreground transition hover:text-foreground focus:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-60',
+                                       isOpen ? 'text-foreground' : ''
+                               )}
+                               aria-haspopup="listbox"
+                               aria-expanded={isOpen}
+                               onclick={toggleOpen}
+                               bind:this={triggerButton}
+                               disabled={loading || updating}
+                       >
+                               <span class="max-w-[160px] truncate text-right font-medium">
+                                       {selectedOption?.name || 'Select model'}
+                               </span>
+
+                               {#if updating}
+                                       <Loader2 class="h-3.5 w-3.5 animate-spin text-muted-foreground" />
+                               {:else}
+                                       <ChevronDown
+                                               class={cn(
+                                                       'h-4 w-4 text-muted-foreground transition-transform',
+                                                       isOpen ? 'rotate-180 text-foreground' : ''
+                                               )}
+                                       />
+                               {/if}
+                       </button>
+
+                       {#if isOpen}
+                               <div
+                                       bind:this={menuRef}
+                                       use:portalToBody
+                                       class={cn(
+                                               'fixed z-[1000] overflow-hidden rounded-md border bg-popover shadow-lg transition-opacity',
+                                               menuPosition ? 'opacity-100' : 'pointer-events-none opacity-0'
+                                       )}
+                                       role="listbox"
+                                       style:top={menuPosition ? `${menuPosition.top}px` : undefined}
+                                       style:left={menuPosition ? `${menuPosition.left}px` : undefined}
+                                       style:width={menuPosition ? `${menuPosition.width}px` : undefined}
+                                       data-placement={menuPosition?.placement ?? 'bottom'}
+                               >
+                                       <div
+                                               class="overflow-y-auto py-1"
+                                               style:max-height={menuPosition && menuPosition.maxHeight > 0
+                                                       ? `${menuPosition.maxHeight}px`
+                                                       : undefined}
+                                       >
+                                               {#each options as option (option.id)}
+                                                       <button
+                                                               type="button"
+                                                               class={cn(
+                                                                       'flex w-full flex-col items-start gap-0.5 px-3 py-2 text-left text-sm transition hover:bg-muted focus:bg-muted focus:outline-none',
+                                                                       option.id === selectedOption?.id ? 'bg-accent text-accent-foreground' : ''
+                                                               )}
+                                                               role="option"
+                                                               aria-selected={option.id === selectedOption?.id}
+                                                               onclick={() => handleOptionSelect(option.id)}
+                                                       >
+                                                               <span class="block w-full truncate font-medium" title={option.name}>
+                                                                       {option.name}
+                                                               </span>
+
+                                                               {#if option.description}
+                                                                       <span class="text-xs text-muted-foreground">{option.description}</span>
+                                                               {/if}
+                                                       </button>
+                                               {/each}
+                                       </div>
+                               </div>
+                       {/if}
+               </div>
+       {/if}
+
+       {#if error}
+               <p class="text-xs text-destructive">{error}</p>
+       {/if}
+</div>
index 5539ed9e21c349a1fd232c46ceda8cede797f3ce..e878e7bf8a21763e26858f171d998fddfb635fcb 100644 (file)
@@ -10,6 +10,7 @@
        import ChatMessageActions from './ChatMessageActions.svelte';
        import Label from '$lib/components/ui/label/label.svelte';
        import { config } from '$lib/stores/settings.svelte';
+       import { modelName as serverModelName } from '$lib/stores/server.svelte';
        import { copyToClipboard } from '$lib/utils/copy';
 
        interface Props {
        }: Props = $props();
 
        const processingState = useProcessingState();
+       let currentConfig = $derived(config());
+       let serverModel = $derived(serverModelName());
+       let displayedModel = $derived((): string | null => {
+               if (!currentConfig.showModelInfo) return null;
+
+               if (currentConfig.modelSelectorEnabled) {
+                       return message.model ?? null;
+               }
+
+               return serverModel;
+       });
+
+       function handleCopyModel() {
+               const model = displayedModel();
+
+               void copyToClipboard(model ?? '');
+       }
 </script>
 
 <div
                </div>
        {/if}
 
-       {#if config().showModelInfo && message.model}
+       {#if displayedModel()}
                <span class="mt-6 mb-4 inline-flex items-center gap-1 text-xs text-muted-foreground">
                        <Package class="h-3.5 w-3.5" />
 
 
                        <button
                                class="inline-flex cursor-pointer items-center gap-1 rounded-sm bg-muted-foreground/15 px-1.5 py-0.75"
-                               onclick={() => copyToClipboard(message.model)}
+                               onclick={handleCopyModel}
                        >
-                               {message.model}
+                               {displayedModel()}
 
                                <Copy class="ml-1 h-3 w-3 " />
                        </button>
index ad5d617b5ff641ded957b5515745a4f92c1d26a8..20e4d3b3324e8df59b0608323b3c9a20a21d507d 100644 (file)
                        title: 'Developer',
                        icon: Code,
                        fields: [
+                               {
+                                       key: 'modelSelectorEnabled',
+                                       label: 'Enable model selector',
+                                       type: 'checkbox'
+                               },
                                {
                                        key: 'disableReasoningFormat',
                                        label: 'Show raw LLM output',
index 7b85db93db3f5735291ee8c49c5f94bf2b9ea865..392132f442fd366b7d6d412a9a40da1c7487949d 100644 (file)
@@ -8,6 +8,7 @@ export { default as ChatFormTextarea } from './chat/ChatForm/ChatFormTextarea.sv
 export { default as ChatFormActions } from './chat/ChatForm/ChatFormActions.svelte';
 export { default as ChatFormActionFileAttachments } from './chat/ChatForm/ChatFormActionFileAttachments.svelte';
 export { default as ChatFormActionRecord } from './chat/ChatForm/ChatFormActionRecord.svelte';
+export { default as ChatFormModelSelector } from './chat/ChatForm/ChatFormModelSelector.svelte';
 export { default as ChatFormHelperText } from './chat/ChatForm/ChatFormHelperText.svelte';
 export { default as ChatFormFileInputInvisible } from './chat/ChatForm/ChatFormFileInputInvisible.svelte';
 
@@ -32,7 +33,6 @@ export { default as ParameterSourceIndicator } from './chat/ChatSettings/Paramet
 export { default as ChatSidebar } from './chat/ChatSidebar/ChatSidebar.svelte';
 export { default as ChatSidebarConversationItem } from './chat/ChatSidebar/ChatSidebarConversationItem.svelte';
 export { default as ChatSidebarSearch } from './chat/ChatSidebar/ChatSidebarSearch.svelte';
-
 export { default as ChatErrorDialog } from './dialogs/ChatErrorDialog.svelte';
 export { default as EmptyFileAlertDialog } from './dialogs/EmptyFileAlertDialog.svelte';
 
index a7839d1c4dcd71604a6046d36e2b84558230ed8e..5bc28eeb47bf3d72218b0d40e984db8b366f2fe8 100644 (file)
@@ -8,22 +8,33 @@
                class: className,
                children,
                size = 'default',
+               variant = 'default',
                ...restProps
        }: WithoutChild<SelectPrimitive.TriggerProps> & {
                size?: 'sm' | 'default';
+               variant?: 'default' | 'plain';
        } = $props();
+
+       const baseClasses = $derived(
+               variant === 'plain'
+                       ? "group inline-flex w-full items-center justify-end gap-2 whitespace-nowrap px-0 py-0 text-sm font-medium text-muted-foreground transition-colors focus-visible:outline-none focus-visible:ring-0 focus-visible:ring-offset-0 disabled:cursor-not-allowed disabled:opacity-50 data-[placeholder]:text-muted-foreground data-[size=default]:h-9 data-[size=sm]:h-8 [&_svg]:pointer-events-none [&_svg]:shrink-0 [&_svg:not([class*='size-'])]:size-3 [&_svg:not([class*='text-'])]:text-muted-foreground"
+                       : "flex w-fit items-center justify-between gap-2 rounded-md border border-input bg-transparent px-3 py-2 text-sm whitespace-nowrap shadow-xs transition-[color,box-shadow] outline-none select-none focus-visible:border-ring focus-visible:ring-[3px] focus-visible:ring-ring/50 disabled:cursor-not-allowed disabled:opacity-50 aria-invalid:border-destructive aria-invalid:ring-destructive/20 data-[placeholder]:text-muted-foreground data-[size=default]:h-9 data-[size=sm]:h-8 *:data-[slot=select-value]:line-clamp-1 *:data-[slot=select-value]:flex *:data-[slot=select-value]:items-center *:data-[slot=select-value]:gap-2 dark:bg-input/30 dark:hover:bg-input/50 dark:aria-invalid:ring-destructive/40 [&_svg]:pointer-events-none [&_svg]:shrink-0 [&_svg:not([class*='size-'])]:size-4 [&_svg:not([class*='text-'])]:text-muted-foreground"
+       );
+
+       const chevronClasses = $derived(
+               variant === 'plain'
+                       ? 'size-3 opacity-60 transition-transform group-data-[state=open]:-rotate-180'
+                       : 'size-4 opacity-50'
+       );
 </script>
 
 <SelectPrimitive.Trigger
        bind:ref
        data-slot="select-trigger"
        data-size={size}
-       class={cn(
-               "flex w-fit items-center justify-between gap-2 rounded-md border border-input bg-transparent px-3 py-2 text-sm whitespace-nowrap shadow-xs transition-[color,box-shadow] outline-none select-none focus-visible:border-ring focus-visible:ring-[3px] focus-visible:ring-ring/50 disabled:cursor-not-allowed disabled:opacity-50 aria-invalid:border-destructive aria-invalid:ring-destructive/20 data-[placeholder]:text-muted-foreground data-[size=default]:h-9 data-[size=sm]:h-8 *:data-[slot=select-value]:line-clamp-1 *:data-[slot=select-value]:flex *:data-[slot=select-value]:items-center *:data-[slot=select-value]:gap-2 dark:bg-input/30 dark:hover:bg-input/50 dark:aria-invalid:ring-destructive/40 [&_svg]:pointer-events-none [&_svg]:shrink-0 [&_svg:not([class*='size-'])]:size-4 [&_svg:not([class*='text-'])]:text-muted-foreground",
-               className
-       )}
+       class={cn(baseClasses, className)}
        {...restProps}
 >
        {@render children?.()}
-       <ChevronDownIcon class="size-4 opacity-50" />
+       <ChevronDownIcon class={chevronClasses} />
 </SelectPrimitive.Trigger>
index 9fcc7bab93d1dbb6a44d41894a1f7b8a0692c7a0..8bdc5f33c38a922466de94775db349955e2bb0b0 100644 (file)
@@ -1 +1,2 @@
 export const SERVER_PROPS_LOCALSTORAGE_KEY = 'LlamaCppWebui.serverProps';
+export const SELECTED_MODEL_LOCALSTORAGE_KEY = 'LlamaCppWebui.selectedModel';
index 154ec888ce2dc7c9f2e2b517281c52c10ef497cc..512dcc96997e73393200fac35752a66f1479d183 100644 (file)
@@ -13,6 +13,7 @@ export const SETTING_CONFIG_DEFAULT: Record<string, string | number | boolean> =
        pdfAsImage: false,
        showModelInfo: false,
        renderUserContentAsMarkdown: false,
+       modelSelectorEnabled: false,
        // make sure these default values are in sync with `common.h`
        samplers: 'top_k;typ_p;top_p;min_p;temperature',
        temperature: 0.8,
@@ -86,6 +87,8 @@ export const SETTING_CONFIG_INFO: Record<string, string> = {
        pdfAsImage: 'Parse PDF as image instead of text (requires vision-capable model).',
        showModelInfo: 'Display the model name used to generate each message below the message content.',
        renderUserContentAsMarkdown: 'Render user messages using markdown formatting in the chat.',
+       modelSelectorEnabled:
+               'Enable the model selector in the chat input to choose the inference model. Sends the associated model field in API requests.',
        pyInterpreterEnabled:
                'Enable Python interpreter using Pyodide. Allows running Python code in markdown code blocks.'
 };
index 2c4e53a02b2da01aa55aaa7172dd61d02e99218e..df03b10251ac2ce9d72bdb6d4d27239d815901c1 100644 (file)
@@ -1,4 +1,5 @@
 import { config } from '$lib/stores/settings.svelte';
+import { selectedModelName } from '$lib/stores/models.svelte';
 import { slotsService } from './slots';
 /**
  * ChatService - Low-level API communication layer for llama.cpp server interactions
@@ -51,6 +52,8 @@ export class ChatService {
                        onChunk,
                        onComplete,
                        onError,
+                       onReasoningChunk,
+                       onModel,
                        // Generation parameters
                        temperature,
                        max_tokens,
@@ -118,6 +121,13 @@ export class ChatService {
                        stream
                };
 
+               const modelSelectorEnabled = Boolean(currentConfig.modelSelectorEnabled);
+               const activeModel = modelSelectorEnabled ? selectedModelName() : null;
+
+               if (modelSelectorEnabled && activeModel) {
+                       requestBody.model = activeModel;
+               }
+
                requestBody.reasoning_format = currentConfig.disableReasoningFormat ? 'none' : 'auto';
 
                if (temperature !== undefined) requestBody.temperature = temperature;
@@ -189,13 +199,14 @@ export class ChatService {
                                        onChunk,
                                        onComplete,
                                        onError,
-                                       options.onReasoningChunk,
+                                       onReasoningChunk,
+                                       onModel,
                                        conversationId,
                                        abortController.signal
                                );
                                return;
                        } else {
-                               return this.handleNonStreamResponse(response, onComplete, onError);
+                               return this.handleNonStreamResponse(response, onComplete, onError, onModel);
                        }
                } catch (error) {
                        if (error instanceof Error && error.name === 'AbortError') {
@@ -255,6 +266,7 @@ export class ChatService {
                ) => void,
                onError?: (error: Error) => void,
                onReasoningChunk?: (chunk: string) => void,
+               onModel?: (model: string) => void,
                conversationId?: string,
                abortSignal?: AbortSignal
        ): Promise<void> {
@@ -270,6 +282,7 @@ export class ChatService {
                let hasReceivedData = false;
                let lastTimings: ChatMessageTimings | undefined;
                let streamFinished = false;
+               let modelEmitted = false;
 
                try {
                        let chunk = '';
@@ -298,6 +311,12 @@ export class ChatService {
                                                try {
                                                        const parsed: ApiChatCompletionStreamChunk = JSON.parse(data);
 
+                                                       const chunkModel = this.extractModelName(parsed);
+                                                       if (chunkModel && !modelEmitted) {
+                                                               modelEmitted = true;
+                                                               onModel?.(chunkModel);
+                                                       }
+
                                                        const content = parsed.choices[0]?.delta?.content;
                                                        const reasoningContent = parsed.choices[0]?.delta?.reasoning_content;
                                                        const timings = parsed.timings;
@@ -372,7 +391,8 @@ export class ChatService {
                        reasoningContent?: string,
                        timings?: ChatMessageTimings
                ) => void,
-               onError?: (error: Error) => void
+               onError?: (error: Error) => void,
+               onModel?: (model: string) => void
        ): Promise<string> {
                try {
                        const responseText = await response.text();
@@ -383,6 +403,12 @@ export class ChatService {
                        }
 
                        const data: ApiChatCompletionResponse = JSON.parse(responseText);
+
+                       const responseModel = this.extractModelName(data);
+                       if (responseModel) {
+                               onModel?.(responseModel);
+                       }
+
                        const content = data.choices[0]?.message?.content || '';
                        const reasoningContent = data.choices[0]?.message?.reasoning_content;
 
@@ -625,6 +651,39 @@ export class ChatService {
                }
        }
 
+       private extractModelName(data: unknown): string | undefined {
+               const asRecord = (value: unknown): Record<string, unknown> | undefined => {
+                       return typeof value === 'object' && value !== null
+                               ? (value as Record<string, unknown>)
+                               : undefined;
+               };
+
+               const getTrimmedString = (value: unknown): string | undefined => {
+                       return typeof value === 'string' && value.trim() ? value.trim() : undefined;
+               };
+
+               const root = asRecord(data);
+               if (!root) return undefined;
+
+               // 1) root (some implementations provide `model` at the top level)
+               const rootModel = getTrimmedString(root.model);
+               if (rootModel) return rootModel;
+
+               // 2) streaming choice (delta) or final response (message)
+               const firstChoice = Array.isArray(root.choices) ? asRecord(root.choices[0]) : undefined;
+               if (!firstChoice) return undefined;
+
+               // priority: delta.model (first chunk) else message.model (final response)
+               const deltaModel = getTrimmedString(asRecord(firstChoice.delta)?.model);
+               if (deltaModel) return deltaModel;
+
+               const messageModel = getTrimmedString(asRecord(firstChoice.message)?.model);
+               if (messageModel) return messageModel;
+
+               // avoid guessing from non-standard locations (metadata, etc.)
+               return undefined;
+       }
+
        private updateProcessingState(
                timings?: ChatMessageTimings,
                promptProgress?: ChatMessagePromptProgress,
diff --git a/tools/server/webui/src/lib/services/models.ts b/tools/server/webui/src/lib/services/models.ts
new file mode 100644 (file)
index 0000000..1c7fa3b
--- /dev/null
@@ -0,0 +1,22 @@
+import { base } from '$app/paths';
+import { config } from '$lib/stores/settings.svelte';
+import type { ApiModelListResponse } from '$lib/types/api';
+
+export class ModelsService {
+       static async list(): Promise<ApiModelListResponse> {
+               const currentConfig = config();
+               const apiKey = currentConfig.apiKey?.toString().trim();
+
+               const response = await fetch(`${base}/v1/models`, {
+                       headers: {
+                               ...(apiKey ? { Authorization: `Bearer ${apiKey}` } : {})
+                       }
+               });
+
+               if (!response.ok) {
+                       throw new Error(`Failed to fetch model list (status ${response.status})`);
+               }
+
+               return response.json() as Promise<ApiModelListResponse>;
+       }
+}
index ccc67c7294263ccdeae3a48a1b3c385e20aa28ed..a2e74a2e10721604cd76a5dcfa14765265e67bb2 100644 (file)
@@ -1,7 +1,7 @@
 import { DatabaseStore } from '$lib/stores/database';
 import { chatService, slotsService } from '$lib/services';
-import { serverStore } from '$lib/stores/server.svelte';
 import { config } from '$lib/stores/settings.svelte';
+import { normalizeModelName } from '$lib/utils/model-names';
 import { filterByLeafNodeId, findLeafNode, findDescendantMessages } from '$lib/utils/branching';
 import { browser } from '$app/environment';
 import { goto } from '$app/navigation';
@@ -359,28 +359,33 @@ class ChatStore {
        ): Promise<void> {
                let streamedContent = '';
                let streamedReasoningContent = '';
-               let modelCaptured = false;
 
-               const captureModelIfNeeded = (updateDbImmediately = true): string | undefined => {
-                       if (!modelCaptured) {
-                               const currentModelName = serverStore.modelName;
+               let resolvedModel: string | null = null;
+               let modelPersisted = false;
 
-                               if (currentModelName) {
-                                       if (updateDbImmediately) {
-                                               DatabaseStore.updateMessage(assistantMessage.id, { model: currentModelName }).catch(
-                                                       console.error
-                                               );
-                                       }
+               const recordModel = (modelName: string, persistImmediately = true): void => {
+                       const normalizedModel = normalizeModelName(modelName);
 
-                                       const messageIndex = this.findMessageIndex(assistantMessage.id);
+                       if (!normalizedModel || normalizedModel === resolvedModel) {
+                               return;
+                       }
 
-                                       this.updateMessageAtIndex(messageIndex, { model: currentModelName });
-                                       modelCaptured = true;
+                       resolvedModel = normalizedModel;
 
-                                       return currentModelName;
-                               }
+                       const messageIndex = this.findMessageIndex(assistantMessage.id);
+
+                       this.updateMessageAtIndex(messageIndex, { model: normalizedModel });
+
+                       if (persistImmediately && !modelPersisted) {
+                               modelPersisted = true;
+                               DatabaseStore.updateMessage(assistantMessage.id, { model: normalizedModel }).catch(
+                                       (error) => {
+                                               console.error('Failed to persist model name:', error);
+                                               modelPersisted = false;
+                                               resolvedModel = null;
+                                       }
+                               );
                        }
-                       return undefined;
                };
 
                slotsService.startStreaming();
@@ -399,7 +404,6 @@ class ChatStore {
                                                assistantMessage.id
                                        );
 
-                                       captureModelIfNeeded();
                                        const messageIndex = this.findMessageIndex(assistantMessage.id);
                                        this.updateMessageAtIndex(messageIndex, {
                                                content: streamedContent
@@ -409,13 +413,15 @@ class ChatStore {
                                onReasoningChunk: (reasoningChunk: string) => {
                                        streamedReasoningContent += reasoningChunk;
 
-                                       captureModelIfNeeded();
-
                                        const messageIndex = this.findMessageIndex(assistantMessage.id);
 
                                        this.updateMessageAtIndex(messageIndex, { thinking: streamedReasoningContent });
                                },
 
+                               onModel: (modelName: string) => {
+                                       recordModel(modelName);
+                               },
+
                                onComplete: async (
                                        finalContent?: string,
                                        reasoningContent?: string,
@@ -434,10 +440,9 @@ class ChatStore {
                                                timings: timings
                                        };
 
-                                       const capturedModel = captureModelIfNeeded(false);
-
-                                       if (capturedModel) {
-                                               updateData.model = capturedModel;
+                                       if (resolvedModel && !modelPersisted) {
+                                               updateData.model = resolvedModel;
+                                               modelPersisted = true;
                                        }
 
                                        await DatabaseStore.updateMessage(assistantMessage.id, updateData);
@@ -565,7 +570,8 @@ class ChatStore {
                                content: '',
                                timestamp: Date.now(),
                                thinking: '',
-                               children: []
+                               children: [],
+                               model: null
                        },
                        parentId || null
                );
@@ -1533,7 +1539,8 @@ class ChatStore {
                                        role: 'assistant',
                                        content: '',
                                        thinking: '',
-                                       children: []
+                                       children: [],
+                                       model: null
                                },
                                parentMessage.id
                        );
@@ -1590,7 +1597,8 @@ class ChatStore {
                                        role: 'assistant',
                                        content: '',
                                        thinking: '',
-                                       children: []
+                                       children: [],
+                                       model: null
                                },
                                userMessageId
                        );
diff --git a/tools/server/webui/src/lib/stores/models.svelte.ts b/tools/server/webui/src/lib/stores/models.svelte.ts
new file mode 100644 (file)
index 0000000..bcb6882
--- /dev/null
@@ -0,0 +1,187 @@
+import { ModelsService } from '$lib/services/models';
+import { persisted } from '$lib/stores/persisted.svelte';
+import { SELECTED_MODEL_LOCALSTORAGE_KEY } from '$lib/constants/localstorage-keys';
+import type { ModelOption } from '$lib/types/models';
+
+type PersistedModelSelection = {
+       id: string;
+       model: string;
+};
+
+class ModelsStore {
+       private _models = $state<ModelOption[]>([]);
+       private _loading = $state(false);
+       private _updating = $state(false);
+       private _error = $state<string | null>(null);
+       private _selectedModelId = $state<string | null>(null);
+       private _selectedModelName = $state<string | null>(null);
+       private _persistedSelection = persisted<PersistedModelSelection | null>(
+               SELECTED_MODEL_LOCALSTORAGE_KEY,
+               null
+       );
+
+       constructor() {
+               const persisted = this._persistedSelection.value;
+               if (persisted) {
+                       this._selectedModelId = persisted.id;
+                       this._selectedModelName = persisted.model;
+               }
+       }
+
+       get models(): ModelOption[] {
+               return this._models;
+       }
+
+       get loading(): boolean {
+               return this._loading;
+       }
+
+       get updating(): boolean {
+               return this._updating;
+       }
+
+       get error(): string | null {
+               return this._error;
+       }
+
+       get selectedModelId(): string | null {
+               return this._selectedModelId;
+       }
+
+       get selectedModelName(): string | null {
+               return this._selectedModelName;
+       }
+
+       get selectedModel(): ModelOption | null {
+               if (!this._selectedModelId) {
+                       return null;
+               }
+
+               return this._models.find((model) => model.id === this._selectedModelId) ?? null;
+       }
+
+       async fetch(force = false): Promise<void> {
+               if (this._loading) return;
+               if (this._models.length > 0 && !force) return;
+
+               this._loading = true;
+               this._error = null;
+
+               try {
+                       const response = await ModelsService.list();
+
+                       const models: ModelOption[] = response.data.map((item, index) => {
+                               const details = response.models?.[index];
+                               const rawCapabilities = Array.isArray(details?.capabilities) ? details?.capabilities : [];
+                               const displayNameSource =
+                                       details?.name && details.name.trim().length > 0 ? details.name : item.id;
+                               const displayName = this.toDisplayName(displayNameSource);
+
+                               return {
+                                       id: item.id,
+                                       name: displayName,
+                                       model: details?.model || item.id,
+                                       description: details?.description,
+                                       capabilities: rawCapabilities.filter((value): value is string => Boolean(value)),
+                                       details: details?.details,
+                                       meta: item.meta ?? null
+                               } satisfies ModelOption;
+                       });
+
+                       this._models = models;
+
+                       const selection = this.determineInitialSelection(models);
+
+                       this._selectedModelId = selection.id;
+                       this._selectedModelName = selection.model;
+                       this._persistedSelection.value =
+                               selection.id && selection.model ? { id: selection.id, model: selection.model } : null;
+               } catch (error) {
+                       this._models = [];
+                       this._error = error instanceof Error ? error.message : 'Failed to load models';
+
+                       throw error;
+               } finally {
+                       this._loading = false;
+               }
+       }
+
+       async select(modelId: string): Promise<void> {
+               if (!modelId || this._updating) {
+                       return;
+               }
+
+               if (this._selectedModelId === modelId) {
+                       return;
+               }
+
+               const option = this._models.find((model) => model.id === modelId);
+               if (!option) {
+                       throw new Error('Selected model is not available');
+               }
+
+               this._updating = true;
+               this._error = null;
+
+               try {
+                       this._selectedModelId = option.id;
+                       this._selectedModelName = option.model;
+                       this._persistedSelection.value = { id: option.id, model: option.model };
+               } finally {
+                       this._updating = false;
+               }
+       }
+
+       private toDisplayName(id: string): string {
+               const segments = id.split(/\\|\//);
+               const candidate = segments.pop();
+
+               return candidate && candidate.trim().length > 0 ? candidate : id;
+       }
+
+       /**
+        * Determines which model should be selected after fetching the models list.
+        * Priority: current selection > persisted selection > first available model > none
+        */
+       private determineInitialSelection(models: ModelOption[]): {
+               id: string | null;
+               model: string | null;
+       } {
+               const persisted = this._persistedSelection.value;
+               let nextSelectionId = this._selectedModelId ?? persisted?.id ?? null;
+               let nextSelectionName = this._selectedModelName ?? persisted?.model ?? null;
+
+               if (nextSelectionId) {
+                       const match = models.find((m) => m.id === nextSelectionId);
+
+                       if (match) {
+                               nextSelectionId = match.id;
+                               nextSelectionName = match.model;
+                       } else if (models[0]) {
+                               nextSelectionId = models[0].id;
+                               nextSelectionName = models[0].model;
+                       } else {
+                               nextSelectionId = null;
+                               nextSelectionName = null;
+                       }
+               } else if (models[0]) {
+                       nextSelectionId = models[0].id;
+                       nextSelectionName = models[0].model;
+               }
+
+               return { id: nextSelectionId, model: nextSelectionName };
+       }
+}
+
+export const modelsStore = new ModelsStore();
+
+export const modelOptions = () => modelsStore.models;
+export const modelsLoading = () => modelsStore.loading;
+export const modelsUpdating = () => modelsStore.updating;
+export const modelsError = () => modelsStore.error;
+export const selectedModelId = () => modelsStore.selectedModelId;
+export const selectedModelName = () => modelsStore.selectedModelName;
+export const selectedModelOption = () => modelsStore.selectedModel;
+
+export const fetchModels = modelsStore.fetch.bind(modelsStore);
+export const selectModel = modelsStore.select.bind(modelsStore);
diff --git a/tools/server/webui/src/lib/stores/persisted.svelte.ts b/tools/server/webui/src/lib/stores/persisted.svelte.ts
new file mode 100644 (file)
index 0000000..1e07f80
--- /dev/null
@@ -0,0 +1,50 @@
+import { browser } from '$app/environment';
+
+type PersistedValue<T> = {
+       get value(): T;
+       set value(newValue: T);
+};
+
+export function persisted<T>(key: string, initialValue: T): PersistedValue<T> {
+       let value = initialValue;
+
+       if (browser) {
+               try {
+                       const stored = localStorage.getItem(key);
+
+                       if (stored !== null) {
+                               value = JSON.parse(stored) as T;
+                       }
+               } catch (error) {
+                       console.warn(`Failed to load ${key}:`, error);
+               }
+       }
+
+       const persist = (next: T) => {
+               if (!browser) {
+                       return;
+               }
+
+               try {
+                       if (next === null || next === undefined) {
+                               localStorage.removeItem(key);
+                               return;
+                       }
+
+                       localStorage.setItem(key, JSON.stringify(next));
+               } catch (error) {
+                       console.warn(`Failed to persist ${key}:`, error);
+               }
+       };
+
+       return {
+               get value() {
+                       return value;
+               },
+
+               set value(newValue: T) {
+                       value = newValue;
+                       persist(newValue);
+               }
+       };
+}
index b330cbb4bf42ef563c110b57a6dc06a4c28d1a1f..b10f0dd3a4189527f697664a6bf6898c959b3185 100644 (file)
@@ -80,7 +80,8 @@ class SettingsStore {
                if (!browser) return;
 
                try {
-                       const savedVal = JSON.parse(localStorage.getItem('config') || '{}');
+                       const storedConfigRaw = localStorage.getItem('config');
+                       const savedVal = JSON.parse(storedConfigRaw || '{}');
 
                        // Merge with defaults to prevent breaking changes
                        this.config = {
index d0e60a6c137067cee439c61c6ad3308a579e9fd0..6d76ab1f68e9dc3279791a1e0058d6c4f7f9e72c 100644 (file)
@@ -36,6 +36,41 @@ export interface ApiChatMessageData {
        timestamp?: number;
 }
 
+export interface ApiModelDataEntry {
+       id: string;
+       object: string;
+       created: number;
+       owned_by: string;
+       meta?: Record<string, unknown> | null;
+}
+
+export interface ApiModelDetails {
+       name: string;
+       model: string;
+       modified_at?: string;
+       size?: string | number;
+       digest?: string;
+       type?: string;
+       description?: string;
+       tags?: string[];
+       capabilities?: string[];
+       parameters?: string;
+       details?: {
+               parent_model?: string;
+               format?: string;
+               family?: string;
+               families?: string[];
+               parameter_size?: string;
+               quantization_level?: string;
+       };
+}
+
+export interface ApiModelListResponse {
+       object: string;
+       data: ApiModelDataEntry[];
+       models?: ApiModelDetails[];
+}
+
 export interface ApiLlamaCppServerProps {
        default_generation_settings: {
                id: number;
@@ -120,6 +155,7 @@ export interface ApiChatCompletionRequest {
                content: string | ApiChatMessageContentPart[];
        }>;
        stream?: boolean;
+       model?: string;
        // Reasoning parameters
        reasoning_format?: string;
        // Generation parameters
@@ -150,10 +186,14 @@ export interface ApiChatCompletionRequest {
 }
 
 export interface ApiChatCompletionStreamChunk {
+       model?: string;
        choices: Array<{
+               model?: string;
+               metadata?: { model?: string };
                delta: {
                        content?: string;
                        reasoning_content?: string;
+                       model?: string;
                };
        }>;
        timings?: {
@@ -167,10 +207,14 @@ export interface ApiChatCompletionStreamChunk {
 }
 
 export interface ApiChatCompletionResponse {
+       model?: string;
        choices: Array<{
+               model?: string;
+               metadata?: { model?: string };
                message: {
                        content: string;
                        reasoning_content?: string;
+                       model?: string;
                };
        }>;
 }
diff --git a/tools/server/webui/src/lib/types/models.d.ts b/tools/server/webui/src/lib/types/models.d.ts
new file mode 100644 (file)
index 0000000..3b6bad5
--- /dev/null
@@ -0,0 +1,11 @@
+import type { ApiModelDataEntry, ApiModelDetails } from '$lib/types/api';
+
+export interface ModelOption {
+       id: string;
+       name: string;
+       model: string;
+       description?: string;
+       capabilities: string[];
+       details?: ApiModelDetails['details'];
+       meta?: ApiModelDataEntry['meta'];
+}
index 4311f779ad8419642d7550c48162ea5dffa8c1c3..659fb0c7d1cf5624266321f7bedd08ca3f341d8e 100644 (file)
@@ -41,6 +41,7 @@ export interface SettingsChatServiceOptions {
        // Callbacks
        onChunk?: (chunk: string) => void;
        onReasoningChunk?: (chunk: string) => void;
+       onModel?: (model: string) => void;
        onComplete?: (response: string, reasoningContent?: string, timings?: ChatMessageTimings) => void;
        onError?: (error: Error) => void;
 }
diff --git a/tools/server/webui/src/lib/utils/model-names.test.ts b/tools/server/webui/src/lib/utils/model-names.test.ts
new file mode 100644 (file)
index 0000000..e19e92f
--- /dev/null
@@ -0,0 +1,44 @@
+import { describe, expect, it } from 'vitest';
+import { isValidModelName, normalizeModelName } from './model-names';
+
+describe('normalizeModelName', () => {
+       it('extracts filename from forward slash path', () => {
+               expect(normalizeModelName('models/model-name-1')).toBe('model-name-1');
+               expect(normalizeModelName('path/to/model/model-name-2')).toBe('model-name-2');
+       });
+
+       it('extracts filename from backslash path', () => {
+               expect(normalizeModelName('C\\Models\\model-name-1')).toBe('model-name-1');
+               expect(normalizeModelName('path\\to\\model\\model-name-2')).toBe('model-name-2');
+       });
+
+       it('handles mixed path separators', () => {
+               expect(normalizeModelName('path/to\\model/model-name-2')).toBe('model-name-2');
+       });
+
+       it('returns simple names as-is', () => {
+               expect(normalizeModelName('simple-model')).toBe('simple-model');
+               expect(normalizeModelName('model-name-2')).toBe('model-name-2');
+       });
+
+       it('trims whitespace', () => {
+               expect(normalizeModelName('  model-name  ')).toBe('model-name');
+       });
+
+       it('returns empty string for empty input', () => {
+               expect(normalizeModelName('')).toBe('');
+               expect(normalizeModelName('   ')).toBe('');
+       });
+});
+
+describe('isValidModelName', () => {
+       it('returns true for valid names', () => {
+               expect(isValidModelName('model')).toBe(true);
+               expect(isValidModelName('path/to/model.bin')).toBe(true);
+       });
+
+       it('returns false for empty values', () => {
+               expect(isValidModelName('')).toBe(false);
+               expect(isValidModelName('   ')).toBe(false);
+       });
+});
diff --git a/tools/server/webui/src/lib/utils/model-names.ts b/tools/server/webui/src/lib/utils/model-names.ts
new file mode 100644 (file)
index 0000000..b1ea9d9
--- /dev/null
@@ -0,0 +1,39 @@
+/**
+ * Normalizes a model name by extracting the filename from a path.
+ *
+ * Handles both forward slashes (/) and backslashes (\) as path separators.
+ * If the model name is just a filename (no path), returns it as-is.
+ *
+ * @param modelName - The model name or path to normalize
+ * @returns The normalized model name (filename only)
+ *
+ * @example
+ * normalizeModelName('models/llama-3.1-8b') // Returns: 'llama-3.1-8b'
+ * normalizeModelName('C:\\Models\\gpt-4') // Returns: 'gpt-4'
+ * normalizeModelName('simple-model') // Returns: 'simple-model'
+ * normalizeModelName('  spaced  ') // Returns: 'spaced'
+ * normalizeModelName('') // Returns: ''
+ */
+export function normalizeModelName(modelName: string): string {
+       const trimmed = modelName.trim();
+
+       if (!trimmed) {
+               return '';
+       }
+
+       const segments = trimmed.split(/[\\/]/);
+       const candidate = segments.pop();
+       const normalized = candidate?.trim();
+
+       return normalized && normalized.length > 0 ? normalized : trimmed;
+}
+
+/**
+ * Validates if a model name is valid (non-empty after normalization).
+ *
+ * @param modelName - The model name to validate
+ * @returns true if valid, false otherwise
+ */
+export function isValidModelName(modelName: string): boolean {
+       return normalizeModelName(modelName).length > 0;
+}
diff --git a/tools/server/webui/src/lib/utils/portal-to-body.ts b/tools/server/webui/src/lib/utils/portal-to-body.ts
new file mode 100644 (file)
index 0000000..bffbe89
--- /dev/null
@@ -0,0 +1,20 @@
+export function portalToBody(node: HTMLElement) {
+       if (typeof document === 'undefined') {
+               return;
+       }
+
+       const target = document.body;
+       if (!target) {
+               return;
+       }
+
+       target.appendChild(node);
+
+       return {
+               destroy() {
+                       if (node.parentNode === target) {
+                               target.removeChild(node);
+                       }
+               }
+       };
+}
index 8912f642ceffce809a65045f84c1a3cc2645feb5..075bdd356bc99e774a85143222ea6cfb4cfc8a27 100644 (file)
                </Sidebar.Root>
 
                <Sidebar.Trigger
-                       class="transition-left absolute h-8 w-8 duration-200 ease-linear {sidebarOpen
+                       class="transition-left absolute left-0 z-[900] h-8 w-8 duration-200 ease-linear {sidebarOpen
                                ? 'md:left-[var(--sidebar-width)]'
-                               : 'left-0'}"
-                       style="translate: 1rem 1rem; z-index: 99999;"
+                               : ''}"
+                       style="translate: 1rem 1rem;"
                />
 
                <Sidebar.Inset class="flex flex-1 flex-col overflow-hidden">