Skip to content

Commit

Permalink
improve openai config
Browse files Browse the repository at this point in the history
  • Loading branch information
GregoMac1 committed Oct 9, 2024
1 parent 70f8760 commit 6903d30
Show file tree
Hide file tree
Showing 8 changed files with 54 additions and 24 deletions.
1 change: 1 addition & 0 deletions src/i18n/en/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ const en = {
clear: 'Clear',
confirmDeletion: 'Confirm deletion',
connected: 'Connected',
connecting: 'Connecting...',
content: 'Content',
controls: 'Controls',
controlsOnlyAvailableForOllama:
Expand Down
1 change: 1 addition & 0 deletions src/i18n/es/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ const es = {
currentVersion: 'Versión actual',
confirmDeletion: 'Confirmar eliminación',
connected: 'Conectado',
connecting: 'Conectando...',
content: 'Contenido',
controls: 'Controles',
copy: 'Copiar',
Expand Down
8 changes: 8 additions & 0 deletions src/i18n/i18n-types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,10 @@ type RootTranslation = {
* C​o​n​n​e​c​t​e​d
*/
connected: string
/**
* C​o​n​n​e​c​t​i​n​g​.​.​.
*/
connecting: string
/**
* C​o​n​t​e​n​t
*/
Expand Down Expand Up @@ -640,6 +644,10 @@ The completion in progress will stop
* Connected
*/
connected: () => LocalizedString
/**
* Connecting...
*/
connecting: () => LocalizedString
/**
* Content
*/
Expand Down
5 changes: 1 addition & 4 deletions src/lib/chat/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -43,10 +43,7 @@ export async function listModels(): Promise<Model[]> {
.getModels()
.then((models) => models.models)
.catch(() => []);
const openaiModels = await new OpenAIStrategy()
.getModels()
.then((models) => models.models)
.catch(() => []);
const openaiModels = await new OpenAIStrategy().getModels().catch(() => []);

return [...ollamaModels, ...openaiModels];
}
Expand Down
13 changes: 12 additions & 1 deletion src/lib/chat/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,14 @@ export class OpenAIStrategy implements ChatStrategy {
});
}

config(params: { server: string; apiKey: string }): void {
this.openai = new OpenAI({
baseURL: params.server,
apiKey: params.apiKey,
dangerouslyAllowBrowser: true
});
}

async chat(
payload: any,

Check failure on line 29 in src/lib/chat/openai.ts

View workflow job for this annotation

GitHub Actions / Run linter

Unexpected any. Specify a different type
abortSignal: AbortSignal,
Expand All @@ -36,6 +44,9 @@ export class OpenAIStrategy implements ChatStrategy {

async getModels(): Promise<any> {

Check failure on line 45 in src/lib/chat/openai.ts

View workflow job for this annotation

GitHub Actions / Run linter

Unexpected any. Specify a different type
const response = await this.openai.models.list();
return response.data;
return response.data?.map((model) => ({
api: 'openai',
name: model.id
}));
}
}
13 changes: 10 additions & 3 deletions src/lib/components/FieldSelectModel.svelte
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
<script lang="ts">
import LL from '$i18n/i18n-svelte';
import { getLastUsedModels } from '$lib/chat';
import { getLastUsedModels, type Model } from '$lib/chat';
import { settingsStore } from '$lib/localStorage';
import FieldSelect from './FieldSelect.svelte';
Expand All @@ -19,16 +19,23 @@
let lastUsedModels: ModelOption[] = [];
let otherModels: ModelOption[] = [];
function getBadges(model: Model): string[] {
const badges: string[] = [];
if (model.details?.parameter_size) badges.push(model.details.parameter_size);
if (model.api) badges.push(model.api);
return badges;
}
$: disabled = !$settingsStore.models?.length;
$: models = $settingsStore.models?.map((m) => ({
value: m.name,
label: m.name,
badge: [m.details.parameter_size, m.api]
badge: getBadges(m)
}));
$: lastUsedModels = getLastUsedModels().map((m) => ({
value: m.name,
label: m.name,
badge: [m.details.parameter_size, m.api]
badge: getBadges(m)
}));
$: otherModels = models?.filter((m) => !lastUsedModels.some((lm) => lm.value === m.value)) || [];
</script>
Expand Down
2 changes: 1 addition & 1 deletion src/lib/localStorage.ts
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ export interface Settings {

const defaultSettings: Settings = {
ollamaServer: 'http://localhost:11434',
openaiServer: 'http://localhost:11434/v1/',
openaiServer: 'https://api.openai.com/v1',
openaiApiKey: null,
models: [],
lastUpdateCheck: null,
Expand Down
35 changes: 20 additions & 15 deletions src/routes/settings/OpenAI.svelte
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
<script lang="ts">
import { onMount } from 'svelte';
import { MonitorUp } from 'lucide-svelte';
import LL from '$i18n/i18n-svelte';
import { OpenAIStrategy } from '$lib/chat/openai';
Expand All @@ -11,18 +11,17 @@
import P from '$lib/components/P.svelte';
import { settingsStore } from '$lib/localStorage';
let openaiURL: URL | null = null;
const DEFAULT_OPENAI_SERVER = 'https://api.openai.com/v1';
let openai = new OpenAIStrategy();
let openaiServer = $settingsStore.openaiServer || DEFAULT_OPENAI_SERVER;
let openaiApiKey = $settingsStore.openaiApiKey || '';
let openaiServerStatus: 'connected' | 'disconnected' = 'disconnected';
let openaiServerStatus: 'connected' | 'disconnected' | 'connecting' = 'disconnected';
$: settingsStore.update((settings) => ({ ...settings, openaiServer, openaiApiKey }));
async function getModelsList(): Promise<void> {
openaiServerStatus = 'connecting';
try {
await openai.getModels();
openaiServerStatus = 'connected';
Expand All @@ -31,17 +30,10 @@
}
}
onMount(async () => {
// Get the current URL and set the default server
openaiURL = new URL(window.location.href);
if (openaiURL.port) {
openaiURL = new URL(
`${openaiURL.protocol}//${openaiURL.hostname}${openaiURL.pathname}${openaiURL.search}${openaiURL.hash}`
);
}
async function updateOpenAIConfig() {
openai.config({ server: openaiServer, apiKey: openaiApiKey });
await getModelsList();
});
}
</script>

<Fieldset>
Expand All @@ -51,11 +43,12 @@
label={$LL.baseUrl()}
placeholder={DEFAULT_OPENAI_SERVER}
bind:value={openaiServer}
on:keyup={getModelsList}
>
<svelte:fragment slot="status">
{#if openaiServerStatus === 'disconnected'}
<Badge variant="warning">{$LL.disconnected()}</Badge>
{:else if openaiServerStatus === 'connecting'}
<Badge variant="warning">{$LL.connecting()}</Badge>
{:else}
<Badge variant="positive">{$LL.connected()}</Badge>
{/if}
Expand All @@ -69,6 +62,18 @@
placeholder={$LL.noApiKey()}
type="password"
>
<svelte:fragment slot="nav">
<Button
aria-label="Connect"
class="h-full text-muted"
isLoading={openaiServerStatus === 'connecting'}
disabled={openaiServerStatus === 'connecting' || !openaiServer || !openaiApiKey}
on:click={updateOpenAIConfig}
>
<MonitorUp class="base-icon" />
</Button>
</svelte:fragment>

<svelte:fragment slot="help">
{#if openaiApiKey === 'ollama' || openaiServerStatus === 'disconnected'}
<FieldHelp>
Expand Down

0 comments on commit 6903d30

Please sign in to comment.