Skip to content

Commit

Permalink
Bug fix for azure models (#4)
Browse files Browse the repository at this point in the history
* Setup support for Azure OpenAI Services

* fixed issue with azure models
  • Loading branch information
pureit-dev authored Dec 18, 2023
1 parent 440e4ad commit 9f3f494
Show file tree
Hide file tree
Showing 9 changed files with 89 additions and 49 deletions.
1 change: 1 addition & 0 deletions .env.local.example
Original file line number Diff line number Diff line change
Expand Up @@ -60,3 +60,4 @@ OPENAI_API_TYPE= # openai or azure
OPENAI_API_VERSION= # e.g 2023-07-01-preview
AZURE_DEPLOYMENT_ID_EMBEDDINGS= # Your embeddings deployment name
AZURE_DEPLOYMENT_ID= # Your deployment name
AZURE_MODELS_PATH= # "models" or "deployments"
10 changes: 6 additions & 4 deletions components/Chat/Chat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -216,10 +216,12 @@ export const Chat = memo(() => {
</div>
<div className="text-center text-gray-500 dark:text-gray-400">
<div className="mb-2">
OpenGPT allows you to safely use your OpenAI API Key and pay as you go.
OpenGPT allows you to safely use your OpenAI API Key and pay as
you go.
</div>
<div className="mb-2">
It is <span className="italic">only</span> used to communicate with their API.
It is <span className="italic">only</span> used to communicate
with their API.
</div>
<div className="mb-2">
{t(
Expand Down Expand Up @@ -259,7 +261,7 @@ export const Chat = memo(() => {
<Spinner size="16px" className="mx-auto" />
</div>
) : (
'Chatbot UI'
'OpenGPT'
)}
</div>

Expand Down Expand Up @@ -361,4 +363,4 @@ export const Chat = memo(() => {
</ChatContext.Provider>
);
});
Chat.displayName = 'Chat';
Chat.displayName = 'Chat';
65 changes: 43 additions & 22 deletions components/Chat/ModelSelect.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ import { useTranslation } from 'next-i18next';

import useConversations from '@/hooks/useConversations';

import { OPENAI_API_TYPE } from '@/utils/app/const';

import { OpenAIModel, OpenAIModelType } from '@/types/openai';

import HomeContext from '@/pages/api/home/home.context';
Expand All @@ -27,6 +29,22 @@ export const ModelSelect = () => {
});
};

// If a Model Name isn't available, then use the Model ID instead.
const getModelText = (model: OpenAIModel) => {
if (
OPENAI_API_TYPE === 'azure' &&
model.name &&
model.id &&
model.name.toLowerCase() != model.id.toLowerCase()
) {
// If using Azure, and the model name and ID aren't the same, then add the model ID to help distinguish the differences between the models.
// This can help when there are multiple models with the same name.
return model.name + ' [' + model.id + ']';
} else {
return model.name || model.id;
}
};

return (
<div className="flex flex-col">
<label className="mb-2 text-left text-neutral-700 dark:text-neutral-400">
Expand All @@ -39,30 +57,33 @@ export const ModelSelect = () => {
value={selectedConversation?.model?.id || defaultModelId}
onChange={handleChange}
>
{models.filter(m => m.type === OpenAIModelType.CHAT).map((model) => (
<option
key={model.id}
value={model.id}
className="dark:bg-[#343541] dark:text-white"
>
{model.id === defaultModelId
? `Default (${model.name})`
: model.name}
</option>
))}
{models
.filter((m) => m.type === OpenAIModelType.CHAT)
.map((model) => (
<option
key={model.id}
value={model.id}
className="dark:bg-[#343541] dark:text-white"
>
{model.id === defaultModelId
? `Default (${getModelText(model)})`
: getModelText(model)}
</option>
))}
</select>
</div>
{!isAzureOpenAI && <div className="w-full mt-3 text-left text-neutral-700 dark:text-neutral-400 flex items-center">
<a
href="https://platform.openai.com/account/usage"
target="_blank"
className="flex items-center"
>
<IconExternalLink size={18} className={'inline mr-1'} />
{t('View Account Usage')}
</a>
</div>
}
{!isAzureOpenAI && (
<div className="w-full mt-3 text-left text-neutral-700 dark:text-neutral-400 flex items-center">
<a
href="https://platform.openai.com/account/usage"
target="_blank"
className="flex items-center"
>
<IconExternalLink size={18} className={'inline mr-1'} />
{t('View Account Usage')}
</a>
</div>
)}
</div>
);
};
2 changes: 1 addition & 1 deletion public/locales/zh_TW/chat.json
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
"click if using a .env.local file": "若使用 .env.local 檔案,請點選",
"Message limit is {{maxLength}} characters. You have entered {{valueLength}} characters.": "訊息字數限制為 {{maxLength}} 字元,您已輸入 {{valueLength}} 字元。",
"Please enter a message": "請輸入訊息",
"Chatbot UI is an advanced chatbot kit for OpenAI's chat models aiming to mimic ChatGPT's interface and functionality.": "Chatbot UI 是一個設計來模擬 OpenAI 聊天模型 ChatGPT 的介面和功能進階的聊天機器人。",
"Open GPT is an advanced chatbot kit for OpenAI's chat models aiming to mimic ChatGPT's interface and functionality.": "Open GPT 是一個設計來模擬 OpenAI 聊天模型 ChatGPT 的介面和功能進階的聊天機器人。",
"Are you sure you want to clear all messages?": "您確定要清除所有訊息嗎?",
"Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic.": "較高的數值,如 0.8,將使輸出更為隨機,而較低的數值,如 0.2,則會使輸出更集中且具確定性。",
"View Account Usage": "檢視帳戶使用情況",
Expand Down
28 changes: 18 additions & 10 deletions server/routers/models.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
import {
AZURE_DEPLOYMENT_ID,
AZURE_MODELS_PATH,
OPENAI_API_HOST,
OPENAI_API_TYPE,
OPENAI_API_VERSION,
Expand All @@ -24,7 +26,7 @@ export const models = router({

let url = `${OPENAI_API_HOST}/v1/models`;
if (OPENAI_API_TYPE === 'azure') {
url = `${OPENAI_API_HOST}/openai/models?api-version=${OPENAI_API_VERSION}`;
url = `${OPENAI_API_HOST}/openai/${AZURE_MODELS_PATH}?api-version=${OPENAI_API_VERSION}`;
}

const response = await fetch(url, {
Expand Down Expand Up @@ -59,13 +61,14 @@ export const models = router({

const json = await response.json();

const models: OpenAIModel[] = json.data
let models: OpenAIModel[] = json.data
.map((model: any) => {
const model_name =
OPENAI_API_TYPE === 'azure' ? model.model || model.id : model.id;
for (const [key, value] of Object.entries(OpenAIModelID)) {
const modelId = model.id;
if (value === modelId) {
if (value === model_name) {
const r: OpenAIModel = {
id: modelId,
id: model.id,
azureDeploymentId:
OPENAI_API_TYPE === 'azure' ? model.id : undefined,
name: OpenAIModels[value].name,
Expand All @@ -79,13 +82,18 @@ export const models = router({
}
})
.filter(Boolean);
if (OPENAI_API_TYPE === 'azure') {
return models.filter(
(modelId) =>
modelId.azureDeploymentId === process.env.AZURE_DEPLOYMENT_ID,

if (OPENAI_API_TYPE === 'azure' && AZURE_DEPLOYMENT_ID) {
// Attempt to only show 1 specific model for the user to select when using Azure. If AZURE_DEPLOYMENT_ID has no value then show all models.
const filteredModels = models.filter(
(model) => model.id === AZURE_DEPLOYMENT_ID,
);
}
if (filteredModels.length > 0) {
// Only provide 1 model

models = filteredModels;
}
}
return models;
}),
});
23 changes: 13 additions & 10 deletions types/openai.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import { AZURE_DEPLOYMENT_ID, OPENAI_API_TYPE } from '@/utils/app/const';

import * as z from 'zod';

export enum OpenAIModelID {
Expand All @@ -13,7 +15,7 @@ export enum OpenAIModelID {
export enum OpenAIModelType {
CHAT = 'chat',
COMPLETION = 'completion',
EMDEDDING = 'embedding'
EMDEDDING = 'embedding',
}

export const OpenAIModelSchema = z.object({
Expand All @@ -22,61 +24,62 @@ export const OpenAIModelSchema = z.object({
name: z.string(),
maxLength: z.number(), // max length of a message.
tokenLimit: z.number(),
type: z.nativeEnum(OpenAIModelType).default(OpenAIModelType.CHAT)
type: z.nativeEnum(OpenAIModelType).default(OpenAIModelType.CHAT),
});
export type OpenAIModel = z.infer<typeof OpenAIModelSchema>;

// in case the `DEFAULT_MODEL` environment variable is not set or set to an unsupported model
export const fallbackModelID = OpenAIModelID.GPT_3_5;
export const fallbackModelID =
OPENAI_API_TYPE === 'azure' ? AZURE_DEPLOYMENT_ID : OpenAIModelID.GPT_3_5;

export const OpenAIModels: Record<OpenAIModelID, OpenAIModel> = {
[OpenAIModelID.GPT_3_5]: {
id: OpenAIModelID.GPT_3_5,
name: 'GPT-3.5',
maxLength: 12000,
tokenLimit: 4000,
type: OpenAIModelType.CHAT
type: OpenAIModelType.CHAT,
},
[OpenAIModelID.GPT_3_5_16K]: {
id: OpenAIModelID.GPT_3_5_16K,
name: 'GPT-3.5-16K',
maxLength: 48000,
tokenLimit: 16000,
type: OpenAIModelType.CHAT
type: OpenAIModelType.CHAT,
},
[OpenAIModelID.GPT_3_5_AZ]: {
id: OpenAIModelID.GPT_3_5_AZ,
name: 'GPT-3.5',
maxLength: 12000,
tokenLimit: 4000,
type: OpenAIModelType.CHAT
type: OpenAIModelType.CHAT,
},
[OpenAIModelID.GPT_3_5_16K_AZ]: {
id: OpenAIModelID.GPT_3_5_16K_AZ,
name: 'GPT-3.5-16K',
maxLength: 48000,
tokenLimit: 16000,
type: OpenAIModelType.CHAT
type: OpenAIModelType.CHAT,
},
[OpenAIModelID.GPT_4]: {
id: OpenAIModelID.GPT_4,
name: 'GPT-4',
maxLength: 24000,
tokenLimit: 8000,
type: OpenAIModelType.CHAT
type: OpenAIModelType.CHAT,
},
[OpenAIModelID.GPT_4_32K]: {
id: OpenAIModelID.GPT_4_32K,
name: 'GPT-4-32K',
maxLength: 96000,
tokenLimit: 32000,
type: OpenAIModelType.CHAT
type: OpenAIModelType.CHAT,
},
[OpenAIModelID.TEXT_EMBEDDING_ADA_002]: {
id: OpenAIModelID.TEXT_EMBEDDING_ADA_002,
name: 'TEXT-EMBEDDING-ADA-002',
maxLength: 24000,
tokenLimit: 8000,
type: OpenAIModelType.EMDEDDING
type: OpenAIModelType.EMDEDDING,
},
};
2 changes: 2 additions & 0 deletions utils/app/const.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@ export const SUPPORT_EMAIL = process.env.SUPPORT_EMAIL || '';
export const PROMPT_SHARING_ENABLED: boolean =
process.env.PROMPT_SHARING_ENABLED === 'true' || false;

export const AZURE_MODELS_PATH = process.env.AZURE_MODELS_PATH || 'deployments';

export const DEFAULT_USER_LIMIT_USD_MONTHLY: number =
process.env.DEFAULT_USER_LIMIT_USD_MONTHLY != undefined
? Number.parseFloat(process.env.DEFAULT_USER_LIMIT_USD_MONTHLY)
Expand Down
4 changes: 3 additions & 1 deletion utils/server/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,9 @@ export const OpenAIStream = async (
) => {
let url = `${OPENAI_API_HOST}/v1/chat/completions`;
if (OPENAI_API_TYPE === 'azure') {
url = `${OPENAI_API_HOST}/openai/deployments/${AZURE_DEPLOYMENT_ID}/chat/completions?api-version=${OPENAI_API_VERSION}`;
url = `${OPENAI_API_HOST}/openai/deployments/${
AZURE_DEPLOYMENT_ID || model.id
}/chat/completions?api-version=${OPENAI_API_VERSION}`;
}
const res = await fetch(url, {
headers: {
Expand Down
3 changes: 2 additions & 1 deletion utils/server/openai.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import {
AZURE_DEPLOYMENT_ID_EMBEDDINGS,
AZURE_MODELS_PATH,
OPENAI_API_HOST,
OPENAI_API_TYPE,
OPENAI_API_VERSION,
Expand All @@ -15,7 +16,7 @@ export const getOpenAIApi = (deploymentId?: string): OpenAI => {
openaiConfig = {
apiKey,
baseURL: new URL(
OPENAI_API_HOST + '/openai/models/' + deploymentId,
OPENAI_API_HOST + '/openai/deployments/' + deploymentId,
).toString(),
defaultQuery: { 'api-version': OPENAI_API_VERSION },
defaultHeaders: { 'api-key': apiKey },
Expand Down

0 comments on commit 9f3f494

Please sign in to comment.