Skip to content

Commit 156080e

Browse files
authored
Merge pull request #26 from mckaywrigley/main
Fork Sync: Update from parent repository
2 parents 91aefaf + 6ef83b0 commit 156080e

25 files changed

+244
-57
lines changed

.env.local.example

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1,2 @@
1-
OPENAI_API_KEY=YOUR_KEY
1+
OPENAI_API_KEY=YOUR_KEY
2+
DEFAULT_MODEL=gpt-3.5-turbo

README.md

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -59,10 +59,15 @@ Fork Chatbot UI on Replit [here](https://replit.com/@MckayWrigley/chatbot-ui-pro
5959

6060
**Docker**
6161

62+
Build locally:
6263
```shell
6364
docker build -t chatgpt-ui .
6465
docker run -e OPENAI_API_KEY=xxxxxxxx -p 3000:3000 chatgpt-ui
6566
```
67+
Pull from ghcr:
68+
```
69+
docker run -e OPENAI_API_KEY=xxxxxxxx -p 3000:3000 ghcr.io/mckaywrigley/chatbot-ui:main
70+
```
6671

6772
## Running Locally
6873

@@ -86,6 +91,8 @@ Create a .env.local file in the root of the repo with your OpenAI API Key:
8691
OPENAI_API_KEY=YOUR_KEY
8792
```
8893

94+
> you can set `OPENAI_API_HOST` where access to the official OpenAI host is restricted or unavailable, allowing users to configure an alternative host for their specific needs.
95+
8996
**4. Run App**
9097

9198
```bash
@@ -96,6 +103,18 @@ npm run dev
96103

97104
You should be able to start chatting.
98105

106+
## Configuration
107+
108+
When deploying the application, the following environment variables can be set:
109+
110+
| Environment Variable | Default value | Description |
111+
|----------------------|------------------|---------------------------------------------------------|
112+
| OPENAI_API_KEY | | The default API key used for authentication with OpenAI |
113+
| DEFAULT_MODEL | `gpt-3.5-turbo` | The default model to use on new conversations |
114+
115+
If you do not provide an OpenAI API key with `OPENAI_API_KEY`, users will have to provide their own key.
116+
If you don't have an OpenAI API key, you can get one [here](https://platform.openai.com/account/api-keys).
117+
99118
## Contact
100119

101120
If you have any questions, feel free to reach out to me on [Twitter](https://twitter.com/mckaywrigley).

components/Chat/Chat.tsx

Lines changed: 11 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ import { Conversation, Message } from '@/types/chat';
22
import { IconArrowDown } from '@tabler/icons-react';
33
import { KeyValuePair } from '@/types/data';
44
import { ErrorMessage } from '@/types/error';
5-
import { OpenAIModel } from '@/types/openai';
5+
import { OpenAIModel, OpenAIModelID } from '@/types/openai';
66
import { Prompt } from '@/types/prompt';
77
import { throttle } from '@/utils';
88
import { IconClearAll, IconKey, IconSettings } from '@tabler/icons-react';
@@ -29,6 +29,7 @@ interface Props {
2929
models: OpenAIModel[];
3030
apiKey: string;
3131
serverSideApiKeyIsSet: boolean;
32+
defaultModelId: OpenAIModelID;
3233
messageIsStreaming: boolean;
3334
modelError: ErrorMessage | null;
3435
loading: boolean;
@@ -48,6 +49,7 @@ export const Chat: FC<Props> = memo(
4849
models,
4950
apiKey,
5051
serverSideApiKeyIsSet,
52+
defaultModelId,
5153
messageIsStreaming,
5254
modelError,
5355
loading,
@@ -149,7 +151,7 @@ export const Chat: FC<Props> = memo(
149151
}, [messagesEndRef]);
150152

151153
return (
152-
<div className="overflow-none relative flex-1 bg-white dark:bg-[#343541]">
154+
<div className="overflow-hidden relative flex-1 bg-white dark:bg-[#343541]">
153155
{!(apiKey || serverSideApiKeyIsSet) ? (
154156
<div className="mx-auto flex h-full w-[300px] flex-col justify-center space-y-6 sm:w-[500px]">
155157
<div className="mx-auto mb-5 text-gray-800 dark:text-gray-100">
@@ -206,6 +208,7 @@ export const Chat: FC<Props> = memo(
206208
<ModelSelect
207209
model={conversation.model}
208210
models={models}
211+
defaultModelId={defaultModelId}
209212
onModelChange={(model) =>
210213
onUpdateConversation(conversation, {
211214
key: 'model',
@@ -236,12 +239,13 @@ export const Chat: FC<Props> = memo(
236239
className="ml-2 cursor-pointer hover:opacity-50"
237240
onClick={handleSettings}
238241
>
239-
<IconSettings size={18} />
242+
<IconSettings size={18} />
240243
</button>
241244
<button
242245
className="ml-2 cursor-pointer hover:opacity-50"
243-
onClick={onClearAll}>
244-
<IconClearAll size={18} />
246+
onClick={onClearAll}
247+
>
248+
<IconClearAll size={18} />
245249
</button>
246250
</div>
247251
{showSettings && (
@@ -250,6 +254,7 @@ export const Chat: FC<Props> = memo(
250254
<ModelSelect
251255
model={conversation.model}
252256
models={models}
257+
defaultModelId={defaultModelId}
253258
onModelChange={(model) =>
254259
onUpdateConversation(conversation, {
255260
key: 'model',
@@ -306,7 +311,7 @@ export const Chat: FC<Props> = memo(
306311
className="flex h-7 w-7 items-center justify-center rounded-full bg-white shadow-md hover:shadow-lg focus:outline-none focus:ring-2 focus:ring-blue-500 dark:bg-[#515152d7]"
307312
onClick={handleScrollDown}
308313
>
309-
<IconArrowDown className="h-4 w-4" />
314+
<IconArrowDown size={18} />
310315
</button>
311316
</div>
312317
)}

components/Chat/ChatInput.tsx

Lines changed: 14 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
import { Message } from '@/types/chat';
2-
import { OpenAIModel, OpenAIModelID } from '@/types/openai';
2+
import { OpenAIModel } from '@/types/openai';
33
import { Prompt } from '@/types/prompt';
44
import { IconPlayerStop, IconRepeat, IconSend } from '@tabler/icons-react';
55
import { useTranslation } from 'next-i18next';
@@ -56,7 +56,7 @@ export const ChatInput: FC<Props> = ({
5656

5757
const handleChange = (e: React.ChangeEvent<HTMLTextAreaElement>) => {
5858
const value = e.target.value;
59-
const maxLength = model.id === OpenAIModelID.GPT_3_5 ? 12000 : 24000;
59+
const maxLength = model.maxLength;
6060

6161
if (value.length > maxLength) {
6262
alert(
@@ -107,11 +107,16 @@ export const ChatInput: FC<Props> = ({
107107

108108
const handleInitModal = () => {
109109
const selectedPrompt = filteredPrompts[activePromptIndex];
110-
setContent((prevContent) => {
111-
const newContent = prevContent?.replace(/\/\w*$/, selectedPrompt.content);
112-
return newContent;
113-
});
114-
handlePromptSelect(selectedPrompt);
110+
if (selectedPrompt) {
111+
setContent((prevContent) => {
112+
const newContent = prevContent?.replace(
113+
/\/\w*$/,
114+
selectedPrompt.content,
115+
);
116+
return newContent;
117+
});
118+
handlePromptSelect(selectedPrompt);
119+
}
115120
setShowPromptList(false);
116121
};
117122

@@ -256,7 +261,7 @@ export const ChatInput: FC<Props> = ({
256261
<div className="relative mx-2 flex w-full flex-grow flex-col rounded-md border border-black/10 bg-white shadow-[0_0_10px_rgba(0,0,0,0.10)] dark:border-gray-900/50 dark:bg-[#40414F] dark:text-white dark:shadow-[0_0_15px_rgba(0,0,0,0.10)] sm:mx-4">
257262
<textarea
258263
ref={textareaRef}
259-
className="m-0 w-full resize-none border-0 bg-transparent p-0 pr-8 pl-2 text-black dark:bg-transparent dark:text-white py-2 md:py-3 md:pl-4"
264+
className="m-0 w-full resize-none border-0 bg-transparent p-0 py-2 pr-8 pl-2 text-black dark:bg-transparent dark:text-white md:py-3 md:pl-4"
260265
style={{
261266
resize: 'none',
262267
bottom: `${textareaRef?.current?.scrollHeight}px`,
@@ -278,7 +283,7 @@ export const ChatInput: FC<Props> = ({
278283
onKeyDown={handleKeyDown}
279284
/>
280285
<button
281-
className="absolute right-2 top-2 rounded-sm p-1 text-neutral-800 hover:bg-neutral-200 hover:text-neutral-900 dark:bg-opacity-50 dark:text-neutral-100 dark:hover:text-neutral-200 opacity-60"
286+
className="absolute right-2 top-2 rounded-sm p-1 text-neutral-800 opacity-60 hover:bg-neutral-200 hover:text-neutral-900 dark:bg-opacity-50 dark:text-neutral-100 dark:hover:text-neutral-200"
282287
onClick={handleSend}
283288
>
284289
<IconSend size={18} />

components/Chat/ModelSelect.tsx

Lines changed: 13 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,22 @@
1-
import { OpenAIModel } from '@/types/openai';
1+
import { OpenAIModel, OpenAIModelID } from '@/types/openai';
22
import { useTranslation } from 'next-i18next';
33
import { FC } from 'react';
44

55
interface Props {
66
model: OpenAIModel;
77
models: OpenAIModel[];
8+
defaultModelId: OpenAIModelID;
89
onModelChange: (model: OpenAIModel) => void;
910
}
1011

11-
export const ModelSelect: FC<Props> = ({ model, models, onModelChange }) => {
12+
export const ModelSelect: FC<Props> = ({
13+
model,
14+
models,
15+
defaultModelId,
16+
onModelChange,
17+
}) => {
1218
const { t } = useTranslation('chat');
19+
1320
return (
1421
<div className="flex flex-col">
1522
<label className="mb-2 text-left text-neutral-700 dark:text-neutral-400">
@@ -19,7 +26,7 @@ export const ModelSelect: FC<Props> = ({ model, models, onModelChange }) => {
1926
<select
2027
className="w-full bg-transparent p-2"
2128
placeholder={t('Select a model') || ''}
22-
value={model.id}
29+
value={model?.id || defaultModelId}
2330
onChange={(e) => {
2431
onModelChange(
2532
models.find(
@@ -34,7 +41,9 @@ export const ModelSelect: FC<Props> = ({ model, models, onModelChange }) => {
3441
value={model.id}
3542
className="dark:bg-[#343541] dark:text-white"
3643
>
37-
{model.name}
44+
{model.id === defaultModelId
45+
? `Default (${model.name})`
46+
: model.name}
3847
</option>
3948
))}
4049
</select>

components/Chat/SystemPrompt.tsx

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -43,8 +43,7 @@ export const SystemPrompt: FC<Props> = ({
4343

4444
const handleChange = (e: React.ChangeEvent<HTMLTextAreaElement>) => {
4545
const value = e.target.value;
46-
const maxLength =
47-
conversation.model.id === OpenAIModelID.GPT_3_5 ? 12000 : 24000;
46+
const maxLength = conversation.model.maxLength;
4847

4948
if (value.length > maxLength) {
5049
alert(

components/Chatbar/Conversation.tsx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ export const ConversationComponent: FC<Props> = ({
6868
return (
6969
<div className="relative flex items-center">
7070
{isRenaming && selectedConversation.id === conversation.id ? (
71-
<div className="flex w-full items-center gap-3 bg-[#343541]/90 p-3">
71+
<div className="flex w-full items-center gap-3 bg-[#343541]/90 p-3 rounded-lg">
7272
<IconMessage size={18} />
7373
<input
7474
className="mr-12 flex-1 overflow-hidden overflow-ellipsis border-neutral-400 bg-transparent text-left text-[12.5px] leading-3 text-white outline-none focus:border-neutral-100"

components/Folders/Chat/ChatFolder.tsx

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -103,7 +103,7 @@ export const ChatFolder: FC<Props> = ({
103103
<>
104104
<div className="relative flex items-center">
105105
{isRenaming ? (
106-
<div className="flex w-full items-center gap-3 bg-[#343541]/90 p-3">
106+
<div className="flex w-full items-center gap-3 bg-[#343541]/90 p-3 rounded-lg">
107107
{isOpen ? (
108108
<IconCaretDown size={18} />
109109
) : (
@@ -199,21 +199,21 @@ export const ChatFolder: FC<Props> = ({
199199

200200
{isOpen
201201
? conversations.map((conversation, index) => {
202-
if (conversation.folderId === currentFolder.id) {
203-
return (
204-
<div key={index} className="ml-5 gap-2 border-l pl-2">
205-
<ConversationComponent
206-
selectedConversation={selectedConversation}
207-
conversation={conversation}
208-
loading={loading}
209-
onSelectConversation={onSelectConversation}
210-
onDeleteConversation={onDeleteConversation}
211-
onUpdateConversation={onUpdateConversation}
212-
/>
213-
</div>
214-
);
215-
}
216-
})
202+
if (conversation.folderId === currentFolder.id) {
203+
return (
204+
<div key={index} className="ml-5 gap-2 border-l pl-2">
205+
<ConversationComponent
206+
selectedConversation={selectedConversation}
207+
conversation={conversation}
208+
loading={loading}
209+
onSelectConversation={onSelectConversation}
210+
onDeleteConversation={onDeleteConversation}
211+
onUpdateConversation={onUpdateConversation}
212+
/>
213+
</div>
214+
);
215+
}
216+
})
217217
: null}
218218
</>
219219
);

components/Promptbar/PromptModal.tsx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,7 @@ export const PromptModal: FC<Props> = ({ prompt, onClose, onUpdatePrompt }) => {
6565
<input
6666
ref={nameInputRef}
6767
className="mt-2 w-full rounded-lg border border-neutral-500 px-4 py-2 text-neutral-900 shadow focus:outline-none dark:border-neutral-800 dark:border-opacity-50 dark:bg-[#40414F] dark:text-neutral-100"
68-
placeholder="A name for your prompt."
68+
placeholder={t('A name for your prompt.') || ''}
6969
value={name}
7070
onChange={(e) => setName(e.target.value)}
7171
/>

next-i18next.config.js

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@ module.exports = {
1717
"te",
1818
"vi",
1919
"zh",
20+
"ar",
2021
],
2122
},
2223
localePath:

pages/api/chat.ts

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
import { ChatBody, Message } from '@/types/chat';
2-
import { OpenAIModelID } from '@/types/openai';
32
import { DEFAULT_SYSTEM_PROMPT } from '@/utils/app/const';
43
import { OpenAIStream } from '@/utils/server';
54
import tiktokenModel from '@dqbd/tiktoken/encoders/cl100k_base.json';
@@ -22,8 +21,6 @@ const handler = async (req: Request): Promise<Response> => {
2221
tiktokenModel.pat_str,
2322
);
2423

25-
const tokenLimit = model.id === OpenAIModelID.GPT_4 ? 6000 : 3000;
26-
2724
let promptToSend = prompt;
2825
if (!promptToSend) {
2926
promptToSend = DEFAULT_SYSTEM_PROMPT;
@@ -38,7 +35,7 @@ const handler = async (req: Request): Promise<Response> => {
3835
const message = messages[i];
3936
const tokens = encoding.encode(message.content);
4037

41-
if (tokenCount + tokens.length > tokenLimit) {
38+
if (tokenCount + tokens.length > model.tokenLimit) {
4239
break;
4340
}
4441
tokenCount += tokens.length;

0 commit comments

Comments
 (0)