Skip to content

Commit 9bad4b1

Browse files
authored
Merge pull request miurla#9 from lizhe2004/main
support openai base url configuration
2 parents 5476e80 + 366b08a commit 9bad4b1

File tree

6 files changed

+33
-6
lines changed

6 files changed

+33
-6
lines changed

.env.local.example

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,8 @@
1+
# Used to set the base URL path for OpenAI API requests.The default value is https://api.openai.com/v1
2+
OPENAI_API_BASE=
13
# OpenAI API key retrieved here: https://platform.openai.com/api-keys
24
OPENAI_API_KEY=
35

46
# Tavily API Key retrieved here: https://app.tavily.com/home
5-
TAVILY_API_KEY=
7+
TAVILY_API_KEY=
8+

README.md

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,8 @@ cp .env.local.example .env.local
4646
Your .env.local file should look like this:
4747

4848
```
49+
# Used to set the base URL path for OpenAI API requests.The default value is https://api.openai.com/v1
50+
OPENAI_API_BASE=
4951
# OpenAI API key retrieved here: https://platform.openai.com/api-keys
5052
OPENAI_API_KEY=[YOUR_OPENAI_API_KEY]
5153
@@ -65,4 +67,4 @@ You can now visit http://localhost:3000.
6567

6668
Host your own live version of Morphic with Vercel.
6769

68-
[![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2Fmiurla%2Fmorphic&env=OPENAI_API_KEY,TAVILY_API_KEY)
70+
[![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2Fmiurla%2Fmorphic&env=OPENAI_API_BASE,OPENAI_API_KEY,TAVILY_API_KEY)

lib/agents/inquire.tsx

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import { openai } from 'ai/openai'
1+
import { OpenAI } from 'ai/openai'
22
import { Copilot } from '@/components/copilot'
33
import { createStreamableUI, createStreamableValue } from 'ai/rsc'
44
import { ExperimentalMessage, experimental_streamObject } from 'ai'
@@ -8,6 +8,11 @@ export async function inquire(
88
uiStream: ReturnType<typeof createStreamableUI>,
99
messages: ExperimentalMessage[]
1010
) {
11+
const openai = new OpenAI({
12+
baseUrl: process.env.OPENAI_API_BASE, // optional base URL for proxies etc.
13+
apiKey: process.env.OPENAI_API_KEY, // optional API key, default to env property OPENAI_API_KEY
14+
organization: '' // optional organization
15+
})
1116
const objectStream = createStreamableValue<PartialInquiry>()
1217
uiStream.update(<Copilot inquiry={objectStream.value} />)
1318

lib/agents/query-suggestor.tsx

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,12 +3,17 @@ import { ExperimentalMessage, experimental_streamObject } from 'ai'
33
import { PartialRelated, relatedSchema } from '@/lib/schema/related'
44
import { Section } from '@/components/section'
55
import SearchRelated from '@/components/search-related'
6-
import { openai } from 'ai/openai'
6+
import { OpenAI } from 'ai/openai'
77

88
export async function querySuggestor(
99
uiStream: ReturnType<typeof createStreamableUI>,
1010
messages: ExperimentalMessage[]
1111
) {
12+
const openai = new OpenAI({
13+
baseUrl: process.env.OPENAI_API_BASE, // optional base URL for proxies etc.
14+
apiKey: process.env.OPENAI_API_KEY, // optional API key, default to env property OPENAI_API_KEY
15+
organization: '' // optional organization
16+
})
1217
const objectStream = createStreamableValue<PartialRelated>()
1318
uiStream.append(
1419
<Section title="Related" separator={true}>

lib/agents/researcher.tsx

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ import {
77
} from 'ai'
88
import { searchSchema } from '@/lib/schema/search'
99
import { Section } from '@/components/section'
10-
import { openai } from 'ai/openai'
10+
import { OpenAI } from 'ai/openai'
1111
import { ToolBadge } from '@/components/tool-badge'
1212
import { SearchSkeleton } from '@/components/search-skeleton'
1313
import { SearchResults } from '@/components/search-results'
@@ -20,6 +20,12 @@ export async function researcher(
2020
streamText: ReturnType<typeof createStreamableValue<string>>,
2121
messages: ExperimentalMessage[]
2222
) {
23+
const openai = new OpenAI({
24+
baseUrl: process.env.OPENAI_API_BASE, // optional base URL for proxies etc.
25+
apiKey: process.env.OPENAI_API_KEY, // optional API key, default to env property OPENAI_API_KEY
26+
organization: '' // optional organization
27+
})
28+
2329
const searchAPI: 'tavily' | 'exa' = 'tavily'
2430

2531
let fullResponse = ''

lib/agents/task-manager.tsx

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,15 @@
11
import { ExperimentalMessage, experimental_generateObject } from 'ai'
2-
import { openai } from 'ai/openai'
2+
import { OpenAI } from 'ai/openai'
33
import { nextActionSchema } from '../schema/next-action'
44

55
// Decide whether inquiry is required for the user input
66
export async function taskManager(messages: ExperimentalMessage[]) {
7+
const openai = new OpenAI({
8+
baseUrl: process.env.OPENAI_API_BASE, // optional base URL for proxies etc.
9+
apiKey: process.env.OPENAI_API_KEY, // optional API key, default to env property OPENAI_API_KEY
10+
organization: '' // optional organization
11+
})
12+
713
const result = await experimental_generateObject({
814
model: openai.chat('gpt-3.5-turbo'),
915
system: `As a professional web researcher, your primary objective is to fully comprehend the user's query, conduct thorough web searches to gather the necessary information, and provide an appropriate response.

0 commit comments

Comments
 (0)