Skip to content

feat: include HttpsProxyAgent on all OpenAI instances #839

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 3 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 9 additions & 2 deletions codex-cli/src/components/chat/terminal-chat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,12 @@ import { useTerminalSize } from "../../hooks/use-terminal-size.js";
import { AgentLoop } from "../../utils/agent/agent-loop.js";
import { ReviewDecision } from "../../utils/agent/review.js";
import { generateCompactSummary } from "../../utils/compact-summary.js";
import { getBaseUrl, getApiKey, saveConfig } from "../../utils/config.js";
import {
getBaseUrl,
getApiKey,
saveConfig,
PROXY_URL,
} from "../../utils/config.js";
import { extractAppliedPatches as _extractAppliedPatches } from "../../utils/extract-applied-patches.js";
import { getGitDiff } from "../../utils/get-diff.js";
import { createInputItem } from "../../utils/input-utils.js";
Expand All @@ -32,6 +37,7 @@ import HelpOverlay from "../help-overlay.js";
import HistoryOverlay from "../history-overlay.js";
import ModelOverlay from "../model-overlay.js";
import chalk from "chalk";
import { HttpsProxyAgent } from "https-proxy-agent";
import { Box, Text } from "ink";
import { spawn } from "node:child_process";
import OpenAI from "openai";
Expand All @@ -56,7 +62,7 @@ type Props = {
};

const colorsByPolicy: Record<ApprovalPolicy, ColorName | undefined> = {
"suggest": undefined,
suggest: undefined,
"auto-edit": "greenBright",
"full-auto": "green",
};
Expand All @@ -81,6 +87,7 @@ async function generateCommandExplanation(
const oai = new OpenAI({
apiKey: getApiKey(config.provider),
baseURL: getBaseUrl(config.provider),
httpAgent: PROXY_URL ? new HttpsProxyAgent(PROXY_URL) : undefined,
});

// Format the command for display
Expand Down
3 changes: 3 additions & 0 deletions codex-cli/src/components/singlepass-cli-app.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import {
OPENAI_PROJECT,
getBaseUrl,
getApiKey,
PROXY_URL,
} from "../utils/config";
import {
generateDiffSummary,
Expand All @@ -25,6 +26,7 @@ import {
import { EditedFilesSchema } from "../utils/singlepass/file_ops";
import * as fsSync from "fs";
import * as fsPromises from "fs/promises";
import { HttpsProxyAgent } from "https-proxy-agent";
import { Box, Text, useApp, useInput } from "ink";
import OpenAI from "openai";
import { zodResponseFormat } from "openai/helpers/zod";
Expand Down Expand Up @@ -412,6 +414,7 @@ export function SinglePassApp({
baseURL: getBaseUrl(config.provider),
timeout: OPENAI_TIMEOUT_MS,
defaultHeaders: headers,
httpAgent: PROXY_URL ? new HttpsProxyAgent(PROXY_URL) : undefined,
});
const chatResp = await openai.beta.chat.completions.parse({
model: config.model,
Expand Down
4 changes: 1 addition & 3 deletions codex-cli/src/utils/agent/agent-loop.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ import {
OPENAI_PROJECT,
getApiKey,
getBaseUrl,
PROXY_URL,
} from "../config.js";
import { log } from "../logger/log.js";
import { parseToolCallArguments } from "../parsers.js";
Expand All @@ -39,9 +40,6 @@ const RATE_LIMIT_RETRY_WAIT_MS = parseInt(
10,
);

// See https://github.com/openai/openai-node/tree/v4?tab=readme-ov-file#configuring-an-https-agent-eg-for-proxies
const PROXY_URL = process.env["HTTPS_PROXY"];

export type CommandConfirmation = {
review: ReviewDecision;
applyPatch?: ApplyPatchCommand | undefined;
Expand Down
4 changes: 3 additions & 1 deletion codex-cli/src/utils/compact-summary.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import type { AppConfig } from "./config.js";
import type { ResponseItem } from "openai/resources/responses/responses.mjs";

import { getBaseUrl, getApiKey } from "./config.js";
import { getBaseUrl, getApiKey, PROXY_URL } from "./config.js";
import { HttpsProxyAgent } from "https-proxy-agent";
import OpenAI from "openai";
/**
* Generate a condensed summary of the conversation items.
Expand All @@ -26,6 +27,7 @@ export async function generateCompactSummary(
const oai = new OpenAI({
apiKey: getApiKey(config.provider),
baseURL: getBaseUrl(config.provider),
httpAgent: PROXY_URL ? new HttpsProxyAgent(PROXY_URL) : undefined,
});

const conversationText = items
Expand Down
8 changes: 8 additions & 0 deletions codex-cli/src/utils/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,14 @@ export const CONFIG_YML_FILEPATH = join(CONFIG_DIR, "config.yml");
// work unchanged.
export const CONFIG_FILEPATH = CONFIG_JSON_FILEPATH;
export const INSTRUCTIONS_FILEPATH = join(CONFIG_DIR, "instructions.md");
export const PROXY_URL =
process.env["https_proxy"] ||
process.env["http_proxy"] ||
process.env["proxy"] ||
process.env["HTTPS_PROXY"] ||
process.env["HTTP_PROXY"] ||
process.env["PROXY"] ||
undefined;

export const OPENAI_TIMEOUT_MS =
parseInt(process.env["OPENAI_TIMEOUT_MS"] || "0", 10) || undefined;
Expand Down
4 changes: 2 additions & 2 deletions codex-cli/src/utils/model-info.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ export const openAiModelInfo = {
label: "o1 Pro (2025-03-19)",
maxContextLength: 200000,
},
"o3": {
o3: {
label: "o3",
maxContextLength: 200000,
},
Expand Down Expand Up @@ -135,7 +135,7 @@ export const openAiModelInfo = {
label: "GPT-4 (0613)",
maxContextLength: 8192,
},
"o1": {
o1: {
label: "o1",
maxContextLength: 128000,
},
Expand Down
3 changes: 3 additions & 0 deletions codex-cli/src/utils/model-utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,10 @@ import {
OPENAI_PROJECT,
getBaseUrl,
getApiKey,
PROXY_URL,
} from "./config";
import { type SupportedModelId, openAiModelInfo } from "./model-info.js";
import { HttpsProxyAgent } from "https-proxy-agent";
import OpenAI from "openai";

const MODEL_LIST_TIMEOUT_MS = 2_000; // 2 seconds
Expand Down Expand Up @@ -39,6 +41,7 @@ async function fetchModels(provider: string): Promise<Array<string>> {
apiKey: getApiKey(provider),
baseURL: getBaseUrl(provider),
defaultHeaders: headers,
httpAgent: PROXY_URL ? new HttpsProxyAgent(PROXY_URL) : undefined,
});
const list = await openai.models.list();
const models: Array<string> = [];
Expand Down
2 changes: 1 addition & 1 deletion codex-cli/src/utils/responses.ts
Original file line number Diff line number Diff line change
Expand Up @@ -256,7 +256,7 @@ function convertTools(
function: {
name: tool.name,
description: tool.description || undefined,
parameters: tool.parameters,
parameters: tool.parameters ?? undefined,
},
}));
}
Expand Down
Loading