diff --git a/.github/workflows/codex.yml b/.github/workflows/codex.yml index cb31275534..367c53e947 100644 --- a/.github/workflows/codex.yml +++ b/.github/workflows/codex.yml @@ -39,7 +39,7 @@ jobs: - name: Checkout repository uses: actions/checkout@v5 - - uses: dtolnay/rust-toolchain@1.88 + - uses: dtolnay/rust-toolchain@1.89 with: targets: x86_64-unknown-linux-gnu components: clippy diff --git a/.github/workflows/rust-ci.yml b/.github/workflows/rust-ci.yml index fca1a02288..fa9f1cd15f 100644 --- a/.github/workflows/rust-ci.yml +++ b/.github/workflows/rust-ci.yml @@ -57,7 +57,7 @@ jobs: working-directory: codex-rs steps: - uses: actions/checkout@v5 - - uses: dtolnay/rust-toolchain@1.88 + - uses: dtolnay/rust-toolchain@1.89 with: components: rustfmt - name: cargo fmt @@ -112,7 +112,7 @@ jobs: steps: - uses: actions/checkout@v5 - - uses: dtolnay/rust-toolchain@1.88 + - uses: dtolnay/rust-toolchain@1.89 with: targets: ${{ matrix.target }} components: clippy diff --git a/.github/workflows/rust-release.yml b/.github/workflows/rust-release.yml index c00a7a5e03..0044b864c7 100644 --- a/.github/workflows/rust-release.yml +++ b/.github/workflows/rust-release.yml @@ -75,7 +75,7 @@ jobs: steps: - uses: actions/checkout@v5 - - uses: dtolnay/rust-toolchain@1.88 + - uses: dtolnay/rust-toolchain@1.89 with: targets: ${{ matrix.target }} diff --git a/codex-rs/apply-patch/src/lib.rs b/codex-rs/apply-patch/src/lib.rs index 87df66dd23..1aae106682 100644 --- a/codex-rs/apply-patch/src/lib.rs +++ b/codex-rs/apply-patch/src/lib.rs @@ -415,12 +415,12 @@ fn apply_hunks_to_files(hunks: &[Hunk]) -> anyhow::Result { for hunk in hunks { match hunk { Hunk::AddFile { path, contents } => { - if let Some(parent) = path.parent() { - if !parent.as_os_str().is_empty() { - std::fs::create_dir_all(parent).with_context(|| { - format!("Failed to create parent directories for {}", path.display()) - })?; - } + if let Some(parent) = path.parent() + && !parent.as_os_str().is_empty() + { + std::fs::create_dir_all(parent).with_context(|| { + format!("Failed to create parent directories for {}", path.display()) + })?; } std::fs::write(path, contents) .with_context(|| format!("Failed to write file {}", path.display()))?; @@ -439,15 +439,12 @@ fn apply_hunks_to_files(hunks: &[Hunk]) -> anyhow::Result { let AppliedPatch { new_contents, .. } = derive_new_contents_from_chunks(path, chunks)?; if let Some(dest) = move_path { - if let Some(parent) = dest.parent() { - if !parent.as_os_str().is_empty() { - std::fs::create_dir_all(parent).with_context(|| { - format!( - "Failed to create parent directories for {}", - dest.display() - ) - })?; - } + if let Some(parent) = dest.parent() + && !parent.as_os_str().is_empty() + { + std::fs::create_dir_all(parent).with_context(|| { + format!("Failed to create parent directories for {}", dest.display()) + })?; } std::fs::write(dest, new_contents) .with_context(|| format!("Failed to write file {}", dest.display()))?; @@ -529,9 +526,12 @@ fn compute_replacements( // If a chunk has a `change_context`, we use seek_sequence to find it, then // adjust our `line_index` to continue from there. if let Some(ctx_line) = &chunk.change_context { - if let Some(idx) = - seek_sequence::seek_sequence(original_lines, &[ctx_line.clone()], line_index, false) - { + if let Some(idx) = seek_sequence::seek_sequence( + original_lines, + std::slice::from_ref(ctx_line), + line_index, + false, + ) { line_index = idx + 1; } else { return Err(ApplyPatchError::ComputeReplacements(format!( diff --git a/codex-rs/arg0/src/lib.rs b/codex-rs/arg0/src/lib.rs index b7766fe70a..216a0437d1 100644 --- a/codex-rs/arg0/src/lib.rs +++ b/codex-rs/arg0/src/lib.rs @@ -89,10 +89,10 @@ const ILLEGAL_ENV_VAR_PREFIX: &str = "CODEX_"; /// Security: Do not allow `.env` files to create or modify any variables /// with names starting with `CODEX_`. fn load_dotenv() { - if let Ok(codex_home) = codex_core::config::find_codex_home() { - if let Ok(iter) = dotenvy::from_path_iter(codex_home.join(".env")) { - set_filtered(iter); - } + if let Ok(codex_home) = codex_core::config::find_codex_home() + && let Ok(iter) = dotenvy::from_path_iter(codex_home.join(".env")) + { + set_filtered(iter); } if let Ok(iter) = dotenvy::dotenv_iter() { diff --git a/codex-rs/cli/src/login.rs b/codex-rs/cli/src/login.rs index 72eb7b4faf..959bf46f18 100644 --- a/codex-rs/cli/src/login.rs +++ b/codex-rs/cli/src/login.rs @@ -66,12 +66,12 @@ pub async fn run_login_status(cli_config_overrides: CliConfigOverrides) -> ! { Ok(api_key) => { eprintln!("Logged in using an API key - {}", safe_format_key(&api_key)); - if let Ok(env_api_key) = env::var(OPENAI_API_KEY_ENV_VAR) { - if env_api_key == api_key { - eprintln!( - " API loaded from OPENAI_API_KEY environment variable or .env file" - ); - } + if let Ok(env_api_key) = env::var(OPENAI_API_KEY_ENV_VAR) + && env_api_key == api_key + { + eprintln!( + " API loaded from OPENAI_API_KEY environment variable or .env file" + ); } std::process::exit(0); } diff --git a/codex-rs/core/src/chat_completions.rs b/codex-rs/core/src/chat_completions.rs index fc0ceca5ad..f3ed34c6dc 100644 --- a/codex-rs/core/src/chat_completions.rs +++ b/codex-rs/core/src/chat_completions.rs @@ -290,13 +290,12 @@ async fn process_chat_sse( .get("delta") .and_then(|d| d.get("content")) .and_then(|c| c.as_str()) + && !content.is_empty() { - if !content.is_empty() { - assistant_text.push_str(content); - let _ = tx_event - .send(Ok(ResponseEvent::OutputTextDelta(content.to_string()))) - .await; - } + assistant_text.push_str(content); + let _ = tx_event + .send(Ok(ResponseEvent::OutputTextDelta(content.to_string()))) + .await; } // Forward any reasoning/thinking deltas if present. @@ -333,27 +332,25 @@ async fn process_chat_sse( .get("delta") .and_then(|d| d.get("tool_calls")) .and_then(|tc| tc.as_array()) + && let Some(tool_call) = tool_calls.first() { - if let Some(tool_call) = tool_calls.first() { - // Mark that we have an active function call in progress. - fn_call_state.active = true; + // Mark that we have an active function call in progress. + fn_call_state.active = true; - // Extract call_id if present. - if let Some(id) = tool_call.get("id").and_then(|v| v.as_str()) { - fn_call_state.call_id.get_or_insert_with(|| id.to_string()); - } + // Extract call_id if present. + if let Some(id) = tool_call.get("id").and_then(|v| v.as_str()) { + fn_call_state.call_id.get_or_insert_with(|| id.to_string()); + } - // Extract function details if present. - if let Some(function) = tool_call.get("function") { - if let Some(name) = function.get("name").and_then(|n| n.as_str()) { - fn_call_state.name.get_or_insert_with(|| name.to_string()); - } + // Extract function details if present. + if let Some(function) = tool_call.get("function") { + if let Some(name) = function.get("name").and_then(|n| n.as_str()) { + fn_call_state.name.get_or_insert_with(|| name.to_string()); + } - if let Some(args_fragment) = - function.get("arguments").and_then(|a| a.as_str()) - { - fn_call_state.arguments.push_str(args_fragment); - } + if let Some(args_fragment) = function.get("arguments").and_then(|a| a.as_str()) + { + fn_call_state.arguments.push_str(args_fragment); } } } @@ -491,15 +488,14 @@ where // Only use the final assistant message if we have not // seen any deltas; otherwise, deltas already built the // cumulative text and this would duplicate it. - if this.cumulative.is_empty() { - if let crate::models::ResponseItem::Message { content, .. } = &item { - if let Some(text) = content.iter().find_map(|c| match c { - crate::models::ContentItem::OutputText { text } => Some(text), - _ => None, - }) { - this.cumulative.push_str(text); - } - } + if this.cumulative.is_empty() + && let crate::models::ResponseItem::Message { content, .. } = &item + && let Some(text) = content.iter().find_map(|c| match c { + crate::models::ContentItem::OutputText { text } => Some(text), + _ => None, + }) + { + this.cumulative.push_str(text); } // Swallow assistant message here; emit on Completed. diff --git a/codex-rs/core/src/codex.rs b/codex-rs/core/src/codex.rs index 6423fb44ef..57719f5269 100644 --- a/codex-rs/core/src/codex.rs +++ b/codex-rs/core/src/codex.rs @@ -544,10 +544,10 @@ impl Session { pub fn remove_task(&self, sub_id: &str) { let mut state = self.state.lock_unchecked(); - if let Some(task) = &state.current_task { - if task.sub_id == sub_id { - state.current_task.take(); - } + if let Some(task) = &state.current_task + && task.sub_id == sub_id + { + state.current_task.take(); } } @@ -1239,18 +1239,18 @@ async fn submission_loop( // Gracefully flush and shutdown rollout recorder on session end so tests // that inspect the rollout file do not race with the background writer. let recorder_opt = sess.rollout.lock_unchecked().take(); - if let Some(rec) = recorder_opt { - if let Err(e) = rec.shutdown().await { - warn!("failed to shutdown rollout recorder: {e}"); - let event = Event { - id: sub.id.clone(), - msg: EventMsg::Error(ErrorEvent { - message: "Failed to shutdown rollout recorder".to_string(), - }), - }; - if let Err(e) = sess.tx_event.send(event).await { - warn!("failed to send error message: {e:?}"); - } + if let Some(rec) = recorder_opt + && let Err(e) = rec.shutdown().await + { + warn!("failed to shutdown rollout recorder: {e}"); + let event = Event { + id: sub.id.clone(), + msg: EventMsg::Error(ErrorEvent { + message: "Failed to shutdown rollout recorder".to_string(), + }), + }; + if let Err(e) = sess.tx_event.send(event).await { + warn!("failed to send error message: {e:?}"); } } diff --git a/codex-rs/core/src/config.rs b/codex-rs/core/src/config.rs index 760729d827..e0ff411f15 100644 --- a/codex-rs/core/src/config.rs +++ b/codex-rs/core/src/config.rs @@ -759,10 +759,10 @@ fn default_model() -> String { pub fn find_codex_home() -> std::io::Result { // Honor the `CODEX_HOME` environment variable when it is set to allow users // (and tests) to override the default location. - if let Ok(val) = std::env::var("CODEX_HOME") { - if !val.is_empty() { - return PathBuf::from(val).canonicalize(); - } + if let Ok(val) = std::env::var("CODEX_HOME") + && !val.is_empty() + { + return PathBuf::from(val).canonicalize(); } let mut p = home_dir().ok_or_else(|| { diff --git a/codex-rs/core/src/git_info.rs b/codex-rs/core/src/git_info.rs index 09a59b7b8c..ccb43ae55a 100644 --- a/codex-rs/core/src/git_info.rs +++ b/codex-rs/core/src/git_info.rs @@ -51,33 +51,30 @@ pub async fn collect_git_info(cwd: &Path) -> Option { }; // Process commit hash - if let Some(output) = commit_result { - if output.status.success() { - if let Ok(hash) = String::from_utf8(output.stdout) { - git_info.commit_hash = Some(hash.trim().to_string()); - } - } + if let Some(output) = commit_result + && output.status.success() + && let Ok(hash) = String::from_utf8(output.stdout) + { + git_info.commit_hash = Some(hash.trim().to_string()); } // Process branch name - if let Some(output) = branch_result { - if output.status.success() { - if let Ok(branch) = String::from_utf8(output.stdout) { - let branch = branch.trim(); - if branch != "HEAD" { - git_info.branch = Some(branch.to_string()); - } - } + if let Some(output) = branch_result + && output.status.success() + && let Ok(branch) = String::from_utf8(output.stdout) + { + let branch = branch.trim(); + if branch != "HEAD" { + git_info.branch = Some(branch.to_string()); } } // Process repository URL - if let Some(output) = url_result { - if output.status.success() { - if let Ok(url) = String::from_utf8(output.stdout) { - git_info.repository_url = Some(url.trim().to_string()); - } - } + if let Some(output) = url_result + && output.status.success() + && let Ok(url) = String::from_utf8(output.stdout) + { + git_info.repository_url = Some(url.trim().to_string()); } Some(git_info) diff --git a/codex-rs/core/src/is_safe_command.rs b/codex-rs/core/src/is_safe_command.rs index 38dd9562f8..f54e247fd8 100644 --- a/codex-rs/core/src/is_safe_command.rs +++ b/codex-rs/core/src/is_safe_command.rs @@ -12,20 +12,17 @@ pub fn is_known_safe_command(command: &[String]) -> bool { // introduce side effects ( "&&", "||", ";", and "|" ). If every // individual command in the script is itself a known‑safe command, then // the composite expression is considered safe. - if let [bash, flag, script] = command { - if bash == "bash" && flag == "-lc" { - if let Some(tree) = try_parse_bash(script) { - if let Some(all_commands) = try_parse_word_only_commands_sequence(&tree, script) { - if !all_commands.is_empty() - && all_commands - .iter() - .all(|cmd| is_safe_to_call_with_exec(cmd)) - { - return true; - } - } - } - } + if let [bash, flag, script] = command + && bash == "bash" + && flag == "-lc" + && let Some(tree) = try_parse_bash(script) + && let Some(all_commands) = try_parse_word_only_commands_sequence(&tree, script) + && !all_commands.is_empty() + && all_commands + .iter() + .all(|cmd| is_safe_to_call_with_exec(cmd)) + { + return true; } false diff --git a/codex-rs/core/src/model_provider_info.rs b/codex-rs/core/src/model_provider_info.rs index 00eabc539c..0102591d2c 100644 --- a/codex-rs/core/src/model_provider_info.rs +++ b/codex-rs/core/src/model_provider_info.rs @@ -167,10 +167,10 @@ impl ModelProviderInfo { if let Some(env_headers) = &self.env_http_headers { for (header, env_var) in env_headers { - if let Ok(val) = std::env::var(env_var) { - if !val.trim().is_empty() { - builder = builder.header(header, val); - } + if let Ok(val) = std::env::var(env_var) + && !val.trim().is_empty() + { + builder = builder.header(header, val); } } } diff --git a/codex-rs/core/src/openai_tools.rs b/codex-rs/core/src/openai_tools.rs index 32ead20e7d..dd5eb12516 100644 --- a/codex-rs/core/src/openai_tools.rs +++ b/codex-rs/core/src/openai_tools.rs @@ -420,11 +420,11 @@ fn sanitize_json_schema(value: &mut JsonValue) { } JsonValue::Object(map) => { // First, recursively sanitize known nested schema holders - if let Some(props) = map.get_mut("properties") { - if let Some(props_map) = props.as_object_mut() { - for (_k, v) in props_map.iter_mut() { - sanitize_json_schema(v); - } + if let Some(props) = map.get_mut("properties") + && let Some(props_map) = props.as_object_mut() + { + for (_k, v) in props_map.iter_mut() { + sanitize_json_schema(v); } } if let Some(items) = map.get_mut("items") { @@ -444,18 +444,18 @@ fn sanitize_json_schema(value: &mut JsonValue) { .map(|s| s.to_string()); // If type is an array (union), pick first supported; else leave to inference - if ty.is_none() { - if let Some(JsonValue::Array(types)) = map.get("type") { - for t in types { - if let Some(tt) = t.as_str() { - if matches!( - tt, - "object" | "array" | "string" | "number" | "integer" | "boolean" - ) { - ty = Some(tt.to_string()); - break; - } - } + if ty.is_none() + && let Some(JsonValue::Array(types)) = map.get("type") + { + for t in types { + if let Some(tt) = t.as_str() + && matches!( + tt, + "object" | "array" | "string" | "number" | "integer" | "boolean" + ) + { + ty = Some(tt.to_string()); + break; } } } diff --git a/codex-rs/core/src/parse_command.rs b/codex-rs/core/src/parse_command.rs index 6ea0626814..7348af43ac 100644 --- a/codex-rs/core/src/parse_command.rs +++ b/codex-rs/core/src/parse_command.rs @@ -1196,10 +1196,10 @@ fn simplify_once(commands: &[ParsedCommand]) -> Option> { } // echo ... && ...rest => ...rest - if let ParsedCommand::Unknown { cmd } = &commands[0] { - if shlex_split(cmd).is_some_and(|t| t.first().map(|s| s.as_str()) == Some("echo")) { - return Some(commands[1..].to_vec()); - } + if let ParsedCommand::Unknown { cmd } = &commands[0] + && shlex_split(cmd).is_some_and(|t| t.first().map(|s| s.as_str()) == Some("echo")) + { + return Some(commands[1..].to_vec()); } // cd foo && [any Test command] => [any Test command] @@ -1208,17 +1208,15 @@ fn simplify_once(commands: &[ParsedCommand]) -> Option> { shlex_split(cmd).is_some_and(|t| t.first().map(|s| s.as_str()) == Some("cd")) } _ => false, - }) { - if commands - .iter() - .skip(idx + 1) - .any(|pc| matches!(pc, ParsedCommand::Test { .. })) - { - let mut out = Vec::with_capacity(commands.len() - 1); - out.extend_from_slice(&commands[..idx]); - out.extend_from_slice(&commands[idx + 1..]); - return Some(out); - } + }) && commands + .iter() + .skip(idx + 1) + .any(|pc| matches!(pc, ParsedCommand::Test { .. })) + { + let mut out = Vec::with_capacity(commands.len() - 1); + out.extend_from_slice(&commands[..idx]); + out.extend_from_slice(&commands[idx + 1..]); + return Some(out); } // cmd || true => cmd @@ -1564,127 +1562,124 @@ fn parse_bash_lc_commands(original: &[String]) -> Option> { if bash != "bash" || flag != "-lc" { return None; } - if let Some(tree) = try_parse_bash(script) { - if let Some(all_commands) = try_parse_word_only_commands_sequence(&tree, script) { - if !all_commands.is_empty() { - let script_tokens = shlex_split(script) - .unwrap_or_else(|| vec!["bash".to_string(), flag.clone(), script.clone()]); - // Strip small formatting helpers (e.g., head/tail/awk/wc/etc) so we - // bias toward the primary command when pipelines are present. - // First, drop obvious small formatting helpers (e.g., wc/awk/etc). - let had_multiple_commands = all_commands.len() > 1; - // The bash AST walker yields commands in right-to-left order for - // connector/pipeline sequences. Reverse to reflect actual execution order. - let mut filtered_commands = drop_small_formatting_commands(all_commands); - filtered_commands.reverse(); - if filtered_commands.is_empty() { - return Some(vec![ParsedCommand::Unknown { - cmd: script.clone(), - }]); - } - let mut commands: Vec = filtered_commands - .into_iter() - .map(|tokens| summarize_main_tokens(&tokens)) - .collect(); - if commands.len() > 1 { - commands.retain(|pc| !matches!(pc, ParsedCommand::Noop { .. })); - } - if commands.len() == 1 { - // If we reduced to a single command, attribute the full original script - // for clearer UX in file-reading and listing scenarios, or when there were - // no connectors in the original script. For search commands that came from - // a pipeline (e.g. `rg --files | sed -n`), keep only the primary command. - let had_connectors = had_multiple_commands - || script_tokens - .iter() - .any(|t| t == "|" || t == "&&" || t == "||" || t == ";"); - commands = commands - .into_iter() - .map(|pc| match pc { - ParsedCommand::Read { name, cmd, .. } => { - if had_connectors { - let has_pipe = script_tokens.iter().any(|t| t == "|"); - let has_sed_n = script_tokens.windows(2).any(|w| { - w.first().map(|s| s.as_str()) == Some("sed") - && w.get(1).map(|s| s.as_str()) == Some("-n") - }); - if has_pipe && has_sed_n { - ParsedCommand::Read { - cmd: script.clone(), - name, - } - } else { - ParsedCommand::Read { - cmd: cmd.clone(), - name, - } - } - } else { - ParsedCommand::Read { - cmd: shlex_join(&script_tokens), - name, - } + if let Some(tree) = try_parse_bash(script) + && let Some(all_commands) = try_parse_word_only_commands_sequence(&tree, script) + && !all_commands.is_empty() + { + let script_tokens = shlex_split(script) + .unwrap_or_else(|| vec!["bash".to_string(), flag.clone(), script.clone()]); + // Strip small formatting helpers (e.g., head/tail/awk/wc/etc) so we + // bias toward the primary command when pipelines are present. + // First, drop obvious small formatting helpers (e.g., wc/awk/etc). + let had_multiple_commands = all_commands.len() > 1; + // The bash AST walker yields commands in right-to-left order for + // connector/pipeline sequences. Reverse to reflect actual execution order. + let mut filtered_commands = drop_small_formatting_commands(all_commands); + filtered_commands.reverse(); + if filtered_commands.is_empty() { + return Some(vec![ParsedCommand::Unknown { + cmd: script.clone(), + }]); + } + let mut commands: Vec = filtered_commands + .into_iter() + .map(|tokens| summarize_main_tokens(&tokens)) + .collect(); + if commands.len() > 1 { + commands.retain(|pc| !matches!(pc, ParsedCommand::Noop { .. })); + } + if commands.len() == 1 { + // If we reduced to a single command, attribute the full original script + // for clearer UX in file-reading and listing scenarios, or when there were + // no connectors in the original script. For search commands that came from + // a pipeline (e.g. `rg --files | sed -n`), keep only the primary command. + let had_connectors = had_multiple_commands + || script_tokens + .iter() + .any(|t| t == "|" || t == "&&" || t == "||" || t == ";"); + commands = commands + .into_iter() + .map(|pc| match pc { + ParsedCommand::Read { name, cmd, .. } => { + if had_connectors { + let has_pipe = script_tokens.iter().any(|t| t == "|"); + let has_sed_n = script_tokens.windows(2).any(|w| { + w.first().map(|s| s.as_str()) == Some("sed") + && w.get(1).map(|s| s.as_str()) == Some("-n") + }); + if has_pipe && has_sed_n { + ParsedCommand::Read { + cmd: script.clone(), + name, } - } - ParsedCommand::ListFiles { path, cmd, .. } => { - if had_connectors { - ParsedCommand::ListFiles { - cmd: cmd.clone(), - path, - } - } else { - ParsedCommand::ListFiles { - cmd: shlex_join(&script_tokens), - path, - } + } else { + ParsedCommand::Read { + cmd: cmd.clone(), + name, } } - ParsedCommand::Search { - query, path, cmd, .. - } => { - if had_connectors { - ParsedCommand::Search { - cmd: cmd.clone(), - query, - path, - } - } else { - ParsedCommand::Search { - cmd: shlex_join(&script_tokens), - query, - path, - } - } + } else { + ParsedCommand::Read { + cmd: shlex_join(&script_tokens), + name, } - ParsedCommand::Format { - tool, targets, cmd, .. - } => ParsedCommand::Format { + } + } + ParsedCommand::ListFiles { path, cmd, .. } => { + if had_connectors { + ParsedCommand::ListFiles { cmd: cmd.clone(), - tool, - targets, - }, - ParsedCommand::Test { cmd, .. } => { - ParsedCommand::Test { cmd: cmd.clone() } + path, } - ParsedCommand::Lint { - tool, targets, cmd, .. - } => ParsedCommand::Lint { + } else { + ParsedCommand::ListFiles { + cmd: shlex_join(&script_tokens), + path, + } + } + } + ParsedCommand::Search { + query, path, cmd, .. + } => { + if had_connectors { + ParsedCommand::Search { cmd: cmd.clone(), - tool, - targets, - }, - ParsedCommand::Unknown { .. } => ParsedCommand::Unknown { - cmd: script.clone(), - }, - ParsedCommand::Noop { .. } => ParsedCommand::Noop { - cmd: script.clone(), - }, - }) - .collect(); - } - return Some(commands); - } + query, + path, + } + } else { + ParsedCommand::Search { + cmd: shlex_join(&script_tokens), + query, + path, + } + } + } + ParsedCommand::Format { + tool, targets, cmd, .. + } => ParsedCommand::Format { + cmd: cmd.clone(), + tool, + targets, + }, + ParsedCommand::Test { cmd, .. } => ParsedCommand::Test { cmd: cmd.clone() }, + ParsedCommand::Lint { + tool, targets, cmd, .. + } => ParsedCommand::Lint { + cmd: cmd.clone(), + tool, + targets, + }, + ParsedCommand::Unknown { .. } => ParsedCommand::Unknown { + cmd: script.clone(), + }, + ParsedCommand::Noop { .. } => ParsedCommand::Noop { + cmd: script.clone(), + }, + }) + .collect(); } + return Some(commands); } Some(vec![ParsedCommand::Unknown { cmd: script.clone(), diff --git a/codex-rs/core/src/safety.rs b/codex-rs/core/src/safety.rs index c878a71110..37cb8f4e4c 100644 --- a/codex-rs/core/src/safety.rs +++ b/codex-rs/core/src/safety.rs @@ -231,10 +231,10 @@ fn is_write_patch_constrained_to_writable_paths( if !is_path_writable(path) { return false; } - if let Some(dest) = move_path { - if !is_path_writable(dest) { - return false; - } + if let Some(dest) = move_path + && !is_path_writable(dest) + { + return false; } } } diff --git a/codex-rs/core/src/shell.rs b/codex-rs/core/src/shell.rs index 0cde3b4a78..c269b87ef2 100644 --- a/codex-rs/core/src/shell.rs +++ b/codex-rs/core/src/shell.rs @@ -70,13 +70,13 @@ pub async fn default_user_shell() -> Shell { } let stdout = String::from_utf8_lossy(&o.stdout); for line in stdout.lines() { - if let Some(shell_path) = line.strip_prefix("UserShell: ") { - if shell_path.ends_with("/zsh") { - return Shell::Zsh(ZshShell { - shell_path: shell_path.to_string(), - zshrc_path: format!("{home}/.zshrc"), - }); - } + if let Some(shell_path) = line.strip_prefix("UserShell: ") + && shell_path.ends_with("/zsh") + { + return Shell::Zsh(ZshShell { + shell_path: shell_path.to_string(), + zshrc_path: format!("{home}/.zshrc"), + }); } } diff --git a/codex-rs/core/tests/cli_stream.rs b/codex-rs/core/tests/cli_stream.rs index 219e99787d..dd53a8d302 100644 --- a/codex-rs/core/tests/cli_stream.rs +++ b/codex-rs/core/tests/cli_stream.rs @@ -297,13 +297,12 @@ async fn integration_creates_and_checks_session_file() { Ok(v) => v, Err(_) => continue, }; - if item.get("type").and_then(|t| t.as_str()) == Some("message") { - if let Some(c) = item.get("content") { - if c.to_string().contains(&marker) { - matching_path = Some(path.to_path_buf()); - break; - } - } + if item.get("type").and_then(|t| t.as_str()) == Some("message") + && let Some(c) = item.get("content") + && c.to_string().contains(&marker) + { + matching_path = Some(path.to_path_buf()); + break; } } } @@ -376,13 +375,12 @@ async fn integration_creates_and_checks_session_file() { let Ok(item) = serde_json::from_str::(line) else { continue; }; - if item.get("type").and_then(|t| t.as_str()) == Some("message") { - if let Some(c) = item.get("content") { - if c.to_string().contains(&marker) { - found_message = true; - break; - } - } + if item.get("type").and_then(|t| t.as_str()) == Some("message") + && let Some(c) = item.get("content") + && c.to_string().contains(&marker) + { + found_message = true; + break; } } assert!( diff --git a/codex-rs/exec/src/event_processor.rs b/codex-rs/exec/src/event_processor.rs index b7b3c27dc5..1ba10c34e2 100644 --- a/codex-rs/exec/src/event_processor.rs +++ b/codex-rs/exec/src/event_processor.rs @@ -29,9 +29,9 @@ pub(crate) fn handle_last_message(last_agent_message: Option<&str>, output_file: } fn write_last_message_file(contents: &str, last_message_path: Option<&Path>) { - if let Some(path) = last_message_path { - if let Err(e) = std::fs::write(path, contents) { - eprintln!("Failed to write last message file {path:?}: {e}"); - } + if let Some(path) = last_message_path + && let Err(e) = std::fs::write(path, contents) + { + eprintln!("Failed to write last message file {path:?}: {e}"); } } diff --git a/codex-rs/execpolicy/src/execv_checker.rs b/codex-rs/execpolicy/src/execv_checker.rs index 242ea6d177..fcd80b2b5f 100644 --- a/codex-rs/execpolicy/src/execv_checker.rs +++ b/codex-rs/execpolicy/src/execv_checker.rs @@ -214,7 +214,12 @@ system_path=[{fake_cp:?}] // Only readable folders specified. assert_eq!( - checker.check(valid_exec.clone(), &cwd, &[root_path.clone()], &[]), + checker.check( + valid_exec.clone(), + &cwd, + std::slice::from_ref(&root_path), + &[] + ), Err(WriteablePathNotInWriteableFolders { file: dest_path.clone(), folders: vec![] @@ -226,8 +231,8 @@ system_path=[{fake_cp:?}] checker.check( valid_exec.clone(), &cwd, - &[root_path.clone()], - &[root_path.clone()] + std::slice::from_ref(&root_path), + std::slice::from_ref(&root_path) ), Ok(cp.clone()), ); @@ -246,8 +251,8 @@ system_path=[{fake_cp:?}] checker.check( valid_exec_call_folders_as_args, &cwd, - &[root_path.clone()], - &[root_path.clone()] + std::slice::from_ref(&root_path), + std::slice::from_ref(&root_path) ), Ok(cp.clone()), ); @@ -269,8 +274,8 @@ system_path=[{fake_cp:?}] checker.check( exec_with_parent_of_readable_folder, &cwd, - &[root_path.clone()], - &[dest_path.clone()] + std::slice::from_ref(&root_path), + std::slice::from_ref(&dest_path) ), Err(ReadablePathNotInReadableFolders { file: root_path.parent().unwrap().to_path_buf(), diff --git a/codex-rs/execpolicy/src/policy.rs b/codex-rs/execpolicy/src/policy.rs index 7ce148b7c2..825d6164a5 100644 --- a/codex-rs/execpolicy/src/policy.rs +++ b/codex-rs/execpolicy/src/policy.rs @@ -56,16 +56,16 @@ impl Policy { } for arg in args { - if let Some(regex) = &self.forbidden_substrings_pattern { - if regex.is_match(arg) { - return Ok(MatchedExec::Forbidden { - cause: Forbidden::Arg { - arg: arg.clone(), - exec_call: exec_call.clone(), - }, - reason: format!("arg `{arg}` contains forbidden substring"), - }); - } + if let Some(regex) = &self.forbidden_substrings_pattern + && regex.is_match(arg) + { + return Ok(MatchedExec::Forbidden { + cause: Forbidden::Arg { + arg: arg.clone(), + exec_call: exec_call.clone(), + }, + reason: format!("arg `{arg}` contains forbidden substring"), + }); } } diff --git a/codex-rs/execpolicy/src/sed_command.rs b/codex-rs/execpolicy/src/sed_command.rs index 64494ddf00..cc96aa98e7 100644 --- a/codex-rs/execpolicy/src/sed_command.rs +++ b/codex-rs/execpolicy/src/sed_command.rs @@ -3,12 +3,12 @@ use crate::error::Result; pub fn parse_sed_command(sed_command: &str) -> Result<()> { // For now, we parse only commands like `122,202p`. - if let Some(stripped) = sed_command.strip_suffix("p") { - if let Some((first, rest)) = stripped.split_once(",") { - if first.parse::().is_ok() && rest.parse::().is_ok() { - return Ok(()); - } - } + if let Some(stripped) = sed_command.strip_suffix("p") + && let Some((first, rest)) = stripped.split_once(",") + && first.parse::().is_ok() + && rest.parse::().is_ok() + { + return Ok(()); } Err(Error::SedCommandNotProvablySafe { diff --git a/codex-rs/file-search/src/lib.rs b/codex-rs/file-search/src/lib.rs index e0eb87656b..bfcfe922ef 100644 --- a/codex-rs/file-search/src/lib.rs +++ b/codex-rs/file-search/src/lib.rs @@ -228,11 +228,11 @@ pub fn run( for &Reverse((score, ref line)) in best_list.binary_heap.iter() { if global_heap.len() < limit.get() { global_heap.push(Reverse((score, line.clone()))); - } else if let Some(min_element) = global_heap.peek() { - if score > min_element.0.0 { - global_heap.pop(); - global_heap.push(Reverse((score, line.clone()))); - } + } else if let Some(min_element) = global_heap.peek() + && score > min_element.0.0 + { + global_heap.pop(); + global_heap.push(Reverse((score, line.clone()))); } } } @@ -320,11 +320,11 @@ impl BestMatchesList { if self.binary_heap.len() < self.max_count { self.binary_heap.push(Reverse((score, line.to_string()))); - } else if let Some(min_element) = self.binary_heap.peek() { - if score > min_element.0.0 { - self.binary_heap.pop(); - self.binary_heap.push(Reverse((score, line.to_string()))); - } + } else if let Some(min_element) = self.binary_heap.peek() + && score > min_element.0.0 + { + self.binary_heap.pop(); + self.binary_heap.push(Reverse((score, line.to_string()))); } } } diff --git a/codex-rs/login/src/server.rs b/codex-rs/login/src/server.rs index 32229484ff..6ffc86219d 100644 --- a/codex-rs/login/src/server.rs +++ b/codex-rs/login/src/server.rs @@ -364,10 +364,10 @@ async fn persist_tokens_async( let codex_home = codex_home.to_path_buf(); tokio::task::spawn_blocking(move || { let auth_file = get_auth_file(&codex_home); - if let Some(parent) = auth_file.parent() { - if !parent.exists() { - std::fs::create_dir_all(parent).map_err(io::Error::other)?; - } + if let Some(parent) = auth_file.parent() + && !parent.exists() + { + std::fs::create_dir_all(parent).map_err(io::Error::other)?; } let mut auth = read_or_default(&auth_file); diff --git a/codex-rs/ollama/src/client.rs b/codex-rs/ollama/src/client.rs index 55c86e31f8..45e0486156 100644 --- a/codex-rs/ollama/src/client.rs +++ b/codex-rs/ollama/src/client.rs @@ -166,9 +166,8 @@ impl OllamaClient { yield PullEvent::Error(err_msg.to_string()); return; } - if let Some(status) = value.get("status").and_then(|s| s.as_str()) { - if status == "success" { yield PullEvent::Success; return; } - } + if let Some(status) = value.get("status").and_then(|s| s.as_str()) + && status == "success" { yield PullEvent::Success; return; } } } } diff --git a/codex-rs/protocol-ts/src/lib.rs b/codex-rs/protocol-ts/src/lib.rs index a37130b83b..c2b196dada 100644 --- a/codex-rs/protocol-ts/src/lib.rs +++ b/codex-rs/protocol-ts/src/lib.rs @@ -48,18 +48,16 @@ pub fn generate_ts(out_dir: &Path, prettier: Option<&Path>) -> Result<()> { } // Format with Prettier by passing individual files (no shell globbing) - if let Some(prettier_bin) = prettier { - if !ts_files.is_empty() { - let status = Command::new(prettier_bin) - .arg("--write") - .args(ts_files.iter().map(|p| p.as_os_str())) - .status() - .with_context(|| { - format!("Failed to invoke Prettier at {}", prettier_bin.display()) - })?; - if !status.success() { - return Err(anyhow!("Prettier failed with status {}", status)); - } + if let Some(prettier_bin) = prettier + && !ts_files.is_empty() + { + let status = Command::new(prettier_bin) + .arg("--write") + .args(ts_files.iter().map(|p| p.as_os_str())) + .status() + .with_context(|| format!("Failed to invoke Prettier at {}", prettier_bin.display()))?; + if !status.success() { + return Err(anyhow!("Prettier failed with status {}", status)); } } diff --git a/codex-rs/rust-toolchain.toml b/codex-rs/rust-toolchain.toml index 72bafdf4b6..0a9f6e8169 100644 --- a/codex-rs/rust-toolchain.toml +++ b/codex-rs/rust-toolchain.toml @@ -1,3 +1,3 @@ [toolchain] -channel = "1.88.0" +channel = "1.89.0" components = [ "clippy", "rustfmt", "rust-src"] diff --git a/codex-rs/tui/src/bottom_pane/approval_modal_view.rs b/codex-rs/tui/src/bottom_pane/approval_modal_view.rs index 0cfc24d780..b7e6e5e69a 100644 --- a/codex-rs/tui/src/bottom_pane/approval_modal_view.rs +++ b/codex-rs/tui/src/bottom_pane/approval_modal_view.rs @@ -33,10 +33,10 @@ impl ApprovalModalView<'_> { /// Advance to next request if the current one is finished. fn maybe_advance(&mut self) { - if self.current.is_complete() { - if let Some(req) = self.queue.pop() { - self.current = UserApprovalWidget::new(req, self.app_event_tx.clone()); - } + if self.current.is_complete() + && let Some(req) = self.queue.pop() + { + self.current = UserApprovalWidget::new(req, self.app_event_tx.clone()); } } } diff --git a/codex-rs/tui/src/bottom_pane/textarea.rs b/codex-rs/tui/src/bottom_pane/textarea.rs index d2a3916e55..8379deb9f8 100644 --- a/codex-rs/tui/src/bottom_pane/textarea.rs +++ b/codex-rs/tui/src/bottom_pane/textarea.rs @@ -1575,53 +1575,53 @@ mod tests { } 14 => { // Try inserting inside an existing element (should clamp to boundary) - if let Some(payload) = elem_texts.choose(&mut rng).cloned() { - if let Some(start) = ta.text().find(&payload) { - let end = start + payload.len(); - if end - start > 2 { - let pos = rng.random_range(start + 1..end - 1); - let ins = rand_grapheme(&mut rng); - ta.insert_str_at(pos, &ins); - } + if let Some(payload) = elem_texts.choose(&mut rng).cloned() + && let Some(start) = ta.text().find(&payload) + { + let end = start + payload.len(); + if end - start > 2 { + let pos = rng.random_range(start + 1..end - 1); + let ins = rand_grapheme(&mut rng); + ta.insert_str_at(pos, &ins); } } } 15 => { // Replace a range that intersects an element -> whole element should be replaced - if let Some(payload) = elem_texts.choose(&mut rng).cloned() { - if let Some(start) = ta.text().find(&payload) { - let end = start + payload.len(); - // Create an intersecting range [start-δ, end-δ2) - let mut s = start.saturating_sub(rng.random_range(0..=2)); - let mut e = (end + rng.random_range(0..=2)).min(ta.text().len()); - // Align to char boundaries to satisfy String::replace_range contract - let txt = ta.text(); - while s > 0 && !txt.is_char_boundary(s) { - s -= 1; - } - while e < txt.len() && !txt.is_char_boundary(e) { - e += 1; - } - if s < e { - // Small replacement text - let mut srep = String::new(); - for _ in 0..rng.random_range(0..=2) { - srep.push_str(&rand_grapheme(&mut rng)); - } - ta.replace_range(s..e, &srep); + if let Some(payload) = elem_texts.choose(&mut rng).cloned() + && let Some(start) = ta.text().find(&payload) + { + let end = start + payload.len(); + // Create an intersecting range [start-δ, end-δ2) + let mut s = start.saturating_sub(rng.random_range(0..=2)); + let mut e = (end + rng.random_range(0..=2)).min(ta.text().len()); + // Align to char boundaries to satisfy String::replace_range contract + let txt = ta.text(); + while s > 0 && !txt.is_char_boundary(s) { + s -= 1; + } + while e < txt.len() && !txt.is_char_boundary(e) { + e += 1; + } + if s < e { + // Small replacement text + let mut srep = String::new(); + for _ in 0..rng.random_range(0..=2) { + srep.push_str(&rand_grapheme(&mut rng)); } + ta.replace_range(s..e, &srep); } } } 16 => { // Try setting the cursor to a position inside an element; it should clamp out - if let Some(payload) = elem_texts.choose(&mut rng).cloned() { - if let Some(start) = ta.text().find(&payload) { - let end = start + payload.len(); - if end - start > 2 { - let pos = rng.random_range(start + 1..end - 1); - ta.set_cursor(pos); - } + if let Some(payload) = elem_texts.choose(&mut rng).cloned() + && let Some(start) = ta.text().find(&payload) + { + let end = start + payload.len(); + if end - start > 2 { + let pos = rng.random_range(start + 1..end - 1); + ta.set_cursor(pos); } } } diff --git a/codex-rs/tui/src/chatwidget/tests.rs b/codex-rs/tui/src/chatwidget/tests.rs index 192ab3c6ae..c7e9ad5b03 100644 --- a/codex-rs/tui/src/chatwidget/tests.rs +++ b/codex-rs/tui/src/chatwidget/tests.rs @@ -339,18 +339,18 @@ async fn binary_size_transcript_matches_ideal_fixture() { } } "app_event" => { - if let Some(variant) = v.get("variant").and_then(|s| s.as_str()) { - if variant == "CommitTick" { - chat.on_commit_tick(); - while let Ok(app_ev) = rx.try_recv() { - if let AppEvent::InsertHistory(lines) = app_ev { - transcript.push_str(&lines_to_single_string(&lines)); - crate::insert_history::insert_history_lines_to_writer( - &mut terminal, - &mut ansi, - lines, - ); - } + if let Some(variant) = v.get("variant").and_then(|s| s.as_str()) + && variant == "CommitTick" + { + chat.on_commit_tick(); + while let Ok(app_ev) = rx.try_recv() { + if let AppEvent::InsertHistory(lines) = app_ev { + transcript.push_str(&lines_to_single_string(&lines)); + crate::insert_history::insert_history_lines_to_writer( + &mut terminal, + &mut ansi, + lines, + ); } } } diff --git a/codex-rs/tui/src/custom_terminal.rs b/codex-rs/tui/src/custom_terminal.rs index 1ada679fc1..91153f92ed 100644 --- a/codex-rs/tui/src/custom_terminal.rs +++ b/codex-rs/tui/src/custom_terminal.rs @@ -264,10 +264,10 @@ where #[allow(clippy::print_stderr)] fn drop(&mut self) { // Attempt to restore the cursor state - if self.hidden_cursor { - if let Err(err) = self.show_cursor() { - eprintln!("Failed to show the cursor: {err}"); - } + if self.hidden_cursor + && let Err(err) = self.show_cursor() + { + eprintln!("Failed to show the cursor: {err}"); } } } @@ -309,7 +309,7 @@ where } /// Get a Frame object which provides a consistent view into the terminal state for rendering. - pub fn get_frame(&mut self) -> Frame { + pub fn get_frame(&mut self) -> Frame<'_> { let count = self.frame_count; Frame { cursor_position: None, diff --git a/codex-rs/tui/src/exec_command.rs b/codex-rs/tui/src/exec_command.rs index 35c59b22a5..db3bf2fef4 100644 --- a/codex-rs/tui/src/exec_command.rs +++ b/codex-rs/tui/src/exec_command.rs @@ -33,10 +33,10 @@ where return None; } - if let Some(home_dir) = std::env::var_os("HOME").map(PathBuf::from) { - if let Ok(rel) = path.strip_prefix(&home_dir) { - return Some(rel.to_path_buf()); - } + if let Some(home_dir) = std::env::var_os("HOME").map(PathBuf::from) + && let Ok(rel) = path.strip_prefix(&home_dir) + { + return Some(rel.to_path_buf()); } None diff --git a/codex-rs/tui/src/file_search.rs b/codex-rs/tui/src/file_search.rs index 8a05b2c165..7f15e46abb 100644 --- a/codex-rs/tui/src/file_search.rs +++ b/codex-rs/tui/src/file_search.rs @@ -94,13 +94,13 @@ impl FileSearchManager { // If there is an in-flight search that is definitely obsolete, // cancel it now. - if let Some(active_search) = &st.active_search { - if !query.starts_with(&active_search.query) { - active_search - .cancellation_token - .store(true, Ordering::Relaxed); - st.active_search = None; - } + if let Some(active_search) = &st.active_search + && !query.starts_with(&active_search.query) + { + active_search + .cancellation_token + .store(true, Ordering::Relaxed); + st.active_search = None; } // Schedule a search to run after debounce. @@ -187,10 +187,10 @@ impl FileSearchManager { { #[expect(clippy::unwrap_used)] let mut st = search_state.lock().unwrap(); - if let Some(active_search) = &st.active_search { - if Arc::ptr_eq(&active_search.cancellation_token, &cancellation_token) { - st.active_search = None; - } + if let Some(active_search) = &st.active_search + && Arc::ptr_eq(&active_search.cancellation_token, &cancellation_token) + { + st.active_search = None; } } }); diff --git a/codex-rs/tui/src/history_cell.rs b/codex-rs/tui/src/history_cell.rs index 58677c724a..021c09e337 100644 --- a/codex-rs/tui/src/history_cell.rs +++ b/codex-rs/tui/src/history_cell.rs @@ -541,33 +541,33 @@ pub(crate) fn new_status_output( // 👤 Account (only if ChatGPT tokens exist), shown under the first block let auth_file = get_auth_file(&config.codex_home); - if let Ok(auth) = try_read_auth_json(&auth_file) { - if let Some(tokens) = auth.tokens.clone() { - lines.push(Line::from(vec!["👤 ".into(), "Account".bold()])); - lines.push(Line::from(" • Signed in with ChatGPT")); - - let info = tokens.id_token; - if let Some(email) = &info.email { - lines.push(Line::from(vec![" • Login: ".into(), email.clone().into()])); - } + if let Ok(auth) = try_read_auth_json(&auth_file) + && let Some(tokens) = auth.tokens.clone() + { + lines.push(Line::from(vec!["👤 ".into(), "Account".bold()])); + lines.push(Line::from(" • Signed in with ChatGPT")); - match auth.openai_api_key.as_deref() { - Some(key) if !key.is_empty() => { - lines.push(Line::from( - " • Using API key. Run codex login to use ChatGPT plan", - )); - } - _ => { - let plan_text = info - .get_chatgpt_plan_type() - .map(|s| title_case(&s)) - .unwrap_or_else(|| "Unknown".to_string()); - lines.push(Line::from(vec![" • Plan: ".into(), plan_text.into()])); - } - } + let info = tokens.id_token; + if let Some(email) = &info.email { + lines.push(Line::from(vec![" • Login: ".into(), email.clone().into()])); + } - lines.push(Line::from("")); + match auth.openai_api_key.as_deref() { + Some(key) if !key.is_empty() => { + lines.push(Line::from( + " • Using API key. Run codex login to use ChatGPT plan", + )); + } + _ => { + let plan_text = info + .get_chatgpt_plan_type() + .map(|s| title_case(&s)) + .unwrap_or_else(|| "Unknown".to_string()); + lines.push(Line::from(vec![" • Plan: ".into(), plan_text.into()])); + } } + + lines.push(Line::from("")); } // 🧠 Model @@ -612,10 +612,10 @@ pub(crate) fn new_status_output( " • Input: ".into(), usage.non_cached_input().to_string().into(), ]; - if let Some(cached) = usage.cached_input_tokens { - if cached > 0 { - input_line_spans.push(format!(" (+ {cached} cached)").into()); - } + if let Some(cached) = usage.cached_input_tokens + && cached > 0 + { + input_line_spans.push(format!(" (+ {cached} cached)").into()); } lines.push(Line::from(input_line_spans)); // Output: @@ -688,16 +688,15 @@ pub(crate) fn new_mcp_tools_output( ])); } - if let Some(env) = cfg.env.as_ref() { - if !env.is_empty() { - let mut env_pairs: Vec = - env.iter().map(|(k, v)| format!("{k}={v}")).collect(); - env_pairs.sort(); - lines.push(Line::from(vec![ - " • Env: ".into(), - env_pairs.join(" ").into(), - ])); - } + if let Some(env) = cfg.env.as_ref() + && !env.is_empty() + { + let mut env_pairs: Vec = env.iter().map(|(k, v)| format!("{k}={v}")).collect(); + env_pairs.sort(); + lines.push(Line::from(vec![ + " • Env: ".into(), + env_pairs.join(" ").into(), + ])); } if names.is_empty() { diff --git a/codex-rs/tui/src/onboarding/auth.rs b/codex-rs/tui/src/onboarding/auth.rs index 2396b4d5fa..2ef70ca275 100644 --- a/codex-rs/tui/src/onboarding/auth.rs +++ b/codex-rs/tui/src/onboarding/auth.rs @@ -123,20 +123,20 @@ impl AuthModeWidget { // If the user is already authenticated but the method differs from their // preferred auth method, show a brief explanation. - if let LoginStatus::AuthMode(current) = self.login_status { - if current != self.preferred_auth_method { - let to_label = |mode: AuthMode| match mode { - AuthMode::ApiKey => "API key", - AuthMode::ChatGPT => "ChatGPT", - }; - let msg = format!( - " You’re currently using {} while your preferred method is {}.", - to_label(current), - to_label(self.preferred_auth_method) - ); - lines.push(Line::from(msg).style(Style::default())); - lines.push(Line::from("")); - } + if let LoginStatus::AuthMode(current) = self.login_status + && current != self.preferred_auth_method + { + let to_label = |mode: AuthMode| match mode { + AuthMode::ApiKey => "API key", + AuthMode::ChatGPT => "ChatGPT", + }; + let msg = format!( + " You’re currently using {} while your preferred method is {}.", + to_label(current), + to_label(self.preferred_auth_method) + ); + lines.push(Line::from(msg).style(Style::default())); + lines.push(Line::from("")); } let create_mode_item = |idx: usize, @@ -222,15 +222,15 @@ impl AuthModeWidget { spans.extend(shimmer_spans("Finish signing in via your browser")); let mut lines = vec![Line::from(spans), Line::from("")]; - if let SignInState::ChatGptContinueInBrowser(state) = &self.sign_in_state { - if !state.auth_url.is_empty() { - lines.push(Line::from(" If the link doesn't open automatically, open the following link to authenticate:")); - lines.push(Line::from(vec![ - Span::raw(" "), - state.auth_url.as_str().cyan().underlined(), - ])); - lines.push(Line::from("")); - } + if let SignInState::ChatGptContinueInBrowser(state) = &self.sign_in_state + && !state.auth_url.is_empty() + { + lines.push(Line::from(" If the link doesn't open automatically, open the following link to authenticate:")); + lines.push(Line::from(vec![ + Span::raw(" "), + state.auth_url.as_str().cyan().underlined(), + ])); + lines.push(Line::from("")); } lines.push( diff --git a/codex-rs/tui/src/streaming/controller.rs b/codex-rs/tui/src/streaming/controller.rs index 161a111173..5eae2a91f6 100644 --- a/codex-rs/tui/src/streaming/controller.rs +++ b/codex-rs/tui/src/streaming/controller.rs @@ -96,26 +96,26 @@ impl StreamController { /// Begin a stream, flushing previously completed lines from any other /// active stream to maintain ordering. pub(crate) fn begin(&mut self, kind: StreamKind, sink: &impl HistorySink) { - if let Some(current) = self.current_stream { - if current != kind { - // Synchronously flush completed lines from previous stream. - let cfg = self.config.clone(); - let prev_state = self.state_mut(current); - let newly_completed = prev_state.collector.commit_complete_lines(&cfg); - if !newly_completed.is_empty() { - prev_state.enqueue(newly_completed); - } - let step = prev_state.drain_all(); - if !step.history.is_empty() { - let mut lines: Lines = Vec::new(); - self.emit_header_if_needed(current, &mut lines); - lines.extend(step.history); - // Ensure at most one trailing blank after the flushed block. - Self::ensure_single_trailing_blank(&mut lines); - sink.insert_history(lines); - } - self.current_stream = None; + if let Some(current) = self.current_stream + && current != kind + { + // Synchronously flush completed lines from previous stream. + let cfg = self.config.clone(); + let prev_state = self.state_mut(current); + let newly_completed = prev_state.collector.commit_complete_lines(&cfg); + if !newly_completed.is_empty() { + prev_state.enqueue(newly_completed); + } + let step = prev_state.drain_all(); + if !step.history.is_empty() { + let mut lines: Lines = Vec::new(); + self.emit_header_if_needed(current, &mut lines); + lines.extend(step.history); + // Ensure at most one trailing blank after the flushed block. + Self::ensure_single_trailing_blank(&mut lines); + sink.insert_history(lines); } + self.current_stream = None; } if self.current_stream != Some(kind) { diff --git a/codex-rs/tui/src/text_formatting.rs b/codex-rs/tui/src/text_formatting.rs index b77c6e5baa..3c4cfc31bb 100644 --- a/codex-rs/tui/src/text_formatting.rs +++ b/codex-rs/tui/src/text_formatting.rs @@ -53,12 +53,12 @@ pub(crate) fn format_json_compact(text: &str) -> Option { } ' ' | '\t' if !in_string => { // Add a space after : and , but only when not in a string - if let Some(&next_ch) = chars.peek() { - if let Some(last_ch) = result.chars().last() { - if (last_ch == ':' || last_ch == ',') && !matches!(next_ch, '}' | ']') { - result.push(' '); - } - } + if let Some(&next_ch) = chars.peek() + && let Some(last_ch) = result.chars().last() + && (last_ch == ':' || last_ch == ',') + && !matches!(next_ch, '}' | ']') + { + result.push(' '); } } _ => { diff --git a/codex-rs/tui/tests/vt100_history.rs b/codex-rs/tui/tests/vt100_history.rs index 402e847b47..1f01a6d3ba 100644 --- a/codex-rs/tui/tests/vt100_history.rs +++ b/codex-rs/tui/tests/vt100_history.rs @@ -112,12 +112,11 @@ fn long_token_wraps() { let mut count_a = 0usize; for row in 0..6 { for col in 0..20 { - if let Some(cell) = screen.cell(row, col) { - if let Some(ch) = cell.contents().chars().next() { - if ch == 'A' { - count_a += 1; - } - } + if let Some(cell) = screen.cell(row, col) + && let Some(ch) = cell.contents().chars().next() + && ch == 'A' + { + count_a += 1; } } }