Skip to content

Commit beededb

Browse files
echobtfactorydroid
andauthored
feat(cli): implement CLI improvements from open PRs (#181)
This commit implements improvements from multiple CLI-related PRs: 1. PR #137: Use consistent provider name casing in models output - Removed .to_uppercase() call to keep provider names in lowercase 2. PR #135: Add --log-level flag for granular log verbosity control - Added LogLevel enum with error, warn, info, debug, trace levels - Support for FABRIC_LOG_LEVEL environment variable - Deprecated --debug flag (still works for backward compatibility) 3. PR #147: Display timestamps with timezone suffix - Sessions list now shows timestamps in "YYYY-MM-DD HH:MM:SS UTC" format - Makes timezone unambiguous for team collaboration 4. PR #153: Emit valid JSONL with full event data in streaming mode - Serialize full event using serde with proper type tags - Include complete event payload data (text deltas, tool call info) 5. PR #138: Add model alias shortcuts for common models - Added ModelAlias struct and MODEL_ALIASES constant - resolve_model_alias() function for short name resolution - Supported aliases: sonnet, opus, haiku, gpt4, gpt, o1, o3, codex, gemini, deepseek, r1, llama - Case-insensitive matching 6. PR #139: Pass CLI temperature override to session - Added temperature field to Config and ConfigOverrides - Modified session.rs to use config.temperature instead of hardcoded 0.7 - Temperature can be set via cortex run --temperature flag Also includes formatting improvements from cargo fmt. Co-authored-by: Droid Agent <droid@factory.ai>
1 parent 4dabd9e commit beededb

File tree

12 files changed

+296
-55
lines changed

12 files changed

+296
-55
lines changed

cortex-cli/src/login.rs

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -148,7 +148,9 @@ pub async fn run_logout(config_overrides: CliConfigOverrides) -> ! {
148148
Ok(Some(_)) => {
149149
// User is logged in, ask for confirmation if terminal is interactive
150150
if std::io::stdin().is_terminal() {
151-
eprint!("Are you sure you want to log out? This will remove your stored credentials. [y/N]: ");
151+
eprint!(
152+
"Are you sure you want to log out? This will remove your stored credentials. [y/N]: "
153+
);
152154
let _ = std::io::Write::flush(&mut std::io::stderr());
153155

154156
let mut input = String::new();

cortex-cli/src/main.rs

Lines changed: 80 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
//! - Debug sandbox commands
1010
//! - Shell completions
1111
12-
use anyhow::{Context, Result};
12+
use anyhow::{Context, Result, bail};
1313
use clap::{Args, CommandFactory, Parser, Subcommand};
1414
use clap_complete::{Shell, generate};
1515
use std::io;
@@ -36,6 +36,50 @@ use cortex_cli::upgrade_cmd::UpgradeCli;
3636
use cortex_cli::{LandlockCommand, SeatbeltCommand, WindowsCommand};
3737
use cortex_common::CliConfigOverrides;
3838

39+
/// Log verbosity level for CLI output.
40+
///
41+
/// Controls the amount of logging detail shown. Can also be set via
42+
/// the FABRIC_LOG_LEVEL environment variable.
43+
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq, clap::ValueEnum)]
44+
pub enum LogLevel {
45+
/// Only show errors
46+
Error,
47+
/// Show warnings and errors
48+
Warn,
49+
/// Show informational messages, warnings, and errors (default)
50+
#[default]
51+
Info,
52+
/// Show debug messages and above
53+
Debug,
54+
/// Show all messages including trace-level details
55+
Trace,
56+
}
57+
58+
impl LogLevel {
59+
/// Convert to tracing filter string
60+
fn as_filter_str(&self) -> &'static str {
61+
match self {
62+
LogLevel::Error => "error",
63+
LogLevel::Warn => "warn",
64+
LogLevel::Info => "info",
65+
LogLevel::Debug => "debug",
66+
LogLevel::Trace => "trace",
67+
}
68+
}
69+
70+
/// Parse from string (case-insensitive)
71+
fn from_str_loose(s: &str) -> Option<LogLevel> {
72+
match s.to_lowercase().as_str() {
73+
"error" => Some(LogLevel::Error),
74+
"warn" | "warning" => Some(LogLevel::Warn),
75+
"info" => Some(LogLevel::Info),
76+
"debug" => Some(LogLevel::Debug),
77+
"trace" => Some(LogLevel::Trace),
78+
_ => None,
79+
}
80+
}
81+
}
82+
3983
/// Cortex CLI - AI Coding Agent
4084
///
4185
/// If no subcommand is specified, starts the interactive TUI.
@@ -131,8 +175,13 @@ struct InteractiveArgs {
131175
#[arg(long = "image", short = 'i', value_delimiter = ',', num_args = 1..)]
132176
images: Vec<PathBuf>,
133177

134-
/// Working directory
135-
#[arg(long)]
178+
/// Set log verbosity level (error, warn, info, debug, trace)
179+
/// Can also be set via FABRIC_LOG_LEVEL environment variable
180+
#[arg(long = "log-level", short = 'L', value_enum, default_value = "info")]
181+
log_level: LogLevel,
182+
183+
/// Enable debug logging (deprecated: use --log-level debug instead)
184+
#[arg(long, hide = true)]
136185
debug: bool,
137186

138187
/// Initial prompt (if no subcommand)
@@ -449,12 +498,19 @@ async fn main() -> Result<()> {
449498
// Initialize logging only for non-TUI commands
450499
// TUI mode has its own file-based logging to avoid stdout pollution
451500
if cli.command.is_some() {
501+
// Determine log level with precedence: --debug flag > --log-level > FABRIC_LOG_LEVEL env > default (info)
452502
let log_level = if cli.interactive.debug {
453-
"debug"
503+
// Deprecated --debug flag takes precedence for backward compatibility
504+
LogLevel::Debug
505+
} else if let Ok(env_level) = std::env::var("FABRIC_LOG_LEVEL") {
506+
// Check environment variable
507+
LogLevel::from_str_loose(&env_level).unwrap_or(cli.interactive.log_level)
454508
} else {
455-
"info"
509+
cli.interactive.log_level
456510
};
457-
tracing_subscriber::fmt().with_env_filter(log_level).init();
511+
tracing_subscriber::fmt()
512+
.with_env_filter(log_level.as_filter_str())
513+
.init();
458514
}
459515

460516
// Background update check (non-blocking, runs in parallel with command execution)
@@ -514,7 +570,9 @@ async fn main() -> Result<()> {
514570
Some(Commands::Mcp(mcp_cli)) => mcp_cli.run().await,
515571
Some(Commands::Agent(agent_cli)) => agent_cli.run().await,
516572
Some(Commands::McpServer) => {
517-
bail!("MCP server mode is not yet implemented. Use 'cortex mcp' for MCP server management.");
573+
bail!(
574+
"MCP server mode is not yet implemented. Use 'cortex mcp' for MCP server management."
575+
);
518576
}
519577
Some(Commands::Completion(completion_cli)) => {
520578
generate_completions(completion_cli.shell);
@@ -562,8 +620,15 @@ async fn main() -> Result<()> {
562620
}
563621

564622
async fn run_tui(initial_prompt: Option<String>, args: &InteractiveArgs) -> Result<()> {
623+
use cortex_common::resolve_model_alias;
624+
565625
let mut config = cortex_engine::Config::default();
566626

627+
// Apply model override if specified, resolving alias (e.g., "sonnet" -> "anthropic/claude-sonnet-4-20250514")
628+
if let Some(ref model) = args.model {
629+
config.model = resolve_model_alias(model).to_string();
630+
}
631+
567632
// Apply working directory override if specified
568633
if let Some(ref cwd) = args.cwd {
569634
let cwd_path = if cwd.is_absolute() {
@@ -982,9 +1047,15 @@ async fn list_sessions(
9821047

9831048
for session in display_sessions.iter().take(15) {
9841049
let model = session.model.as_deref().unwrap_or("unknown");
985-
let date = if session.timestamp.len() >= 19 {
986-
session.timestamp[..19].replace('T', " ")
1050+
// Display timestamp in ISO8601 format with UTC timezone suffix for clarity
1051+
let date = if let Ok(parsed) = chrono::DateTime::parse_from_rfc3339(&session.timestamp) {
1052+
// Format as "YYYY-MM-DD HH:MM:SS UTC" for unambiguous timezone
1053+
parsed
1054+
.with_timezone(&chrono::Utc)
1055+
.format("%Y-%m-%d %H:%M:%S UTC")
1056+
.to_string()
9871057
} else {
1058+
// Fallback: use raw timestamp (already includes timezone info)
9881059
session.timestamp.clone()
9891060
};
9901061

cortex-cli/src/models_cmd.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -361,7 +361,7 @@ async fn run_list(provider_filter: Option<String>, json: bool) -> Result<()> {
361361
println!("{}", "=".repeat(80));
362362

363363
for (provider, models) in by_provider {
364-
println!("\n{} ({} models)", provider.to_uppercase(), models.len());
364+
println!("\n{} ({} models)", provider, models.len());
365365
println!("{}", "-".repeat(40));
366366
println!(
367367
"{:<35} {:^6} {:^6} {:^8} {:^6}",

cortex-cli/src/run_cmd.rs

Lines changed: 17 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@ use std::time::Duration;
3030
/// Maximum file size for attachments (10MB)
3131
const MAX_FILE_SIZE: u64 = 10 * 1024 * 1024;
3232

33+
use cortex_common::resolve_model_alias;
3334
use cortex_engine::rollout::get_rollout_path;
3435
use cortex_engine::{Session, list_sessions};
3536
use cortex_protocol::{ConversationId, EventMsg, Op, Submission, UserInput};
@@ -476,6 +477,16 @@ impl RunCli {
476477
config.current_agent = Some(agent_name.clone());
477478
}
478479

480+
// Resolve model alias if provided (e.g., "sonnet" -> "anthropic/claude-sonnet-4-20250514")
481+
if let Some(ref model) = self.model {
482+
config.model = resolve_model_alias(model).to_string();
483+
}
484+
485+
// Apply temperature override if provided
486+
if let Some(temp) = self.temperature {
487+
config.temperature = Some(temp);
488+
}
489+
479490
// Initialize custom command registry if not already initialized
480491
let project_root = self.cwd.clone().or_else(|| std::env::current_dir().ok());
481492
let _custom_registry = cortex_engine::init_custom_command_registry(
@@ -622,24 +633,19 @@ impl RunCli {
622633

623634
event_count += 1;
624635

625-
// Output JSON event if in JSON mode
636+
// Output JSON event if in JSON mode (JSONL format with full event data)
626637
if is_json {
638+
// Serialize the full event using serde, which properly includes all data
639+
// and uses the correct type tags from the EventMsg enum's serde attributes
627640
let event_json = serde_json::json!({
628-
"type": format!("{:?}", event.msg).split('(').next().unwrap_or("Unknown"),
629641
"event_id": event_count,
630642
"session_id": session_id,
631643
"timestamp": chrono::Utc::now().to_rfc3339(),
644+
"event": event.msg,
632645
});
633646

634-
match self.format {
635-
OutputFormat::Json => {
636-
eprintln!("{}", serde_json::to_string(&event_json)?);
637-
}
638-
OutputFormat::Jsonl => {
639-
eprintln!("{}", serde_json::to_string(&event_json)?);
640-
}
641-
_ => {}
642-
}
647+
// Output as a single JSON line (JSONL format)
648+
println!("{}", serde_json::to_string(&event_json)?);
643649
}
644650

645651
match &event.msg {

cortex-common/src/model_presets.rs

Lines changed: 144 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1048,3 +1048,147 @@ pub fn get_models_for_provider(provider: &str) -> Vec<&'static ModelPreset> {
10481048
.filter(|m| m.provider == provider)
10491049
.collect()
10501050
}
1051+
1052+
/// Model alias entry mapping a short name to a full model identifier.
1053+
#[derive(Debug, Clone, Copy)]
1054+
pub struct ModelAlias {
1055+
/// Short alias (e.g., "sonnet").
1056+
pub alias: &'static str,
1057+
/// Full model identifier (e.g., "anthropic/claude-sonnet-4-20250514").
1058+
pub model: &'static str,
1059+
}
1060+
1061+
/// Built-in model aliases for common shortcuts.
1062+
pub const MODEL_ALIASES: &[ModelAlias] = &[
1063+
// Claude models
1064+
ModelAlias {
1065+
alias: "claude",
1066+
model: "anthropic/claude-opus-4.5",
1067+
},
1068+
ModelAlias {
1069+
alias: "opus",
1070+
model: "anthropic/claude-opus-4.5",
1071+
},
1072+
ModelAlias {
1073+
alias: "sonnet",
1074+
model: "anthropic/claude-sonnet-4-20250514",
1075+
},
1076+
ModelAlias {
1077+
alias: "haiku",
1078+
model: "anthropic/claude-haiku-4.5",
1079+
},
1080+
// OpenAI models
1081+
ModelAlias {
1082+
alias: "gpt4",
1083+
model: "openai/gpt-4o",
1084+
},
1085+
ModelAlias {
1086+
alias: "gpt",
1087+
model: "openai/gpt-4o",
1088+
},
1089+
ModelAlias {
1090+
alias: "o1",
1091+
model: "openai/o1",
1092+
},
1093+
ModelAlias {
1094+
alias: "o3",
1095+
model: "openai/o3",
1096+
},
1097+
ModelAlias {
1098+
alias: "codex",
1099+
model: "openai/gpt-5.2-codex",
1100+
},
1101+
// Google models
1102+
ModelAlias {
1103+
alias: "gemini",
1104+
model: "google/gemini-2.5-pro-preview-06-05",
1105+
},
1106+
// DeepSeek models
1107+
ModelAlias {
1108+
alias: "deepseek",
1109+
model: "deepseek/deepseek-chat",
1110+
},
1111+
ModelAlias {
1112+
alias: "r1",
1113+
model: "deepseek/deepseek-r1",
1114+
},
1115+
// Meta models
1116+
ModelAlias {
1117+
alias: "llama",
1118+
model: "meta-llama/llama-3.3-70b-instruct",
1119+
},
1120+
];
1121+
1122+
/// Resolves a model alias to its full model identifier.
1123+
///
1124+
/// If the input matches a known alias, returns the corresponding full model name.
1125+
/// Otherwise, returns the input unchanged.
1126+
///
1127+
/// # Examples
1128+
///
1129+
/// ```
1130+
/// use cortex_common::resolve_model_alias;
1131+
///
1132+
/// assert_eq!(resolve_model_alias("sonnet"), "anthropic/claude-sonnet-4-20250514");
1133+
/// assert_eq!(resolve_model_alias("gpt4"), "openai/gpt-4o");
1134+
/// assert_eq!(resolve_model_alias("unknown-model"), "unknown-model");
1135+
/// ```
1136+
pub fn resolve_model_alias(model: &str) -> &str {
1137+
MODEL_ALIASES
1138+
.iter()
1139+
.find(|a| a.alias.eq_ignore_ascii_case(model))
1140+
.map(|a| a.model)
1141+
.unwrap_or(model)
1142+
}
1143+
1144+
/// Returns a list of all available model aliases.
1145+
pub fn list_model_aliases() -> &'static [ModelAlias] {
1146+
MODEL_ALIASES
1147+
}
1148+
1149+
#[cfg(test)]
1150+
mod alias_tests {
1151+
use super::*;
1152+
1153+
#[test]
1154+
fn test_resolve_model_alias_known() {
1155+
assert_eq!(
1156+
resolve_model_alias("sonnet"),
1157+
"anthropic/claude-sonnet-4-20250514"
1158+
);
1159+
assert_eq!(resolve_model_alias("opus"), "anthropic/claude-opus-4.5");
1160+
assert_eq!(resolve_model_alias("gpt4"), "openai/gpt-4o");
1161+
assert_eq!(resolve_model_alias("haiku"), "anthropic/claude-haiku-4.5");
1162+
assert_eq!(resolve_model_alias("codex"), "openai/gpt-5.2-codex");
1163+
assert_eq!(resolve_model_alias("r1"), "deepseek/deepseek-r1");
1164+
}
1165+
1166+
#[test]
1167+
fn test_resolve_model_alias_case_insensitive() {
1168+
assert_eq!(
1169+
resolve_model_alias("SONNET"),
1170+
"anthropic/claude-sonnet-4-20250514"
1171+
);
1172+
assert_eq!(
1173+
resolve_model_alias("Sonnet"),
1174+
"anthropic/claude-sonnet-4-20250514"
1175+
);
1176+
assert_eq!(resolve_model_alias("GPT4"), "openai/gpt-4o");
1177+
}
1178+
1179+
#[test]
1180+
fn test_resolve_model_alias_unknown() {
1181+
assert_eq!(resolve_model_alias("unknown-model"), "unknown-model");
1182+
assert_eq!(
1183+
resolve_model_alias("anthropic/claude-3-opus"),
1184+
"anthropic/claude-3-opus"
1185+
);
1186+
}
1187+
1188+
#[test]
1189+
fn test_list_model_aliases() {
1190+
let aliases = list_model_aliases();
1191+
assert!(!aliases.is_empty());
1192+
assert!(aliases.iter().any(|a| a.alias == "sonnet"));
1193+
}
1194+
}

0 commit comments

Comments
 (0)