Skip to content

Added Microsoft Entra ID Support for Keyless Auth #1778

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 4 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion codex-rs/chatgpt/src/chatgpt_token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ pub fn set_chatgpt_token_data(value: TokenData) {

/// Initialize the ChatGPT token from auth.json file
pub async fn init_chatgpt_token_from_auth(codex_home: &Path) -> std::io::Result<()> {
let auth = codex_login::load_auth(codex_home, true)?;
let auth = codex_login::load_auth(codex_home, "", &None, true)?;
if let Some(auth) = auth {
let token_data = auth.get_token_data().await?;
set_chatgpt_token_data(token_data);
Expand Down
19 changes: 17 additions & 2 deletions codex-rs/cli/src/login.rs
Original file line number Diff line number Diff line change
Expand Up @@ -46,13 +46,24 @@ pub async fn run_login_with_api_key(
pub async fn run_login_status(cli_config_overrides: CliConfigOverrides) -> ! {
let config = load_config_or_exit(cli_config_overrides);

match load_auth(&config.codex_home, true) {
match load_auth(
&config.codex_home,
&config.model_provider.name,
&config.model_provider.env_key,
true,
) {
Ok(Some(auth)) => match auth.mode {
AuthMode::ApiKey => {
if let Some(api_key) = auth.api_key.as_deref() {
eprintln!("Logged in using an API key - {}", safe_format_key(api_key));

if let Ok(env_api_key) = env::var(OPENAI_API_KEY_ENV_VAR) {
if let Ok(env_api_key) = env::var(
&config
.model_provider
.env_key
.as_deref()
.unwrap_or(OPENAI_API_KEY_ENV_VAR),
) {
if env_api_key == api_key {
eprintln!(
" API loaded from OPENAI_API_KEY environment variable or .env file"
Expand All @@ -64,6 +75,10 @@ pub async fn run_login_status(cli_config_overrides: CliConfigOverrides) -> ! {
}
std::process::exit(0);
}
AuthMode::MicrosoftEntraID => {
eprintln!("Logged in using Azure CLI");
std::process::exit(0);
}
AuthMode::ChatGPT => {
eprintln!("Logged in using ChatGPT");
std::process::exit(0);
Expand Down
7 changes: 6 additions & 1 deletion codex-rs/cli/src/proto.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,12 @@ pub async fn run_main(opts: ProtoCli) -> anyhow::Result<()> {
.map_err(anyhow::Error::msg)?;

let config = Config::load_with_cli_overrides(overrides_vec, ConfigOverrides::default())?;
let auth = load_auth(&config.codex_home, true)?;
let auth = load_auth(
&config.codex_home,
&config.model_provider.name,
&config.model_provider.env_key,
true,
)?;
let ctrl_c = notify_on_sigint();
let CodexSpawnOk { codex, .. } = Codex::spawn(config, auth, ctrl_c.clone()).await?;
let codex = Arc::new(codex);
Expand Down
31 changes: 31 additions & 0 deletions codex-rs/config.md
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,8 @@ base_url = "https://api.mistral.ai/v1"
env_key = "MISTRAL_API_KEY"
```

### Azure OpenAI

Note that Azure requires `api-version` to be passed as a query parameter, so be sure to specify it as part of `query_params` when defining the Azure provider:

```toml
Expand All @@ -76,6 +78,35 @@ env_key = "AZURE_OPENAI_API_KEY" # Or "OPENAI_API_KEY", whichever you use.
query_params = { api-version = "2025-04-01-preview" }
```

**Support for Microsoft Entra ID for Keyless Authentication**

If you prefer to use Microsoft Entra ID for keyless authentication, you can follow these steps to get it working:

1. Install the Azure CLI [here](https://learn.microsoft.com/en-us/cli/azure/install-azure-cli?view=azure-cli-latest).
2. Login to the Azure CLI using the command `az login`.
3. Deploy a model by going to the "Models + endpoints" section in [Azure AI Foundry](https://ai.azure.com/) project (Takes around 5 minutes for a newly deployed model to become available).
4. Edit your codex config (located in `~/.codex/config.toml`) to something like this:

```toml
profile = "codex"
disable_response_storage = true

[model_providers.azurecodex]
name = "Azure" # IMPORTANT: keep as "Azure" or "azure"
# Make sure you set the appropriate subdomain for this URL.
base_url = "https://YOUR_PROJECT_NAME/openai" # replace YOUR_PROJECT_NAME by visiting your Azure AI Foundry project and navigating to Overview > Azure OpenAI
env_key = "AZURE_OPENAI_API_KEY" # placeholder for backwards compatibility, not actually used
query_params = { api-version = "2025-04-01-preview" }
wire_api = "responses" # Microsoft Entra ID is only implemented for responses wire_api

[profiles.codex]
model = "codex-mini"
model_provider = "azurecodex"
```


```toml

It is also possible to configure a provider to include extra HTTP headers with a request. These can be hardcoded values (`http_headers`) or values read from environment variables (`env_http_headers`):

```toml
Expand Down
37 changes: 30 additions & 7 deletions codex-rs/core/src/client.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,4 @@
use std::io::BufRead;
use std::path::Path;
use std::time::Duration;

use crate::config::find_codex_home;
use bytes::Bytes;
use codex_login::AuthMode;
use codex_login::CodexAuth;
Expand All @@ -11,6 +8,9 @@ use reqwest::StatusCode;
use serde::Deserialize;
use serde::Serialize;
use serde_json::Value;
use std::io::BufRead;
use std::path::Path;
use std::time::Duration;
use tokio::sync::mpsc;
use tokio::time::timeout;
use tokio_util::io::ReaderStream;
Expand Down Expand Up @@ -134,12 +134,35 @@ impl ModelClient {
let base_url = match self.provider.base_url.clone() {
Some(url) => url,
None => match auth.mode {
AuthMode::ChatGPT => "https://chatgpt.com/backend-api/codex".to_string(),
AuthMode::ApiKey => "https://api.openai.com/v1".to_string(),
_ => "https://chatgpt.com/backend-api/codex".to_string(),
},
};
let query_string = self
.provider
.query_params
.as_ref()
.map_or_else(String::new, |params| {
let full_params = params
.iter()
.map(|(k, v)| format!("{k}={v}"))
.collect::<Vec<_>>()
.join("&");
format!("?{full_params}")
});

let codex_home = find_codex_home();
let Ok(codex_home) = codex_home else {
return Err(CodexErr::EnvVar(EnvVarError {
var: "CODEX_HOME".to_string(),
instructions: Some(
"Set the CODEX_HOME environment variable to your Codex home directory."
.to_string(),
),
}));
};

let token = auth.get_token().await?;
let token = auth.get_token(&codex_home).await?;

let full_instructions = prompt.get_full_instructions(&self.config.model);
let tools_json = create_tools_json_for_responses_api(
Expand Down Expand Up @@ -194,7 +217,7 @@ impl ModelClient {

let mut req_builder = self
.client
.post(format!("{base_url}/responses"))
.post(format!("{base_url}/responses{query_string}"))
.header("OpenAI-Beta", "responses=experimental")
.header("session_id", self.session_id.to_string())
.bearer_auth(&token)
Expand Down
7 changes: 6 additions & 1 deletion codex-rs/core/src/codex_wrapper.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,12 @@ pub struct CodexConversation {
/// that callers can surface the information to the UI.
pub async fn init_codex(config: Config) -> anyhow::Result<CodexConversation> {
let ctrl_c = notify_on_sigint();
let auth = load_auth(&config.codex_home, true)?;
let auth = load_auth(
&config.codex_home,
&config.model_provider.name,
&config.model_provider.env_key,
true,
)?;
let CodexSpawnOk {
codex,
init_id,
Expand Down
Loading