Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 28 additions & 2 deletions lib/accounts.ts
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ import {

const log = createLogger("accounts");
type StoredAccount = AccountStorageV3["accounts"][number];
const DISK_PREFERRED_MERGE_KEYS = new Set(["refreshToken", "accessToken", "expiresAt"]);
const CREDENTIAL_MERGE_KEYS = new Set(["refreshToken", "accessToken", "expiresAt"]);

function initFamilyState(defaultValue: number): Record<ModelFamily, number> {
return Object.fromEntries(
Expand Down Expand Up @@ -854,13 +854,18 @@ export class AccountManager {
private mergeStoredAccountRecords(current: StoredAccount, incoming: StoredAccount): StoredAccount {
const next: StoredAccount = { ...current };
const nextRecord = next as unknown as Record<string, unknown>;
const preferredCredentialSource = this.selectCredentialMergeSource(current, incoming);
for (const [rawKey, rawValue] of Object.entries(incoming)) {
const value = rawValue as unknown;
if (value === undefined) {
continue;
}
const currentValue = nextRecord[rawKey];
if (DISK_PREFERRED_MERGE_KEYS.has(rawKey) && currentValue !== undefined) {
if (
CREDENTIAL_MERGE_KEYS.has(rawKey) &&
currentValue !== undefined &&
preferredCredentialSource === "current"
) {
continue;
}
if (
Expand Down Expand Up @@ -899,6 +904,27 @@ export class AccountManager {
return next;
}

private selectCredentialMergeSource(
current: StoredAccount,
incoming: StoredAccount,
): "current" | "incoming" {
const currentExpiresAt =
typeof current.expiresAt === "number" ? current.expiresAt : undefined;
const incomingExpiresAt =
typeof incoming.expiresAt === "number" ? incoming.expiresAt : undefined;

if (incomingExpiresAt === undefined && currentExpiresAt === undefined) {
return "incoming";
}
Comment on lines +916 to +918
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

when both credentials lack expiresAt, the code returns "incoming" (disk token), which contradicts the PR goal to "prefer the freshest credential set."

the PR description says fresh tokens are preferred during conflict recovery, but when both tokens lack expiry timestamps, preferring incoming (the on-disk snapshot) can silently regress non-expiring api-key style credentials to stale disk tokens regardless of recency.

current is the in-memory merged value (fresher), while incoming is the on-disk snapshot (potentially stale). when timestamps are absent on both, preferring current is safer and aligns with the stated goal.

suggest:

Suggested change
if (incomingExpiresAt === undefined && currentExpiresAt === undefined) {
return "incoming";
}
if (incomingExpiresAt === undefined && currentExpiresAt === undefined) {
return "current";
}

also missing: no vitest coverage exists for this tie-breaking path, which changed behavior and carries token safety risk.

Prompt To Fix With AI
This is a comment left during a code review.
Path: lib/accounts.ts
Line: 916-918

Comment:
when both credentials lack `expiresAt`, the code returns `"incoming"` (disk token), which contradicts the PR goal to "prefer the freshest credential set."

the PR description says fresh tokens are preferred during conflict recovery, but when both tokens lack expiry timestamps, preferring `incoming` (the on-disk snapshot) can silently regress non-expiring api-key style credentials to stale disk tokens regardless of recency.

`current` is the in-memory merged value (fresher), while `incoming` is the on-disk snapshot (potentially stale). when timestamps are absent on both, preferring `current` is safer and aligns with the stated goal.

suggest:

```suggestion
		if (incomingExpiresAt === undefined && currentExpiresAt === undefined) {
			return "current";
		}
```

also missing: no vitest coverage exists for this tie-breaking path, which changed behavior and carries token safety risk.

How can I resolve this? If you propose a fix, please make it concise.

Fix in Codex

if (incomingExpiresAt === undefined) {
return "current";
}
if (currentExpiresAt === undefined) {
return "incoming";
}
return incomingExpiresAt > currentExpiresAt ? "incoming" : "current";
}

private applyPersistedStorageSnapshot(storage: AccountStorageV3): void {
const previousByRefreshToken = new Map(
this.accounts.map((account) => [account.refreshToken, account] as const),
Expand Down
61 changes: 35 additions & 26 deletions lib/storage.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2092,7 +2092,7 @@ function cloneFlaggedStorageForPersist(storage: FlaggedAccountStorageV1): Flagge
};
}

export async function loadFlaggedAccounts(): Promise<FlaggedAccountStorageV1> {
async function loadFlaggedAccountsUnlocked(): Promise<FlaggedAccountStorageV1> {
const path = getFlaggedAccountsPath();
const empty: FlaggedAccountStorageV1 = { version: 1, accounts: [] };

Expand Down Expand Up @@ -2121,7 +2121,7 @@ export async function loadFlaggedAccounts(): Promise<FlaggedAccountStorageV1> {
const legacyData = JSON.parse(legacyContent) as unknown;
const migrated = normalizeFlaggedStorage(legacyData);
if (migrated.accounts.length > 0) {
await saveFlaggedAccounts(migrated);
await saveFlaggedAccountsUnlocked(migrated);
}
try {
await fs.unlink(legacyPath);
Expand All @@ -2148,28 +2148,34 @@ export async function loadFlaggedAccounts(): Promise<FlaggedAccountStorageV1> {
}
}

export async function saveFlaggedAccounts(storage: FlaggedAccountStorageV1): Promise<void> {
return withStorageLock(async () => {
const path = getFlaggedAccountsPath();
const uniqueSuffix = `${Date.now()}.${Math.random().toString(36).slice(2, 8)}`;
const tempPath = `${path}.${uniqueSuffix}.tmp`;
async function saveFlaggedAccountsUnlocked(storage: FlaggedAccountStorageV1): Promise<void> {
const path = getFlaggedAccountsPath();
const uniqueSuffix = `${Date.now()}.${Math.random().toString(36).slice(2, 8)}`;
const tempPath = `${path}.${uniqueSuffix}.tmp`;

try {
await fs.mkdir(dirname(path), { recursive: true });
const normalized = normalizeFlaggedStorage(storage);
const content = JSON.stringify(cloneFlaggedStorageForPersist(normalized), null, 2);
await fs.writeFile(tempPath, content, { encoding: "utf-8", mode: 0o600 });
await fs.rename(tempPath, path);
} catch (error) {
Comment on lines +2156 to +2162
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

use retrying rename for flagged saves on windows locks.

lib/storage.ts:2161 uses a single fs.rename. transient EPERM/EBUSY can fail flagged writes during rotation and concurrent updates. reuse the existing retry helper at lib/storage.ts:454.

proposed patch
-    await fs.rename(tempPath, path);
+    await renameFileWithRetry(tempPath, path);

As per coding guidelines lib/**: focus on auth rotation, windows filesystem IO, and concurrency. verify every change cites affected tests (vitest) and that new queues handle EBUSY/429 scenarios. check for logging that leaks tokens or emails.

📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
try {
await fs.mkdir(dirname(path), { recursive: true });
const normalized = normalizeFlaggedStorage(storage);
const content = JSON.stringify(cloneFlaggedStorageForPersist(normalized), null, 2);
await fs.writeFile(tempPath, content, { encoding: "utf-8", mode: 0o600 });
await fs.rename(tempPath, path);
} catch (error) {
try {
await fs.mkdir(dirname(path), { recursive: true });
const normalized = normalizeFlaggedStorage(storage);
const content = JSON.stringify(cloneFlaggedStorageForPersist(normalized), null, 2);
await fs.writeFile(tempPath, content, { encoding: "utf-8", mode: 0o600 });
await renameFileWithRetry(tempPath, path);
} catch (error) {
🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@lib/storage.ts` around lines 2156 - 2162, The current save block uses a
single fs.rename which can fail on Windows with transient EPERM/EBUSY; replace
the direct await fs.rename(tempPath, path) in the flagged-save block that uses
normalizeFlaggedStorage and cloneFlaggedStorageForPersist with the project’s
existing retry helper (the retry function defined near lib/storage.ts:454) to
attempt the rename multiple times with backoff and only surface the error after
retries exhaust; ensure the retry only targets transient filesystem errors
(EPERM/EBUSY) and preserves the same tempPath/path semantics, add/adjust tests
(vitest) to cover Windows-like EBUSY/429 scenarios for rotation/concurrent
updates, and avoid logging any sensitive data (tokens/emails) during retries or
errors.

try {
await fs.mkdir(dirname(path), { recursive: true });
const normalized = normalizeFlaggedStorage(storage);
const content = JSON.stringify(cloneFlaggedStorageForPersist(normalized), null, 2);
await fs.writeFile(tempPath, content, { encoding: "utf-8", mode: 0o600 });
await fs.rename(tempPath, path);
} catch (error) {
try {
await fs.unlink(tempPath);
} catch {
// Ignore cleanup failures.
}
log.error("Failed to save flagged account storage", { path, error: String(error) });
throw error;
await fs.unlink(tempPath);
} catch {
// Ignore cleanup failures.
}
});
log.error("Failed to save flagged account storage", { path, error: String(error) });
throw error;
}
}

export async function loadFlaggedAccounts(): Promise<FlaggedAccountStorageV1> {
return loadFlaggedAccountsUnlocked();
}
Comment on lines +2173 to +2175
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

loadFlaggedAccounts exports the unlocked variant without guarding — any future caller doing a read-modify-write cycle risks a TOCTOU race.

The rotateStoredSecretEncryption fix is solid (both read and write under withStorageLock), but loadFlaggedAccounts() is now just a passthrough to the unguarded loadFlaggedAccountsUnlocked(). if external code does:

const accounts = await loadFlaggedAccounts();   // no lock held
// concurrent saveFlaggedAccounts can fire here
await saveFlaggedAccounts(mergeOrModify(accounts)); // writes stale snapshot

...the concurrent write is silently lost. on windows this is especially risky because antivirus i/o can extend the TOCTOU window substantially.

consider adding explicit jsdoc to loadFlaggedAccounts documenting that callers needing a read-modify-write cycle must hold withStorageLock externally, or expose a locked variant as the public API to prevent future misuse.

Prompt To Fix With AI
This is a comment left during a code review.
Path: lib/storage.ts
Line: 2173-2175

Comment:
`loadFlaggedAccounts` exports the unlocked variant without guarding — any future caller doing a read-modify-write cycle risks a TOCTOU race.

The `rotateStoredSecretEncryption` fix is solid (both read and write under `withStorageLock`), but `loadFlaggedAccounts()` is now just a passthrough to the unguarded `loadFlaggedAccountsUnlocked()`. if external code does:

```ts
const accounts = await loadFlaggedAccounts();   // no lock held
// concurrent saveFlaggedAccounts can fire here
await saveFlaggedAccounts(mergeOrModify(accounts)); // writes stale snapshot
```

...the concurrent write is silently lost. on windows this is especially risky because antivirus i/o can extend the TOCTOU window substantially.

consider adding explicit jsdoc to `loadFlaggedAccounts` documenting that callers needing a read-modify-write cycle must hold `withStorageLock` externally, or expose a locked variant as the public API to prevent future misuse.

How can I resolve this? If you propose a fix, please make it concise.

Fix in Codex


export async function saveFlaggedAccounts(storage: FlaggedAccountStorageV1): Promise<void> {
return withStorageLock(() => saveFlaggedAccountsUnlocked(storage));
}

export async function clearFlaggedAccounts(): Promise<void> {
Expand Down Expand Up @@ -2292,11 +2298,14 @@ export async function rotateStoredSecretEncryption(): Promise<{
return current.accounts.length;
});

const flagged = await loadFlaggedAccounts();
const flaggedCount = flagged.accounts.length;
if (flaggedCount > 0) {
await saveFlaggedAccounts(flagged);
}
const flaggedCount = await withStorageLock(async () => {
const flagged = await loadFlaggedAccountsUnlocked();
const count = flagged.accounts.length;
if (count > 0) {
await saveFlaggedAccountsUnlocked(flagged);
}
return count;
});

return {
accounts: accountCount,
Expand Down
46 changes: 30 additions & 16 deletions lib/telemetry.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,4 @@
import {
existsSync,
promises as fs,
readdirSync,
renameSync,
statSync,
unlinkSync,
} from "node:fs";
import { existsSync, promises as fs, readdirSync } from "node:fs";
import { join } from "node:path";
import { getCorrelationId, maskEmail } from "./logger.js";
import { getCodexLogDir } from "./runtime-paths.js";
Expand Down Expand Up @@ -144,21 +137,42 @@ function parseArchiveSuffix(fileName: string): number | null {
return Number.isFinite(parsed) ? parsed : null;
}

function rotateLogsIfNeeded(): void {
function isMissingFsError(error: unknown): boolean {
return (error as NodeJS.ErrnoException | undefined)?.code === "ENOENT";
}

async function rotateLogsIfNeeded(): Promise<void> {
const logPath = getTelemetryPath();
if (!existsSync(logPath)) return;
let size: number;
try {
size = (await fs.stat(logPath)).size;
} catch (error) {
if (isMissingFsError(error)) {
return;
}
throw error;
}

const size = statSync(logPath).size;
if (size < telemetryConfig.maxFileSizeBytes) return;

for (let i = telemetryConfig.maxFiles - 1; i >= 1; i -= 1) {
const target = `${logPath}.${i}`;
const source = i === 1 ? logPath : `${logPath}.${i - 1}`;
if (i === telemetryConfig.maxFiles - 1 && existsSync(target)) {
unlinkSync(target);
if (i === telemetryConfig.maxFiles - 1) {
try {
await fs.unlink(target);
} catch (error) {
if (!isMissingFsError(error)) {
throw error;
}
}
}
if (existsSync(source)) {
renameSync(source, target);
try {
await fs.rename(source, target);
} catch (error) {
if (!isMissingFsError(error)) {
throw error;
}
}
Comment on lines +161 to 176
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

add retry/backoff for windows busy-file rotation failures.

fs.unlink and fs.rename in lib/telemetry.ts:163 and lib/telemetry.ts:171 fail hard on transient EBUSY/EPERM/EAGAIN. that bubbles into recordTelemetryEvent and gets swallowed at lib/telemetry.ts:274, dropping events.

proposed patch
+function isRetryableFsBusyError(error: unknown): boolean {
+  const code = (error as NodeJS.ErrnoException | undefined)?.code;
+  return code === "EBUSY" || code === "EPERM" || code === "EAGAIN";
+}
+
+async function withFsRetry(task: () => Promise<void>): Promise<void> {
+  for (let attempt = 0; attempt < 5; attempt += 1) {
+    try {
+      await task();
+      return;
+    } catch (error) {
+      if (!isRetryableFsBusyError(error) || attempt === 4) {
+        throw error;
+      }
+      await new Promise((resolve) => setTimeout(resolve, 20 * 2 ** attempt));
+    }
+  }
+}
+
 async function rotateLogsIfNeeded(): Promise<void> {
@@
-      try {
-        await fs.unlink(target);
+      try {
+        await withFsRetry(() => fs.unlink(target));
       } catch (error) {
@@
-    try {
-      await fs.rename(source, target);
+    try {
+      await withFsRetry(() => fs.rename(source, target));
     } catch (error) {

As per coding guidelines lib/**: focus on auth rotation, windows filesystem IO, and concurrency. verify every change cites affected tests (vitest) and that new queues handle EBUSY/429 scenarios. check for logging that leaks tokens or emails.

🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@lib/telemetry.ts` around lines 161 - 176, The unlink/rename calls in the
telemetry rotation code (around the block using fs.unlink(target) and
fs.rename(source, target)) must retry on transient Windows filesystem errors
(EBUSY/EPERM/EAGAIN) instead of failing immediately; implement a small
retry-with-exponential-backoff-and-jitter helper (e.g., retryFsOp or
useTransientRetry) and use it to wrap the existing unlink and rename calls,
consult and reuse isMissingFsError for non-transient-not-found handling, and
ensure non-transient errors are still rethrown; update
recordTelemetryEvent-related flows (where failures were previously swallowed)
and add/adjust Vitest unit tests to cover EBUSY/EPERM/EAGAIN transient failures
and successful retry, making sure no logs leak tokens/emails.

Comment on lines +170 to 176
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

isMissingFsError only catches ENOENT when renaming intermediate archive slots. on windows/ntfs, if a target file is held open by antivirus or another process, fs.rename throws EPERM instead, which propagates out uncaught.

since rotateLogsIfNeeded() is called from within recordTelemetryEvent's outer catch { } (line 274-276), the error gets swallowed silently — the telemetry event for that call is lost permanently because the appendFile line never executes.

the oldest archive (line 161-168) is explicitly unlinked first, mitigating this for that slot, but intermediate renames (i < maxFiles - 1) are vulnerable. on busy windows systems with aggressive antivirus, this can cause systematic telemetry loss under log rotation.

consider extending isMissingFsError to include EPERM and EACCES:

function isMissingFsError(error: unknown): boolean {
  const code = (error as NodeJS.ErrnoException | undefined)?.code;
  return code === "ENOENT" || code === "EPERM" || code === "EACCES";
}

also consider adding a regression test that mocks intermediate rename to reject with EPERM to verify graceful handling.

Prompt To Fix With AI
This is a comment left during a code review.
Path: lib/telemetry.ts
Line: 170-176

Comment:
`isMissingFsError` only catches `ENOENT` when renaming intermediate archive slots. on windows/ntfs, if a target file is held open by antivirus or another process, `fs.rename` throws `EPERM` instead, which propagates out uncaught.

since `rotateLogsIfNeeded()` is called from within `recordTelemetryEvent`'s outer `catch { }` (line 274-276), the error gets swallowed silently — the telemetry event for that call is lost permanently because the `appendFile` line never executes.

the oldest archive (line 161-168) is explicitly unlinked first, mitigating this for that slot, but intermediate renames (i < maxFiles - 1) are vulnerable. on busy windows systems with aggressive antivirus, this can cause systematic telemetry loss under log rotation.

consider extending `isMissingFsError` to include `EPERM` and `EACCES`:

```ts
function isMissingFsError(error: unknown): boolean {
  const code = (error as NodeJS.ErrnoException | undefined)?.code;
  return code === "ENOENT" || code === "EPERM" || code === "EACCES";
}
```

also consider adding a regression test that mocks intermediate rename to reject with `EPERM` to verify graceful handling.

How can I resolve this? If you propose a fix, please make it concise.

Fix in Codex

}
}
Expand Down Expand Up @@ -253,7 +267,7 @@ export async function recordTelemetryEvent(input: TelemetryEventInput): Promise<
try {
await queueAppend(async () => {
await ensureLogDir();
rotateLogsIfNeeded();
await rotateLogsIfNeeded();
const line = `${JSON.stringify(entry)}\n`;
await fs.appendFile(getTelemetryPath(), line, "utf8");
});
Expand Down
52 changes: 52 additions & 0 deletions test/accounts-edge.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -564,6 +564,58 @@ describe("accounts edge branches", () => {
);
});

it("prefers fresher local credentials over stale disk tokens during conflict merge", async () => {
const staleExpiresAt = Date.now() + 1_000;
const freshExpiresAt = Date.now() + 120_000;
const stored = buildStored([
buildStoredAccount({
refreshToken: "refresh-fresh",
accessToken: "access-fresh",
expiresAt: freshExpiresAt,
email: "identity@example.com",
accountId: "account-identity-1",
}),
]);

const latestDisk = buildStored([
buildStoredAccount({
refreshToken: "refresh-stale",
accessToken: "access-stale",
expiresAt: staleExpiresAt,
email: "identity@example.com",
accountId: "account-identity-1",
}),
]);

const conflictError = Object.assign(new Error("conflict"), {
code: "ECONFLICT",
});
mockSaveAccounts
.mockRejectedValueOnce(conflictError)
.mockResolvedValueOnce(undefined);
mockLoadAccounts.mockResolvedValueOnce(latestDisk);

const { AccountManager } = await importAccountsModule();
const manager = new AccountManager(undefined, stored as never);

await manager.saveToDisk();

const retriedPayload = mockSaveAccounts.mock.calls[1]?.[0] as {
accounts: Array<{
accountId?: string;
refreshToken: string;
accessToken?: string;
expiresAt?: number;
}>;
};
const mergedAccount = retriedPayload.accounts.find(
(account) => account.accountId === "account-identity-1",
);
expect(mergedAccount?.refreshToken).toBe("refresh-fresh");
expect(mergedAccount?.accessToken).toBe("access-fresh");
expect(mergedAccount?.expiresAt).toBe(freshExpiresAt);
});
Comment on lines +611 to +617
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟡 Minor

assert account id stability in this regression.

the new case verifies credential fields but not identity stability. add an explicit accountId assertion so identity merge regressions fail fast. see test/accounts-edge.test.ts:611 and lib/accounts.ts:810.

proposed patch
     const mergedAccount = retriedPayload.accounts.find(
       (account) => account.accountId === "account-identity-1",
     );
+    expect(mergedAccount?.accountId).toBe("account-identity-1");
     expect(mergedAccount?.refreshToken).toBe("refresh-fresh");
     expect(mergedAccount?.accessToken).toBe("access-fresh");
     expect(mergedAccount?.expiresAt).toBe(freshExpiresAt);

As per coding guidelines test/**: tests must stay deterministic and use vitest. demand regression cases that reproduce concurrency bugs, token refresh races, and windows filesystem behavior. reject changes that mock real secrets or skip assertions.

📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
const mergedAccount = retriedPayload.accounts.find(
(account) => account.accountId === "account-identity-1",
);
expect(mergedAccount?.refreshToken).toBe("refresh-fresh");
expect(mergedAccount?.accessToken).toBe("access-fresh");
expect(mergedAccount?.expiresAt).toBe(freshExpiresAt);
});
const mergedAccount = retriedPayload.accounts.find(
(account) => account.accountId === "account-identity-1",
);
expect(mergedAccount?.accountId).toBe("account-identity-1");
expect(mergedAccount?.refreshToken).toBe("refresh-fresh");
expect(mergedAccount?.accessToken).toBe("access-fresh");
expect(mergedAccount?.expiresAt).toBe(freshExpiresAt);
});
🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@test/accounts-edge.test.ts` around lines 611 - 617, The test currently checks
credential fields for the merged account but omits an explicit identity
assertion; update the test so after finding mergedAccount from
retriedPayload.accounts (the variable `mergedAccount` in accounts-edge.test.ts)
you assert mergedAccount.accountId === "account-identity-1" to ensure identity
stability and catch merge regressions (this ties to the merge behavior in
lib/accounts.ts around the merge logic at ~810); keep the assertion
deterministic using the vitest expect API already in the file and do not mock
secrets or remove existing credential assertions.


it("prefers fresher timestamp and rate-limit reset values during conflict merge", async () => {
const localNow = Date.now();
const localAccount = buildStoredAccount({
Expand Down
79 changes: 79 additions & 0 deletions test/storage-flagged.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import {
getFlaggedAccountsPath,
getStoragePath,
loadFlaggedAccounts,
rotateStoredSecretEncryption,
saveFlaggedAccounts,
setStoragePathDirect,
} from "../lib/storage.js";
Expand Down Expand Up @@ -239,4 +240,82 @@ describe("flagged account storage", () => {

renameSpy.mockRestore();
});

it("preserves concurrent flagged-account updates during secret rotation", async () => {
const previousEncryptionKey = process.env.CODEX_AUTH_ENCRYPTION_KEY;
const previousPreviousKey = process.env.CODEX_AUTH_PREVIOUS_ENCRYPTION_KEY;
process.env.CODEX_AUTH_ENCRYPTION_KEY = "0123456789abcdef0123456789abcdef";
delete process.env.CODEX_AUTH_PREVIOUS_ENCRYPTION_KEY;

await saveFlaggedAccounts({
version: 1,
accounts: [
{
refreshToken: "flagged-alpha",
flaggedAt: 1,
addedAt: 1,
lastUsed: 1,
},
],
});
Comment on lines +245 to +260
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

move env mutation inside the try/finally restoration scope.

process.env is changed before the try starts in test/storage-flagged.test.ts:247-260. if setup throws there, keys are not restored and later tests can flake.

proposed patch
-    process.env.CODEX_AUTH_ENCRYPTION_KEY = "0123456789abcdef0123456789abcdef";
-    delete process.env.CODEX_AUTH_PREVIOUS_ENCRYPTION_KEY;
-
-    await saveFlaggedAccounts({
-      version: 1,
-      accounts: [
-        {
-          refreshToken: "flagged-alpha",
-          flaggedAt: 1,
-          addedAt: 1,
-          lastUsed: 1,
-        },
-      ],
-    });
+    try {
+      process.env.CODEX_AUTH_ENCRYPTION_KEY = "0123456789abcdef0123456789abcdef";
+      delete process.env.CODEX_AUTH_PREVIOUS_ENCRYPTION_KEY;
+
+      await saveFlaggedAccounts({
+        version: 1,
+        accounts: [
+          {
+            refreshToken: "flagged-alpha",
+            flaggedAt: 1,
+            addedAt: 1,
+            lastUsed: 1,
+          },
+        ],
+      });
 
-    let concurrentSavePromise: Promise<void> | null = null;
+      let concurrentSavePromise: Promise<void> | null = null;
       // ... existing test body ...
-    try {
-      const result = await rotateStoredSecretEncryption();
+      const result = await rotateStoredSecretEncryption();
       // ...
-    } finally {
+    } finally {
       readFileSpy.mockRestore();
       // env restore stays here
     }

As per coding guidelines test/**: tests must stay deterministic and use vitest. demand regression cases that reproduce concurrency bugs, token refresh races, and windows filesystem behavior. reject changes that mock real secrets or skip assertions.

🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@test/storage-flagged.test.ts` around lines 245 - 260, Move the process.env
mutations for CODEX_AUTH_ENCRYPTION_KEY and CODEX_AUTH_PREVIOUS_ENCRYPTION_KEY
into the try/finally scope so the original values saved in previousEncryptionKey
and previousPreviousKey are always restored; specifically, in the test
surrounding saveFlaggedAccounts(...) wrap the env setup (setting
process.env.CODEX_AUTH_ENCRYPTION_KEY and deleting
process.env.CODEX_AUTH_PREVIOUS_ENCRYPTION_KEY) right after entering try and
perform the restoration of previousEncryptionKey/previousPreviousKey in the
finally block to guarantee cleanup even if setup or saveFlaggedAccounts throws.


let concurrentSavePromise: Promise<void> | null = null;
const flaggedPath = getFlaggedAccountsPath();
const originalReadFile = fs.readFile.bind(fs);
const readFileSpy = vi.spyOn(fs, "readFile").mockImplementation(async (path, options) => {
if (
concurrentSavePromise === null &&
typeof path === "string" &&
path === flaggedPath &&
options === "utf-8"
) {
queueMicrotask(() => {
concurrentSavePromise = saveFlaggedAccounts({
version: 1,
accounts: [
{
refreshToken: "flagged-alpha",
flaggedAt: 1,
addedAt: 1,
lastUsed: 1,
},
{
refreshToken: "flagged-beta",
flaggedAt: 2,
addedAt: 2,
lastUsed: 2,
},
],
});
});
}
return originalReadFile(path, options);
});

try {
const result = await rotateStoredSecretEncryption();
if (concurrentSavePromise) {
await concurrentSavePromise;
}

const flagged = await loadFlaggedAccounts();
expect(result.flaggedAccounts).toBe(1);
expect(flagged.accounts.map((account) => account.refreshToken).sort()).toEqual([
"flagged-alpha",
"flagged-beta",
]);
} finally {
readFileSpy.mockRestore();
if (previousEncryptionKey === undefined) {
delete process.env.CODEX_AUTH_ENCRYPTION_KEY;
} else {
process.env.CODEX_AUTH_ENCRYPTION_KEY = previousEncryptionKey;
}
if (previousPreviousKey === undefined) {
delete process.env.CODEX_AUTH_PREVIOUS_ENCRYPTION_KEY;
} else {
process.env.CODEX_AUTH_PREVIOUS_ENCRYPTION_KEY = previousPreviousKey;
}
}
});
});
Loading