Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
44 changes: 36 additions & 8 deletions bin/gstack-global-discover.ts
Original file line number Diff line number Diff line change
Expand Up @@ -297,6 +297,39 @@ function extractCwdFromJsonl(filePath: string): string | null {
return null;
}


function readFirstLineBounded(filePath: string, maxBytes = 1024 * 1024): string | null {
const fd = openSync(filePath, "r");
try {
const chunks: Buffer[] = [];
const chunkSize = 65536;
let total = 0;

while (total < maxBytes) {
const remaining = maxBytes - total;
const buf = Buffer.alloc(Math.min(chunkSize, remaining));
const bytesRead = readSync(fd, buf, 0, buf.length, total);
if (bytesRead === 0) break;

const chunk = buf.subarray(0, bytesRead);
const newlineIndex = chunk.indexOf(10);
if (newlineIndex !== -1) {
chunks.push(chunk.subarray(0, newlineIndex));
total += newlineIndex;
return Buffer.concat(chunks, total).toString("utf-8");
}

chunks.push(chunk);
total += bytesRead;
}

if (chunks.length === 0) return null;
return Buffer.concat(chunks, total).toString("utf-8");
} finally {
closeSync(fd);
}
}

function scanCodex(since: Date): Session[] {
const sessionsDir = process.env.CODEX_SESSIONS_DIR || join(homedir(), ".codex", "sessions");
if (!existsSync(sessionsDir)) return [];
Expand Down Expand Up @@ -334,15 +367,10 @@ function scanCodex(since: Date): Session[] {
}

// Codex session_meta lines embed the full system prompt in
// base_instructions (~15KB as of CLI v0.117+). A 4KB buffer
// truncates the line and JSON.parse fails. 128KB covers current
// sizes with room for growth.
// base_instructions. Read until the first newline instead of
// relying on a fixed-size prefix so larger prompts still parse.
try {
const fd = openSync(filePath, "r");
const buf = Buffer.alloc(131072);
const bytesRead = readSync(fd, buf, 0, 131072, 0);
closeSync(fd);
const firstLine = buf.toString("utf-8", 0, bytesRead).split("\n")[0];
const firstLine = readFirstLineBounded(filePath);
if (!firstLine) continue;
const meta = JSON.parse(firstLine);
if (meta.type === "session_meta" && meta.payload?.cwd) {
Expand Down
58 changes: 26 additions & 32 deletions test/global-discover.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -251,42 +251,36 @@ describe("gstack-global-discover", () => {
expect(meta.payload.cwd).toBe("/tmp/test-repo");
});

test("regression: session_meta beyond 128KB still needs streaming parse", () => {
// This test documents the current limitation: 128KB buffer is a heuristic.
// If Codex ever embeds >128KB in session_meta, this test will fail,
// signaling that the buffer needs to increase or be replaced with streaming.
const padding = "x".repeat(140000); // ~140KB payload
const sessionMeta = JSON.stringify({
timestamp: new Date().toISOString(),
type: "session_meta",
payload: {
id: "test-large",
timestamp: new Date().toISOString(),
cwd: "/tmp/large-test",
originator: "codex_exec",
cli_version: "0.200.0",
source: "exec",
model_provider: "openai",
base_instructions: { text: padding },
},
test("discovers codex sessions with session_meta beyond 128KB", () => {
const repoDir = join(tmpDir, "large-repo");
mkdirSync(repoDir);
spawnSync("git", ["init"], { cwd: repoDir, stdio: "pipe" });
spawnSync("git", ["commit", "--allow-empty", "-m", "init"], {
cwd: repoDir,
stdio: "pipe",
});

expect(sessionMeta.length).toBeGreaterThan(131072);
const filePath = writeCodexSession(codexDir, repoDir, 140000);
const firstLine = require("fs").readFileSync(filePath, "utf-8").split("\n")[0];
expect(firstLine.length).toBeGreaterThan(131072);

const filePath = join(codexDir, "large-test.jsonl");
writeFileSync(filePath, sessionMeta + "\n");
const result = spawnSync(
"bun",
["run", scriptPath, "--since", "1h", "--format", "json"],
{
encoding: "utf-8",
timeout: 30000,
env: {
...process.env,
CODEX_SESSIONS_DIR: join(tmpDir, "codex-home", "sessions"),
},
}
);

// 128KB buffer: JSON.parse FAILS for >128KB lines (current limitation)
const { openSync, readSync, closeSync } = require("fs");
const fd = openSync(filePath, "r");
const buf = Buffer.alloc(131072);
readSync(fd, buf, 0, 131072, 0);
closeSync(fd);
expect(() =>
JSON.parse(buf.toString("utf-8").split("\n")[0])
).toThrow();
// When this test starts passing (e.g., after implementing streaming parse),
// update it to verify correct parsing instead of documenting the limitation.
expect(result.status).toBe(0);
const json = JSON.parse(result.stdout);
expect(json.tools.codex.total_sessions).toBeGreaterThanOrEqual(1);
expect(json.repos.some((repo: any) => repo.paths.includes(repoDir))).toBe(true);
});
});

Expand Down