Skip to content

Commit 8fc212d

Browse files
committed
Improve setup and settings provider selection UX
1 parent a6bffb7 commit 8fc212d

4 files changed

Lines changed: 269 additions & 3 deletions

File tree

src/config.ts

Lines changed: 65 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ import {
1818
normalizeOpenAiResponsesReasoningEffort
1919
} from "./integrations/openai/modelCatalog.js";
2020
import { ensureDir, fileExists, writeJsonFile } from "./utils/fs.js";
21-
import { askLine, askRequiredLine, PromptReader } from "./utils/prompt.js";
21+
import { askChoice, askLine, askRequiredLine, PromptReader } from "./utils/prompt.js";
2222

2323
export interface AppPaths {
2424
cwd: string;
@@ -134,11 +134,13 @@ export async function runSetupWizard(
134134
"Default objective metric",
135135
"state-of-the-art reproducibility"
136136
);
137+
writePrimaryLlmTradeoffGuidance();
137138
const llmMode = await askPrimaryLlmMode(promptReader);
138139
const openAiModel =
139140
llmMode === "openai_api"
140141
? await askOpenAiResponsesModel(promptReader)
141142
: DEFAULT_OPENAI_RESPONSES_MODEL;
143+
writePdfAnalysisTradeoffGuidance();
142144
const pdfAnalysisMode = await askPdfAnalysisMode(promptReader);
143145
const responsesPdfModel =
144146
pdfAnalysisMode === "responses_api_pdf"
@@ -356,9 +358,51 @@ function normalizePrimaryLlmMode(value: unknown): "codex_chatgpt_only" | "openai
356358
return value === "openai_api" ? value : "codex_chatgpt_only";
357359
}
358360

361+
function writePrimaryLlmTradeoffGuidance(): void {
362+
output.write(
363+
[
364+
"Primary LLM provider trade-off:",
365+
"- codex: uses Sign in with ChatGPT, no OpenAI API key needed, best fit for interactive coding and implement_experiments.",
366+
"- api: uses OpenAI API models, requires OPENAI_API_KEY, easier to control model choice and structured API behavior, but API usage is billed separately.",
367+
""
368+
].join("\n")
369+
);
370+
}
371+
372+
function writePdfAnalysisTradeoffGuidance(): void {
373+
output.write(
374+
[
375+
"PDF analysis trade-off:",
376+
"- codex: downloads PDFs locally and extracts text with local tools; cheaper to operate inside the current Codex flow, but extraction quality depends on local tooling.",
377+
"- api: sends PDFs to the OpenAI Responses API; usually better document understanding, but slower and requires OPENAI_API_KEY.",
378+
""
379+
].join("\n")
380+
);
381+
}
382+
359383
async function askPrimaryLlmMode(
360384
promptReader: PromptReader = askLine
361385
): Promise<"codex_chatgpt_only" | "openai_api"> {
386+
if (promptReader === askLine) {
387+
const answer = await askChoice(
388+
"Primary LLM provider",
389+
[
390+
{
391+
label: "codex",
392+
value: "codex_chatgpt_only",
393+
description: "(ChatGPT sign-in, best for interactive coding)"
394+
},
395+
{
396+
label: "api",
397+
value: "openai_api",
398+
description: "(OPENAI_API_KEY required, direct API control)"
399+
}
400+
],
401+
"codex_chatgpt_only"
402+
);
403+
return answer === "openai_api" ? "openai_api" : "codex_chatgpt_only";
404+
}
405+
362406
while (true) {
363407
const answer = (await promptReader("Primary LLM provider (codex/api)", "codex")).trim().toLowerCase();
364408
if (!answer || answer === "codex" || answer === "chatgpt" || answer === "codex_chatgpt_only") {
@@ -374,6 +418,26 @@ async function askPrimaryLlmMode(
374418
async function askPdfAnalysisMode(
375419
promptReader: PromptReader = askLine
376420
): Promise<"codex_text_extract" | "responses_api_pdf"> {
421+
if (promptReader === askLine) {
422+
const answer = await askChoice(
423+
"PDF analysis mode",
424+
[
425+
{
426+
label: "codex",
427+
value: "codex_text_extract",
428+
description: "(local PDF download + text extraction)"
429+
},
430+
{
431+
label: "api",
432+
value: "responses_api_pdf",
433+
description: "(Responses API PDF input, richer but slower)"
434+
}
435+
],
436+
"codex_text_extract"
437+
);
438+
return answer === "responses_api_pdf" ? "responses_api_pdf" : "codex_text_extract";
439+
}
440+
377441
while (true) {
378442
const answer = (await promptReader("PDF analysis mode (codex/api)", "codex")).trim().toLowerCase();
379443
if (!answer || answer === "codex") {

src/utils/prompt.ts

Lines changed: 98 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,16 @@
11
import { stdin as input, stdout as output } from "node:process";
2-
import readline from "node:readline/promises";
2+
import readline from "node:readline";
3+
import readlinePromises from "node:readline/promises";
34

45
export type PromptReader = (question: string, defaultValue?: string) => Promise<string>;
6+
export type PromptChoice = {
7+
label: string;
8+
value: string;
9+
description?: string;
10+
};
511

612
export async function askLine(question: string, defaultValue = ""): Promise<string> {
7-
const rl = readline.createInterface({ input, output });
13+
const rl = readlinePromises.createInterface({ input, output });
814
try {
915
const suffix = defaultValue ? ` (${defaultValue})` : "";
1016
const answer = (await rl.question(`${question}${suffix}: `)).trim();
@@ -17,6 +23,96 @@ export async function askLine(question: string, defaultValue = ""): Promise<stri
1723
}
1824
}
1925

26+
export async function askChoice(question: string, choices: PromptChoice[], defaultValue?: string): Promise<string> {
27+
if (!input.isTTY || !output.isTTY || typeof input.setRawMode !== "function") {
28+
const fallbackDefault = defaultValue ?? choices[0]?.value ?? "";
29+
const answer = (await askLine(question, fallbackDefault)).trim();
30+
const normalized = answer.toLowerCase();
31+
const matched = choices.find((choice) => {
32+
const label = choice.label.toLowerCase();
33+
const value = choice.value.toLowerCase();
34+
return normalized === label || normalized === value;
35+
});
36+
return matched?.value ?? fallbackDefault;
37+
}
38+
39+
const defaultIndex = Math.max(
40+
0,
41+
choices.findIndex((choice) => choice.value === defaultValue || choice.label === defaultValue)
42+
);
43+
let selectedIndex = defaultIndex >= 0 ? defaultIndex : 0;
44+
let settled = false;
45+
46+
return await new Promise<string>((resolve, reject) => {
47+
const redraw = () => {
48+
const lines = [
49+
`${question}:`,
50+
...choices.map((choice, index) => {
51+
const pointer = index === selectedIndex ? ">" : " ";
52+
const suffix = choice.description ? ` ${choice.description}` : "";
53+
return `${pointer} ${choice.label}${suffix}`;
54+
})
55+
];
56+
output.write("\x1b[2K\r");
57+
output.write(lines.join("\n"));
58+
output.write(`\x1b[${lines.length - 1}A\r`);
59+
};
60+
61+
const cleanup = () => {
62+
input.off("keypress", onKeypress);
63+
if (input.isTTY && typeof input.setRawMode === "function") {
64+
input.setRawMode(false);
65+
}
66+
output.write(`\x1b[${choices.length}B\r`);
67+
output.write("\x1b[2K\r");
68+
};
69+
70+
const finish = (value: string) => {
71+
if (settled) {
72+
return;
73+
}
74+
settled = true;
75+
cleanup();
76+
output.write(`${question}: ${choices.find((choice) => choice.value === value)?.label ?? value}\n`);
77+
resolve(value);
78+
};
79+
80+
const fail = (error: Error) => {
81+
if (settled) {
82+
return;
83+
}
84+
settled = true;
85+
cleanup();
86+
reject(error);
87+
};
88+
89+
const onKeypress = (_str: string, key: readline.Key) => {
90+
if (key.name === "up") {
91+
selectedIndex = (selectedIndex - 1 + choices.length) % choices.length;
92+
redraw();
93+
return;
94+
}
95+
if (key.name === "down") {
96+
selectedIndex = (selectedIndex + 1) % choices.length;
97+
redraw();
98+
return;
99+
}
100+
if (key.name === "return" || key.name === "enter") {
101+
finish(choices[selectedIndex]?.value ?? choices[0].value);
102+
return;
103+
}
104+
if (key.ctrl && key.name === "c") {
105+
fail(new Error("setup aborted"));
106+
}
107+
};
108+
109+
readline.emitKeypressEvents(input);
110+
input.setRawMode(true);
111+
input.on("keypress", onKeypress);
112+
redraw();
113+
});
114+
}
115+
20116
export async function askRequiredLine(
21117
question: string,
22118
reader: PromptReader = askLine

tests/configEnv.test.ts

Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -82,6 +82,23 @@ async function createWorkspace(): Promise<{ cwd: string; paths: ReturnType<typeo
8282
return { cwd, paths };
8383
}
8484

85+
async function captureStdout<T>(fn: () => Promise<T>): Promise<{ result: T; output: string }> {
86+
const chunks: string[] = [];
87+
const originalWrite = process.stdout.write.bind(process.stdout);
88+
89+
process.stdout.write = ((chunk: string | Uint8Array) => {
90+
chunks.push(typeof chunk === "string" ? chunk : Buffer.from(chunk).toString("utf8"));
91+
return true;
92+
}) as typeof process.stdout.write;
93+
94+
try {
95+
const result = await fn();
96+
return { result, output: chunks.join("") };
97+
} finally {
98+
process.stdout.write = originalWrite;
99+
}
100+
}
101+
85102
afterEach(() => {
86103
if (ORIGINAL_SEMANTIC_SCHOLAR_API_KEY === undefined) {
87104
delete process.env.SEMANTIC_SCHOLAR_API_KEY;
@@ -158,6 +175,35 @@ describe("config .env overrides", () => {
158175
await expect(fs.readFile(paths.configFile, "utf8")).resolves.toContain("project_name: project");
159176
});
160177

178+
it("prints codex/api trade-off guidance during first-run setup", async () => {
179+
delete process.env.OPENAI_API_KEY;
180+
const cwd = await fs.mkdtemp(path.join(os.tmpdir(), "autoresearch-setup-tradeoffs-"));
181+
const paths = resolveAppPaths(cwd);
182+
const answers = [
183+
"project",
184+
"Multi-agent collaboration",
185+
"recent papers,last 5 years",
186+
"reproducibility",
187+
"codex",
188+
"codex",
189+
"semantic-key"
190+
];
191+
192+
const { output } = await captureStdout(() =>
193+
runSetupWizard(paths, async (_question, defaultValue = "") => {
194+
const answer = answers.shift();
195+
return answer !== undefined ? answer : defaultValue;
196+
})
197+
);
198+
199+
expect(output).toContain("Primary LLM provider trade-off:");
200+
expect(output).toContain("codex: uses Sign in with ChatGPT");
201+
expect(output).toContain("api: uses OpenAI API models");
202+
expect(output).toContain("PDF analysis trade-off:");
203+
expect(output).toContain("codex: downloads PDFs locally");
204+
expect(output).toContain("api: sends PDFs to the OpenAI Responses API");
205+
});
206+
161207
it("uses OPENAI_API_KEY from .env when Responses PDF mode is enabled", async () => {
162208
delete process.env.OPENAI_API_KEY;
163209
const { cwd } = await createWorkspace();

tests/terminalAppPlanExecution.test.ts

Lines changed: 60 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -55,6 +55,66 @@ function makeRun(id = "run-1"): any {
5555
}
5656

5757
describe("TerminalApp pending natural plan execution", () => {
58+
it("uses selection menus for provider and PDF mode in settings", async () => {
59+
const saveConfig = vi.fn().mockResolvedValue(undefined);
60+
const app = new TerminalApp({
61+
config: {
62+
papers: { max_results: 100 },
63+
providers: {
64+
llm_mode: "codex_chatgpt_only",
65+
codex: { model: "gpt-5.3-codex", reasoning_effort: "xhigh", fast_mode: false },
66+
openai: { model: "gpt-5.4", reasoning_effort: "medium" }
67+
},
68+
analysis: {
69+
pdf_mode: "codex_text_extract",
70+
responses_model: "gpt-5.4"
71+
},
72+
research: {
73+
default_topic: "Multi-agent collaboration",
74+
default_constraints: ["recent papers", "last 5 years"],
75+
default_objective_metric: "state-of-the-art reproducibility"
76+
}
77+
} as any,
78+
runStore: {} as any,
79+
titleGenerator: {} as any,
80+
codex: {} as any,
81+
eventStream: { subscribe: () => () => {} } as any,
82+
orchestrator: {} as any,
83+
semanticScholarApiKeyConfigured: false,
84+
onQuit: () => {},
85+
saveConfig
86+
}) as any;
87+
88+
app.render = () => {};
89+
app.updateSuggestions = () => {};
90+
app.drainQueuedInputs = async () => {};
91+
app.askWithinTui = vi
92+
.fn()
93+
.mockResolvedValueOnce("Pilot topic")
94+
.mockResolvedValueOnce("recent papers,last 5 years")
95+
.mockResolvedValueOnce("reproducibility");
96+
app.openSelectionMenu = vi
97+
.fn()
98+
.mockResolvedValueOnce("codex_chatgpt_only")
99+
.mockResolvedValueOnce("codex_text_extract");
100+
101+
await app.handleSettings();
102+
103+
expect(app.openSelectionMenu).toHaveBeenNthCalledWith(
104+
1,
105+
"Select primary LLM provider",
106+
expect.any(Array),
107+
"codex_chatgpt_only"
108+
);
109+
expect(app.openSelectionMenu).toHaveBeenNthCalledWith(
110+
2,
111+
"Select PDF analysis mode",
112+
expect.any(Array),
113+
"codex_text_extract"
114+
);
115+
expect(saveConfig).toHaveBeenCalledTimes(1);
116+
});
117+
58118
it("answers collected-paper count questions directly instead of arming a collect command", async () => {
59119
const app = makeApp();
60120
const run = makeRun("run-count");

0 commit comments

Comments
 (0)