Skip to content

Commit 7198c8f

Browse files
committed
test(httpapi): cover sdk prompt and stream parity
1 parent b5ec807 commit 7198c8f

1 file changed

Lines changed: 200 additions & 5 deletions

File tree

packages/opencode/test/server/httpapi-sdk.test.ts

Lines changed: 200 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -8,22 +8,33 @@ import { MessageID, PartID, SessionID } from "../../src/session/schema"
88
import { MessageV2 } from "../../src/session/message-v2"
99
import { ModelID, ProviderID } from "../../src/provider/schema"
1010
import { Session as SessionNs } from "@/session/session"
11+
import { TestLLMServer } from "../lib/llm-server"
1112
import path from "path"
1213
import { resetDatabase } from "../fixture/db"
1314
import { tmpdir } from "../fixture/fixture"
1415

15-
const original = Flag.OPENCODE_EXPERIMENTAL_HTTPAPI
16+
const original = {
17+
OPENCODE_EXPERIMENTAL_HTTPAPI: Flag.OPENCODE_EXPERIMENTAL_HTTPAPI,
18+
OPENCODE_SERVER_PASSWORD: Flag.OPENCODE_SERVER_PASSWORD,
19+
OPENCODE_SERVER_USERNAME: Flag.OPENCODE_SERVER_USERNAME,
20+
}
1621
type Backend = "legacy" | "httpapi"
1722
type Sdk = ReturnType<typeof createOpencodeClient>
1823
type SdkResult = { response: Response; data?: unknown; error?: unknown }
1924

20-
function app(backend: Backend) {
25+
function app(backend: Backend, input?: { password?: string; username?: string }) {
2126
Flag.OPENCODE_EXPERIMENTAL_HTTPAPI = backend === "httpapi"
27+
Flag.OPENCODE_SERVER_PASSWORD = input?.password
28+
Flag.OPENCODE_SERVER_USERNAME = input?.username
2229
return backend === "httpapi" ? Server.Default().app : Server.Legacy().app
2330
}
2431

25-
function client(backend: Backend, directory?: string) {
26-
const serverApp = app(backend)
32+
function client(
33+
backend: Backend,
34+
directory?: string,
35+
input?: { password?: string; username?: string; headers?: Record<string, string> },
36+
) {
37+
const serverApp = app(backend, input)
2738
const fetch = Object.assign(
2839
async (request: RequestInfo | URL, init?: RequestInit) =>
2940
await serverApp.fetch(request instanceof Request ? request : new Request(request, init)),
@@ -32,10 +43,48 @@ function client(backend: Backend, directory?: string) {
3243
return createOpencodeClient({
3344
baseUrl: "http://localhost",
3445
directory,
46+
headers: input?.headers,
3547
fetch,
3648
})
3749
}
3850

51+
function authorization(username: string, password: string) {
52+
return `Basic ${Buffer.from(`${username}:${password}`).toString("base64")}`
53+
}
54+
55+
function providerConfig(url: string) {
56+
return {
57+
formatter: false,
58+
lsp: false,
59+
provider: {
60+
test: {
61+
name: "Test",
62+
id: "test",
63+
env: [],
64+
npm: "@ai-sdk/openai-compatible",
65+
models: {
66+
"test-model": {
67+
id: "test-model",
68+
name: "Test Model",
69+
attachment: false,
70+
reasoning: false,
71+
temperature: false,
72+
tool_call: true,
73+
release_date: "2025-01-01",
74+
limit: { context: 100000, output: 10000 },
75+
cost: { input: 0, output: 0 },
76+
options: {},
77+
},
78+
},
79+
options: {
80+
apiKey: "test-key",
81+
baseURL: url,
82+
},
83+
},
84+
},
85+
}
86+
}
87+
3988
async function expectStatus(result: Promise<{ response: Response }>, status: number) {
4089
expect((await result).response.status).toBe(status)
4190
}
@@ -128,8 +177,26 @@ async function withTmp<T>(backend: Backend, fn: (input: { sdk: Sdk; directory: s
128177
return fn({ sdk: client(backend, tmp.path), directory: tmp.path })
129178
}
130179

180+
async function withFakeLlm<T>(
181+
backend: Backend,
182+
fn: (input: { sdk: Sdk; directory: string; llm: TestLLMServer["Service"] }) => Promise<T>,
183+
) {
184+
return Effect.runPromise(
185+
Effect.gen(function* () {
186+
const llm = yield* TestLLMServer
187+
const tmp = yield* Effect.acquireRelease(
188+
Effect.promise(() => tmpdir({ git: true, config: providerConfig(llm.url) })),
189+
(tmp) => Effect.promise(() => tmp[Symbol.asyncDispose]()),
190+
)
191+
return yield* Effect.promise(() => fn({ sdk: client(backend, tmp.path), directory: tmp.path, llm }))
192+
}).pipe(Effect.scoped, Effect.provide(TestLLMServer.layer)),
193+
)
194+
}
195+
131196
afterEach(async () => {
132-
Flag.OPENCODE_EXPERIMENTAL_HTTPAPI = original
197+
Flag.OPENCODE_EXPERIMENTAL_HTTPAPI = original.OPENCODE_EXPERIMENTAL_HTTPAPI
198+
Flag.OPENCODE_SERVER_PASSWORD = original.OPENCODE_SERVER_PASSWORD
199+
Flag.OPENCODE_SERVER_USERNAME = original.OPENCODE_SERVER_USERNAME
133200
await Instance.disposeAll()
134201
await resetDatabase()
135202
})
@@ -199,6 +266,63 @@ describe("HttpApi SDK", () => {
199266
})
200267
})
201268

269+
test("matches generated SDK global event stream across backends", async () => {
270+
await compareBackends(async (backend) => {
271+
const events = await client(backend).global.event({ signal: AbortSignal.timeout(1_000) })
272+
try {
273+
const first = await events.stream.next()
274+
return {
275+
type: record(record(first.value).payload).type,
276+
}
277+
} finally {
278+
await events.stream.return(undefined)
279+
}
280+
})
281+
})
282+
283+
test("matches generated SDK instance event stream across backends", async () => {
284+
await compareBackends((backend) =>
285+
withTmp(backend, async ({ sdk }) => {
286+
const events = await sdk.event.subscribe(undefined, { signal: AbortSignal.timeout(1_000) })
287+
try {
288+
const first = await events.stream.next()
289+
return {
290+
type: record(record(first.value).payload).type,
291+
}
292+
} finally {
293+
await events.stream.return(undefined)
294+
}
295+
}),
296+
)
297+
})
298+
299+
test("matches generated SDK basic auth behavior across backends", async () => {
300+
await compareBackends((backend) =>
301+
withTmp(backend, async ({ directory }) => {
302+
const missing = await capture(
303+
client(backend, directory, { password: "secret" }).file.read({ path: "hello.txt" }),
304+
)
305+
const bad = await capture(
306+
client(backend, directory, {
307+
password: "secret",
308+
headers: { authorization: authorization("opencode", "wrong") },
309+
}).file.read({ path: "hello.txt" }),
310+
)
311+
const good = await capture(
312+
client(backend, directory, {
313+
password: "secret",
314+
headers: { authorization: authorization("opencode", "secret") },
315+
}).file.read({ path: "hello.txt" }),
316+
)
317+
318+
return {
319+
statuses: statuses({ missing, bad, good }),
320+
content: record(good.data).content,
321+
}
322+
}),
323+
)
324+
})
325+
202326
test("matches generated SDK instance read routes across backends", async () => {
203327
await compareBackends((backend) =>
204328
withTmp(backend, async ({ sdk, directory }) => {
@@ -356,6 +480,77 @@ describe("HttpApi SDK", () => {
356480
)
357481
})
358482

483+
test("matches generated SDK prompt no-reply routes across backends", async () => {
484+
await compareBackends((backend) =>
485+
withTmp(backend, async ({ sdk }) => {
486+
const session = await capture(sdk.session.create({ title: "prompt" }))
487+
const sessionID = String(record(session.data).id)
488+
const prompt = await capture(
489+
sdk.session.prompt({
490+
sessionID,
491+
agent: "build",
492+
noReply: true,
493+
parts: [{ type: "text", text: "hello" }],
494+
}),
495+
)
496+
const asyncPrompt = await capture(
497+
sdk.session.promptAsync({
498+
sessionID,
499+
agent: "build",
500+
noReply: true,
501+
parts: [{ type: "text", text: "async hello" }],
502+
}),
503+
)
504+
const messages = await capture(sdk.session.messages({ sessionID }))
505+
506+
return {
507+
statuses: statuses({ session, prompt, asyncPrompt, messages }),
508+
promptRole: record(record(prompt.data).info).role,
509+
messageCount: array(messages.data).length,
510+
messageTexts: array(messages.data)
511+
.flatMap((item) => array(record(item).parts))
512+
.map((part) => record(part).text)
513+
.filter((text): text is string => typeof text === "string")
514+
.sort(),
515+
}
516+
}),
517+
)
518+
})
519+
520+
test("matches generated SDK prompt streaming through fake LLM across backends", async () => {
521+
await compareBackends((backend) =>
522+
withFakeLlm(backend, async ({ sdk, llm }) => {
523+
await Effect.runPromise(llm.text("fake world", { usage: { input: 11, output: 7 } }))
524+
const session = await capture(
525+
sdk.session.create({
526+
title: "llm prompt",
527+
permission: [{ permission: "*", pattern: "*", action: "allow" }],
528+
}),
529+
)
530+
const sessionID = String(record(session.data).id)
531+
const prompt = await capture(
532+
sdk.session.prompt({
533+
sessionID,
534+
agent: "build",
535+
model: { providerID: "test", modelID: "test-model" },
536+
parts: [{ type: "text", text: "hello llm" }],
537+
}),
538+
)
539+
const messages = await capture(sdk.session.messages({ sessionID }))
540+
const inputs = await Effect.runPromise(llm.inputs)
541+
542+
return {
543+
statuses: statuses({ session, prompt, messages }),
544+
calls: inputs.length,
545+
requestedModel: inputs[0]?.model,
546+
responseText: JSON.stringify(prompt.data).includes("fake world"),
547+
persistedText: JSON.stringify(messages.data).includes("fake world"),
548+
userText: JSON.stringify(messages.data).includes("hello llm"),
549+
}
550+
}),
551+
)
552+
})
553+
359554
test("matches generated SDK TUI validation and command routes across backends", async () => {
360555
await compareBackends((backend) =>
361556
withTmp(backend, async ({ sdk }) => {

0 commit comments

Comments
 (0)