import { afterEach, beforeEach, describe, expect, it, jest } from "@jest/globals"; import { flushPromises, mount } from "@vue/test-utils"; import { ReadableStream } from "node:stream/web"; import { TextDecoder, TextEncoder } from "node:util"; import AiView from "../../../frontend/src/views/AiView.vue"; function jsonResponse(body, status = 200, statusText = "OK") { return new Response(JSON.stringify(body), { status, statusText, headers: { "content-type": "application/json" }, }); } function streamResponse(chunks) { const encoder = new TextEncoder(); return new Response(new ReadableStream({ start(controller) { for (const chunk of chunks) controller.enqueue(encoder.encode(chunk)); controller.close(); }, }), { status: 200, headers: { "content-type": "text/plain" }, }); } function installFetch(handler) { global.fetch = jest.fn(async (url, options = {}) => handler(String(url), options)); } describe("AI chat view", () => { beforeEach(() => { localStorage.clear(); Object.defineProperty(globalThis, "crypto", { configurable: true, value: { randomUUID: () => "uuid-1" }, }); Object.defineProperty(globalThis, "TextDecoder", { configurable: true, value: TextDecoder, }); Object.defineProperty(navigator, "clipboard", { configurable: true, value: { writeText: jest.fn(async () => {}) }, }); Object.defineProperty(performance, "now", { configurable: true, value: jest.fn() .mockReturnValueOnce(100) .mockReturnValueOnce(145) .mockReturnValue(200), }); installFetch((url) => { if (url.includes("/api/ai/info")) { return jsonResponse({ model: url.includes("atlas-smart") ? "smart-model" : "quick-model", gpu: "titan-24", node: "titan-24", endpoint: "https://ai.example.dev/chat", }); } return jsonResponse({ reply: "typed assistant response", latency_ms: 42 }); }); }); afterEach(() => { jest.restoreAllMocks(); localStorage.clear(); Reflect.deleteProperty(global, "fetch"); Reflect.deleteProperty(globalThis, "__ATLAS_IMPORT_META_ENV__"); }); it("loads profile metadata, switches profiles, and copies curl examples", async () => { const wrapper = mount(AiView); await flushPromises(); expect(wrapper.text()).toContain("quick-model"); expect(wrapper.text()).toContain("titan-24"); await wrapper.find(".endpoint-copy").trigger("click"); expect(navigator.clipboard.writeText).toHaveBeenCalledWith(expect.stringContaining("curl -X POST https://ai.example.dev/chat")); expect(wrapper.text()).toContain("copied"); await wrapper.findAll(".profile-tab").find((button) => button.text() === "Atlas Smart").trigger("click"); await flushPromises(); expect(wrapper.text()).toContain("smart-model"); Object.defineProperty(navigator, "clipboard", { configurable: true, value: { writeText: jest.fn(async () => { throw new Error("blocked"); }) }, }); await wrapper.find(".endpoint-copy").trigger("click"); expect(wrapper.text()).not.toContain("copied"); wrapper.unmount(); }); it("sends JSON chat requests and reveals typed responses", async () => { const bodies = []; installFetch((url, options) => { if (url.includes("/api/ai/info")) return jsonResponse({ model: "quick-model" }); bodies.push(JSON.parse(options.body)); return jsonResponse({ reply: "typed assistant response", latency_ms: 42 }); }); const wrapper = mount(AiView); await flushPromises(); await wrapper.find("textarea").setValue(" hello atlas "); await wrapper.find("form").trigger("submit.prevent"); await new Promise((resolve) => setTimeout(resolve, 80)); await flushPromises(); expect(wrapper.text()).toContain("hello atlas"); expect(wrapper.text()).toContain("typed assistant response"); expect(wrapper.text()).toContain("42 ms"); expect(bodies[0]).toMatchObject({ message: "hello atlas", profile: "atlas-quick", conversation_id: expect.stringContaining("atlas-quick"), }); expect(localStorage.getItem("atlas-ai-conversation:atlas-quick")).toContain("uuid-1"); await wrapper.find("textarea").setValue("second"); await wrapper.find("form").trigger("submit.prevent"); await new Promise((resolve) => setTimeout(resolve, 80)); expect(bodies[1].history.map((item) => item.role)).toContain("assistant"); wrapper.unmount(); }); it("handles streaming responses and keyboard submission", async () => { const seen = []; installFetch((url, options) => { if (url.includes("/api/ai/info")) return jsonResponse({}, 204, "No Content"); seen.push(JSON.parse(options.body)); return streamResponse(["stream ", "reply"]); }); const wrapper = mount(AiView); await flushPromises(); const preventDefault = jest.fn(); await wrapper.find("textarea").setValue("stream please"); await wrapper.find("textarea").trigger("keydown", { key: "Enter", shiftKey: false, preventDefault }); await flushPromises(); expect(preventDefault).toHaveBeenCalled(); expect(wrapper.text()).toContain("stream reply"); expect(seen).toHaveLength(1); await wrapper.find("textarea").setValue("line one"); await wrapper.find("textarea").trigger("keydown", { key: "Enter", shiftKey: true, preventDefault: jest.fn() }); await flushPromises(); expect(seen).toHaveLength(1); wrapper.unmount(); }); it("shows request failures without losing the conversation", async () => { installFetch((url) => { if (url.includes("/api/ai/info")) throw new Error("info offline"); return jsonResponse({ error: "model offline" }, 503, "Service Unavailable"); }); const wrapper = mount(AiView); await flushPromises(); await wrapper.find("textarea").setValue("break"); await wrapper.find("form").trigger("submit.prevent"); await flushPromises(); expect(wrapper.text()).toContain("model offline"); expect(wrapper.text()).toContain("(no response)"); await wrapper.find("textarea").setValue(" "); await wrapper.find("form").trigger("submit.prevent"); await flushPromises(); expect(fetch).toHaveBeenCalledTimes(2); wrapper.unmount(); }); });