diff --git a/apps/server/src/index.ts b/apps/server/src/index.ts index 9b0652b..f24529c 100644 --- a/apps/server/src/index.ts +++ b/apps/server/src/index.ts @@ -128,6 +128,64 @@ app.post("/ai/ocr", async (c) => { return c.json({ text }); }); +// Text generation via OpenRouter (same model as OCR, text-only) +app.post("/ai/generate", async (c) => { + const { OPENROUTER_API_KEY, OPENROUTER_SITE_URL, OPENROUTER_SITE_NAME } = + env<{ + OPENROUTER_API_KEY?: string; + OPENROUTER_SITE_URL?: string; + OPENROUTER_SITE_NAME?: string; + }>(c); + if (!OPENROUTER_API_KEY) { + return c.json({ error: "Missing OPENROUTER_API_KEY" }, 500); + } + + type Req = { prompt?: string; system?: string; temperature?: number }; + let bodyJson: Req | null = null; + try { + bodyJson = await c.req.json(); + } catch { + // ignore and treat as empty + } + const prompt = bodyJson?.prompt ?? ""; + const baseSystem = + "You are a concise assistant. Respond with a brief, plain text answer. Do not use markdown, lists, headings, or code fences. Keep it under ~150 words and avoid extra commentary."; + const system = bodyJson?.system + ? `${bodyJson.system}\n\nRules: Respond concisely in plain text only. No markdown, lists, headings, or code fences. Keep it under ~150 words.` + : baseSystem; + const temperature = Number.isFinite(bodyJson?.temperature) + ? (bodyJson?.temperature as number) + : 0.7; + + if (!prompt.trim()) { + return c.json({ error: "Missing prompt" }, 400); + } + + const body = { + model: "openrouter/sonoma-sky-alpha", + temperature, + messages: [ + { role: "system", content: system }, + { role: "user", content: prompt }, + ], + }; + + const res = await openRouterChat(OPENROUTER_API_KEY as string, body, { + referer: OPENROUTER_SITE_URL, + title: OPENROUTER_SITE_NAME, + }); + if (!res.ok) { + const err = await res.text(); + return c.json( + { error: `OpenRouter error ${res.status}`, details: err }, + 502 + ); + } + const json = (await res.json()) as any; + const text: string = json?.choices?.[0]?.message?.content ?? ""; + return c.json({ text }); +}); + app.get("/", (c) => { return c.text("OK"); }); diff --git a/apps/web/src/lib/tldraw/ai-shapes.tsx b/apps/web/src/lib/tldraw/ai-shapes.tsx index 1a0ad4c..d9c32ee 100644 --- a/apps/web/src/lib/tldraw/ai-shapes.tsx +++ b/apps/web/src/lib/tldraw/ai-shapes.tsx @@ -23,12 +23,166 @@ export type AIImageShape = TLBaseShape< h: number; } >; +// New: AI Prompt shape +export type AIPromptShape = TLBaseShape< + "ai-prompt", + { + prompt: string; + status: "idle" | "processing" | "completed" | "error"; + createdDate: number; + w: number; + h: number; + temperature: number; + } +>; + +export class AIPromptShapeUtil extends ShapeUtil { + static override type = "ai-prompt" as const; + + static override props: RecordProps = { + prompt: T.string, + status: T.literalEnum("idle", "processing", "completed", "error"), + createdDate: T.number, + w: T.number, + h: T.number, + temperature: T.number, + }; + + getDefaultProps(): AIPromptShape["props"] { + return { + prompt: "", + status: "idle", + createdDate: Date.now(), + w: 320, + h: 180, + temperature: 0.7, + }; + } + + override canEdit() { + return true; + } + override canResize() { + return true; + } + override onResize(shape: AIPromptShape, info: TLResizeInfo) { + return resizeBox(shape, info); + } + + getGeometry(shape: AIPromptShape) { + return new Rectangle2d({ width: shape.props.w, height: shape.props.h, isFilled: true }); + } + + component(shape: AIPromptShape) { + const isEditing = this.editor.getEditingShapeId() === shape.id; + const [temp, setTemp] = useState(shape.props.temperature); + useEffect(() => setTemp(shape.props.temperature), [shape.props.temperature]); + + const triggerGenerate = () => { + try { + (window as any).__aiGenerate?.(shape.id); + } catch {} + }; + + return ( + +
+
+ ✨ AI Prompt + + {shape.props.status} + +
+