diff --git a/.changeset/ripe-peaches-crash.md b/.changeset/ripe-peaches-crash.md new file mode 100644 index 0000000..f17d53e --- /dev/null +++ b/.changeset/ripe-peaches-crash.md @@ -0,0 +1,5 @@ +--- +"create-better-t-stack": minor +--- + +Upgrade to AI SDK v5 diff --git a/apps/cli/src/constants.ts b/apps/cli/src/constants.ts index 79db83e..a9968db 100644 --- a/apps/cli/src/constants.ts +++ b/apps/cli/src/constants.ts @@ -87,11 +87,11 @@ export const dependencyVersionMap = { turbo: "^2.5.4", - ai: "^4.3.16", - "@ai-sdk/google": "^1.2.3", - "@ai-sdk/vue": "^1.2.8", - "@ai-sdk/svelte": "^2.1.9", - "@ai-sdk/react": "^1.2.12", + ai: "^5.0.9", + "@ai-sdk/google": "^2.0.3", + "@ai-sdk/vue": "^2.0.9", + "@ai-sdk/svelte": "^3.0.9", + "@ai-sdk/react": "^2.0.9", "@orpc/server": "^1.5.0", "@orpc/client": "^1.5.0", diff --git a/apps/cli/src/helpers/setup/examples-setup.ts b/apps/cli/src/helpers/setup/examples-setup.ts index 2f7dde5..8d1e45a 100644 --- a/apps/cli/src/helpers/setup/examples-setup.ts +++ b/apps/cli/src/helpers/setup/examples-setup.ts @@ -17,33 +17,44 @@ export async function setupExamples(config: ProjectConfig) { } if (examples.includes("ai")) { - const clientDir = path.join(projectDir, "apps/web"); + const webClientDir = path.join(projectDir, "apps/web"); + const nativeClientDir = path.join(projectDir, "apps/native"); const serverDir = path.join(projectDir, "apps/server"); - const clientDirExists = await fs.pathExists(clientDir); + + const webClientDirExists = await fs.pathExists(webClientDir); + const nativeClientDirExists = await fs.pathExists(nativeClientDir); const serverDirExists = await fs.pathExists(serverDir); const hasNuxt = frontend.includes("nuxt"); const hasSvelte = frontend.includes("svelte"); - const hasReact = + const hasReactWeb = frontend.includes("react-router") || frontend.includes("tanstack-router") || frontend.includes("next") || - frontend.includes("tanstack-start") || + frontend.includes("tanstack-start"); + const hasReactNative = frontend.includes("native-nativewind") || frontend.includes("native-unistyles"); - if (clientDirExists) { + if (webClientDirExists) { const dependencies: AvailableDependencies[] = ["ai"]; if (hasNuxt) { dependencies.push("@ai-sdk/vue"); } else if (hasSvelte) { dependencies.push("@ai-sdk/svelte"); - } else if (hasReact) { + } else if (hasReactWeb) { dependencies.push("@ai-sdk/react"); } await addPackageDependency({ dependencies, - projectDir: clientDir, + projectDir: webClientDir, + }); + } + + if (nativeClientDirExists && hasReactNative) { + await addPackageDependency({ + dependencies: ["ai", "@ai-sdk/react"], + projectDir: nativeClientDir, }); } diff --git a/apps/cli/templates/backend/server/express/src/index.ts.hbs b/apps/cli/templates/backend/server/express/src/index.ts.hbs index da6fd12..1e3127c 100644 --- a/apps/cli/templates/backend/server/express/src/index.ts.hbs +++ b/apps/cli/templates/backend/server/express/src/index.ts.hbs @@ -14,7 +14,7 @@ import { createContext } from "./lib/context"; import cors from "cors"; import express from "express"; {{#if (includes examples "ai")}} -import { streamText } from "ai"; +import { streamText, type UIMessage, convertToModelMessages } from "ai"; import { google } from "@ai-sdk/google"; {{/if}} {{#if auth}} @@ -44,16 +44,16 @@ app.use( "/trpc", createExpressMiddleware({ router: appRouter, - createContext + createContext, }) ); {{/if}} {{#if (eq api "orpc")}} const handler = new RPCHandler(appRouter); -app.use('/rpc{*path}', async (req, res, next) => { +app.use("/rpc{*path}", async (req, res, next) => { const { matched } = await handler.handle(req, res, { - prefix: '/rpc', + prefix: "/rpc", {{#if auth}} context: await createContext({ req }), {{else}} @@ -65,16 +65,16 @@ app.use('/rpc{*path}', async (req, res, next) => { }); {{/if}} -app.use(express.json()) +app.use(express.json()); {{#if (includes examples "ai")}} app.post("/ai", async (req, res) => { - const { messages = [] } = req.body || {}; + const { messages = [] } = (req.body || {}) as { messages: UIMessage[] }; const result = streamText({ model: google("gemini-1.5-flash"), - messages, + messages: convertToModelMessages(messages), }); - result.pipeDataStreamToResponse(res); + result.pipeUIMessageStreamToResponse(res); }); {{/if}} @@ -85,4 +85,4 @@ app.get("/", (_req, res) => { const port = process.env.PORT || 3000; app.listen(port, () => { console.log(`Server is running on port ${port}`); -}); +}); \ No newline at end of file diff --git a/apps/cli/templates/backend/server/fastify/src/index.ts.hbs b/apps/cli/templates/backend/server/fastify/src/index.ts.hbs index 6b0d2f7..b59b50e 100644 --- a/apps/cli/templates/backend/server/fastify/src/index.ts.hbs +++ b/apps/cli/templates/backend/server/fastify/src/index.ts.hbs @@ -19,8 +19,7 @@ import { createContext } from "./lib/context"; {{/if}} {{#if (includes examples "ai")}} -import type { FastifyRequest, FastifyReply } from "fastify"; -import { streamText, type Message } from "ai"; +import { streamText, type UIMessage, convertToModelMessages } from "ai"; import { google } from "@ai-sdk/google"; {{/if}} @@ -99,7 +98,7 @@ fastify.route({ response.headers.forEach((value, key) => reply.header(key, value)); reply.send(response.body ? await response.text() : null); } catch (error) { - fastify.log.error("Authentication Error:", error); + fastify.log.error({ err: error }, "Authentication Error:"); reply.status(500).send({ error: "Internal authentication error", code: "AUTH_FAILURE" @@ -125,26 +124,24 @@ fastify.register(fastifyTRPCPlugin, { {{#if (includes examples "ai")}} interface AiRequestBody { id?: string; - messages: Message[]; + messages: UIMessage[]; } fastify.post('/ai', async function (request, reply) { + // there are some issues with the ai sdk and fastify, docs: https://ai-sdk.dev/cookbook/api-servers/fastify const { messages } = request.body as AiRequestBody; const result = streamText({ model: google('gemini-1.5-flash'), - messages, + messages: convertToModelMessages(messages), }); - reply.header('X-Vercel-AI-Data-Stream', 'v1'); - reply.header('Content-Type', 'text/plain; charset=utf-8'); - - return reply.send(result.toDataStream()); + return result.pipeUIMessageStreamToResponse(reply.raw); }); {{/if}} fastify.get('/', async () => { - return 'OK' -}) + return 'OK'; +}); fastify.listen({ port: 3000 }, (err) => { if (err) { @@ -152,4 +149,4 @@ fastify.listen({ port: 3000 }, (err) => { process.exit(1); } console.log("Server running on port 3000"); -}); +}); \ No newline at end of file diff --git a/apps/cli/templates/backend/server/hono/src/index.ts.hbs b/apps/cli/templates/backend/server/hono/src/index.ts.hbs index 0fc09ae..86d09fa 100644 --- a/apps/cli/templates/backend/server/hono/src/index.ts.hbs +++ b/apps/cli/templates/backend/server/hono/src/index.ts.hbs @@ -21,32 +21,33 @@ import { Hono } from "hono"; import { cors } from "hono/cors"; import { logger } from "hono/logger"; {{#if (and (includes examples "ai") (or (eq runtime "bun") (eq runtime "node")))}} -import { streamText } from "ai"; +import { streamText, convertToModelMessages } from "ai"; import { google } from "@ai-sdk/google"; -import { stream } from "hono/streaming"; {{/if}} {{#if (and (includes examples "ai") (eq runtime "workers"))}} -import { streamText } from "ai"; -import { stream } from "hono/streaming"; +import { streamText, convertToModelMessages } from "ai"; import { createGoogleGenerativeAI } from "@ai-sdk/google"; {{/if}} const app = new Hono(); app.use(logger()); -app.use("/*", cors({ - {{#if (or (eq runtime "bun") (eq runtime "node"))}} - origin: process.env.CORS_ORIGIN || "", - {{/if}} - {{#if (eq runtime "workers")}} - origin: env.CORS_ORIGIN || "", - {{/if}} - allowMethods: ["GET", "POST", "OPTIONS"], - {{#if auth}} - allowHeaders: ["Content-Type", "Authorization"], - credentials: true, - {{/if}} -})); +app.use( + "/*", + cors({ + {{#if (or (eq runtime "bun") (eq runtime "node"))}} + origin: process.env.CORS_ORIGIN || "", + {{/if}} + {{#if (eq runtime "workers")}} + origin: env.CORS_ORIGIN || "", + {{/if}} + allowMethods: ["GET", "POST", "OPTIONS"], + {{#if auth}} + allowHeaders: ["Content-Type", "Authorization"], + credentials: true, + {{/if}} + }) +); {{#if auth}} app.on(["POST", "GET"], "/api/auth/**", (c) => auth.handler(c.req.raw)); @@ -69,44 +70,43 @@ app.use("/rpc/*", async (c, next) => { {{/if}} {{#if (eq api "trpc")}} -app.use("/trpc/*", trpcServer({ - router: appRouter, - createContext: (_opts, context) => { - return createContext({ context }); - }, -})); +app.use( + "/trpc/*", + trpcServer({ + router: appRouter, + createContext: (_opts, context) => { + return createContext({ context }); + }, + }) +); {{/if}} {{#if (and (includes examples "ai") (or (eq runtime "bun") (eq runtime "node")))}} app.post("/ai", async (c) => { const body = await c.req.json(); - const messages = body.messages || []; + const uiMessages = body.messages || []; const result = streamText({ model: google("gemini-1.5-flash"), - messages, + messages: convertToModelMessages(uiMessages), }); - c.header("X-Vercel-AI-Data-Stream", "v1"); - c.header("Content-Type", "text/plain; charset=utf-8"); - return stream(c, (stream) => stream.pipe(result.toDataStream())); + return result.toUIMessageStreamResponse(); }); {{/if}} {{#if (and (includes examples "ai") (eq runtime "workers"))}} app.post("/ai", async (c) => { const body = await c.req.json(); - const messages = body.messages || []; + const uiMessages = body.messages || []; const google = createGoogleGenerativeAI({ apiKey: env.GOOGLE_GENERATIVE_AI_API_KEY, }); const result = streamText({ model: google("gemini-1.5-flash"), - messages, + messages: convertToModelMessages(uiMessages), }); - c.header("X-Vercel-AI-Data-Stream", "v1"); - c.header("Content-Type", "text/plain; charset=utf-8"); - return stream(c, (stream) => stream.pipe(result.toDataStream())); + return result.toUIMessageStreamResponse(); }); {{/if}} @@ -117,17 +117,20 @@ app.get("/", (c) => { {{#if (eq runtime "node")}} import { serve } from "@hono/node-server"; -serve({ - fetch: app.fetch, - port: 3000, -}, (info) => { - console.log(`Server is running on http://localhost:${info.port}`); -}); +serve( + { + fetch: app.fetch, + port: 3000, + }, + (info) => { + console.log(`Server is running on http://localhost:${info.port}`); + } +); {{else}} - {{#if (eq runtime "bun")}} +{{#if (eq runtime "bun")}} export default app; - {{/if}} - {{#if (eq runtime "workers")}} -export default app; - {{/if}} {{/if}} +{{#if (eq runtime "workers")}} +export default app; +{{/if}} +{{/if}} \ No newline at end of file diff --git a/apps/cli/templates/examples/ai/native/nativewind/app/(drawer)/ai.tsx.hbs b/apps/cli/templates/examples/ai/native/nativewind/app/(drawer)/ai.tsx.hbs index 08d84df..e68089c 100644 --- a/apps/cli/templates/examples/ai/native/nativewind/app/(drawer)/ai.tsx.hbs +++ b/apps/cli/templates/examples/ai/native/nativewind/app/(drawer)/ai.tsx.hbs @@ -1,4 +1,4 @@ -import { useRef, useEffect } from "react"; +import { useRef, useEffect, useState } from "react"; import { View, Text, @@ -9,11 +9,11 @@ import { Platform, } from "react-native"; import { useChat } from "@ai-sdk/react"; +import { DefaultChatTransport } from "ai"; import { fetch as expoFetch } from "expo/fetch"; import { Ionicons } from "@expo/vector-icons"; import { Container } from "@/components/container"; -// Utility function to generate API URLs const generateAPIUrl = (relativePath: string) => { const serverUrl = process.env.EXPO_PUBLIC_SERVER_URL; if (!serverUrl) { @@ -25,11 +25,13 @@ const generateAPIUrl = (relativePath: string) => { }; export default function AIScreen() { - const { messages, input, handleInputChange, handleSubmit, error } = useChat({ - fetch: expoFetch as unknown as typeof globalThis.fetch, - api: generateAPIUrl('/ai'), + const [input, setInput] = useState(""); + const { messages, error, sendMessage } = useChat({ + transport: new DefaultChatTransport({ + fetch: expoFetch as unknown as typeof globalThis.fetch, + api: generateAPIUrl('/ai'), + }), onError: error => console.error(error, 'AI Chat Error'), - maxSteps: 5, }); const scrollViewRef = useRef(null); @@ -39,8 +41,10 @@ export default function AIScreen() { }, [messages]); const onSubmit = () => { - if (input.trim()) { - handleSubmit(); + const value = input.trim(); + if (value) { + sendMessage({ text: value }); + setInput(""); } }; @@ -100,9 +104,28 @@ export default function AIScreen() { {message.role === "user" ? "You" : "AI Assistant"} - - {message.content} - + + {message.parts.map((part, i) => { + if (part.type === 'text') { + return ( + + {part.text} + + ); + } + return ( + + {JSON.stringify(part)} + + ); + })} + ))} @@ -113,21 +136,13 @@ export default function AIScreen() { - handleInputChange({ - ...e, - target: { - ...e.target, - value: e.nativeEvent.text, - }, - } as unknown as React.ChangeEvent) - } + onChangeText={setInput} placeholder="Type your message..." placeholderTextColor="#6b7280" className="flex-1 border border-border rounded-md px-3 py-2 text-foreground bg-background min-h-[40px] max-h-[120px]" onSubmitEditing={(e) => { - handleSubmit(e); e.preventDefault(); + onSubmit(); }} autoFocus={true} /> diff --git a/apps/cli/templates/examples/ai/native/unistyles/app/(drawer)/ai.tsx.hbs b/apps/cli/templates/examples/ai/native/unistyles/app/(drawer)/ai.tsx.hbs index c59e844..5d9a48d 100644 --- a/apps/cli/templates/examples/ai/native/unistyles/app/(drawer)/ai.tsx.hbs +++ b/apps/cli/templates/examples/ai/native/unistyles/app/(drawer)/ai.tsx.hbs @@ -1,4 +1,4 @@ -import { useRef, useEffect } from "react"; +import React, { useRef, useEffect, useState } from "react"; import { View, Text, @@ -9,6 +9,7 @@ import { Platform, } from "react-native"; import { useChat } from "@ai-sdk/react"; +import { DefaultChatTransport } from "ai"; import { fetch as expoFetch } from "expo/fetch"; import { Ionicons } from "@expo/vector-icons"; import { StyleSheet, useUnistyles } from "react-native-unistyles"; @@ -18,21 +19,22 @@ const generateAPIUrl = (relativePath: string) => { const serverUrl = process.env.EXPO_PUBLIC_SERVER_URL; if (!serverUrl) { throw new Error( - "EXPO_PUBLIC_SERVER_URL environment variable is not defined", + "EXPO_PUBLIC_SERVER_URL environment variable is not defined" ); } - const path = relativePath.startsWith("/") ? relativePath : `/${relativePath}`; return serverUrl.concat(path); }; export default function AIScreen() { const { theme } = useUnistyles(); - const { messages, input, handleInputChange, handleSubmit, error } = useChat({ - fetch: expoFetch as unknown as typeof globalThis.fetch, - api: generateAPIUrl("/ai"), + const [input, setInput] = useState(""); + const { messages, error, sendMessage } = useChat({ + transport: new DefaultChatTransport({ + fetch: expoFetch as unknown as typeof globalThis.fetch, + api: generateAPIUrl("/ai"), + }), onError: (error) => console.error(error, "AI Chat Error"), - maxSteps: 5, }); const scrollViewRef = useRef(null); @@ -42,8 +44,10 @@ export default function AIScreen() { }, [messages]); const onSubmit = () => { - if (input.trim()) { - handleSubmit(); + const value = input.trim(); + if (value) { + sendMessage({ text: value }); + setInput(""); } }; @@ -100,7 +104,28 @@ export default function AIScreen() { {message.role === "user" ? "You" : "AI Assistant"} - {message.content} + + {message.parts.map((part, i) => { + if (part.type === "text") { + return ( + + {part.text} + + ); + } + return ( + + {JSON.stringify(part)} + + ); + })} + ))} @@ -111,21 +136,13 @@ export default function AIScreen() { - handleInputChange({ - ...e, - target: { - ...e.target, - value: e.nativeEvent.text, - }, - } as unknown as React.ChangeEvent) - } + onChangeText={setInput} placeholder="Type your message..." placeholderTextColor={theme.colors.border} style={styles.textInput} onSubmitEditing={(e) => { - handleSubmit(e); e.preventDefault(); + onSubmit(); }} autoFocus={true} /> @@ -141,7 +158,9 @@ export default function AIScreen() { name="send" size={20} color={ - input.trim() ? theme.colors.background : theme.colors.border + input.trim() + ? theme.colors.background + : theme.colors.border } /> @@ -230,6 +249,9 @@ const styles = StyleSheet.create((theme) => ({ marginBottom: theme.spacing.sm, color: theme.colors.typography, }, + messageContentWrapper: { + gap: theme.spacing.xs, + }, messageContent: { color: theme.colors.typography, lineHeight: 20, @@ -276,4 +298,4 @@ const styles = StyleSheet.create((theme) => ({ sendButtonDisabled: { backgroundColor: theme.colors.border, }, -})); +})); \ No newline at end of file diff --git a/apps/cli/templates/examples/ai/server/next/src/app/ai/route.ts b/apps/cli/templates/examples/ai/server/next/src/app/ai/route.ts deleted file mode 100644 index 74482ed..0000000 --- a/apps/cli/templates/examples/ai/server/next/src/app/ai/route.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { google } from '@ai-sdk/google'; -import { streamText } from 'ai'; - -export const maxDuration = 30; - -export async function POST(req: Request) { - const { messages } = await req.json(); - - const result = streamText({ - model: google('gemini-2.0-flash'), - messages, - }); - - return result.toDataStreamResponse(); -} diff --git a/apps/cli/templates/examples/ai/server/next/src/app/ai/route.ts.hbs b/apps/cli/templates/examples/ai/server/next/src/app/ai/route.ts.hbs new file mode 100644 index 0000000..4705c9b --- /dev/null +++ b/apps/cli/templates/examples/ai/server/next/src/app/ai/route.ts.hbs @@ -0,0 +1,15 @@ +import { google } from '@ai-sdk/google'; +import { streamText, type UIMessage, convertToModelMessages } from 'ai'; + +export const maxDuration = 30; + +export async function POST(req: Request) { + const { messages }: { messages: UIMessage[] } = await req.json(); + + const result = streamText({ + model: google('gemini-2.0-flash'), + messages: convertToModelMessages(messages), + }); + + return result.toUIMessageStreamResponse(); +} \ No newline at end of file diff --git a/apps/cli/templates/examples/ai/web/nuxt/app/pages/ai.vue b/apps/cli/templates/examples/ai/web/nuxt/app/pages/ai.vue.hbs similarity index 56% rename from apps/cli/templates/examples/ai/web/nuxt/app/pages/ai.vue rename to apps/cli/templates/examples/ai/web/nuxt/app/pages/ai.vue.hbs index 39b1bcf..7fde26b 100644 --- a/apps/cli/templates/examples/ai/web/nuxt/app/pages/ai.vue +++ b/apps/cli/templates/examples/ai/web/nuxt/app/pages/ai.vue.hbs @@ -1,20 +1,35 @@ - -
-
- {#if chat.messages.length === 0} -
Ask the AI anything to get started!
- {/if} - - {#each chat.messages as message (message.id)} -
-

- {message.role === 'user' ? 'You' : 'AI Assistant'} -

-
- {#each message.parts as part, partIndex (partIndex)} - {#if part.type === 'text'} - {part.text} - {:else if part.type === 'tool-invocation'} -
{JSON.stringify(part.toolInvocation, null, 2)}
- {/if} - {/each} -
-
- {/each} -
-
- -
- { - if (e.key === 'Enter' && !e.shiftKey) { - e.preventDefault(); - chat.handleSubmit(e); - } - }} - /> - -
-
diff --git a/apps/cli/templates/examples/ai/web/svelte/src/routes/ai/+page.svelte.hbs b/apps/cli/templates/examples/ai/web/svelte/src/routes/ai/+page.svelte.hbs new file mode 100644 index 0000000..b322bcf --- /dev/null +++ b/apps/cli/templates/examples/ai/web/svelte/src/routes/ai/+page.svelte.hbs @@ -0,0 +1,107 @@ + + +
+
+ {#if chat.messages.length === 0} +
+ Ask me anything to get started! +
+ {/if} + + {#each chat.messages as message (message.id)} +
+

+ {message.role === "user" ? "You" : "AI Assistant"} +

+
+ {#each message.parts as part, partIndex (partIndex)} + {#if part.type === "text"} + {part.text} + {/if} + {/each} +
+
+ {/each} +
+
+ +
+ { + if (e.key === "Enter" && !e.shiftKey) { + e.preventDefault(); + handleSubmit(e); + } + }} + /> + +
+
diff --git a/apps/cli/templates/extras/bunfig.toml.hbs b/apps/cli/templates/extras/bunfig.toml.hbs index dacfe0a..3c52875 100644 --- a/apps/cli/templates/extras/bunfig.toml.hbs +++ b/apps/cli/templates/extras/bunfig.toml.hbs @@ -1,7 +1,7 @@ -{{#if (or (includes frontend "nuxt") (includes frontend "native-nativewind"))}} -# [install] +[install] +{{#if (or (or (includes frontend "nuxt") (includes frontend "native-nativewind")) (includes frontend +"native-unistyles"))}} # linker = "isolated" {{else}} -[install] linker = "isolated" -{{/if}} +{{/if}} \ No newline at end of file