mirror of
https://github.com/FranP-code/create-better-t-stack.git
synced 2025-10-12 23:52:15 +00:00
feat(cli): add alchemy and improve cli tooling and structure (#520)
This commit is contained in:
@@ -1,8 +0,0 @@
|
||||
# Changesets
|
||||
|
||||
Hello and welcome! This folder has been automatically generated by `@changesets/cli`, a build tool that works
|
||||
with multi-package repos, or single-package repos to help you version and publish your code. You can
|
||||
find the full documentation for it [in our repository](https://github.com/changesets/changesets)
|
||||
|
||||
We have a quick list of common questions to get you started engaging with this project in
|
||||
[our documentation](https://github.com/changesets/changesets/blob/main/docs/common-questions.md)
|
||||
@@ -1,11 +0,0 @@
|
||||
{
|
||||
"$schema": "https://unpkg.com/@changesets/config@3.0.5/schema.json",
|
||||
"changelog": "@changesets/cli/changelog",
|
||||
"commit": false,
|
||||
"fixed": [],
|
||||
"linked": [],
|
||||
"access": "public",
|
||||
"baseBranch": "main",
|
||||
"updateInternalDependencies": "patch",
|
||||
"ignore": ["@better-t-stack/backend", "web"]
|
||||
}
|
||||
674
.cursor/rules/convex_rules.mdc
Normal file
674
.cursor/rules/convex_rules.mdc
Normal file
@@ -0,0 +1,674 @@
|
||||
---
|
||||
alwaysApply: true
|
||||
---
|
||||
# Convex guidelines
|
||||
## Function guidelines
|
||||
### New function syntax
|
||||
- ALWAYS use the new function syntax for Convex functions. For example:
|
||||
```typescript
|
||||
import { query } from "./_generated/server";
|
||||
import { v } from "convex/values";
|
||||
export const f = query({
|
||||
args: {},
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
// Function body
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Http endpoint syntax
|
||||
- HTTP endpoints are defined in `convex/http.ts` and require an `httpAction` decorator. For example:
|
||||
```typescript
|
||||
import { httpRouter } from "convex/server";
|
||||
import { httpAction } from "./_generated/server";
|
||||
const http = httpRouter();
|
||||
http.route({
|
||||
path: "/echo",
|
||||
method: "POST",
|
||||
handler: httpAction(async (ctx, req) => {
|
||||
const body = await req.bytes();
|
||||
return new Response(body, { status: 200 });
|
||||
}),
|
||||
});
|
||||
```
|
||||
- HTTP endpoints are always registered at the exact path you specify in the `path` field. For example, if you specify `/api/someRoute`, the endpoint will be registered at `/api/someRoute`.
|
||||
|
||||
### Validators
|
||||
- Below is an example of an array validator:
|
||||
```typescript
|
||||
import { mutation } from "./_generated/server";
|
||||
import { v } from "convex/values";
|
||||
|
||||
export default mutation({
|
||||
args: {
|
||||
simpleArray: v.array(v.union(v.string(), v.number())),
|
||||
},
|
||||
handler: async (ctx, args) => {
|
||||
//...
|
||||
},
|
||||
});
|
||||
```
|
||||
- Below is an example of a schema with validators that codify a discriminated union type:
|
||||
```typescript
|
||||
import { defineSchema, defineTable } from "convex/server";
|
||||
import { v } from "convex/values";
|
||||
|
||||
export default defineSchema({
|
||||
results: defineTable(
|
||||
v.union(
|
||||
v.object({
|
||||
kind: v.literal("error"),
|
||||
errorMessage: v.string(),
|
||||
}),
|
||||
v.object({
|
||||
kind: v.literal("success"),
|
||||
value: v.number(),
|
||||
}),
|
||||
),
|
||||
)
|
||||
});
|
||||
```
|
||||
- Always use the `v.null()` validator when returning a null value. Below is an example query that returns a null value:
|
||||
```typescript
|
||||
import { query } from "./_generated/server";
|
||||
import { v } from "convex/values";
|
||||
|
||||
export const exampleQuery = query({
|
||||
args: {},
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
console.log("This query returns a null value");
|
||||
return null;
|
||||
},
|
||||
});
|
||||
```
|
||||
- Here are the valid Convex types along with their respective validators:
|
||||
Convex Type | TS/JS type | Example Usage | Validator for argument validation and schemas | Notes |
|
||||
| ----------- | ------------| -----------------------| -----------------------------------------------| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| Id | string | `doc._id` | `v.id(tableName)` | |
|
||||
| Null | null | `null` | `v.null()` | JavaScript's `undefined` is not a valid Convex value. Functions the return `undefined` or do not return will return `null` when called from a client. Use `null` instead. |
|
||||
| Int64 | bigint | `3n` | `v.int64()` | Int64s only support BigInts between -2^63 and 2^63-1. Convex supports `bigint`s in most modern browsers. |
|
||||
| Float64 | number | `3.1` | `v.number()` | Convex supports all IEEE-754 double-precision floating point numbers (such as NaNs). Inf and NaN are JSON serialized as strings. |
|
||||
| Boolean | boolean | `true` | `v.boolean()` |
|
||||
| String | string | `"abc"` | `v.string()` | Strings are stored as UTF-8 and must be valid Unicode sequences. Strings must be smaller than the 1MB total size limit when encoded as UTF-8. |
|
||||
| Bytes | ArrayBuffer | `new ArrayBuffer(8)` | `v.bytes()` | Convex supports first class bytestrings, passed in as `ArrayBuffer`s. Bytestrings must be smaller than the 1MB total size limit for Convex types. |
|
||||
| Array | Array] | `[1, 3.2, "abc"]` | `v.array(values)` | Arrays can have at most 8192 values. |
|
||||
| Object | Object | `{a: "abc"}` | `v.object({property: value})` | Convex only supports "plain old JavaScript objects" (objects that do not have a custom prototype). Objects can have at most 1024 entries. Field names must be nonempty and not start with "$" or "_". |
|
||||
| Record | Record | `{"a": "1", "b": "2"}` | `v.record(keys, values)` | Records are objects at runtime, but can have dynamic keys. Keys must be only ASCII characters, nonempty, and not start with "$" or "_". |
|
||||
|
||||
### Function registration
|
||||
- Use `internalQuery`, `internalMutation`, and `internalAction` to register internal functions. These functions are private and aren't part of an app's API. They can only be called by other Convex functions. These functions are always imported from `./_generated/server`.
|
||||
- Use `query`, `mutation`, and `action` to register public functions. These functions are part of the public API and are exposed to the public Internet. Do NOT use `query`, `mutation`, or `action` to register sensitive internal functions that should be kept private.
|
||||
- You CANNOT register a function through the `api` or `internal` objects.
|
||||
- ALWAYS include argument and return validators for all Convex functions. This includes all of `query`, `internalQuery`, `mutation`, `internalMutation`, `action`, and `internalAction`. If a function doesn't return anything, include `returns: v.null()` as its output validator.
|
||||
- If the JavaScript implementation of a Convex function doesn't have a return value, it implicitly returns `null`.
|
||||
|
||||
### Function calling
|
||||
- Use `ctx.runQuery` to call a query from a query, mutation, or action.
|
||||
- Use `ctx.runMutation` to call a mutation from a mutation or action.
|
||||
- Use `ctx.runAction` to call an action from an action.
|
||||
- ONLY call an action from another action if you need to cross runtimes (e.g. from V8 to Node). Otherwise, pull out the shared code into a helper async function and call that directly instead.
|
||||
- Try to use as few calls from actions to queries and mutations as possible. Queries and mutations are transactions, so splitting logic up into multiple calls introduces the risk of race conditions.
|
||||
- All of these calls take in a `FunctionReference`. Do NOT try to pass the callee function directly into one of these calls.
|
||||
- When using `ctx.runQuery`, `ctx.runMutation`, or `ctx.runAction` to call a function in the same file, specify a type annotation on the return value to work around TypeScript circularity limitations. For example,
|
||||
```
|
||||
export const f = query({
|
||||
args: { name: v.string() },
|
||||
returns: v.string(),
|
||||
handler: async (ctx, args) => {
|
||||
return "Hello " + args.name;
|
||||
},
|
||||
});
|
||||
|
||||
export const g = query({
|
||||
args: {},
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
const result: string = await ctx.runQuery(api.example.f, { name: "Bob" });
|
||||
return null;
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Function references
|
||||
- Function references are pointers to registered Convex functions.
|
||||
- Use the `api` object defined by the framework in `convex/_generated/api.ts` to call public functions registered with `query`, `mutation`, or `action`.
|
||||
- Use the `internal` object defined by the framework in `convex/_generated/api.ts` to call internal (or private) functions registered with `internalQuery`, `internalMutation`, or `internalAction`.
|
||||
- Convex uses file-based routing, so a public function defined in `convex/example.ts` named `f` has a function reference of `api.example.f`.
|
||||
- A private function defined in `convex/example.ts` named `g` has a function reference of `internal.example.g`.
|
||||
- Functions can also registered within directories nested within the `convex/` folder. For example, a public function `h` defined in `convex/messages/access.ts` has a function reference of `api.messages.access.h`.
|
||||
|
||||
### Api design
|
||||
- Convex uses file-based routing, so thoughtfully organize files with public query, mutation, or action functions within the `convex/` directory.
|
||||
- Use `query`, `mutation`, and `action` to define public functions.
|
||||
- Use `internalQuery`, `internalMutation`, and `internalAction` to define private, internal functions.
|
||||
|
||||
### Pagination
|
||||
- Paginated queries are queries that return a list of results in incremental pages.
|
||||
- You can define pagination using the following syntax:
|
||||
|
||||
```ts
|
||||
import { v } from "convex/values";
|
||||
import { query, mutation } from "./_generated/server";
|
||||
import { paginationOptsValidator } from "convex/server";
|
||||
export const listWithExtraArg = query({
|
||||
args: { paginationOpts: paginationOptsValidator, author: v.string() },
|
||||
handler: async (ctx, args) => {
|
||||
return await ctx.db
|
||||
.query("messages")
|
||||
.filter((q) => q.eq(q.field("author"), args.author))
|
||||
.order("desc")
|
||||
.paginate(args.paginationOpts);
|
||||
},
|
||||
});
|
||||
```
|
||||
Note: `paginationOpts` is an object with the following properties:
|
||||
- `numItems`: the maximum number of documents to return (the validator is `v.number()`)
|
||||
- `cursor`: the cursor to use to fetch the next page of documents (the validator is `v.union(v.string(), v.null())`)
|
||||
- A query that ends in `.paginate()` returns an object that has the following properties:
|
||||
- page (contains an array of documents that you fetches)
|
||||
- isDone (a boolean that represents whether or not this is the last page of documents)
|
||||
- continueCursor (a string that represents the cursor to use to fetch the next page of documents)
|
||||
|
||||
|
||||
## Validator guidelines
|
||||
- `v.bigint()` is deprecated for representing signed 64-bit integers. Use `v.int64()` instead.
|
||||
- Use `v.record()` for defining a record type. `v.map()` and `v.set()` are not supported.
|
||||
|
||||
## Schema guidelines
|
||||
- Always define your schema in `convex/schema.ts`.
|
||||
- Always import the schema definition functions from `convex/server`:
|
||||
- System fields are automatically added to all documents and are prefixed with an underscore. The two system fields that are automatically added to all documents are `_creationTime` which has the validator `v.number()` and `_id` which has the validator `v.id(tableName)`.
|
||||
- Always include all index fields in the index name. For example, if an index is defined as `["field1", "field2"]`, the index name should be "by_field1_and_field2".
|
||||
- Index fields must be queried in the same order they are defined. If you want to be able to query by "field1" then "field2" and by "field2" then "field1", you must create separate indexes.
|
||||
|
||||
## Typescript guidelines
|
||||
- You can use the helper typescript type `Id` imported from './_generated/dataModel' to get the type of the id for a given table. For example if there is a table called 'users' you can use `Id<'users'>` to get the type of the id for that table.
|
||||
- If you need to define a `Record` make sure that you correctly provide the type of the key and value in the type. For example a validator `v.record(v.id('users'), v.string())` would have the type `Record<Id<'users'>, string>`. Below is an example of using `Record` with an `Id` type in a query:
|
||||
```ts
|
||||
import { query } from "./_generated/server";
|
||||
import { Doc, Id } from "./_generated/dataModel";
|
||||
|
||||
export const exampleQuery = query({
|
||||
args: { userIds: v.array(v.id("users")) },
|
||||
returns: v.record(v.id("users"), v.string()),
|
||||
handler: async (ctx, args) => {
|
||||
const idToUsername: Record<Id<"users">, string> = {};
|
||||
for (const userId of args.userIds) {
|
||||
const user = await ctx.db.get(userId);
|
||||
if (user) {
|
||||
idToUsername[user._id] = user.username;
|
||||
}
|
||||
}
|
||||
|
||||
return idToUsername;
|
||||
},
|
||||
});
|
||||
```
|
||||
- Be strict with types, particularly around id's of documents. For example, if a function takes in an id for a document in the 'users' table, take in `Id<'users'>` rather than `string`.
|
||||
- Always use `as const` for string literals in discriminated union types.
|
||||
- When using the `Array` type, make sure to always define your arrays as `const array: Array<T> = [...];`
|
||||
- When using the `Record` type, make sure to always define your records as `const record: Record<KeyType, ValueType> = {...};`
|
||||
- Always add `@types/node` to your `package.json` when using any Node.js built-in modules.
|
||||
|
||||
## Full text search guidelines
|
||||
- A query for "10 messages in channel '#general' that best match the query 'hello hi' in their body" would look like:
|
||||
|
||||
const messages = await ctx.db
|
||||
.query("messages")
|
||||
.withSearchIndex("search_body", (q) =>
|
||||
q.search("body", "hello hi").eq("channel", "#general"),
|
||||
)
|
||||
.take(10);
|
||||
|
||||
## Query guidelines
|
||||
- Do NOT use `filter` in queries. Instead, define an index in the schema and use `withIndex` instead.
|
||||
- Convex queries do NOT support `.delete()`. Instead, `.collect()` the results, iterate over them, and call `ctx.db.delete(row._id)` on each result.
|
||||
- Use `.unique()` to get a single document from a query. This method will throw an error if there are multiple documents that match the query.
|
||||
- When using async iteration, don't use `.collect()` or `.take(n)` on the result of a query. Instead, use the `for await (const row of query)` syntax.
|
||||
### Ordering
|
||||
- By default Convex always returns documents in ascending `_creationTime` order.
|
||||
- You can use `.order('asc')` or `.order('desc')` to pick whether a query is in ascending or descending order. If the order isn't specified, it defaults to ascending.
|
||||
- Document queries that use indexes will be ordered based on the columns in the index and can avoid slow table scans.
|
||||
|
||||
|
||||
## Mutation guidelines
|
||||
- Use `ctx.db.replace` to fully replace an existing document. This method will throw an error if the document does not exist.
|
||||
- Use `ctx.db.patch` to shallow merge updates into an existing document. This method will throw an error if the document does not exist.
|
||||
|
||||
## Action guidelines
|
||||
- Always add `"use node";` to the top of files containing actions that use Node.js built-in modules.
|
||||
- Never use `ctx.db` inside of an action. Actions don't have access to the database.
|
||||
- Below is an example of the syntax for an action:
|
||||
```ts
|
||||
import { action } from "./_generated/server";
|
||||
|
||||
export const exampleAction = action({
|
||||
args: {},
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
console.log("This action does not return anything");
|
||||
return null;
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
## Scheduling guidelines
|
||||
### Cron guidelines
|
||||
- Only use the `crons.interval` or `crons.cron` methods to schedule cron jobs. Do NOT use the `crons.hourly`, `crons.daily`, or `crons.weekly` helpers.
|
||||
- Both cron methods take in a FunctionReference. Do NOT try to pass the function directly into one of these methods.
|
||||
- Define crons by declaring the top-level `crons` object, calling some methods on it, and then exporting it as default. For example,
|
||||
```ts
|
||||
import { cronJobs } from "convex/server";
|
||||
import { internal } from "./_generated/api";
|
||||
import { internalAction } from "./_generated/server";
|
||||
|
||||
const empty = internalAction({
|
||||
args: {},
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
console.log("empty");
|
||||
},
|
||||
});
|
||||
|
||||
const crons = cronJobs();
|
||||
|
||||
// Run `internal.crons.empty` every two hours.
|
||||
crons.interval("delete inactive users", { hours: 2 }, internal.crons.empty, {});
|
||||
|
||||
export default crons;
|
||||
```
|
||||
- You can register Convex functions within `crons.ts` just like any other file.
|
||||
- If a cron calls an internal function, always import the `internal` object from '_generated/api', even if the internal function is registered in the same file.
|
||||
|
||||
|
||||
## File storage guidelines
|
||||
- Convex includes file storage for large files like images, videos, and PDFs.
|
||||
- The `ctx.storage.getUrl()` method returns a signed URL for a given file. It returns `null` if the file doesn't exist.
|
||||
- Do NOT use the deprecated `ctx.storage.getMetadata` call for loading a file's metadata.
|
||||
|
||||
Instead, query the `_storage` system table. For example, you can use `ctx.db.system.get` to get an `Id<"_storage">`.
|
||||
```
|
||||
import { query } from "./_generated/server";
|
||||
import { Id } from "./_generated/dataModel";
|
||||
|
||||
type FileMetadata = {
|
||||
_id: Id<"_storage">;
|
||||
_creationTime: number;
|
||||
contentType?: string;
|
||||
sha256: string;
|
||||
size: number;
|
||||
}
|
||||
|
||||
export const exampleQuery = query({
|
||||
args: { fileId: v.id("_storage") },
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
const metadata: FileMetadata | null = await ctx.db.system.get(args.fileId);
|
||||
console.log(metadata);
|
||||
return null;
|
||||
},
|
||||
});
|
||||
```
|
||||
- Convex storage stores items as `Blob` objects. You must convert all items to/from a `Blob` when using Convex storage.
|
||||
|
||||
|
||||
# Examples:
|
||||
## Example: chat-app
|
||||
|
||||
### Task
|
||||
```
|
||||
Create a real-time chat application backend with AI responses. The app should:
|
||||
- Allow creating users with names
|
||||
- Support multiple chat channels
|
||||
- Enable users to send messages to channels
|
||||
- Automatically generate AI responses to user messages
|
||||
- Show recent message history
|
||||
|
||||
The backend should provide APIs for:
|
||||
1. User management (creation)
|
||||
2. Channel management (creation)
|
||||
3. Message operations (sending, listing)
|
||||
4. AI response generation using OpenAI's GPT-4
|
||||
|
||||
Messages should be stored with their channel, author, and content. The system should maintain message order
|
||||
and limit history display to the 10 most recent messages per channel.
|
||||
|
||||
```
|
||||
|
||||
### Analysis
|
||||
1. Task Requirements Summary:
|
||||
- Build a real-time chat backend with AI integration
|
||||
- Support user creation
|
||||
- Enable channel-based conversations
|
||||
- Store and retrieve messages with proper ordering
|
||||
- Generate AI responses automatically
|
||||
|
||||
2. Main Components Needed:
|
||||
- Database tables: users, channels, messages
|
||||
- Public APIs for user/channel management
|
||||
- Message handling functions
|
||||
- Internal AI response generation system
|
||||
- Context loading for AI responses
|
||||
|
||||
3. Public API and Internal Functions Design:
|
||||
Public Mutations:
|
||||
- createUser:
|
||||
- file path: convex/index.ts
|
||||
- arguments: {name: v.string()}
|
||||
- returns: v.object({userId: v.id("users")})
|
||||
- purpose: Create a new user with a given name
|
||||
- createChannel:
|
||||
- file path: convex/index.ts
|
||||
- arguments: {name: v.string()}
|
||||
- returns: v.object({channelId: v.id("channels")})
|
||||
- purpose: Create a new channel with a given name
|
||||
- sendMessage:
|
||||
- file path: convex/index.ts
|
||||
- arguments: {channelId: v.id("channels"), authorId: v.id("users"), content: v.string()}
|
||||
- returns: v.null()
|
||||
- purpose: Send a message to a channel and schedule a response from the AI
|
||||
|
||||
Public Queries:
|
||||
- listMessages:
|
||||
- file path: convex/index.ts
|
||||
- arguments: {channelId: v.id("channels")}
|
||||
- returns: v.array(v.object({
|
||||
_id: v.id("messages"),
|
||||
_creationTime: v.number(),
|
||||
channelId: v.id("channels"),
|
||||
authorId: v.optional(v.id("users")),
|
||||
content: v.string(),
|
||||
}))
|
||||
- purpose: List the 10 most recent messages from a channel in descending creation order
|
||||
|
||||
Internal Functions:
|
||||
- generateResponse:
|
||||
- file path: convex/index.ts
|
||||
- arguments: {channelId: v.id("channels")}
|
||||
- returns: v.null()
|
||||
- purpose: Generate a response from the AI for a given channel
|
||||
- loadContext:
|
||||
- file path: convex/index.ts
|
||||
- arguments: {channelId: v.id("channels")}
|
||||
- returns: v.array(v.object({
|
||||
_id: v.id("messages"),
|
||||
_creationTime: v.number(),
|
||||
channelId: v.id("channels"),
|
||||
authorId: v.optional(v.id("users")),
|
||||
content: v.string(),
|
||||
}))
|
||||
- writeAgentResponse:
|
||||
- file path: convex/index.ts
|
||||
- arguments: {channelId: v.id("channels"), content: v.string()}
|
||||
- returns: v.null()
|
||||
- purpose: Write an AI response to a given channel
|
||||
|
||||
4. Schema Design:
|
||||
- users
|
||||
- validator: { name: v.string() }
|
||||
- indexes: <none>
|
||||
- channels
|
||||
- validator: { name: v.string() }
|
||||
- indexes: <none>
|
||||
- messages
|
||||
- validator: { channelId: v.id("channels"), authorId: v.optional(v.id("users")), content: v.string() }
|
||||
- indexes
|
||||
- by_channel: ["channelId"]
|
||||
|
||||
5. Background Processing:
|
||||
- AI response generation runs asynchronously after each user message
|
||||
- Uses OpenAI's GPT-4 to generate contextual responses
|
||||
- Maintains conversation context using recent message history
|
||||
|
||||
|
||||
### Implementation
|
||||
|
||||
#### package.json
|
||||
```typescript
|
||||
{
|
||||
"name": "chat-app",
|
||||
"description": "This example shows how to build a chat app without authentication.",
|
||||
"version": "1.0.0",
|
||||
"dependencies": {
|
||||
"convex": "^1.17.4",
|
||||
"openai": "^4.79.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"typescript": "^5.7.3"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### tsconfig.json
|
||||
```typescript
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ESNext",
|
||||
"lib": ["DOM", "DOM.Iterable", "ESNext"],
|
||||
"skipLibCheck": true,
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"strict": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"module": "ESNext",
|
||||
"moduleResolution": "Bundler",
|
||||
"resolveJsonModule": true,
|
||||
"isolatedModules": true,
|
||||
"allowImportingTsExtensions": true,
|
||||
"noEmit": true,
|
||||
"jsx": "react-jsx"
|
||||
},
|
||||
"exclude": ["convex"],
|
||||
"include": ["**/src/**/*.tsx", "**/src/**/*.ts", "vite.config.ts"]
|
||||
}
|
||||
```
|
||||
|
||||
#### convex/index.ts
|
||||
```typescript
|
||||
import {
|
||||
query,
|
||||
mutation,
|
||||
internalQuery,
|
||||
internalMutation,
|
||||
internalAction,
|
||||
} from "./_generated/server";
|
||||
import { v } from "convex/values";
|
||||
import OpenAI from "openai";
|
||||
import { internal } from "./_generated/api";
|
||||
|
||||
/**
|
||||
* Create a user with a given name.
|
||||
*/
|
||||
export const createUser = mutation({
|
||||
args: {
|
||||
name: v.string(),
|
||||
},
|
||||
returns: v.id("users"),
|
||||
handler: async (ctx, args) => {
|
||||
return await ctx.db.insert("users", { name: args.name });
|
||||
},
|
||||
});
|
||||
|
||||
/**
|
||||
* Create a channel with a given name.
|
||||
*/
|
||||
export const createChannel = mutation({
|
||||
args: {
|
||||
name: v.string(),
|
||||
},
|
||||
returns: v.id("channels"),
|
||||
handler: async (ctx, args) => {
|
||||
return await ctx.db.insert("channels", { name: args.name });
|
||||
},
|
||||
});
|
||||
|
||||
/**
|
||||
* List the 10 most recent messages from a channel in descending creation order.
|
||||
*/
|
||||
export const listMessages = query({
|
||||
args: {
|
||||
channelId: v.id("channels"),
|
||||
},
|
||||
returns: v.array(
|
||||
v.object({
|
||||
_id: v.id("messages"),
|
||||
_creationTime: v.number(),
|
||||
channelId: v.id("channels"),
|
||||
authorId: v.optional(v.id("users")),
|
||||
content: v.string(),
|
||||
}),
|
||||
),
|
||||
handler: async (ctx, args) => {
|
||||
const messages = await ctx.db
|
||||
.query("messages")
|
||||
.withIndex("by_channel", (q) => q.eq("channelId", args.channelId))
|
||||
.order("desc")
|
||||
.take(10);
|
||||
return messages;
|
||||
},
|
||||
});
|
||||
|
||||
/**
|
||||
* Send a message to a channel and schedule a response from the AI.
|
||||
*/
|
||||
export const sendMessage = mutation({
|
||||
args: {
|
||||
channelId: v.id("channels"),
|
||||
authorId: v.id("users"),
|
||||
content: v.string(),
|
||||
},
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
const channel = await ctx.db.get(args.channelId);
|
||||
if (!channel) {
|
||||
throw new Error("Channel not found");
|
||||
}
|
||||
const user = await ctx.db.get(args.authorId);
|
||||
if (!user) {
|
||||
throw new Error("User not found");
|
||||
}
|
||||
await ctx.db.insert("messages", {
|
||||
channelId: args.channelId,
|
||||
authorId: args.authorId,
|
||||
content: args.content,
|
||||
});
|
||||
await ctx.scheduler.runAfter(0, internal.index.generateResponse, {
|
||||
channelId: args.channelId,
|
||||
});
|
||||
return null;
|
||||
},
|
||||
});
|
||||
|
||||
const openai = new OpenAI();
|
||||
|
||||
export const generateResponse = internalAction({
|
||||
args: {
|
||||
channelId: v.id("channels"),
|
||||
},
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
const context = await ctx.runQuery(internal.index.loadContext, {
|
||||
channelId: args.channelId,
|
||||
});
|
||||
const response = await openai.chat.completions.create({
|
||||
model: "gpt-4o",
|
||||
messages: context,
|
||||
});
|
||||
const content = response.choices[0].message.content;
|
||||
if (!content) {
|
||||
throw new Error("No content in response");
|
||||
}
|
||||
await ctx.runMutation(internal.index.writeAgentResponse, {
|
||||
channelId: args.channelId,
|
||||
content,
|
||||
});
|
||||
return null;
|
||||
},
|
||||
});
|
||||
|
||||
export const loadContext = internalQuery({
|
||||
args: {
|
||||
channelId: v.id("channels"),
|
||||
},
|
||||
returns: v.array(
|
||||
v.object({
|
||||
role: v.union(v.literal("user"), v.literal("assistant")),
|
||||
content: v.string(),
|
||||
}),
|
||||
),
|
||||
handler: async (ctx, args) => {
|
||||
const channel = await ctx.db.get(args.channelId);
|
||||
if (!channel) {
|
||||
throw new Error("Channel not found");
|
||||
}
|
||||
const messages = await ctx.db
|
||||
.query("messages")
|
||||
.withIndex("by_channel", (q) => q.eq("channelId", args.channelId))
|
||||
.order("desc")
|
||||
.take(10);
|
||||
|
||||
const result = [];
|
||||
for (const message of messages) {
|
||||
if (message.authorId) {
|
||||
const user = await ctx.db.get(message.authorId);
|
||||
if (!user) {
|
||||
throw new Error("User not found");
|
||||
}
|
||||
result.push({
|
||||
role: "user" as const,
|
||||
content: `${user.name}: ${message.content}`,
|
||||
});
|
||||
} else {
|
||||
result.push({ role: "assistant" as const, content: message.content });
|
||||
}
|
||||
}
|
||||
return result;
|
||||
},
|
||||
});
|
||||
|
||||
export const writeAgentResponse = internalMutation({
|
||||
args: {
|
||||
channelId: v.id("channels"),
|
||||
content: v.string(),
|
||||
},
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
await ctx.db.insert("messages", {
|
||||
channelId: args.channelId,
|
||||
content: args.content,
|
||||
});
|
||||
return null;
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
#### convex/schema.ts
|
||||
```typescript
|
||||
import { defineSchema, defineTable } from "convex/server";
|
||||
import { v } from "convex/values";
|
||||
|
||||
export default defineSchema({
|
||||
channels: defineTable({
|
||||
name: v.string(),
|
||||
}),
|
||||
|
||||
users: defineTable({
|
||||
name: v.string(),
|
||||
}),
|
||||
|
||||
messages: defineTable({
|
||||
channelId: v.id("channels"),
|
||||
authorId: v.optional(v.id("users")),
|
||||
content: v.string(),
|
||||
}).index("by_channel", ["channelId"]),
|
||||
});
|
||||
```
|
||||
|
||||
#### src/App.tsx
|
||||
```typescript
|
||||
export default function App() {
|
||||
return <div>Hello World</div>;
|
||||
}
|
||||
```
|
||||
|
||||
108
.cursor/rules/use-bun-instead-of-node-vite-npm-pnpm.mdc
Normal file
108
.cursor/rules/use-bun-instead-of-node-vite-npm-pnpm.mdc
Normal file
@@ -0,0 +1,108 @@
|
||||
---
|
||||
alwaysApply: true
|
||||
---
|
||||
Default to using Bun instead of Node.js.
|
||||
|
||||
- Use `bun <file>` instead of `node <file>` or `ts-node <file>`
|
||||
- Use `bun test` instead of `jest` or `vitest`
|
||||
- Use `bun build <file.html|file.ts|file.css>` instead of `webpack` or `esbuild`
|
||||
- Use `bun install` instead of `npm install` or `yarn install` or `pnpm install`
|
||||
- Use `bun run <script>` instead of `npm run <script>` or `yarn run <script>` or `pnpm run <script>`
|
||||
- Bun automatically loads .env, so don't use dotenv.
|
||||
|
||||
## APIs
|
||||
|
||||
- `Bun.serve()` supports WebSockets, HTTPS, and routes. Don't use `express`.
|
||||
- `bun:sqlite` for SQLite. Don't use `better-sqlite3`.
|
||||
- `Bun.redis` for Redis. Don't use `ioredis`.
|
||||
- `Bun.sql` for Postgres. Don't use `pg` or `postgres.js`.
|
||||
- `WebSocket` is built-in. Don't use `ws`.
|
||||
- Prefer `Bun.file` over `node:fs`'s readFile/writeFile
|
||||
- Bun.$`ls` instead of execa.
|
||||
|
||||
## Testing
|
||||
|
||||
Use `bun test` to run tests.
|
||||
|
||||
```ts#index.test.ts
|
||||
import { test, expect } from "bun:test";
|
||||
|
||||
test("hello world", () => {
|
||||
expect(1).toBe(1);
|
||||
});
|
||||
```
|
||||
|
||||
## Frontend
|
||||
|
||||
Use HTML imports with `Bun.serve()`. Don't use `vite`. HTML imports fully support React, CSS, Tailwind.
|
||||
|
||||
Server:
|
||||
|
||||
```ts#index.ts
|
||||
import index from "./index.html"
|
||||
|
||||
Bun.serve({
|
||||
routes: {
|
||||
"/": index,
|
||||
"/api/users/:id": {
|
||||
GET: (req) => {
|
||||
return new Response(JSON.stringify({ id: req.params.id }));
|
||||
},
|
||||
},
|
||||
},
|
||||
// optional websocket support
|
||||
websocket: {
|
||||
open: (ws) => {
|
||||
ws.send("Hello, world!");
|
||||
},
|
||||
message: (ws, message) => {
|
||||
ws.send(message);
|
||||
},
|
||||
close: (ws) => {
|
||||
// handle close
|
||||
}
|
||||
},
|
||||
development: {
|
||||
hmr: true,
|
||||
console: true,
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
HTML files can import .tsx, .jsx or .js files directly and Bun's bundler will transpile & bundle automatically. `<link>` tags can point to stylesheets and Bun's CSS bundler will bundle.
|
||||
|
||||
```html#index.html
|
||||
<html>
|
||||
<body>
|
||||
<h1>Hello, world!</h1>
|
||||
<script type="module" src="./frontend.tsx"></script>
|
||||
</body>
|
||||
</html>
|
||||
```
|
||||
|
||||
With the following `frontend.tsx`:
|
||||
|
||||
```tsx#frontend.tsx
|
||||
import React from "react";
|
||||
|
||||
// import .css files directly and it works
|
||||
import './index.css';
|
||||
|
||||
import { createRoot } from "react-dom/client";
|
||||
|
||||
const root = createRoot(document.body);
|
||||
|
||||
export default function Frontend() {
|
||||
return <h1>Hello, world!</h1>;
|
||||
}
|
||||
|
||||
root.render(<Frontend />);
|
||||
```
|
||||
|
||||
Then, run index.ts
|
||||
|
||||
```sh
|
||||
bun --hot ./index.ts
|
||||
```
|
||||
|
||||
For more information, read the Bun API docs in `node_modules/bun-types/docs/**.md`.
|
||||
30
.github/workflows/preview.yaml
vendored
30
.github/workflows/preview.yaml
vendored
@@ -1,30 +0,0 @@
|
||||
name: Preview
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "apps/cli/**"
|
||||
- "package.json"
|
||||
- "bun.lock"
|
||||
|
||||
concurrency: ${{ github.workflow }}-${{ github.ref }}
|
||||
|
||||
jobs:
|
||||
preview:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
bun-version: latest
|
||||
|
||||
- name: Install Dependencies
|
||||
run: bun install --frozen-lockfile
|
||||
|
||||
- name: Publish Preview Package
|
||||
run: bunx pkg-pr-new publish './apps/cli' --bin
|
||||
34
.github/workflows/release.yaml
vendored
34
.github/workflows/release.yaml
vendored
@@ -1,17 +1,12 @@
|
||||
name: Release
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- beta
|
||||
paths:
|
||||
- "apps/cli/**"
|
||||
- ".changeset/**"
|
||||
- "package.json"
|
||||
- "bun.lock"
|
||||
|
||||
concurrency: ${{ github.workflow }}-${{ github.ref }}
|
||||
tags:
|
||||
- 'v*'
|
||||
|
||||
jobs:
|
||||
release:
|
||||
@@ -30,14 +25,19 @@ jobs:
|
||||
- name: Install Dependencies
|
||||
run: bun install --frozen-lockfile
|
||||
|
||||
- name: Create Release Pull Request or Publish
|
||||
id: changesets
|
||||
uses: changesets/action@v1
|
||||
with:
|
||||
publish: bun run publish-packages
|
||||
- name: Build CLI
|
||||
run: bun run build:cli
|
||||
env:
|
||||
BTS_TELEMETRY: 1
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
POSTHOG_API_KEY: ${{ secrets.POSTHOG_API_KEY }}
|
||||
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
|
||||
|
||||
- name: Generate Changelog
|
||||
run: bun run release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Publish CLI to NPM
|
||||
run: cd apps/cli && bun publish --access public
|
||||
env:
|
||||
NPM_CONFIG_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
@@ -66,7 +66,8 @@ Options:
|
||||
--install Install dependencies
|
||||
--no-install Skip installing dependencies
|
||||
--db-setup <setup> Database setup (turso, d1, neon, supabase, prisma-postgres, mongodb-atlas, docker, none)
|
||||
--web-deploy <setup> Web deployment (workers, none)
|
||||
--web-deploy <setup> Web deployment (workers, alchemy, none)
|
||||
--server-deploy <setup> Server deployment (workers, alchemy, none)
|
||||
--backend <framework> Backend framework (hono, express, elysia, next, convex, fastify, none)
|
||||
--runtime <runtime> Runtime (bun, node, workers, none)
|
||||
--api <type> API type (trpc, orpc, none)
|
||||
|
||||
@@ -52,7 +52,8 @@
|
||||
"dev": "tsdown --watch",
|
||||
"check-types": "tsc --noEmit",
|
||||
"check": "biome check --write .",
|
||||
"test": "bun run build && vitest --ui",
|
||||
"test": "bun run build && vitest run",
|
||||
"test:ui": "bun run build && vitest --ui",
|
||||
"test:with-build": "bun run build && WITH_BUILD=1 vitest --ui",
|
||||
"prepublishOnly": "npm run build"
|
||||
},
|
||||
@@ -63,22 +64,24 @@
|
||||
}
|
||||
},
|
||||
"dependencies": {
|
||||
"@biomejs/js-api": "^3.0.0",
|
||||
"@biomejs/wasm-nodejs": "^2.2.0",
|
||||
"@clack/prompts": "^0.11.0",
|
||||
"consola": "^3.4.2",
|
||||
"execa": "^9.6.0",
|
||||
"fs-extra": "^11.3.1",
|
||||
"globby": "^14.1.0",
|
||||
"gradient-string": "^3.0.0",
|
||||
"handlebars": "^4.7.8",
|
||||
"jsonc-parser": "^3.3.1",
|
||||
"picocolors": "^1.1.1",
|
||||
"tinyglobby": "^0.2.14",
|
||||
"trpc-cli": "^0.10.2",
|
||||
"ts-morph": "^26.0.0",
|
||||
"zod": "^4.0.17"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/fs-extra": "^11.0.4",
|
||||
"@types/node": "^24.2.1",
|
||||
"@types/node": "^24.3.0",
|
||||
"@vitest/ui": "^3.2.4",
|
||||
"tsdown": "^0.14.1",
|
||||
"typescript": "^5.9.2",
|
||||
|
||||
@@ -7,25 +7,37 @@ const __filename = fileURLToPath(import.meta.url);
|
||||
const distPath = path.dirname(__filename);
|
||||
export const PKG_ROOT = path.join(distPath, "../");
|
||||
|
||||
export const DEFAULT_CONFIG: ProjectConfig = {
|
||||
export const DEFAULT_CONFIG_BASE = {
|
||||
projectName: "my-better-t-app",
|
||||
projectDir: path.resolve(process.cwd(), "my-better-t-app"),
|
||||
relativePath: "my-better-t-app",
|
||||
frontend: ["tanstack-router"],
|
||||
database: "sqlite",
|
||||
orm: "drizzle",
|
||||
frontend: ["tanstack-router"] as const,
|
||||
database: "sqlite" as const,
|
||||
orm: "drizzle" as const,
|
||||
auth: true,
|
||||
addons: ["turborepo"],
|
||||
examples: [],
|
||||
addons: ["turborepo"] as const,
|
||||
examples: [] as const,
|
||||
git: true,
|
||||
packageManager: getUserPkgManager(),
|
||||
install: true,
|
||||
dbSetup: "none",
|
||||
backend: "hono",
|
||||
runtime: "bun",
|
||||
api: "trpc",
|
||||
webDeploy: "none",
|
||||
dbSetup: "none" as const,
|
||||
backend: "hono" as const,
|
||||
runtime: "bun" as const,
|
||||
api: "trpc" as const,
|
||||
webDeploy: "none" as const,
|
||||
serverDeploy: "none" as const,
|
||||
} as const;
|
||||
|
||||
export function getDefaultConfig(): ProjectConfig {
|
||||
return {
|
||||
...DEFAULT_CONFIG_BASE,
|
||||
projectDir: path.resolve(process.cwd(), DEFAULT_CONFIG_BASE.projectName),
|
||||
packageManager: getUserPkgManager(),
|
||||
frontend: [...DEFAULT_CONFIG_BASE.frontend],
|
||||
addons: [...DEFAULT_CONFIG_BASE.addons],
|
||||
examples: [...DEFAULT_CONFIG_BASE.examples],
|
||||
};
|
||||
}
|
||||
|
||||
export const DEFAULT_CONFIG = getDefaultConfig();
|
||||
|
||||
export const dependencyVersionMap = {
|
||||
"better-auth": "^1.3.4",
|
||||
@@ -106,7 +118,8 @@ export const dependencyVersionMap = {
|
||||
"convex-nuxt": "0.1.5",
|
||||
"convex-vue": "^0.1.5",
|
||||
|
||||
"@tanstack/svelte-query": "^5.74.4",
|
||||
"@tanstack/svelte-query": "^5.85.3",
|
||||
"@tanstack/svelte-query-devtools": "^5.85.3",
|
||||
|
||||
"@tanstack/vue-query-devtools": "^5.83.0",
|
||||
"@tanstack/vue-query": "^5.83.0",
|
||||
@@ -116,19 +129,27 @@ export const dependencyVersionMap = {
|
||||
|
||||
"@tanstack/solid-query": "^5.75.0",
|
||||
"@tanstack/solid-query-devtools": "^5.75.0",
|
||||
"@tanstack/solid-router-devtools": "^1.131.25",
|
||||
|
||||
wrangler: "^4.23.0",
|
||||
"@cloudflare/vite-plugin": "^1.9.0",
|
||||
"@opennextjs/cloudflare": "^1.3.0",
|
||||
"nitro-cloudflare-dev": "^0.2.2",
|
||||
"@sveltejs/adapter-cloudflare": "^7.0.4",
|
||||
"@sveltejs/adapter-cloudflare": "^7.2.1",
|
||||
"@cloudflare/workers-types": "^4.20250813.0",
|
||||
|
||||
alchemy: "^0.62.1",
|
||||
// temporary workaround for alchemy + tanstack start
|
||||
nitropack: "^2.12.4",
|
||||
|
||||
dotenv: "^17.2.1",
|
||||
} as const;
|
||||
|
||||
export type AvailableDependencies = keyof typeof dependencyVersionMap;
|
||||
|
||||
export const ADDON_COMPATIBILITY: Record<Addons, readonly Frontend[]> = {
|
||||
pwa: ["tanstack-router", "react-router", "solid", "next"],
|
||||
tauri: ["tanstack-router", "react-router", "nuxt", "svelte", "solid"],
|
||||
tauri: ["tanstack-router", "react-router", "nuxt", "svelte", "solid", "next"],
|
||||
biome: [],
|
||||
husky: [],
|
||||
turborepo: [],
|
||||
|
||||
@@ -7,7 +7,7 @@ import { PKG_ROOT } from "../../constants";
|
||||
import type { ProjectConfig } from "../../types";
|
||||
import { exitCancelled } from "../../utils/errors";
|
||||
import { getPackageExecutionCommand } from "../../utils/package-runner";
|
||||
import { processAndCopyFiles } from "../project-generation/template-manager";
|
||||
import { processAndCopyFiles } from "../core/template-manager";
|
||||
|
||||
export async function setupVibeRules(config: ProjectConfig) {
|
||||
const { packageManager, projectDir } = config;
|
||||
@@ -69,8 +69,8 @@ export async function setupTauri(config: ProjectConfig) {
|
||||
`--window-title=${path.basename(projectDir)}`,
|
||||
`--frontend-dist=${frontendDist}`,
|
||||
`--dev-url=${devUrl}`,
|
||||
`--before-dev-command=\"${packageManager} run dev\"`,
|
||||
`--before-build-command=\"${packageManager} run build\"`,
|
||||
`--before-dev-command="${packageManager} run dev"`,
|
||||
`--before-build-command="${packageManager} run build"`,
|
||||
];
|
||||
const tauriArgsString = tauriArgs.join(" ");
|
||||
|
||||
@@ -5,7 +5,7 @@ import type { AddInput, Addons, ProjectConfig } from "../../types";
|
||||
import { validateAddonCompatibility } from "../../utils/addon-compatibility";
|
||||
import { updateBtsConfig } from "../../utils/bts-config";
|
||||
import { exitWithError } from "../../utils/errors";
|
||||
import { setupAddons } from "../setup/addons-setup";
|
||||
import { setupAddons } from "../addons/addons-setup";
|
||||
import {
|
||||
detectProjectConfig,
|
||||
isBetterTStackProject,
|
||||
@@ -52,6 +52,7 @@ export async function addAddonsToProject(
|
||||
dbSetup: detectedConfig.dbSetup || "none",
|
||||
api: detectedConfig.api || "none",
|
||||
webDeploy: detectedConfig.webDeploy || "none",
|
||||
serverDeploy: detectedConfig.serverDeploy || "none",
|
||||
};
|
||||
|
||||
for (const addon of input.addons) {
|
||||
@@ -1,10 +1,16 @@
|
||||
import path from "node:path";
|
||||
import { log } from "@clack/prompts";
|
||||
import pc from "picocolors";
|
||||
import type { AddInput, ProjectConfig, WebDeploy } from "../../types";
|
||||
import type {
|
||||
AddInput,
|
||||
ProjectConfig,
|
||||
ServerDeploy,
|
||||
WebDeploy,
|
||||
} from "../../types";
|
||||
import { updateBtsConfig } from "../../utils/bts-config";
|
||||
import { exitWithError } from "../../utils/errors";
|
||||
import { setupWebDeploy } from "../setup/web-deploy-setup";
|
||||
import { setupServerDeploy } from "../deployment/server-deploy-setup";
|
||||
import { setupWebDeploy } from "../deployment/web-deploy-setup";
|
||||
import {
|
||||
detectProjectConfig,
|
||||
isBetterTStackProject,
|
||||
@@ -13,7 +19,11 @@ import { installDependencies } from "./install-dependencies";
|
||||
import { setupDeploymentTemplates } from "./template-manager";
|
||||
|
||||
export async function addDeploymentToProject(
|
||||
input: AddInput & { webDeploy: WebDeploy; suppressInstallMessage?: boolean },
|
||||
input: AddInput & {
|
||||
webDeploy?: WebDeploy;
|
||||
serverDeploy?: ServerDeploy;
|
||||
suppressInstallMessage?: boolean;
|
||||
},
|
||||
) {
|
||||
try {
|
||||
const projectDir = input.projectDir || process.cwd();
|
||||
@@ -32,9 +42,18 @@ export async function addDeploymentToProject(
|
||||
);
|
||||
}
|
||||
|
||||
if (detectedConfig.webDeploy === input.webDeploy) {
|
||||
if (input.webDeploy && detectedConfig.webDeploy === input.webDeploy) {
|
||||
exitWithError(
|
||||
`${input.webDeploy} deployment is already configured for this project.`,
|
||||
`${input.webDeploy} web deployment is already configured for this project.`,
|
||||
);
|
||||
}
|
||||
|
||||
if (
|
||||
input.serverDeploy &&
|
||||
detectedConfig.serverDeploy === input.serverDeploy
|
||||
) {
|
||||
exitWithError(
|
||||
`${input.serverDeploy} server deployment is already configured for this project.`,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -56,19 +75,30 @@ export async function addDeploymentToProject(
|
||||
install: input.install || false,
|
||||
dbSetup: detectedConfig.dbSetup || "none",
|
||||
api: detectedConfig.api || "none",
|
||||
webDeploy: input.webDeploy,
|
||||
webDeploy: input.webDeploy || detectedConfig.webDeploy || "none",
|
||||
serverDeploy: input.serverDeploy || detectedConfig.serverDeploy || "none",
|
||||
};
|
||||
|
||||
if (input.webDeploy && input.webDeploy !== "none") {
|
||||
log.info(
|
||||
pc.green(
|
||||
`Adding ${input.webDeploy} deployment to ${config.frontend.join("/")}`,
|
||||
`Adding ${input.webDeploy} web deployment to ${config.frontend.join("/")}`,
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
if (input.serverDeploy && input.serverDeploy !== "none") {
|
||||
log.info(pc.green(`Adding ${input.serverDeploy} server deployment`));
|
||||
}
|
||||
|
||||
await setupDeploymentTemplates(projectDir, config);
|
||||
await setupWebDeploy(config);
|
||||
await setupServerDeploy(config);
|
||||
|
||||
await updateBtsConfig(projectDir, { webDeploy: input.webDeploy });
|
||||
await updateBtsConfig(projectDir, {
|
||||
webDeploy: input.webDeploy || config.webDeploy,
|
||||
serverDeploy: input.serverDeploy || config.serverDeploy,
|
||||
});
|
||||
|
||||
if (config.install) {
|
||||
await installDependencies({
|
||||
303
apps/cli/src/helpers/core/api-setup.ts
Normal file
303
apps/cli/src/helpers/core/api-setup.ts
Normal file
@@ -0,0 +1,303 @@
|
||||
import path from "node:path";
|
||||
import fs from "fs-extra";
|
||||
import type { AvailableDependencies } from "../../constants";
|
||||
import type { Frontend, ProjectConfig } from "../../types";
|
||||
import { addPackageDependency } from "../../utils/add-package-deps";
|
||||
|
||||
async function addBackendWorkspaceDependency(
|
||||
projectDir: string,
|
||||
backendPackageName: string,
|
||||
workspaceVersion: string,
|
||||
) {
|
||||
const pkgJsonPath = path.join(projectDir, "package.json");
|
||||
try {
|
||||
const pkgJson = await fs.readJson(pkgJsonPath);
|
||||
if (!pkgJson.dependencies) {
|
||||
pkgJson.dependencies = {};
|
||||
}
|
||||
pkgJson.dependencies[backendPackageName] = workspaceVersion;
|
||||
await fs.writeJson(pkgJsonPath, pkgJson, { spaces: 2 });
|
||||
} catch (_error) {}
|
||||
}
|
||||
|
||||
function getFrontendType(frontend: Frontend[]): {
|
||||
hasReactWeb: boolean;
|
||||
hasNuxtWeb: boolean;
|
||||
hasSvelteWeb: boolean;
|
||||
hasSolidWeb: boolean;
|
||||
hasNative: boolean;
|
||||
} {
|
||||
const reactBasedFrontends = [
|
||||
"tanstack-router",
|
||||
"react-router",
|
||||
"tanstack-start",
|
||||
"next",
|
||||
];
|
||||
const nativeFrontends = ["native-nativewind", "native-unistyles"];
|
||||
|
||||
return {
|
||||
hasReactWeb: frontend.some((f) => reactBasedFrontends.includes(f)),
|
||||
hasNuxtWeb: frontend.includes("nuxt"),
|
||||
hasSvelteWeb: frontend.includes("svelte"),
|
||||
hasSolidWeb: frontend.includes("solid"),
|
||||
hasNative: frontend.some((f) => nativeFrontends.includes(f)),
|
||||
};
|
||||
}
|
||||
|
||||
function getApiDependencies(
|
||||
api: string,
|
||||
frontendType: ReturnType<typeof getFrontendType>,
|
||||
) {
|
||||
const deps: Record<
|
||||
string,
|
||||
{ dependencies: string[]; devDependencies?: string[] }
|
||||
> = {};
|
||||
|
||||
if (api === "orpc") {
|
||||
deps.server = { dependencies: ["@orpc/server", "@orpc/client"] };
|
||||
} else if (api === "trpc") {
|
||||
deps.server = { dependencies: ["@trpc/server", "@trpc/client"] };
|
||||
}
|
||||
|
||||
if (frontendType.hasReactWeb) {
|
||||
if (api === "orpc") {
|
||||
deps.web = { dependencies: ["@orpc/tanstack-query", "@orpc/client"] };
|
||||
} else if (api === "trpc") {
|
||||
deps.web = {
|
||||
dependencies: [
|
||||
"@trpc/tanstack-react-query",
|
||||
"@trpc/client",
|
||||
"@trpc/server",
|
||||
],
|
||||
};
|
||||
}
|
||||
} else if (frontendType.hasNuxtWeb && api === "orpc") {
|
||||
deps.web = {
|
||||
dependencies: [
|
||||
"@tanstack/vue-query",
|
||||
"@orpc/tanstack-query",
|
||||
"@orpc/client",
|
||||
],
|
||||
devDependencies: ["@tanstack/vue-query-devtools"],
|
||||
};
|
||||
} else if (frontendType.hasSvelteWeb && api === "orpc") {
|
||||
deps.web = {
|
||||
dependencies: [
|
||||
"@orpc/tanstack-query",
|
||||
"@orpc/client",
|
||||
"@tanstack/svelte-query",
|
||||
],
|
||||
devDependencies: ["@tanstack/svelte-query-devtools"],
|
||||
};
|
||||
} else if (frontendType.hasSolidWeb && api === "orpc") {
|
||||
deps.web = {
|
||||
dependencies: [
|
||||
"@orpc/tanstack-query",
|
||||
"@orpc/client",
|
||||
"@tanstack/solid-query",
|
||||
],
|
||||
devDependencies: [
|
||||
"@tanstack/solid-query-devtools",
|
||||
"@tanstack/solid-router-devtools",
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
if (api === "trpc") {
|
||||
deps.native = {
|
||||
dependencies: [
|
||||
"@trpc/tanstack-react-query",
|
||||
"@trpc/client",
|
||||
"@trpc/server",
|
||||
],
|
||||
};
|
||||
} else if (api === "orpc") {
|
||||
deps.native = { dependencies: ["@orpc/tanstack-query", "@orpc/client"] };
|
||||
}
|
||||
|
||||
return deps;
|
||||
}
|
||||
|
||||
function getQueryDependencies(frontend: Frontend[]) {
|
||||
const reactBasedFrontends: Frontend[] = [
|
||||
"react-router",
|
||||
"tanstack-router",
|
||||
"tanstack-start",
|
||||
"next",
|
||||
"native-nativewind",
|
||||
"native-unistyles",
|
||||
];
|
||||
|
||||
const deps: Record<
|
||||
string,
|
||||
{ dependencies: string[]; devDependencies?: string[] }
|
||||
> = {};
|
||||
|
||||
const needsReactQuery = frontend.some((f) => reactBasedFrontends.includes(f));
|
||||
if (needsReactQuery) {
|
||||
const hasReactWeb = frontend.some(
|
||||
(f) =>
|
||||
f !== "native-nativewind" &&
|
||||
f !== "native-unistyles" &&
|
||||
reactBasedFrontends.includes(f),
|
||||
);
|
||||
const hasNative =
|
||||
frontend.includes("native-nativewind") ||
|
||||
frontend.includes("native-unistyles");
|
||||
|
||||
if (hasReactWeb) {
|
||||
deps.web = {
|
||||
dependencies: ["@tanstack/react-query"],
|
||||
devDependencies: ["@tanstack/react-query-devtools"],
|
||||
};
|
||||
}
|
||||
if (hasNative) {
|
||||
deps.native = { dependencies: ["@tanstack/react-query"] };
|
||||
}
|
||||
}
|
||||
|
||||
if (frontend.includes("solid")) {
|
||||
deps.web = {
|
||||
dependencies: ["@tanstack/solid-query"],
|
||||
devDependencies: [
|
||||
"@tanstack/solid-query-devtools",
|
||||
"@tanstack/solid-router-devtools",
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
return deps;
|
||||
}
|
||||
|
||||
function getConvexDependencies(frontend: Frontend[]) {
|
||||
const deps: Record<string, { dependencies: string[] }> = {
|
||||
web: { dependencies: ["convex"] },
|
||||
native: { dependencies: ["convex"] },
|
||||
};
|
||||
|
||||
if (frontend.includes("tanstack-start")) {
|
||||
deps.web.dependencies.push("@convex-dev/react-query");
|
||||
}
|
||||
if (frontend.includes("svelte")) {
|
||||
deps.web.dependencies.push("convex-svelte");
|
||||
}
|
||||
if (frontend.includes("nuxt")) {
|
||||
deps.web.dependencies.push("convex-nuxt", "convex-vue");
|
||||
}
|
||||
|
||||
return deps;
|
||||
}
|
||||
|
||||
export async function setupApi(config: ProjectConfig) {
|
||||
const { api, projectName, frontend, backend, packageManager, projectDir } =
|
||||
config;
|
||||
const isConvex = backend === "convex";
|
||||
|
||||
const webDir = path.join(projectDir, "apps/web");
|
||||
const nativeDir = path.join(projectDir, "apps/native");
|
||||
const serverDir = path.join(projectDir, "apps/server");
|
||||
|
||||
const webDirExists = await fs.pathExists(webDir);
|
||||
const nativeDirExists = await fs.pathExists(nativeDir);
|
||||
const serverDirExists = await fs.pathExists(serverDir);
|
||||
|
||||
const frontendType = getFrontendType(frontend);
|
||||
|
||||
if (!isConvex && api !== "none") {
|
||||
const apiDeps = getApiDependencies(api, frontendType);
|
||||
|
||||
if (serverDirExists && apiDeps.server) {
|
||||
await addPackageDependency({
|
||||
dependencies: apiDeps.server.dependencies as AvailableDependencies[],
|
||||
projectDir: serverDir,
|
||||
});
|
||||
|
||||
if (api === "trpc") {
|
||||
if (backend === "hono") {
|
||||
await addPackageDependency({
|
||||
dependencies: ["@hono/trpc-server"],
|
||||
projectDir: serverDir,
|
||||
});
|
||||
} else if (backend === "elysia") {
|
||||
await addPackageDependency({
|
||||
dependencies: ["@elysiajs/trpc"],
|
||||
projectDir: serverDir,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (webDirExists && apiDeps.web) {
|
||||
await addPackageDependency({
|
||||
dependencies: apiDeps.web.dependencies as AvailableDependencies[],
|
||||
devDependencies: apiDeps.web.devDependencies as AvailableDependencies[],
|
||||
projectDir: webDir,
|
||||
});
|
||||
}
|
||||
|
||||
if (nativeDirExists && apiDeps.native) {
|
||||
await addPackageDependency({
|
||||
dependencies: apiDeps.native.dependencies as AvailableDependencies[],
|
||||
projectDir: nativeDir,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (!isConvex) {
|
||||
const queryDeps = getQueryDependencies(frontend);
|
||||
|
||||
if (webDirExists && queryDeps.web) {
|
||||
await addPackageDependency({
|
||||
dependencies: queryDeps.web.dependencies as AvailableDependencies[],
|
||||
devDependencies: queryDeps.web
|
||||
.devDependencies as AvailableDependencies[],
|
||||
projectDir: webDir,
|
||||
});
|
||||
}
|
||||
|
||||
if (nativeDirExists && queryDeps.native) {
|
||||
await addPackageDependency({
|
||||
dependencies: queryDeps.native.dependencies as AvailableDependencies[],
|
||||
projectDir: nativeDir,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (isConvex) {
|
||||
const convexDeps = getConvexDependencies(frontend);
|
||||
|
||||
if (webDirExists) {
|
||||
await addPackageDependency({
|
||||
dependencies: convexDeps.web.dependencies as AvailableDependencies[],
|
||||
projectDir: webDir,
|
||||
});
|
||||
}
|
||||
|
||||
if (nativeDirExists) {
|
||||
await addPackageDependency({
|
||||
dependencies: convexDeps.native.dependencies as AvailableDependencies[],
|
||||
projectDir: nativeDir,
|
||||
});
|
||||
}
|
||||
|
||||
const backendPackageName = `@${projectName}/backend`;
|
||||
const backendWorkspaceVersion =
|
||||
packageManager === "npm" ? "*" : "workspace:*";
|
||||
|
||||
if (webDirExists) {
|
||||
await addBackendWorkspaceDependency(
|
||||
webDir,
|
||||
backendPackageName,
|
||||
backendWorkspaceVersion,
|
||||
);
|
||||
}
|
||||
|
||||
if (nativeDirExists) {
|
||||
await addBackendWorkspaceDependency(
|
||||
nativeDir,
|
||||
backendPackageName,
|
||||
backendWorkspaceVersion,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3,10 +3,11 @@ import { intro, log, outro } from "@clack/prompts";
|
||||
import consola from "consola";
|
||||
import fs from "fs-extra";
|
||||
import pc from "picocolors";
|
||||
import { DEFAULT_CONFIG } from "../../constants";
|
||||
import { getDefaultConfig } from "../../constants";
|
||||
import { getAddonsToAdd } from "../../prompts/addons";
|
||||
import { gatherConfig } from "../../prompts/config-prompts";
|
||||
import { getProjectName } from "../../prompts/project-name";
|
||||
import { getServerDeploymentToAdd } from "../../prompts/server-deploy";
|
||||
import { getDeploymentToAdd } from "../../prompts/web-deploy";
|
||||
import type {
|
||||
AddInput,
|
||||
@@ -16,6 +17,7 @@ import type {
|
||||
ProjectConfig,
|
||||
} from "../../types";
|
||||
import { trackProjectCreation } from "../../utils/analytics";
|
||||
import { coerceBackendPresets } from "../../utils/compatibility-rules";
|
||||
import { displayConfig } from "../../utils/display-config";
|
||||
import { exitWithError, handleError } from "../../utils/errors";
|
||||
import { generateReproducibleCommand } from "../../utils/generate-reproducible-command";
|
||||
@@ -24,7 +26,12 @@ import {
|
||||
setupProjectDirectory,
|
||||
} from "../../utils/project-directory";
|
||||
import { renderTitle } from "../../utils/render-title";
|
||||
import { getProvidedFlags, processAndValidateFlags } from "../../validation";
|
||||
import {
|
||||
getProvidedFlags,
|
||||
processAndValidateFlags,
|
||||
processProvidedFlagsWithoutValidation,
|
||||
validateConfigCompatibility,
|
||||
} from "../../validation";
|
||||
import { addAddonsToProject } from "./add-addons";
|
||||
import { addDeploymentToProject } from "./add-deployment";
|
||||
import { createProject } from "./create-project";
|
||||
@@ -50,13 +57,14 @@ export async function createProjectHandler(
|
||||
if (input.yes && input.projectName) {
|
||||
currentPathInput = input.projectName;
|
||||
} else if (input.yes) {
|
||||
let defaultName = DEFAULT_CONFIG.relativePath;
|
||||
const defaultConfig = getDefaultConfig();
|
||||
let defaultName = defaultConfig.relativePath;
|
||||
let counter = 1;
|
||||
while (
|
||||
fs.pathExistsSync(path.resolve(process.cwd(), defaultName)) &&
|
||||
fs.readdirSync(path.resolve(process.cwd(), defaultName)).length > 0
|
||||
(await fs.pathExists(path.resolve(process.cwd(), defaultName))) &&
|
||||
(await fs.readdir(path.resolve(process.cwd(), defaultName))).length > 0
|
||||
) {
|
||||
defaultName = `${DEFAULT_CONFIG.projectName}-${counter}`;
|
||||
defaultName = `${defaultConfig.projectName}-${counter}`;
|
||||
counter++;
|
||||
}
|
||||
currentPathInput = defaultName;
|
||||
@@ -102,6 +110,7 @@ export async function createProjectHandler(
|
||||
dbSetup: "none",
|
||||
api: "none",
|
||||
webDeploy: "none",
|
||||
serverDeploy: "none",
|
||||
} satisfies ProjectConfig,
|
||||
reproducibleCommand: "",
|
||||
timeScaffolded,
|
||||
@@ -124,29 +133,25 @@ export async function createProjectHandler(
|
||||
|
||||
const providedFlags = getProvidedFlags(cliInput);
|
||||
|
||||
const flagConfig = processAndValidateFlags(
|
||||
cliInput,
|
||||
providedFlags,
|
||||
finalBaseName,
|
||||
);
|
||||
const { projectName: _projectNameFromFlags, ...otherFlags } = flagConfig;
|
||||
|
||||
if (!input.yes && Object.keys(otherFlags).length > 0) {
|
||||
log.info(pc.yellow("Using these pre-selected options:"));
|
||||
log.message(displayConfig(otherFlags));
|
||||
log.message("");
|
||||
}
|
||||
|
||||
let config: ProjectConfig;
|
||||
if (input.yes) {
|
||||
const flagConfig = processProvidedFlagsWithoutValidation(
|
||||
cliInput,
|
||||
finalBaseName,
|
||||
);
|
||||
|
||||
config = {
|
||||
...DEFAULT_CONFIG,
|
||||
...getDefaultConfig(),
|
||||
...flagConfig,
|
||||
projectName: finalBaseName,
|
||||
projectDir: finalResolvedPath,
|
||||
relativePath: finalPathInput,
|
||||
};
|
||||
|
||||
coerceBackendPresets(config);
|
||||
|
||||
validateConfigCompatibility(config, providedFlags, cliInput);
|
||||
|
||||
if (config.backend === "convex") {
|
||||
log.info(
|
||||
"Due to '--backend convex' flag, the following options have been automatically set: auth=false, database=none, orm=none, api=none, runtime=none, dbSetup=none, examples=todo",
|
||||
@@ -161,6 +166,19 @@ export async function createProjectHandler(
|
||||
log.message(displayConfig(config));
|
||||
log.message("");
|
||||
} else {
|
||||
const flagConfig = processAndValidateFlags(
|
||||
cliInput,
|
||||
providedFlags,
|
||||
finalBaseName,
|
||||
);
|
||||
const { projectName: _projectNameFromFlags, ...otherFlags } = flagConfig;
|
||||
|
||||
if (Object.keys(otherFlags).length > 0) {
|
||||
log.info(pc.yellow("Using these pre-selected options:"));
|
||||
log.message(displayConfig(otherFlags));
|
||||
log.message("");
|
||||
}
|
||||
|
||||
config = await gatherConfig(
|
||||
flagConfig,
|
||||
finalBaseName,
|
||||
@@ -207,11 +225,11 @@ async function handleDirectoryConflictProgrammatically(
|
||||
): Promise<{ finalPathInput: string; shouldClearDirectory: boolean }> {
|
||||
const currentPath = path.resolve(process.cwd(), currentPathInput);
|
||||
|
||||
if (!fs.pathExistsSync(currentPath)) {
|
||||
if (!(await fs.pathExists(currentPath))) {
|
||||
return { finalPathInput: currentPathInput, shouldClearDirectory: false };
|
||||
}
|
||||
|
||||
const dirContents = fs.readdirSync(currentPath);
|
||||
const dirContents = await fs.readdir(currentPath);
|
||||
const isNotEmpty = dirContents.length > 0;
|
||||
|
||||
if (!isNotEmpty) {
|
||||
@@ -231,8 +249,9 @@ async function handleDirectoryConflictProgrammatically(
|
||||
let finalPathInput = `${baseName}-${counter}`;
|
||||
|
||||
while (
|
||||
fs.pathExistsSync(path.resolve(process.cwd(), finalPathInput)) &&
|
||||
fs.readdirSync(path.resolve(process.cwd(), finalPathInput)).length > 0
|
||||
(await fs.pathExists(path.resolve(process.cwd(), finalPathInput))) &&
|
||||
(await fs.readdir(path.resolve(process.cwd(), finalPathInput))).length >
|
||||
0
|
||||
) {
|
||||
counter++;
|
||||
finalPathInput = `${baseName}-${counter}`;
|
||||
@@ -284,6 +303,17 @@ export async function addAddonsHandler(input: AddInput) {
|
||||
}
|
||||
}
|
||||
|
||||
if (!input.serverDeploy) {
|
||||
const serverDeploymentPrompt = await getServerDeploymentToAdd(
|
||||
detectedConfig.runtime,
|
||||
detectedConfig.serverDeploy,
|
||||
);
|
||||
|
||||
if (serverDeploymentPrompt !== "none") {
|
||||
input.serverDeploy = serverDeploymentPrompt;
|
||||
}
|
||||
}
|
||||
|
||||
const packageManager =
|
||||
input.packageManager || detectedConfig.packageManager || "npm";
|
||||
|
||||
@@ -309,6 +339,16 @@ export async function addAddonsHandler(input: AddInput) {
|
||||
somethingAdded = true;
|
||||
}
|
||||
|
||||
if (input.serverDeploy && input.serverDeploy !== "none") {
|
||||
await addDeploymentToProject({
|
||||
...input,
|
||||
install: false,
|
||||
suppressInstallMessage: true,
|
||||
serverDeploy: input.serverDeploy,
|
||||
});
|
||||
somethingAdded = true;
|
||||
}
|
||||
|
||||
if (!somethingAdded) {
|
||||
outro(pc.yellow("No addons or deployment configurations to add."));
|
||||
return;
|
||||
13
apps/cli/src/helpers/core/convex-codegen.ts
Normal file
13
apps/cli/src/helpers/core/convex-codegen.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import path from "node:path";
|
||||
import { execa } from "execa";
|
||||
import type { PackageManager } from "../../types";
|
||||
import { getPackageExecutionCommand } from "../../utils/package-runner";
|
||||
|
||||
export async function runConvexCodegen(
|
||||
projectDir: string,
|
||||
packageManager: PackageManager | null | undefined,
|
||||
) {
|
||||
const backendDir = path.join(projectDir, "packages/backend");
|
||||
const cmd = getPackageExecutionCommand(packageManager, "convex codegen");
|
||||
await execa(cmd, { cwd: backendDir, shell: true });
|
||||
}
|
||||
@@ -3,17 +3,17 @@ import fs from "fs-extra";
|
||||
import type { ProjectConfig } from "../../types";
|
||||
import { writeBtsConfig } from "../../utils/bts-config";
|
||||
import { exitWithError } from "../../utils/errors";
|
||||
import { setupAddons } from "../setup/addons-setup";
|
||||
import { setupApi } from "../setup/api-setup";
|
||||
import { setupAuth } from "../setup/auth-setup";
|
||||
import { setupBackendDependencies } from "../setup/backend-setup";
|
||||
import { setupDatabase } from "../setup/db-setup";
|
||||
import { setupExamples } from "../setup/examples-setup";
|
||||
import {
|
||||
generateCloudflareWorkerTypes,
|
||||
setupRuntime,
|
||||
} from "../setup/runtime-setup";
|
||||
import { setupWebDeploy } from "../setup/web-deploy-setup";
|
||||
import { formatProjectWithBiome } from "../../utils/format-with-biome";
|
||||
import { setupAddons } from "../addons/addons-setup";
|
||||
import { setupAuth } from "../addons/auth-setup";
|
||||
import { setupExamples } from "../addons/examples-setup";
|
||||
import { setupApi } from "../core/api-setup";
|
||||
import { setupBackendDependencies } from "../core/backend-setup";
|
||||
import { setupDatabase } from "../core/db-setup";
|
||||
import { setupRuntime } from "../core/runtime-setup";
|
||||
import { setupServerDeploy } from "../deployment/server-deploy-setup";
|
||||
import { setupWebDeploy } from "../deployment/web-deploy-setup";
|
||||
import { runConvexCodegen } from "./convex-codegen";
|
||||
import { createReadme } from "./create-readme";
|
||||
import { setupEnvironmentVariables } from "./env-setup";
|
||||
import { initializeGit } from "./git";
|
||||
@@ -77,6 +77,7 @@ export async function createProject(options: ProjectConfig) {
|
||||
await handleExtras(projectDir, options);
|
||||
|
||||
await setupWebDeploy(options);
|
||||
await setupServerDeploy(options);
|
||||
|
||||
await setupEnvironmentVariables(options);
|
||||
await updatePackageConfigurations(projectDir, options);
|
||||
@@ -84,6 +85,12 @@ export async function createProject(options: ProjectConfig) {
|
||||
|
||||
await writeBtsConfig(options);
|
||||
|
||||
await formatProjectWithBiome(projectDir);
|
||||
|
||||
if (isConvex) {
|
||||
await runConvexCodegen(projectDir, options.packageManager);
|
||||
}
|
||||
|
||||
log.success("Project template successfully scaffolded!");
|
||||
|
||||
if (options.install) {
|
||||
@@ -91,7 +98,6 @@ export async function createProject(options: ProjectConfig) {
|
||||
projectDir,
|
||||
packageManager: options.packageManager,
|
||||
});
|
||||
await generateCloudflareWorkerTypes(options);
|
||||
}
|
||||
|
||||
await initializeGit(projectDir, options.git);
|
||||
@@ -24,6 +24,7 @@ export async function detectProjectConfig(
|
||||
dbSetup: btsConfig.dbSetup,
|
||||
api: btsConfig.api,
|
||||
webDeploy: btsConfig.webDeploy,
|
||||
serverDeploy: btsConfig.serverDeploy,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import path from "node:path";
|
||||
import fs from "fs-extra";
|
||||
import type { ProjectConfig } from "../../types";
|
||||
import { generateAuthSecret } from "../setup/auth-setup";
|
||||
import { generateAuthSecret } from "../addons/auth-setup";
|
||||
|
||||
export interface EnvVariable {
|
||||
key: string;
|
||||
@@ -85,8 +85,17 @@ export async function addEnvVariablesToFile(
|
||||
}
|
||||
|
||||
export async function setupEnvironmentVariables(config: ProjectConfig) {
|
||||
const { backend, frontend, database, auth, examples, dbSetup, projectDir } =
|
||||
config;
|
||||
const {
|
||||
backend,
|
||||
frontend,
|
||||
database,
|
||||
auth,
|
||||
examples,
|
||||
dbSetup,
|
||||
projectDir,
|
||||
webDeploy,
|
||||
serverDeploy,
|
||||
} = config;
|
||||
|
||||
const hasReactRouter = frontend.includes("react-router");
|
||||
const hasTanStackRouter = frontend.includes("tanstack-router");
|
||||
@@ -239,10 +248,51 @@ export async function setupEnvironmentVariables(config: ProjectConfig) {
|
||||
|
||||
await addEnvVariablesToFile(envPath, serverVars);
|
||||
|
||||
if (config.runtime === "workers") {
|
||||
const devVarsPath = path.join(serverDir, ".dev.vars");
|
||||
try {
|
||||
await fs.copy(envPath, devVarsPath);
|
||||
} catch (_err) {}
|
||||
const isUnifiedAlchemy =
|
||||
webDeploy === "alchemy" && serverDeploy === "alchemy";
|
||||
const isIndividualAlchemy =
|
||||
webDeploy === "alchemy" || serverDeploy === "alchemy";
|
||||
|
||||
if (isUnifiedAlchemy) {
|
||||
const rootEnvPath = path.join(projectDir, ".env");
|
||||
const rootAlchemyVars: EnvVariable[] = [
|
||||
{
|
||||
key: "ALCHEMY_PASSWORD",
|
||||
value: "please-change-this",
|
||||
condition: true,
|
||||
},
|
||||
];
|
||||
await addEnvVariablesToFile(rootEnvPath, rootAlchemyVars);
|
||||
} else if (isIndividualAlchemy) {
|
||||
if (webDeploy === "alchemy") {
|
||||
const webDir = path.join(projectDir, "apps/web");
|
||||
if (await fs.pathExists(webDir)) {
|
||||
const webAlchemyVars: EnvVariable[] = [
|
||||
{
|
||||
key: "ALCHEMY_PASSWORD",
|
||||
value: "please-change-this",
|
||||
condition: true,
|
||||
},
|
||||
];
|
||||
await addEnvVariablesToFile(path.join(webDir, ".env"), webAlchemyVars);
|
||||
}
|
||||
}
|
||||
|
||||
if (serverDeploy === "alchemy") {
|
||||
const serverDir = path.join(projectDir, "apps/server");
|
||||
if (await fs.pathExists(serverDir)) {
|
||||
const serverAlchemyVars: EnvVariable[] = [
|
||||
{
|
||||
key: "ALCHEMY_PASSWORD",
|
||||
value: "please-change-this",
|
||||
condition: true,
|
||||
},
|
||||
];
|
||||
await addEnvVariablesToFile(
|
||||
path.join(serverDir, ".env"),
|
||||
serverAlchemyVars,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -25,6 +25,7 @@ export async function displayPostInstallInstructions(
|
||||
backend,
|
||||
dbSetup,
|
||||
webDeploy,
|
||||
serverDeploy,
|
||||
} = config;
|
||||
|
||||
const isConvex = backend === "convex";
|
||||
@@ -35,7 +36,14 @@ export async function displayPostInstallInstructions(
|
||||
|
||||
const databaseInstructions =
|
||||
!isConvex && database !== "none"
|
||||
? await getDatabaseInstructions(database, orm, runCmd, runtime, dbSetup)
|
||||
? await getDatabaseInstructions(
|
||||
database,
|
||||
orm,
|
||||
runCmd,
|
||||
runtime,
|
||||
dbSetup,
|
||||
serverDeploy,
|
||||
)
|
||||
: "";
|
||||
|
||||
const tauriInstructions = addons?.includes("tauri")
|
||||
@@ -56,8 +64,16 @@ export async function displayPostInstallInstructions(
|
||||
const starlightInstructions = addons?.includes("starlight")
|
||||
? getStarlightInstructions(runCmd)
|
||||
: "";
|
||||
const workersDeployInstructions =
|
||||
webDeploy === "workers" ? getWorkersDeployInstructions(runCmd) : "";
|
||||
const wranglerDeployInstructions = getWranglerDeployInstructions(
|
||||
runCmd,
|
||||
webDeploy,
|
||||
serverDeploy,
|
||||
);
|
||||
const alchemyDeployInstructions = getAlchemyDeployInstructions(
|
||||
runCmd,
|
||||
webDeploy,
|
||||
serverDeploy,
|
||||
);
|
||||
|
||||
const hasWeb = frontend?.some((f) =>
|
||||
[
|
||||
@@ -116,11 +132,9 @@ export async function displayPostInstallInstructions(
|
||||
)} Complete D1 database setup first\n (see Database commands below)\n`;
|
||||
}
|
||||
output += `${pc.cyan(`${stepCounter++}.`)} ${runCmd} dev\n`;
|
||||
output += `${pc.cyan(
|
||||
`${stepCounter++}.`,
|
||||
)} cd apps/server && ${runCmd} run cf-typegen\n\n`;
|
||||
} else {
|
||||
output += "\n";
|
||||
if (serverDeploy === "wrangler") {
|
||||
output += `${pc.cyan(`${stepCounter++}.`)} cd apps/server && ${runCmd} cf-typegen\n`;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -151,8 +165,10 @@ export async function displayPostInstallInstructions(
|
||||
if (tauriInstructions) output += `\n${tauriInstructions.trim()}\n`;
|
||||
if (lintingInstructions) output += `\n${lintingInstructions.trim()}\n`;
|
||||
if (pwaInstructions) output += `\n${pwaInstructions.trim()}\n`;
|
||||
if (workersDeployInstructions)
|
||||
output += `\n${workersDeployInstructions.trim()}\n`;
|
||||
if (wranglerDeployInstructions)
|
||||
output += `\n${wranglerDeployInstructions.trim()}\n`;
|
||||
if (alchemyDeployInstructions)
|
||||
output += `\n${alchemyDeployInstructions.trim()}\n`;
|
||||
if (starlightInstructions) output += `\n${starlightInstructions.trim()}\n`;
|
||||
|
||||
if (noOrmWarning) output += `\n${noOrmWarning.trim()}\n`;
|
||||
@@ -202,10 +218,11 @@ async function getDatabaseInstructions(
|
||||
database: Database,
|
||||
orm?: ORM,
|
||||
runCmd?: string,
|
||||
runtime?: Runtime,
|
||||
_runtime?: Runtime,
|
||||
dbSetup?: DatabaseSetup,
|
||||
serverDeploy?: string,
|
||||
): Promise<string> {
|
||||
const instructions = [];
|
||||
const instructions: string[] = [];
|
||||
|
||||
if (dbSetup === "docker") {
|
||||
const dockerStatus = await getDockerStatus(database);
|
||||
@@ -216,7 +233,7 @@ async function getDatabaseInstructions(
|
||||
}
|
||||
}
|
||||
|
||||
if (runtime === "workers" && dbSetup === "d1") {
|
||||
if (serverDeploy === "wrangler" && dbSetup === "d1") {
|
||||
const packageManager = runCmd === "npm run" ? "npm" : runCmd || "npm";
|
||||
|
||||
instructions.push(
|
||||
@@ -249,7 +266,9 @@ async function getDatabaseInstructions(
|
||||
`${packageManager} wrangler d1 migrations apply YOUR_DB_NAME`,
|
||||
)}`,
|
||||
);
|
||||
instructions.push("");
|
||||
}
|
||||
|
||||
if (dbSetup === "d1" && serverDeploy === "alchemy") {
|
||||
}
|
||||
|
||||
if (orm === "prisma") {
|
||||
@@ -281,7 +300,9 @@ async function getDatabaseInstructions(
|
||||
`${pc.cyan("•")} Start docker container: ${`${runCmd} db:start`}`,
|
||||
);
|
||||
}
|
||||
if (dbSetup !== "d1") {
|
||||
instructions.push(`${pc.cyan("•")} Apply schema: ${`${runCmd} db:push`}`);
|
||||
}
|
||||
instructions.push(`${pc.cyan("•")} Database UI: ${`${runCmd} db:studio`}`);
|
||||
if (database === "sqlite" && dbSetup !== "d1") {
|
||||
instructions.push(
|
||||
@@ -343,6 +364,47 @@ function getBunWebNativeWarning(): string {
|
||||
)} 'bun' might cause issues with web + native apps in a monorepo.\n Use 'pnpm' if problems arise.`;
|
||||
}
|
||||
|
||||
function getWorkersDeployInstructions(runCmd?: string): string {
|
||||
return `\n${pc.bold("Deploy frontend to Cloudflare Workers:")}\n${pc.cyan("•")} Deploy: ${`cd apps/web && ${runCmd} run deploy`}`;
|
||||
function getWranglerDeployInstructions(
|
||||
runCmd?: string,
|
||||
webDeploy?: string,
|
||||
serverDeploy?: string,
|
||||
): string {
|
||||
const instructions: string[] = [];
|
||||
|
||||
if (webDeploy === "wrangler") {
|
||||
instructions.push(
|
||||
`${pc.bold("Deploy web to Cloudflare Workers:")}\n${pc.cyan("•")} Deploy: ${`cd apps/web && ${runCmd} run deploy`}`,
|
||||
);
|
||||
}
|
||||
if (serverDeploy === "wrangler") {
|
||||
instructions.push(
|
||||
`${pc.bold("Deploy server to Cloudflare Workers:")}\n${pc.cyan("•")} Deploy: ${`cd apps/server && ${runCmd} run deploy`}`,
|
||||
);
|
||||
}
|
||||
|
||||
return instructions.length ? `\n${instructions.join("\n")}` : "";
|
||||
}
|
||||
|
||||
function getAlchemyDeployInstructions(
|
||||
runCmd?: string,
|
||||
webDeploy?: string,
|
||||
serverDeploy?: string,
|
||||
): string {
|
||||
const instructions: string[] = [];
|
||||
|
||||
if (webDeploy === "alchemy" && serverDeploy !== "alchemy") {
|
||||
instructions.push(
|
||||
`${pc.bold("Deploy web to Alchemy:")}\n${pc.cyan("•")} Deploy: ${`cd apps/web && ${runCmd} deploy`}`,
|
||||
);
|
||||
} else if (serverDeploy === "alchemy" && webDeploy !== "alchemy") {
|
||||
instructions.push(
|
||||
`${pc.bold("Deploy server to Alchemy:")}\n${pc.cyan("•")} Deploy: ${`cd apps/server && ${runCmd} deploy`}`,
|
||||
);
|
||||
} else if (webDeploy === "alchemy" && serverDeploy === "alchemy") {
|
||||
instructions.push(
|
||||
`${pc.bold("Deploy to Alchemy:")}\n${pc.cyan("•")} Deploy: ${`${runCmd} deploy`}`,
|
||||
);
|
||||
}
|
||||
|
||||
return instructions.length ? `\n${instructions.join("\n")}` : "";
|
||||
}
|
||||
@@ -1,8 +1,5 @@
|
||||
import path from "node:path";
|
||||
import { spinner } from "@clack/prompts";
|
||||
import { execa } from "execa";
|
||||
import fs from "fs-extra";
|
||||
import pc from "picocolors";
|
||||
import type { Backend, ProjectConfig } from "../../types";
|
||||
import { addPackageDependency } from "../../utils/add-package-deps";
|
||||
|
||||
@@ -23,43 +20,6 @@ export async function setupRuntime(config: ProjectConfig) {
|
||||
await setupBunRuntime(serverDir, backend);
|
||||
} else if (runtime === "node") {
|
||||
await setupNodeRuntime(serverDir, backend);
|
||||
} else if (runtime === "workers") {
|
||||
await setupWorkersRuntime(serverDir);
|
||||
}
|
||||
}
|
||||
|
||||
export async function generateCloudflareWorkerTypes(config: ProjectConfig) {
|
||||
if (config.runtime !== "workers") {
|
||||
return;
|
||||
}
|
||||
|
||||
const serverDir = path.join(config.projectDir, "apps/server");
|
||||
|
||||
if (!(await fs.pathExists(serverDir))) {
|
||||
return;
|
||||
}
|
||||
|
||||
const s = spinner();
|
||||
|
||||
try {
|
||||
s.start("Generating Cloudflare Workers types...");
|
||||
|
||||
const runCmd =
|
||||
config.packageManager === "npm" ? "npm" : config.packageManager;
|
||||
await execa(runCmd, ["run", "cf-typegen"], {
|
||||
cwd: serverDir,
|
||||
});
|
||||
|
||||
s.stop("Cloudflare Workers types generated successfully!");
|
||||
} catch {
|
||||
s.stop(pc.yellow("Failed to generate Cloudflare Workers types"));
|
||||
const managerCmd =
|
||||
config.packageManager === "npm"
|
||||
? "npm run"
|
||||
: `${config.packageManager} run`;
|
||||
console.warn(
|
||||
`Note: You can manually run 'cd apps/server && ${managerCmd} cf-typegen' in the project directory later`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -114,26 +74,3 @@ async function setupNodeRuntime(serverDir: string, backend: Backend) {
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async function setupWorkersRuntime(serverDir: string) {
|
||||
const packageJsonPath = path.join(serverDir, "package.json");
|
||||
if (!(await fs.pathExists(packageJsonPath))) return;
|
||||
|
||||
const packageJson = await fs.readJson(packageJsonPath);
|
||||
|
||||
packageJson.scripts = {
|
||||
...packageJson.scripts,
|
||||
dev: "wrangler dev --port=3000",
|
||||
start: "wrangler dev",
|
||||
deploy: "wrangler deploy",
|
||||
build: "wrangler deploy --dry-run",
|
||||
"cf-typegen": "wrangler types --env-interface CloudflareBindings",
|
||||
};
|
||||
|
||||
await fs.writeJson(packageJsonPath, packageJson, { spaces: 2 });
|
||||
|
||||
await addPackageDependency({
|
||||
devDependencies: ["wrangler", "@types/node"],
|
||||
projectDir: serverDir,
|
||||
});
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
import path from "node:path";
|
||||
import fs from "fs-extra";
|
||||
import { globby } from "globby";
|
||||
import { glob } from "tinyglobby";
|
||||
import { PKG_ROOT } from "../../constants";
|
||||
import type { ProjectConfig } from "../../types";
|
||||
import { processTemplate } from "../../utils/template-processor";
|
||||
@@ -13,7 +13,7 @@ export async function processAndCopyFiles(
|
||||
overwrite = true,
|
||||
ignorePatterns?: string[],
|
||||
) {
|
||||
const sourceFiles = await globby(sourcePattern, {
|
||||
const sourceFiles = await glob(sourcePattern, {
|
||||
cwd: baseSourceDir,
|
||||
dot: true,
|
||||
onlyFiles: true,
|
||||
@@ -788,19 +788,6 @@ export async function handleExtras(projectDir: string, context: ProjectConfig) {
|
||||
await processAndCopyFiles("_npmrc.hbs", extrasDir, projectDir, context);
|
||||
}
|
||||
}
|
||||
|
||||
if (context.runtime === "workers") {
|
||||
const runtimeWorkersDir = path.join(PKG_ROOT, "templates/runtime/workers");
|
||||
if (await fs.pathExists(runtimeWorkersDir)) {
|
||||
await processAndCopyFiles(
|
||||
"**/*",
|
||||
runtimeWorkersDir,
|
||||
projectDir,
|
||||
context,
|
||||
false,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function setupDockerComposeTemplates(
|
||||
@@ -827,16 +814,91 @@ export async function setupDeploymentTemplates(
|
||||
projectDir: string,
|
||||
context: ProjectConfig,
|
||||
) {
|
||||
if (context.webDeploy === "none") {
|
||||
return;
|
||||
if (context.webDeploy === "alchemy" || context.serverDeploy === "alchemy") {
|
||||
if (context.webDeploy === "alchemy" && context.serverDeploy === "alchemy") {
|
||||
const alchemyTemplateSrc = path.join(
|
||||
PKG_ROOT,
|
||||
"templates/deploy/alchemy",
|
||||
);
|
||||
if (await fs.pathExists(alchemyTemplateSrc)) {
|
||||
await processAndCopyFiles(
|
||||
"alchemy.run.ts.hbs",
|
||||
alchemyTemplateSrc,
|
||||
projectDir,
|
||||
context,
|
||||
);
|
||||
const serverAppDir = path.join(projectDir, "apps/server");
|
||||
if (await fs.pathExists(serverAppDir)) {
|
||||
await processAndCopyFiles(
|
||||
"env.d.ts.hbs",
|
||||
alchemyTemplateSrc,
|
||||
serverAppDir,
|
||||
context,
|
||||
);
|
||||
await processAndCopyFiles(
|
||||
"wrangler.jsonc.hbs",
|
||||
alchemyTemplateSrc,
|
||||
serverAppDir,
|
||||
context,
|
||||
);
|
||||
}
|
||||
|
||||
if (context.webDeploy === "workers") {
|
||||
}
|
||||
} else {
|
||||
if (context.webDeploy === "alchemy") {
|
||||
const alchemyTemplateSrc = path.join(
|
||||
PKG_ROOT,
|
||||
"templates/deploy/alchemy",
|
||||
);
|
||||
const webAppDir = path.join(projectDir, "apps/web");
|
||||
if (!(await fs.pathExists(webAppDir))) {
|
||||
return;
|
||||
if (
|
||||
(await fs.pathExists(alchemyTemplateSrc)) &&
|
||||
(await fs.pathExists(webAppDir))
|
||||
) {
|
||||
await processAndCopyFiles(
|
||||
"alchemy.run.ts.hbs",
|
||||
alchemyTemplateSrc,
|
||||
webAppDir,
|
||||
context,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (context.serverDeploy === "alchemy") {
|
||||
const alchemyTemplateSrc = path.join(
|
||||
PKG_ROOT,
|
||||
"templates/deploy/alchemy",
|
||||
);
|
||||
const serverAppDir = path.join(projectDir, "apps/server");
|
||||
if (
|
||||
(await fs.pathExists(alchemyTemplateSrc)) &&
|
||||
(await fs.pathExists(serverAppDir))
|
||||
) {
|
||||
await processAndCopyFiles(
|
||||
"alchemy.run.ts.hbs",
|
||||
alchemyTemplateSrc,
|
||||
serverAppDir,
|
||||
context,
|
||||
);
|
||||
await processAndCopyFiles(
|
||||
"env.d.ts.hbs",
|
||||
alchemyTemplateSrc,
|
||||
serverAppDir,
|
||||
context,
|
||||
);
|
||||
await processAndCopyFiles(
|
||||
"wrangler.jsonc.hbs",
|
||||
alchemyTemplateSrc,
|
||||
serverAppDir,
|
||||
context,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (context.webDeploy !== "none" && context.webDeploy !== "alchemy") {
|
||||
const webAppDir = path.join(projectDir, "apps/web");
|
||||
if (await fs.pathExists(webAppDir)) {
|
||||
const frontends = context.frontend;
|
||||
|
||||
const templateMap: Record<string, string> = {
|
||||
@@ -853,7 +915,7 @@ export async function setupDeploymentTemplates(
|
||||
if (templateMap[f]) {
|
||||
const deployTemplateSrc = path.join(
|
||||
PKG_ROOT,
|
||||
`templates/deploy/web/${templateMap[f]}`,
|
||||
`templates/deploy/${context.webDeploy}/web/${templateMap[f]}`,
|
||||
);
|
||||
if (await fs.pathExists(deployTemplateSrc)) {
|
||||
await processAndCopyFiles(
|
||||
@@ -867,3 +929,22 @@ export async function setupDeploymentTemplates(
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (context.serverDeploy !== "none" && context.serverDeploy !== "alchemy") {
|
||||
const serverAppDir = path.join(projectDir, "apps/server");
|
||||
if (await fs.pathExists(serverAppDir)) {
|
||||
const deployTemplateSrc = path.join(
|
||||
PKG_ROOT,
|
||||
`templates/deploy/${context.serverDeploy}/server`,
|
||||
);
|
||||
if (await fs.pathExists(deployTemplateSrc)) {
|
||||
await processAndCopyFiles(
|
||||
"**/*",
|
||||
deployTemplateSrc,
|
||||
serverAppDir,
|
||||
context,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,13 +1,11 @@
|
||||
import path from "node:path";
|
||||
import type { ProjectConfig } from "../../types";
|
||||
import {
|
||||
addEnvVariablesToFile,
|
||||
type EnvVariable,
|
||||
} from "../project-generation/env-setup";
|
||||
import { addEnvVariablesToFile, type EnvVariable } from "../core/env-setup";
|
||||
|
||||
export async function setupCloudflareD1(config: ProjectConfig) {
|
||||
const { projectDir } = config;
|
||||
const { projectDir, serverDeploy } = config;
|
||||
|
||||
if (serverDeploy === "wrangler") {
|
||||
const envPath = path.join(projectDir, "apps/server", ".env");
|
||||
|
||||
const variables: EnvVariable[] = [
|
||||
@@ -32,3 +30,4 @@ export async function setupCloudflareD1(config: ProjectConfig) {
|
||||
await addEnvVariablesToFile(envPath, variables);
|
||||
} catch (_err) {}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
import path from "node:path";
|
||||
import type { Database, ProjectConfig } from "../../types";
|
||||
import {
|
||||
addEnvVariablesToFile,
|
||||
type EnvVariable,
|
||||
} from "../project-generation/env-setup";
|
||||
import { addEnvVariablesToFile, type EnvVariable } from "../core/env-setup";
|
||||
|
||||
export async function setupDockerCompose(config: ProjectConfig) {
|
||||
const { database, projectDir, projectName } = config;
|
||||
|
||||
@@ -6,10 +6,7 @@ import fs from "fs-extra";
|
||||
import pc from "picocolors";
|
||||
import type { ProjectConfig } from "../../types";
|
||||
import { commandExists } from "../../utils/command-exists";
|
||||
import {
|
||||
addEnvVariablesToFile,
|
||||
type EnvVariable,
|
||||
} from "../project-generation/env-setup";
|
||||
import { addEnvVariablesToFile, type EnvVariable } from "../core/env-setup";
|
||||
|
||||
type MongoDBConfig = {
|
||||
connectionString: string;
|
||||
|
||||
@@ -7,10 +7,7 @@ import pc from "picocolors";
|
||||
import type { PackageManager, ProjectConfig } from "../../types";
|
||||
import { exitCancelled } from "../../utils/errors";
|
||||
import { getPackageExecutionCommand } from "../../utils/package-runner";
|
||||
import {
|
||||
addEnvVariablesToFile,
|
||||
type EnvVariable,
|
||||
} from "../project-generation/env-setup";
|
||||
import { addEnvVariablesToFile, type EnvVariable } from "../core/env-setup";
|
||||
|
||||
type NeonConfig = {
|
||||
connectionString: string;
|
||||
|
||||
@@ -8,10 +8,7 @@ import type { ORM, PackageManager, ProjectConfig } from "../../types";
|
||||
import { addPackageDependency } from "../../utils/add-package-deps";
|
||||
import { exitCancelled } from "../../utils/errors";
|
||||
import { getPackageExecutionCommand } from "../../utils/package-runner";
|
||||
import {
|
||||
addEnvVariablesToFile,
|
||||
type EnvVariable,
|
||||
} from "../project-generation/env-setup";
|
||||
import { addEnvVariablesToFile, type EnvVariable } from "../core/env-setup";
|
||||
|
||||
type PrismaConfig = {
|
||||
databaseUrl: string;
|
||||
@@ -253,9 +250,8 @@ export async function setupPrismaPostgres(config: ProjectConfig) {
|
||||
if (prismaConfig) {
|
||||
await writeEnvFile(projectDir, prismaConfig);
|
||||
|
||||
await addDotenvImportToPrismaConfig(projectDir);
|
||||
|
||||
if (orm === "prisma") {
|
||||
await addDotenvImportToPrismaConfig(projectDir);
|
||||
await addPrismaAccelerateExtension(serverDir);
|
||||
}
|
||||
log.success(
|
||||
|
||||
@@ -6,10 +6,7 @@ import fs from "fs-extra";
|
||||
import pc from "picocolors";
|
||||
import type { PackageManager, ProjectConfig } from "../../types";
|
||||
import { getPackageExecutionCommand } from "../../utils/package-runner";
|
||||
import {
|
||||
addEnvVariablesToFile,
|
||||
type EnvVariable,
|
||||
} from "../project-generation/env-setup";
|
||||
import { addEnvVariablesToFile, type EnvVariable } from "../core/env-setup";
|
||||
|
||||
async function writeSupabaseEnvFile(projectDir: string, databaseUrl: string) {
|
||||
try {
|
||||
|
||||
@@ -7,10 +7,7 @@ import pc from "picocolors";
|
||||
import type { ProjectConfig } from "../../types";
|
||||
import { commandExists } from "../../utils/command-exists";
|
||||
import { exitCancelled } from "../../utils/errors";
|
||||
import {
|
||||
addEnvVariablesToFile,
|
||||
type EnvVariable,
|
||||
} from "../project-generation/env-setup";
|
||||
import { addEnvVariablesToFile, type EnvVariable } from "../core/env-setup";
|
||||
|
||||
type TursoConfig = {
|
||||
dbUrl: string;
|
||||
|
||||
@@ -0,0 +1,66 @@
|
||||
import path from "node:path";
|
||||
import fs from "fs-extra";
|
||||
import type { PackageManager, ProjectConfig } from "../../../types";
|
||||
import { addPackageDependency } from "../../../utils/add-package-deps";
|
||||
import { setupAlchemyServerDeploy } from "../server-deploy-setup";
|
||||
import { setupNextAlchemyDeploy } from "./alchemy-next-setup";
|
||||
import { setupNuxtAlchemyDeploy } from "./alchemy-nuxt-setup";
|
||||
import { setupReactRouterAlchemyDeploy } from "./alchemy-react-router-setup";
|
||||
import { setupSolidAlchemyDeploy } from "./alchemy-solid-setup";
|
||||
import { setupSvelteAlchemyDeploy } from "./alchemy-svelte-setup";
|
||||
import { setupTanStackRouterAlchemyDeploy } from "./alchemy-tanstack-router-setup";
|
||||
import { setupTanStackStartAlchemyDeploy } from "./alchemy-tanstack-start-setup";
|
||||
|
||||
export async function setupCombinedAlchemyDeploy(
|
||||
projectDir: string,
|
||||
packageManager: PackageManager,
|
||||
config: ProjectConfig,
|
||||
) {
|
||||
await addPackageDependency({
|
||||
devDependencies: ["alchemy", "dotenv"],
|
||||
projectDir,
|
||||
});
|
||||
|
||||
const rootPkgPath = path.join(projectDir, "package.json");
|
||||
if (await fs.pathExists(rootPkgPath)) {
|
||||
const pkg = await fs.readJson(rootPkgPath);
|
||||
|
||||
pkg.scripts = {
|
||||
...pkg.scripts,
|
||||
deploy: "alchemy deploy",
|
||||
destroy: "alchemy destroy",
|
||||
"alchemy:dev": "alchemy dev",
|
||||
};
|
||||
await fs.writeJson(rootPkgPath, pkg, { spaces: 2 });
|
||||
}
|
||||
|
||||
const serverDir = path.join(projectDir, "apps/server");
|
||||
if (await fs.pathExists(serverDir)) {
|
||||
await setupAlchemyServerDeploy(serverDir, packageManager);
|
||||
}
|
||||
|
||||
const frontend = config.frontend;
|
||||
const isNext = frontend.includes("next");
|
||||
const isNuxt = frontend.includes("nuxt");
|
||||
const isSvelte = frontend.includes("svelte");
|
||||
const isTanstackRouter = frontend.includes("tanstack-router");
|
||||
const isTanstackStart = frontend.includes("tanstack-start");
|
||||
const isReactRouter = frontend.includes("react-router");
|
||||
const isSolid = frontend.includes("solid");
|
||||
|
||||
if (isNext) {
|
||||
await setupNextAlchemyDeploy(projectDir, packageManager);
|
||||
} else if (isNuxt) {
|
||||
await setupNuxtAlchemyDeploy(projectDir, packageManager);
|
||||
} else if (isSvelte) {
|
||||
await setupSvelteAlchemyDeploy(projectDir, packageManager);
|
||||
} else if (isTanstackStart) {
|
||||
await setupTanStackStartAlchemyDeploy(projectDir, packageManager);
|
||||
} else if (isTanstackRouter) {
|
||||
await setupTanStackRouterAlchemyDeploy(projectDir, packageManager);
|
||||
} else if (isReactRouter) {
|
||||
await setupReactRouterAlchemyDeploy(projectDir, packageManager);
|
||||
} else if (isSolid) {
|
||||
await setupSolidAlchemyDeploy(projectDir, packageManager);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,30 @@
|
||||
import path from "node:path";
|
||||
import fs from "fs-extra";
|
||||
import type { PackageManager } from "../../../types";
|
||||
import { addPackageDependency } from "../../../utils/add-package-deps";
|
||||
|
||||
export async function setupNextAlchemyDeploy(
|
||||
projectDir: string,
|
||||
_packageManager: PackageManager,
|
||||
) {
|
||||
const webAppDir = path.join(projectDir, "apps/web");
|
||||
if (!(await fs.pathExists(webAppDir))) return;
|
||||
|
||||
await addPackageDependency({
|
||||
devDependencies: ["alchemy", "dotenv"],
|
||||
projectDir: webAppDir,
|
||||
});
|
||||
|
||||
const pkgPath = path.join(webAppDir, "package.json");
|
||||
if (await fs.pathExists(pkgPath)) {
|
||||
const pkg = await fs.readJson(pkgPath);
|
||||
|
||||
pkg.scripts = {
|
||||
...pkg.scripts,
|
||||
deploy: "alchemy deploy",
|
||||
destroy: "alchemy destroy",
|
||||
"alchemy:dev": "alchemy dev",
|
||||
};
|
||||
await fs.writeJson(pkgPath, pkg, { spaces: 2 });
|
||||
}
|
||||
}
|
||||
104
apps/cli/src/helpers/deployment/alchemy/alchemy-nuxt-setup.ts
Normal file
104
apps/cli/src/helpers/deployment/alchemy/alchemy-nuxt-setup.ts
Normal file
@@ -0,0 +1,104 @@
|
||||
import path from "node:path";
|
||||
import fs from "fs-extra";
|
||||
import { IndentationText, Node, Project, QuoteKind } from "ts-morph";
|
||||
import type { PackageManager } from "../../../types";
|
||||
import { addPackageDependency } from "../../../utils/add-package-deps";
|
||||
|
||||
export async function setupNuxtAlchemyDeploy(
|
||||
projectDir: string,
|
||||
_packageManager: PackageManager,
|
||||
) {
|
||||
const webAppDir = path.join(projectDir, "apps/web");
|
||||
if (!(await fs.pathExists(webAppDir))) return;
|
||||
|
||||
await addPackageDependency({
|
||||
devDependencies: ["alchemy", "nitro-cloudflare-dev", "dotenv"],
|
||||
projectDir: webAppDir,
|
||||
});
|
||||
|
||||
const pkgPath = path.join(webAppDir, "package.json");
|
||||
if (await fs.pathExists(pkgPath)) {
|
||||
const pkg = await fs.readJson(pkgPath);
|
||||
|
||||
pkg.scripts = {
|
||||
...pkg.scripts,
|
||||
deploy: "alchemy deploy",
|
||||
destroy: "alchemy destroy",
|
||||
"alchemy:dev": "alchemy dev",
|
||||
};
|
||||
await fs.writeJson(pkgPath, pkg, { spaces: 2 });
|
||||
}
|
||||
|
||||
const nuxtConfigPath = path.join(webAppDir, "nuxt.config.ts");
|
||||
if (!(await fs.pathExists(nuxtConfigPath))) return;
|
||||
|
||||
try {
|
||||
const project = new Project({
|
||||
manipulationSettings: {
|
||||
indentationText: IndentationText.TwoSpaces,
|
||||
quoteKind: QuoteKind.Double,
|
||||
},
|
||||
});
|
||||
|
||||
project.addSourceFileAtPath(nuxtConfigPath);
|
||||
const sourceFile = project.getSourceFileOrThrow(nuxtConfigPath);
|
||||
|
||||
const exportAssignment = sourceFile.getExportAssignment(
|
||||
(d) => !d.isExportEquals(),
|
||||
);
|
||||
if (!exportAssignment) return;
|
||||
|
||||
const defineConfigCall = exportAssignment.getExpression();
|
||||
if (
|
||||
!Node.isCallExpression(defineConfigCall) ||
|
||||
defineConfigCall.getExpression().getText() !== "defineNuxtConfig"
|
||||
)
|
||||
return;
|
||||
|
||||
let configObject = defineConfigCall.getArguments()[0];
|
||||
if (!configObject) {
|
||||
configObject = defineConfigCall.addArgument("{}");
|
||||
}
|
||||
|
||||
if (Node.isObjectLiteralExpression(configObject)) {
|
||||
if (!configObject.getProperty("nitro")) {
|
||||
configObject.addPropertyAssignment({
|
||||
name: "nitro",
|
||||
initializer: `{
|
||||
preset: "cloudflare_module",
|
||||
cloudflare: {
|
||||
deployConfig: true,
|
||||
nodeCompat: true
|
||||
}
|
||||
}`,
|
||||
});
|
||||
}
|
||||
|
||||
const modulesProperty = configObject.getProperty("modules");
|
||||
if (modulesProperty && Node.isPropertyAssignment(modulesProperty)) {
|
||||
const initializer = modulesProperty.getInitializer();
|
||||
if (Node.isArrayLiteralExpression(initializer)) {
|
||||
const hasModule = initializer
|
||||
.getElements()
|
||||
.some(
|
||||
(el) =>
|
||||
el.getText() === '"nitro-cloudflare-dev"' ||
|
||||
el.getText() === "'nitro-cloudflare-dev'",
|
||||
);
|
||||
if (!hasModule) {
|
||||
initializer.addElement('"nitro-cloudflare-dev"');
|
||||
}
|
||||
}
|
||||
} else if (!modulesProperty) {
|
||||
configObject.addPropertyAssignment({
|
||||
name: "modules",
|
||||
initializer: '["nitro-cloudflare-dev"]',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
await project.save();
|
||||
} catch (error) {
|
||||
console.warn("Failed to update nuxt.config.ts:", error);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,168 @@
|
||||
import path from "node:path";
|
||||
import fs from "fs-extra";
|
||||
import { IndentationText, Node, Project, QuoteKind } from "ts-morph";
|
||||
import type { PackageManager } from "../../../types";
|
||||
import { addPackageDependency } from "../../../utils/add-package-deps";
|
||||
|
||||
export async function setupReactRouterAlchemyDeploy(
|
||||
projectDir: string,
|
||||
_packageManager: PackageManager,
|
||||
) {
|
||||
const webAppDir = path.join(projectDir, "apps/web");
|
||||
if (!(await fs.pathExists(webAppDir))) return;
|
||||
|
||||
await addPackageDependency({
|
||||
devDependencies: ["alchemy", "@cloudflare/vite-plugin", "dotenv"],
|
||||
projectDir: webAppDir,
|
||||
});
|
||||
|
||||
const pkgPath = path.join(webAppDir, "package.json");
|
||||
if (await fs.pathExists(pkgPath)) {
|
||||
const pkg = await fs.readJson(pkgPath);
|
||||
|
||||
pkg.scripts = {
|
||||
...pkg.scripts,
|
||||
deploy: "alchemy deploy",
|
||||
destroy: "alchemy destroy",
|
||||
"alchemy:dev": "alchemy dev",
|
||||
};
|
||||
await fs.writeJson(pkgPath, pkg, { spaces: 2 });
|
||||
}
|
||||
|
||||
const viteConfigPath = path.join(webAppDir, "vite.config.ts");
|
||||
if (await fs.pathExists(viteConfigPath)) {
|
||||
try {
|
||||
const project = new Project({
|
||||
manipulationSettings: {
|
||||
indentationText: IndentationText.TwoSpaces,
|
||||
quoteKind: QuoteKind.Double,
|
||||
},
|
||||
});
|
||||
|
||||
project.addSourceFileAtPath(viteConfigPath);
|
||||
const sourceFile = project.getSourceFileOrThrow(viteConfigPath);
|
||||
|
||||
const alchemyImport = sourceFile.getImportDeclaration(
|
||||
"alchemy/cloudflare/react-router",
|
||||
);
|
||||
if (!alchemyImport) {
|
||||
sourceFile.addImportDeclaration({
|
||||
moduleSpecifier: "alchemy/cloudflare/react-router",
|
||||
defaultImport: "alchemy",
|
||||
});
|
||||
}
|
||||
|
||||
const exportAssignment = sourceFile.getExportAssignment(
|
||||
(d) => !d.isExportEquals(),
|
||||
);
|
||||
if (!exportAssignment) return;
|
||||
|
||||
const defineConfigCall = exportAssignment.getExpression();
|
||||
if (
|
||||
!Node.isCallExpression(defineConfigCall) ||
|
||||
defineConfigCall.getExpression().getText() !== "defineConfig"
|
||||
)
|
||||
return;
|
||||
|
||||
let configObject = defineConfigCall.getArguments()[0];
|
||||
if (!configObject) {
|
||||
configObject = defineConfigCall.addArgument("{}");
|
||||
}
|
||||
|
||||
if (Node.isObjectLiteralExpression(configObject)) {
|
||||
const pluginsProperty = configObject.getProperty("plugins");
|
||||
if (pluginsProperty && Node.isPropertyAssignment(pluginsProperty)) {
|
||||
const initializer = pluginsProperty.getInitializer();
|
||||
if (Node.isArrayLiteralExpression(initializer)) {
|
||||
const hasCloudflarePlugin = initializer
|
||||
.getElements()
|
||||
.some((el) => el.getText().includes("cloudflare("));
|
||||
|
||||
if (!hasCloudflarePlugin) {
|
||||
initializer.addElement("alchemy()");
|
||||
}
|
||||
}
|
||||
} else if (!pluginsProperty) {
|
||||
configObject.addPropertyAssignment({
|
||||
name: "plugins",
|
||||
initializer: "[alchemy()]",
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
await project.save();
|
||||
} catch (error) {
|
||||
console.warn("Failed to update vite.config.ts:", error);
|
||||
}
|
||||
}
|
||||
|
||||
const reactRouterConfigPath = path.join(webAppDir, "react-router.config.ts");
|
||||
if (await fs.pathExists(reactRouterConfigPath)) {
|
||||
try {
|
||||
const project = new Project({
|
||||
manipulationSettings: {
|
||||
indentationText: IndentationText.TwoSpaces,
|
||||
quoteKind: QuoteKind.Double,
|
||||
},
|
||||
});
|
||||
|
||||
project.addSourceFileAtPath(reactRouterConfigPath);
|
||||
const sourceFile = project.getSourceFileOrThrow(reactRouterConfigPath);
|
||||
|
||||
const exportAssignment = sourceFile.getExportAssignment(
|
||||
(d) => !d.isExportEquals(),
|
||||
);
|
||||
if (!exportAssignment) return;
|
||||
|
||||
const configExpression = exportAssignment.getExpression();
|
||||
let configObject: Node | undefined;
|
||||
|
||||
if (Node.isObjectLiteralExpression(configExpression)) {
|
||||
configObject = configExpression;
|
||||
} else if (Node.isSatisfiesExpression(configExpression)) {
|
||||
const expression = configExpression.getExpression();
|
||||
if (Node.isObjectLiteralExpression(expression)) {
|
||||
configObject = expression;
|
||||
}
|
||||
}
|
||||
|
||||
if (!configObject || !Node.isObjectLiteralExpression(configObject))
|
||||
return;
|
||||
|
||||
const futureProperty = configObject.getProperty("future");
|
||||
|
||||
if (!futureProperty) {
|
||||
configObject.addPropertyAssignment({
|
||||
name: "future",
|
||||
initializer: `{
|
||||
unstable_viteEnvironmentApi: true,
|
||||
}`,
|
||||
});
|
||||
} else if (Node.isPropertyAssignment(futureProperty)) {
|
||||
const futureInitializer = futureProperty.getInitializer();
|
||||
|
||||
if (Node.isObjectLiteralExpression(futureInitializer)) {
|
||||
const viteEnvApiProp = futureInitializer.getProperty(
|
||||
"unstable_viteEnvironmentApi",
|
||||
);
|
||||
|
||||
if (!viteEnvApiProp) {
|
||||
futureInitializer.addPropertyAssignment({
|
||||
name: "unstable_viteEnvironmentApi",
|
||||
initializer: "true",
|
||||
});
|
||||
} else if (Node.isPropertyAssignment(viteEnvApiProp)) {
|
||||
const value = viteEnvApiProp.getInitializer()?.getText();
|
||||
if (value === "false") {
|
||||
viteEnvApiProp.setInitializer("true");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await project.save();
|
||||
} catch (error) {
|
||||
console.warn("Failed to update react-router.config.ts:", error);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,30 @@
|
||||
import path from "node:path";
|
||||
import fs from "fs-extra";
|
||||
import type { PackageManager } from "../../../types";
|
||||
import { addPackageDependency } from "../../../utils/add-package-deps";
|
||||
|
||||
export async function setupSolidAlchemyDeploy(
|
||||
projectDir: string,
|
||||
_packageManager: PackageManager,
|
||||
) {
|
||||
const webAppDir = path.join(projectDir, "apps/web");
|
||||
if (!(await fs.pathExists(webAppDir))) return;
|
||||
|
||||
await addPackageDependency({
|
||||
devDependencies: ["alchemy", "dotenv"],
|
||||
projectDir: webAppDir,
|
||||
});
|
||||
|
||||
const pkgPath = path.join(webAppDir, "package.json");
|
||||
if (await fs.pathExists(pkgPath)) {
|
||||
const pkg = await fs.readJson(pkgPath);
|
||||
|
||||
pkg.scripts = {
|
||||
...pkg.scripts,
|
||||
deploy: "alchemy deploy",
|
||||
destroy: "alchemy destroy",
|
||||
"alchemy:dev": "alchemy dev",
|
||||
};
|
||||
await fs.writeJson(pkgPath, pkg, { spaces: 2 });
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,98 @@
|
||||
import path from "node:path";
|
||||
import fs from "fs-extra";
|
||||
import { IndentationText, Node, Project, QuoteKind } from "ts-morph";
|
||||
import type { PackageManager } from "../../../types";
|
||||
import { addPackageDependency } from "../../../utils/add-package-deps";
|
||||
|
||||
export async function setupSvelteAlchemyDeploy(
|
||||
projectDir: string,
|
||||
_packageManager: PackageManager,
|
||||
) {
|
||||
const webAppDir = path.join(projectDir, "apps/web");
|
||||
if (!(await fs.pathExists(webAppDir))) return;
|
||||
|
||||
await addPackageDependency({
|
||||
devDependencies: ["alchemy", "@sveltejs/adapter-cloudflare", "dotenv"],
|
||||
projectDir: webAppDir,
|
||||
});
|
||||
|
||||
const pkgPath = path.join(webAppDir, "package.json");
|
||||
if (await fs.pathExists(pkgPath)) {
|
||||
const pkg = await fs.readJson(pkgPath);
|
||||
|
||||
pkg.scripts = {
|
||||
...pkg.scripts,
|
||||
deploy: "alchemy deploy",
|
||||
destroy: "alchemy destroy",
|
||||
"alchemy:dev": "alchemy dev",
|
||||
};
|
||||
await fs.writeJson(pkgPath, pkg, { spaces: 2 });
|
||||
}
|
||||
|
||||
const svelteConfigPath = path.join(webAppDir, "svelte.config.js");
|
||||
if (!(await fs.pathExists(svelteConfigPath))) return;
|
||||
|
||||
try {
|
||||
const project = new Project({
|
||||
manipulationSettings: {
|
||||
indentationText: IndentationText.TwoSpaces,
|
||||
quoteKind: QuoteKind.Single,
|
||||
},
|
||||
});
|
||||
|
||||
project.addSourceFileAtPath(svelteConfigPath);
|
||||
const sourceFile = project.getSourceFileOrThrow(svelteConfigPath);
|
||||
|
||||
const importDeclarations = sourceFile.getImportDeclarations();
|
||||
const adapterImport = importDeclarations.find((imp) =>
|
||||
imp.getModuleSpecifierValue().includes("@sveltejs/adapter"),
|
||||
);
|
||||
|
||||
if (adapterImport) {
|
||||
adapterImport.setModuleSpecifier("alchemy/cloudflare/sveltekit");
|
||||
adapterImport.removeDefaultImport();
|
||||
adapterImport.setDefaultImport("alchemy");
|
||||
} else {
|
||||
sourceFile.insertImportDeclaration(0, {
|
||||
moduleSpecifier: "alchemy/cloudflare/sveltekit",
|
||||
defaultImport: "alchemy",
|
||||
});
|
||||
}
|
||||
|
||||
const configVariable = sourceFile.getVariableDeclaration("config");
|
||||
if (configVariable) {
|
||||
const initializer = configVariable.getInitializer();
|
||||
if (Node.isObjectLiteralExpression(initializer)) {
|
||||
updateAdapterInConfig(initializer);
|
||||
}
|
||||
}
|
||||
|
||||
await project.save();
|
||||
} catch (error) {
|
||||
console.warn("Failed to update svelte.config.js:", error);
|
||||
}
|
||||
}
|
||||
|
||||
function updateAdapterInConfig(configObject: Node): void {
|
||||
if (!Node.isObjectLiteralExpression(configObject)) return;
|
||||
|
||||
const kitProperty = configObject.getProperty("kit");
|
||||
if (kitProperty && Node.isPropertyAssignment(kitProperty)) {
|
||||
const kitInitializer = kitProperty.getInitializer();
|
||||
if (Node.isObjectLiteralExpression(kitInitializer)) {
|
||||
const adapterProperty = kitInitializer.getProperty("adapter");
|
||||
if (adapterProperty && Node.isPropertyAssignment(adapterProperty)) {
|
||||
const initializer = adapterProperty.getInitializer();
|
||||
if (Node.isCallExpression(initializer)) {
|
||||
const expression = initializer.getExpression();
|
||||
if (
|
||||
Node.isIdentifier(expression) &&
|
||||
expression.getText() === "adapter"
|
||||
) {
|
||||
expression.replaceWithText("alchemy");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,30 @@
|
||||
import path from "node:path";
|
||||
import fs from "fs-extra";
|
||||
import type { PackageManager } from "../../../types";
|
||||
import { addPackageDependency } from "../../../utils/add-package-deps";
|
||||
|
||||
export async function setupTanStackRouterAlchemyDeploy(
|
||||
projectDir: string,
|
||||
_packageManager: PackageManager,
|
||||
) {
|
||||
const webAppDir = path.join(projectDir, "apps/web");
|
||||
if (!(await fs.pathExists(webAppDir))) return;
|
||||
|
||||
await addPackageDependency({
|
||||
devDependencies: ["alchemy", "dotenv"],
|
||||
projectDir: webAppDir,
|
||||
});
|
||||
|
||||
const pkgPath = path.join(webAppDir, "package.json");
|
||||
if (await fs.pathExists(pkgPath)) {
|
||||
const pkg = await fs.readJson(pkgPath);
|
||||
|
||||
pkg.scripts = {
|
||||
...pkg.scripts,
|
||||
deploy: "alchemy deploy",
|
||||
destroy: "alchemy destroy",
|
||||
"alchemy:dev": "alchemy dev",
|
||||
};
|
||||
await fs.writeJson(pkgPath, pkg, { spaces: 2 });
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,158 @@
|
||||
import path from "node:path";
|
||||
import fs from "fs-extra";
|
||||
import { IndentationText, Node, Project, QuoteKind } from "ts-morph";
|
||||
import type { PackageManager } from "../../../types";
|
||||
import { addPackageDependency } from "../../../utils/add-package-deps";
|
||||
|
||||
export async function setupTanStackStartAlchemyDeploy(
|
||||
projectDir: string,
|
||||
_packageManager: PackageManager,
|
||||
) {
|
||||
const webAppDir = path.join(projectDir, "apps/web");
|
||||
if (!(await fs.pathExists(webAppDir))) return;
|
||||
|
||||
await addPackageDependency({
|
||||
devDependencies: ["alchemy", "nitropack", "dotenv"],
|
||||
projectDir: webAppDir,
|
||||
});
|
||||
|
||||
const pkgPath = path.join(webAppDir, "package.json");
|
||||
if (await fs.pathExists(pkgPath)) {
|
||||
const pkg = await fs.readJson(pkgPath);
|
||||
|
||||
pkg.scripts = {
|
||||
...pkg.scripts,
|
||||
deploy: "alchemy deploy",
|
||||
destroy: "alchemy destroy",
|
||||
"alchemy:dev": "alchemy dev",
|
||||
};
|
||||
await fs.writeJson(pkgPath, pkg, { spaces: 2 });
|
||||
}
|
||||
|
||||
const viteConfigPath = path.join(webAppDir, "vite.config.ts");
|
||||
if (await fs.pathExists(viteConfigPath)) {
|
||||
try {
|
||||
const project = new Project({
|
||||
manipulationSettings: {
|
||||
indentationText: IndentationText.TwoSpaces,
|
||||
quoteKind: QuoteKind.Double,
|
||||
},
|
||||
});
|
||||
|
||||
project.addSourceFileAtPath(viteConfigPath);
|
||||
const sourceFile = project.getSourceFileOrThrow(viteConfigPath);
|
||||
|
||||
const alchemyImport = sourceFile.getImportDeclaration(
|
||||
"alchemy/cloudflare/tanstack-start",
|
||||
);
|
||||
if (!alchemyImport) {
|
||||
sourceFile.addImportDeclaration({
|
||||
moduleSpecifier: "alchemy/cloudflare/tanstack-start",
|
||||
defaultImport: "alchemy",
|
||||
});
|
||||
} else {
|
||||
alchemyImport.setModuleSpecifier("alchemy/cloudflare/tanstack-start");
|
||||
}
|
||||
|
||||
const exportAssignment = sourceFile.getExportAssignment(
|
||||
(d) => !d.isExportEquals(),
|
||||
);
|
||||
if (!exportAssignment) return;
|
||||
|
||||
const defineConfigCall = exportAssignment.getExpression();
|
||||
if (
|
||||
!Node.isCallExpression(defineConfigCall) ||
|
||||
defineConfigCall.getExpression().getText() !== "defineConfig"
|
||||
)
|
||||
return;
|
||||
|
||||
let configObject = defineConfigCall.getArguments()[0];
|
||||
if (!configObject) {
|
||||
configObject = defineConfigCall.addArgument("{}");
|
||||
}
|
||||
|
||||
if (Node.isObjectLiteralExpression(configObject)) {
|
||||
if (!configObject.getProperty("build")) {
|
||||
configObject.addPropertyAssignment({
|
||||
name: "build",
|
||||
initializer: `{
|
||||
target: "esnext",
|
||||
rollupOptions: {
|
||||
external: ["node:async_hooks", "cloudflare:workers"],
|
||||
},
|
||||
}`,
|
||||
});
|
||||
}
|
||||
|
||||
const pluginsProperty = configObject.getProperty("plugins");
|
||||
if (pluginsProperty && Node.isPropertyAssignment(pluginsProperty)) {
|
||||
const initializer = pluginsProperty.getInitializer();
|
||||
if (Node.isArrayLiteralExpression(initializer)) {
|
||||
const hasShim = initializer
|
||||
.getElements()
|
||||
.some((el) => el.getText().includes("alchemy"));
|
||||
if (!hasShim) {
|
||||
initializer.addElement("alchemy()");
|
||||
}
|
||||
|
||||
const tanstackElements = initializer
|
||||
.getElements()
|
||||
.filter((el) => el.getText().includes("tanstackStart"));
|
||||
|
||||
tanstackElements.forEach((element) => {
|
||||
if (Node.isCallExpression(element)) {
|
||||
const args = element.getArguments();
|
||||
if (args.length === 0) {
|
||||
element.addArgument(`{
|
||||
target: "cloudflare-module",
|
||||
customViteReactPlugin: true,
|
||||
}`);
|
||||
} else if (
|
||||
args.length === 1 &&
|
||||
Node.isObjectLiteralExpression(args[0])
|
||||
) {
|
||||
const configObj = args[0];
|
||||
if (!configObj.getProperty("target")) {
|
||||
configObj.addPropertyAssignment({
|
||||
name: "target",
|
||||
initializer: '"cloudflare-module"',
|
||||
});
|
||||
}
|
||||
if (!configObj.getProperty("customViteReactPlugin")) {
|
||||
configObj.addPropertyAssignment({
|
||||
name: "customViteReactPlugin",
|
||||
initializer: "true",
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
} else {
|
||||
configObject.addPropertyAssignment({
|
||||
name: "plugins",
|
||||
initializer: "[alchemy()]",
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
await project.save();
|
||||
} catch (error) {
|
||||
console.warn("Failed to update vite.config.ts:", error);
|
||||
}
|
||||
}
|
||||
|
||||
// workaround for tanstack start + workers
|
||||
const nitroConfigPath = path.join(webAppDir, "nitro.config.ts");
|
||||
const nitroConfigContent = `import { defineNitroConfig } from "nitropack/config";
|
||||
|
||||
export default defineNitroConfig({
|
||||
preset: "cloudflare-module",
|
||||
cloudflare: {
|
||||
nodeCompat: true,
|
||||
},
|
||||
});
|
||||
`;
|
||||
|
||||
await fs.writeFile(nitroConfigPath, nitroConfigContent, "utf-8");
|
||||
}
|
||||
7
apps/cli/src/helpers/deployment/alchemy/index.ts
Normal file
7
apps/cli/src/helpers/deployment/alchemy/index.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
export { setupNextAlchemyDeploy } from "./alchemy-next-setup";
|
||||
export { setupNuxtAlchemyDeploy } from "./alchemy-nuxt-setup";
|
||||
export { setupReactRouterAlchemyDeploy } from "./alchemy-react-router-setup";
|
||||
export { setupSolidAlchemyDeploy } from "./alchemy-solid-setup";
|
||||
export { setupSvelteAlchemyDeploy } from "./alchemy-svelte-setup";
|
||||
export { setupTanStackRouterAlchemyDeploy } from "./alchemy-tanstack-router-setup";
|
||||
export { setupTanStackStartAlchemyDeploy } from "./alchemy-tanstack-start-setup";
|
||||
111
apps/cli/src/helpers/deployment/server-deploy-setup.ts
Normal file
111
apps/cli/src/helpers/deployment/server-deploy-setup.ts
Normal file
@@ -0,0 +1,111 @@
|
||||
import path from "node:path";
|
||||
import { log, spinner } from "@clack/prompts";
|
||||
import { execa } from "execa";
|
||||
import fs from "fs-extra";
|
||||
import pc from "picocolors";
|
||||
import type { PackageManager, ProjectConfig } from "../../types";
|
||||
import { addPackageDependency } from "../../utils/add-package-deps";
|
||||
|
||||
export async function setupServerDeploy(config: ProjectConfig) {
|
||||
const { serverDeploy, webDeploy, projectDir } = config;
|
||||
const { packageManager } = config;
|
||||
|
||||
if (serverDeploy === "none") return;
|
||||
|
||||
if (serverDeploy === "alchemy" && webDeploy === "alchemy") {
|
||||
return;
|
||||
}
|
||||
|
||||
const serverDir = path.join(projectDir, "apps/server");
|
||||
if (!(await fs.pathExists(serverDir))) return;
|
||||
|
||||
if (serverDeploy === "wrangler") {
|
||||
await setupWorkersServerDeploy(serverDir, packageManager);
|
||||
await generateCloudflareWorkerTypes({ serverDir, packageManager });
|
||||
} else if (serverDeploy === "alchemy") {
|
||||
await setupAlchemyServerDeploy(serverDir, packageManager);
|
||||
}
|
||||
}
|
||||
|
||||
async function setupWorkersServerDeploy(
|
||||
serverDir: string,
|
||||
_packageManager: PackageManager,
|
||||
) {
|
||||
const packageJsonPath = path.join(serverDir, "package.json");
|
||||
if (!(await fs.pathExists(packageJsonPath))) return;
|
||||
|
||||
const packageJson = await fs.readJson(packageJsonPath);
|
||||
|
||||
packageJson.scripts = {
|
||||
...packageJson.scripts,
|
||||
dev: "wrangler dev --port=3000",
|
||||
start: "wrangler dev",
|
||||
deploy: "wrangler deploy",
|
||||
build: "wrangler deploy --dry-run",
|
||||
"cf-typegen": "wrangler types --env-interface CloudflareBindings",
|
||||
};
|
||||
|
||||
await fs.writeJson(packageJsonPath, packageJson, { spaces: 2 });
|
||||
|
||||
await addPackageDependency({
|
||||
devDependencies: ["wrangler", "@types/node", "@cloudflare/workers-types"],
|
||||
projectDir: serverDir,
|
||||
});
|
||||
}
|
||||
|
||||
async function generateCloudflareWorkerTypes({
|
||||
serverDir,
|
||||
packageManager,
|
||||
}: {
|
||||
serverDir: string;
|
||||
packageManager: ProjectConfig["packageManager"];
|
||||
}) {
|
||||
if (!(await fs.pathExists(serverDir))) return;
|
||||
const s = spinner();
|
||||
try {
|
||||
s.start("Generating Cloudflare Workers types...");
|
||||
const runCmd = packageManager === "npm" ? "npm" : packageManager;
|
||||
await execa(runCmd, ["run", "cf-typegen"], { cwd: serverDir });
|
||||
s.stop("Cloudflare Workers types generated successfully!");
|
||||
} catch {
|
||||
s.stop(pc.yellow("Failed to generate Cloudflare Workers types"));
|
||||
const managerCmd = `${packageManager} run`;
|
||||
log.warn(
|
||||
`Note: You can manually run 'cd apps/server && ${managerCmd} cf-typegen' in the project directory later`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export async function setupAlchemyServerDeploy(
|
||||
serverDir: string,
|
||||
_packageManager: PackageManager,
|
||||
) {
|
||||
if (!(await fs.pathExists(serverDir))) return;
|
||||
|
||||
await addPackageDependency({
|
||||
devDependencies: [
|
||||
"alchemy",
|
||||
"wrangler",
|
||||
"@types/node",
|
||||
"@cloudflare/workers-types",
|
||||
"dotenv",
|
||||
],
|
||||
projectDir: serverDir,
|
||||
});
|
||||
|
||||
const packageJsonPath = path.join(serverDir, "package.json");
|
||||
if (await fs.pathExists(packageJsonPath)) {
|
||||
const packageJson = await fs.readJson(packageJsonPath);
|
||||
|
||||
packageJson.scripts = {
|
||||
...packageJson.scripts,
|
||||
dev: "wrangler dev --port=3000",
|
||||
build: "wrangler deploy --dry-run",
|
||||
deploy: "alchemy deploy",
|
||||
destroy: "alchemy destroy",
|
||||
"alchemy:dev": "alchemy dev",
|
||||
};
|
||||
|
||||
await fs.writeJson(packageJsonPath, packageJson, { spaces: 2 });
|
||||
}
|
||||
}
|
||||
94
apps/cli/src/helpers/deployment/web-deploy-setup.ts
Normal file
94
apps/cli/src/helpers/deployment/web-deploy-setup.ts
Normal file
@@ -0,0 +1,94 @@
|
||||
import path from "node:path";
|
||||
import fs from "fs-extra";
|
||||
import type { PackageManager, ProjectConfig } from "../../types";
|
||||
import { setupCombinedAlchemyDeploy } from "./alchemy/alchemy-combined-setup";
|
||||
import { setupNextAlchemyDeploy } from "./alchemy/alchemy-next-setup";
|
||||
import { setupNuxtAlchemyDeploy } from "./alchemy/alchemy-nuxt-setup";
|
||||
import { setupReactRouterAlchemyDeploy } from "./alchemy/alchemy-react-router-setup";
|
||||
import { setupSolidAlchemyDeploy } from "./alchemy/alchemy-solid-setup";
|
||||
import { setupSvelteAlchemyDeploy } from "./alchemy/alchemy-svelte-setup";
|
||||
import { setupTanStackRouterAlchemyDeploy } from "./alchemy/alchemy-tanstack-router-setup";
|
||||
import { setupTanStackStartAlchemyDeploy } from "./alchemy/alchemy-tanstack-start-setup";
|
||||
import { setupNextWorkersDeploy } from "./workers/workers-next-setup";
|
||||
import { setupNuxtWorkersDeploy } from "./workers/workers-nuxt-setup";
|
||||
import { setupSvelteWorkersDeploy } from "./workers/workers-svelte-setup";
|
||||
import { setupTanstackStartWorkersDeploy } from "./workers/workers-tanstack-start-setup";
|
||||
import { setupWorkersVitePlugin } from "./workers/workers-vite-setup";
|
||||
|
||||
export async function setupWebDeploy(config: ProjectConfig) {
|
||||
const { webDeploy, serverDeploy, frontend, projectDir } = config;
|
||||
const { packageManager } = config;
|
||||
|
||||
if (webDeploy === "none") return;
|
||||
|
||||
if (webDeploy !== "wrangler" && webDeploy !== "alchemy") return;
|
||||
|
||||
if (webDeploy === "alchemy" && serverDeploy === "alchemy") {
|
||||
await setupCombinedAlchemyDeploy(projectDir, packageManager, config);
|
||||
return;
|
||||
}
|
||||
|
||||
const isNext = frontend.includes("next");
|
||||
const isNuxt = frontend.includes("nuxt");
|
||||
const isSvelte = frontend.includes("svelte");
|
||||
const isTanstackRouter = frontend.includes("tanstack-router");
|
||||
const isTanstackStart = frontend.includes("tanstack-start");
|
||||
const isReactRouter = frontend.includes("react-router");
|
||||
const isSolid = frontend.includes("solid");
|
||||
|
||||
if (webDeploy === "wrangler") {
|
||||
if (isNext) {
|
||||
await setupNextWorkersDeploy(projectDir, packageManager);
|
||||
} else if (isNuxt) {
|
||||
await setupNuxtWorkersDeploy(projectDir, packageManager);
|
||||
} else if (isSvelte) {
|
||||
await setupSvelteWorkersDeploy(projectDir, packageManager);
|
||||
} else if (isTanstackStart) {
|
||||
await setupTanstackStartWorkersDeploy(projectDir, packageManager);
|
||||
} else if (isTanstackRouter || isReactRouter || isSolid) {
|
||||
await setupWorkersWebDeploy(projectDir, packageManager);
|
||||
}
|
||||
} else if (webDeploy === "alchemy") {
|
||||
if (isNext) {
|
||||
await setupNextAlchemyDeploy(projectDir, packageManager);
|
||||
} else if (isNuxt) {
|
||||
await setupNuxtAlchemyDeploy(projectDir, packageManager);
|
||||
} else if (isSvelte) {
|
||||
await setupSvelteAlchemyDeploy(projectDir, packageManager);
|
||||
} else if (isTanstackStart) {
|
||||
await setupTanStackStartAlchemyDeploy(projectDir, packageManager);
|
||||
} else if (isTanstackRouter) {
|
||||
await setupTanStackRouterAlchemyDeploy(projectDir, packageManager);
|
||||
} else if (isReactRouter) {
|
||||
await setupReactRouterAlchemyDeploy(projectDir, packageManager);
|
||||
} else if (isSolid) {
|
||||
await setupSolidAlchemyDeploy(projectDir, packageManager);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function setupWorkersWebDeploy(
|
||||
projectDir: string,
|
||||
pkgManager: PackageManager,
|
||||
) {
|
||||
const webAppDir = path.join(projectDir, "apps/web");
|
||||
|
||||
if (!(await fs.pathExists(webAppDir))) {
|
||||
return;
|
||||
}
|
||||
|
||||
const packageJsonPath = path.join(webAppDir, "package.json");
|
||||
if (await fs.pathExists(packageJsonPath)) {
|
||||
const packageJson = await fs.readJson(packageJsonPath);
|
||||
|
||||
packageJson.scripts = {
|
||||
...packageJson.scripts,
|
||||
"wrangler:dev": "wrangler dev --port=3001",
|
||||
deploy: `${pkgManager} run build && wrangler deploy`,
|
||||
};
|
||||
|
||||
await fs.writeJson(packageJsonPath, packageJson, { spaces: 2 });
|
||||
}
|
||||
|
||||
await setupWorkersVitePlugin(projectDir);
|
||||
}
|
||||
@@ -0,0 +1,34 @@
|
||||
import path from "node:path";
|
||||
import fs from "fs-extra";
|
||||
import type { PackageManager } from "../../../types";
|
||||
import { addPackageDependency } from "../../../utils/add-package-deps";
|
||||
|
||||
export async function setupNextWorkersDeploy(
|
||||
projectDir: string,
|
||||
_packageManager: PackageManager,
|
||||
) {
|
||||
const webAppDir = path.join(projectDir, "apps/web");
|
||||
if (!(await fs.pathExists(webAppDir))) return;
|
||||
|
||||
await addPackageDependency({
|
||||
dependencies: ["@opennextjs/cloudflare"],
|
||||
devDependencies: ["wrangler"],
|
||||
projectDir: webAppDir,
|
||||
});
|
||||
|
||||
const packageJsonPath = path.join(webAppDir, "package.json");
|
||||
if (await fs.pathExists(packageJsonPath)) {
|
||||
const pkg = await fs.readJson(packageJsonPath);
|
||||
|
||||
pkg.scripts = {
|
||||
...pkg.scripts,
|
||||
preview: "opennextjs-cloudflare build && opennextjs-cloudflare preview",
|
||||
deploy: "opennextjs-cloudflare build && opennextjs-cloudflare deploy",
|
||||
upload: "opennextjs-cloudflare build && opennextjs-cloudflare upload",
|
||||
"cf-typegen":
|
||||
"wrangler types --env-interface CloudflareEnv cloudflare-env.d.ts",
|
||||
};
|
||||
|
||||
await fs.writeJson(packageJsonPath, pkg, { spaces: 2 });
|
||||
}
|
||||
}
|
||||
@@ -8,9 +8,9 @@ import {
|
||||
type PropertyAssignment,
|
||||
SyntaxKind,
|
||||
} from "ts-morph";
|
||||
import type { PackageManager } from "../../types";
|
||||
import { addPackageDependency } from "../../utils/add-package-deps";
|
||||
import { tsProject } from "../../utils/ts-morph";
|
||||
import type { PackageManager } from "../../../types";
|
||||
import { addPackageDependency } from "../../../utils/add-package-deps";
|
||||
import { tsProject } from "../../../utils/ts-morph";
|
||||
|
||||
export async function setupNuxtWorkersDeploy(
|
||||
projectDir: string,
|
||||
@@ -1,9 +1,9 @@
|
||||
import path from "node:path";
|
||||
import fs from "fs-extra";
|
||||
import type { ImportDeclaration } from "ts-morph";
|
||||
import type { PackageManager } from "../../types";
|
||||
import { addPackageDependency } from "../../utils/add-package-deps";
|
||||
import { tsProject } from "../../utils/ts-morph";
|
||||
import type { PackageManager } from "../../../types";
|
||||
import { addPackageDependency } from "../../../utils/add-package-deps";
|
||||
import { tsProject } from "../../../utils/ts-morph";
|
||||
|
||||
export async function setupSvelteWorkersDeploy(
|
||||
projectDir: string,
|
||||
@@ -6,9 +6,9 @@ import {
|
||||
type ObjectLiteralExpression,
|
||||
SyntaxKind,
|
||||
} from "ts-morph";
|
||||
import type { PackageManager } from "../../types";
|
||||
import { addPackageDependency } from "../../utils/add-package-deps";
|
||||
import { ensureArrayProperty, tsProject } from "../../utils/ts-morph";
|
||||
import type { PackageManager } from "../../../types";
|
||||
import { addPackageDependency } from "../../../utils/add-package-deps";
|
||||
import { ensureArrayProperty, tsProject } from "../../../utils/ts-morph";
|
||||
|
||||
export async function setupTanstackStartWorkersDeploy(
|
||||
projectDir: string,
|
||||
@@ -6,8 +6,8 @@ import {
|
||||
type ObjectLiteralExpression,
|
||||
SyntaxKind,
|
||||
} from "ts-morph";
|
||||
import { addPackageDependency } from "../../utils/add-package-deps";
|
||||
import { ensureArrayProperty, tsProject } from "../../utils/ts-morph";
|
||||
import { addPackageDependency } from "../../../utils/add-package-deps";
|
||||
import { ensureArrayProperty, tsProject } from "../../../utils/ts-morph";
|
||||
|
||||
export async function setupWorkersVitePlugin(projectDir: string) {
|
||||
const webAppDir = path.join(projectDir, "apps/web");
|
||||
@@ -1,284 +0,0 @@
|
||||
import path from "node:path";
|
||||
import fs from "fs-extra";
|
||||
import type { AvailableDependencies } from "../../constants";
|
||||
import type { Frontend, ProjectConfig } from "../../types";
|
||||
import { addPackageDependency } from "../../utils/add-package-deps";
|
||||
|
||||
export async function setupApi(config: ProjectConfig) {
|
||||
const { api, projectName, frontend, backend, packageManager, projectDir } =
|
||||
config;
|
||||
const isConvex = backend === "convex";
|
||||
const webDir = path.join(projectDir, "apps/web");
|
||||
const nativeDir = path.join(projectDir, "apps/native");
|
||||
const webDirExists = await fs.pathExists(webDir);
|
||||
const nativeDirExists = await fs.pathExists(nativeDir);
|
||||
|
||||
const hasReactWeb = frontend.some((f) =>
|
||||
["tanstack-router", "react-router", "tanstack-start", "next"].includes(f),
|
||||
);
|
||||
const hasNuxtWeb = frontend.includes("nuxt");
|
||||
const hasSvelteWeb = frontend.includes("svelte");
|
||||
const hasSolidWeb = frontend.includes("solid");
|
||||
|
||||
if (!isConvex && api !== "none") {
|
||||
const serverDir = path.join(projectDir, "apps/server");
|
||||
const serverDirExists = await fs.pathExists(serverDir);
|
||||
|
||||
if (serverDirExists) {
|
||||
if (api === "orpc") {
|
||||
await addPackageDependency({
|
||||
dependencies: ["@orpc/server", "@orpc/client"],
|
||||
projectDir: serverDir,
|
||||
});
|
||||
} else if (api === "trpc") {
|
||||
await addPackageDependency({
|
||||
dependencies: ["@trpc/server", "@trpc/client"],
|
||||
projectDir: serverDir,
|
||||
});
|
||||
if (config.backend === "hono") {
|
||||
await addPackageDependency({
|
||||
dependencies: ["@hono/trpc-server"],
|
||||
projectDir: serverDir,
|
||||
});
|
||||
} else if (config.backend === "elysia") {
|
||||
await addPackageDependency({
|
||||
dependencies: ["@elysiajs/trpc"],
|
||||
projectDir: serverDir,
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
}
|
||||
|
||||
if (webDirExists) {
|
||||
if (hasReactWeb) {
|
||||
if (api === "orpc") {
|
||||
await addPackageDependency({
|
||||
dependencies: ["@orpc/tanstack-query", "@orpc/client"],
|
||||
projectDir: webDir,
|
||||
});
|
||||
} else if (api === "trpc") {
|
||||
await addPackageDependency({
|
||||
dependencies: [
|
||||
"@trpc/tanstack-react-query",
|
||||
"@trpc/client",
|
||||
"@trpc/server",
|
||||
],
|
||||
projectDir: webDir,
|
||||
});
|
||||
}
|
||||
} else if (hasNuxtWeb) {
|
||||
if (api === "orpc") {
|
||||
await addPackageDependency({
|
||||
dependencies: [
|
||||
"@tanstack/vue-query",
|
||||
"@tanstack/vue-query-devtools",
|
||||
"@orpc/tanstack-query",
|
||||
"@orpc/client",
|
||||
],
|
||||
projectDir: webDir,
|
||||
});
|
||||
}
|
||||
} else if (hasSvelteWeb) {
|
||||
if (api === "orpc") {
|
||||
await addPackageDependency({
|
||||
dependencies: [
|
||||
"@orpc/tanstack-query",
|
||||
"@orpc/client",
|
||||
"@tanstack/svelte-query",
|
||||
],
|
||||
projectDir: webDir,
|
||||
});
|
||||
}
|
||||
} else if (hasSolidWeb) {
|
||||
if (api === "orpc") {
|
||||
await addPackageDependency({
|
||||
dependencies: [
|
||||
"@orpc/tanstack-query",
|
||||
"@orpc/client",
|
||||
"@tanstack/solid-query",
|
||||
],
|
||||
projectDir: webDir,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (nativeDirExists) {
|
||||
if (api === "trpc") {
|
||||
await addPackageDependency({
|
||||
dependencies: [
|
||||
"@trpc/tanstack-react-query",
|
||||
"@trpc/client",
|
||||
"@trpc/server",
|
||||
],
|
||||
projectDir: nativeDir,
|
||||
});
|
||||
} else if (api === "orpc") {
|
||||
await addPackageDependency({
|
||||
dependencies: ["@orpc/tanstack-query", "@orpc/client"],
|
||||
projectDir: nativeDir,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const reactBasedFrontends: Frontend[] = [
|
||||
"react-router",
|
||||
"tanstack-router",
|
||||
"tanstack-start",
|
||||
"next",
|
||||
"native-nativewind",
|
||||
"native-unistyles",
|
||||
];
|
||||
const needsSolidQuery = frontend.includes("solid");
|
||||
const needsReactQuery = frontend.some((f) => reactBasedFrontends.includes(f));
|
||||
|
||||
if (needsReactQuery && !isConvex) {
|
||||
const reactQueryDeps: AvailableDependencies[] = ["@tanstack/react-query"];
|
||||
const reactQueryDevDeps: AvailableDependencies[] = [
|
||||
"@tanstack/react-query-devtools",
|
||||
];
|
||||
|
||||
const hasReactWeb = frontend.some(
|
||||
(f) =>
|
||||
f !== "native-nativewind" &&
|
||||
f !== "native-unistyles" &&
|
||||
reactBasedFrontends.includes(f),
|
||||
);
|
||||
const hasNative =
|
||||
frontend.includes("native-nativewind") ||
|
||||
frontend.includes("native-unistyles");
|
||||
|
||||
if (hasReactWeb && webDirExists) {
|
||||
const webPkgJsonPath = path.join(webDir, "package.json");
|
||||
if (await fs.pathExists(webPkgJsonPath)) {
|
||||
try {
|
||||
await addPackageDependency({
|
||||
dependencies: reactQueryDeps,
|
||||
devDependencies: reactQueryDevDeps,
|
||||
projectDir: webDir,
|
||||
});
|
||||
} catch (_error) {}
|
||||
} else {
|
||||
}
|
||||
}
|
||||
|
||||
if (hasNative && nativeDirExists) {
|
||||
const nativePkgJsonPath = path.join(nativeDir, "package.json");
|
||||
if (await fs.pathExists(nativePkgJsonPath)) {
|
||||
try {
|
||||
await addPackageDependency({
|
||||
dependencies: reactQueryDeps,
|
||||
projectDir: nativeDir,
|
||||
});
|
||||
} catch (_error) {}
|
||||
} else {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (needsSolidQuery && !isConvex) {
|
||||
const solidQueryDeps: AvailableDependencies[] = ["@tanstack/solid-query"];
|
||||
const solidQueryDevDeps: AvailableDependencies[] = [
|
||||
"@tanstack/solid-query-devtools",
|
||||
];
|
||||
|
||||
if (webDirExists) {
|
||||
const webPkgJsonPath = path.join(webDir, "package.json");
|
||||
if (await fs.pathExists(webPkgJsonPath)) {
|
||||
try {
|
||||
await addPackageDependency({
|
||||
dependencies: solidQueryDeps,
|
||||
devDependencies: solidQueryDevDeps,
|
||||
projectDir: webDir,
|
||||
});
|
||||
} catch (_error) {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (isConvex) {
|
||||
if (webDirExists) {
|
||||
const webPkgJsonPath = path.join(webDir, "package.json");
|
||||
if (await fs.pathExists(webPkgJsonPath)) {
|
||||
try {
|
||||
const webDepsToAdd: AvailableDependencies[] = ["convex"];
|
||||
if (frontend.includes("tanstack-start")) {
|
||||
webDepsToAdd.push("@convex-dev/react-query");
|
||||
}
|
||||
if (hasSvelteWeb) {
|
||||
webDepsToAdd.push("convex-svelte");
|
||||
}
|
||||
if (hasNuxtWeb) {
|
||||
webDepsToAdd.push("convex-nuxt");
|
||||
webDepsToAdd.push("convex-vue");
|
||||
}
|
||||
await addPackageDependency({
|
||||
dependencies: webDepsToAdd,
|
||||
projectDir: webDir,
|
||||
});
|
||||
} catch (_error) {}
|
||||
} else {
|
||||
}
|
||||
}
|
||||
|
||||
if (nativeDirExists) {
|
||||
const nativePkgJsonPath = path.join(nativeDir, "package.json");
|
||||
if (await fs.pathExists(nativePkgJsonPath)) {
|
||||
try {
|
||||
await addPackageDependency({
|
||||
dependencies: ["convex"],
|
||||
projectDir: nativeDir,
|
||||
});
|
||||
} catch (_error) {}
|
||||
} else {
|
||||
}
|
||||
}
|
||||
|
||||
const backendPackageName = `@${projectName}/backend`;
|
||||
const backendWorkspaceVersion =
|
||||
packageManager === "npm" ? "*" : "workspace:*";
|
||||
const addWorkspaceDepManually = async (
|
||||
pkgJsonPath: string,
|
||||
depName: string,
|
||||
depVersion: string,
|
||||
) => {
|
||||
try {
|
||||
const pkgJson = await fs.readJson(pkgJsonPath);
|
||||
if (!pkgJson.dependencies) {
|
||||
pkgJson.dependencies = {};
|
||||
}
|
||||
if (pkgJson.dependencies[depName] !== depVersion) {
|
||||
pkgJson.dependencies[depName] = depVersion;
|
||||
await fs.writeJson(pkgJsonPath, pkgJson, { spaces: 2 });
|
||||
} else {
|
||||
}
|
||||
} catch (_error) {}
|
||||
};
|
||||
|
||||
if (webDirExists) {
|
||||
const webPkgJsonPath = path.join(webDir, "package.json");
|
||||
if (await fs.pathExists(webPkgJsonPath)) {
|
||||
await addWorkspaceDepManually(
|
||||
webPkgJsonPath,
|
||||
backendPackageName,
|
||||
backendWorkspaceVersion,
|
||||
);
|
||||
} else {
|
||||
}
|
||||
}
|
||||
|
||||
if (nativeDirExists) {
|
||||
const nativePkgJsonPath = path.join(nativeDir, "package.json");
|
||||
if (await fs.pathExists(nativePkgJsonPath)) {
|
||||
await addWorkspaceDepManually(
|
||||
nativePkgJsonPath,
|
||||
backendPackageName,
|
||||
backendWorkspaceVersion,
|
||||
);
|
||||
} else {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,93 +0,0 @@
|
||||
import path from "node:path";
|
||||
import fs from "fs-extra";
|
||||
import type { PackageManager, ProjectConfig } from "../../types";
|
||||
import { addPackageDependency } from "../../utils/add-package-deps";
|
||||
import { setupNuxtWorkersDeploy } from "./workers-nuxt-setup";
|
||||
import { setupSvelteWorkersDeploy } from "./workers-svelte-setup";
|
||||
import { setupTanstackStartWorkersDeploy } from "./workers-tanstack-start-setup";
|
||||
import { setupWorkersVitePlugin } from "./workers-vite-setup";
|
||||
|
||||
export async function setupWebDeploy(config: ProjectConfig) {
|
||||
const { webDeploy, frontend, projectDir } = config;
|
||||
const { packageManager } = config;
|
||||
|
||||
if (webDeploy === "none") return;
|
||||
|
||||
if (webDeploy !== "workers") return;
|
||||
|
||||
const isNext = frontend.includes("next");
|
||||
const isNuxt = frontend.includes("nuxt");
|
||||
const isSvelte = frontend.includes("svelte");
|
||||
const isTanstackRouter = frontend.includes("tanstack-router");
|
||||
const isTanstackStart = frontend.includes("tanstack-start");
|
||||
const isReactRouter = frontend.includes("react-router");
|
||||
const isSolid = frontend.includes("solid");
|
||||
|
||||
if (isNext) {
|
||||
await setupNextWorkersDeploy(projectDir, packageManager);
|
||||
} else if (isNuxt) {
|
||||
await setupNuxtWorkersDeploy(projectDir, packageManager);
|
||||
} else if (isSvelte) {
|
||||
await setupSvelteWorkersDeploy(projectDir, packageManager);
|
||||
} else if (isTanstackStart) {
|
||||
await setupTanstackStartWorkersDeploy(projectDir, packageManager);
|
||||
} else if (isTanstackRouter || isReactRouter || isSolid) {
|
||||
await setupWorkersWebDeploy(projectDir, packageManager);
|
||||
}
|
||||
}
|
||||
|
||||
async function setupWorkersWebDeploy(
|
||||
projectDir: string,
|
||||
pkgManager: PackageManager,
|
||||
) {
|
||||
const webAppDir = path.join(projectDir, "apps/web");
|
||||
|
||||
if (!(await fs.pathExists(webAppDir))) {
|
||||
return;
|
||||
}
|
||||
|
||||
const packageJsonPath = path.join(webAppDir, "package.json");
|
||||
if (await fs.pathExists(packageJsonPath)) {
|
||||
const packageJson = await fs.readJson(packageJsonPath);
|
||||
|
||||
packageJson.scripts = {
|
||||
...packageJson.scripts,
|
||||
"wrangler:dev": "wrangler dev --port=3001",
|
||||
deploy: `${pkgManager} run build && wrangler deploy`,
|
||||
};
|
||||
|
||||
await fs.writeJson(packageJsonPath, packageJson, { spaces: 2 });
|
||||
}
|
||||
|
||||
await setupWorkersVitePlugin(projectDir);
|
||||
}
|
||||
|
||||
async function setupNextWorkersDeploy(
|
||||
projectDir: string,
|
||||
_packageManager: PackageManager,
|
||||
) {
|
||||
const webAppDir = path.join(projectDir, "apps/web");
|
||||
if (!(await fs.pathExists(webAppDir))) return;
|
||||
|
||||
await addPackageDependency({
|
||||
dependencies: ["@opennextjs/cloudflare"],
|
||||
devDependencies: ["wrangler"],
|
||||
projectDir: webAppDir,
|
||||
});
|
||||
|
||||
const packageJsonPath = path.join(webAppDir, "package.json");
|
||||
if (await fs.pathExists(packageJsonPath)) {
|
||||
const pkg = await fs.readJson(packageJsonPath);
|
||||
|
||||
pkg.scripts = {
|
||||
...pkg.scripts,
|
||||
preview: "opennextjs-cloudflare build && opennextjs-cloudflare preview",
|
||||
deploy: "opennextjs-cloudflare build && opennextjs-cloudflare deploy",
|
||||
upload: "opennextjs-cloudflare build && opennextjs-cloudflare upload",
|
||||
"cf-typegen":
|
||||
"wrangler types --env-interface CloudflareEnv cloudflare-env.d.ts",
|
||||
};
|
||||
|
||||
await fs.writeJson(packageJsonPath, pkg, { spaces: 2 });
|
||||
}
|
||||
}
|
||||
@@ -5,7 +5,7 @@ import z from "zod";
|
||||
import {
|
||||
addAddonsHandler,
|
||||
createProjectHandler,
|
||||
} from "./helpers/project-generation/command-handlers";
|
||||
} from "./helpers/core/command-handlers";
|
||||
import {
|
||||
type AddInput,
|
||||
type Addons,
|
||||
@@ -35,6 +35,8 @@ import {
|
||||
ProjectNameSchema,
|
||||
type Runtime,
|
||||
RuntimeSchema,
|
||||
type ServerDeploy,
|
||||
ServerDeploySchema,
|
||||
type WebDeploy,
|
||||
WebDeploySchema,
|
||||
} from "./types";
|
||||
@@ -88,6 +90,7 @@ export const router = t.router({
|
||||
runtime: RuntimeSchema.optional(),
|
||||
api: APISchema.optional(),
|
||||
webDeploy: WebDeploySchema.optional(),
|
||||
serverDeploy: ServerDeploySchema.optional(),
|
||||
directoryConflict: DirectoryConflictSchema.optional(),
|
||||
renderTitle: z.boolean().optional(),
|
||||
disableAnalytics: z
|
||||
@@ -120,6 +123,7 @@ export const router = t.router({
|
||||
z.object({
|
||||
addons: z.array(AddonsSchema).optional().default([]),
|
||||
webDeploy: WebDeploySchema.optional(),
|
||||
serverDeploy: ServerDeploySchema.optional(),
|
||||
projectDir: z.string().optional(),
|
||||
install: z
|
||||
.boolean()
|
||||
@@ -251,6 +255,7 @@ export type {
|
||||
DatabaseSetup,
|
||||
API,
|
||||
WebDeploy,
|
||||
ServerDeploy,
|
||||
DirectoryConflict,
|
||||
CreateInput,
|
||||
AddInput,
|
||||
|
||||
@@ -11,6 +11,7 @@ import type {
|
||||
PackageManager,
|
||||
ProjectConfig,
|
||||
Runtime,
|
||||
ServerDeploy,
|
||||
WebDeploy,
|
||||
} from "../types";
|
||||
import { exitCancelled } from "../utils/errors";
|
||||
@@ -27,6 +28,7 @@ import { getinstallChoice } from "./install";
|
||||
import { getORMChoice } from "./orm";
|
||||
import { getPackageManagerChoice } from "./package-manager";
|
||||
import { getRuntimeChoice } from "./runtime";
|
||||
import { getServerDeploymentChoice } from "./server-deploy";
|
||||
import { getDeploymentChoice } from "./web-deploy";
|
||||
|
||||
type PromptGroupResults = {
|
||||
@@ -44,6 +46,7 @@ type PromptGroupResults = {
|
||||
packageManager: PackageManager;
|
||||
install: boolean;
|
||||
webDeploy: WebDeploy;
|
||||
serverDeploy: ServerDeploy;
|
||||
};
|
||||
|
||||
export async function gatherConfig(
|
||||
@@ -97,6 +100,13 @@ export async function gatherConfig(
|
||||
results.backend,
|
||||
results.frontend,
|
||||
),
|
||||
serverDeploy: ({ results }) =>
|
||||
getServerDeploymentChoice(
|
||||
flags.serverDeploy,
|
||||
results.runtime,
|
||||
results.backend,
|
||||
results.webDeploy,
|
||||
),
|
||||
git: () => getGitChoice(flags.git),
|
||||
packageManager: () => getPackageManagerChoice(flags.packageManager),
|
||||
install: () => getinstallChoice(flags.install),
|
||||
@@ -144,5 +154,6 @@ export async function gatherConfig(
|
||||
dbSetup: result.dbSetup,
|
||||
api: result.api,
|
||||
webDeploy: result.webDeploy,
|
||||
serverDeploy: result.serverDeploy,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -45,8 +45,8 @@ export async function getProjectName(initialName?: string): Promise<string> {
|
||||
let counter = 1;
|
||||
|
||||
while (
|
||||
fs.pathExistsSync(path.resolve(process.cwd(), defaultName)) &&
|
||||
fs.readdirSync(path.resolve(process.cwd(), defaultName)).length > 0
|
||||
(await fs.pathExists(path.resolve(process.cwd(), defaultName))) &&
|
||||
(await fs.readdir(path.resolve(process.cwd(), defaultName))).length > 0
|
||||
) {
|
||||
defaultName = `${DEFAULT_CONFIG.projectName}-${counter}`;
|
||||
counter++;
|
||||
|
||||
129
apps/cli/src/prompts/server-deploy.ts
Normal file
129
apps/cli/src/prompts/server-deploy.ts
Normal file
@@ -0,0 +1,129 @@
|
||||
import { isCancel, select } from "@clack/prompts";
|
||||
import { DEFAULT_CONFIG } from "../constants";
|
||||
import type { Backend, Runtime, ServerDeploy, WebDeploy } from "../types";
|
||||
import { exitCancelled } from "../utils/errors";
|
||||
|
||||
type DeploymentOption = {
|
||||
value: ServerDeploy;
|
||||
label: string;
|
||||
hint: string;
|
||||
};
|
||||
|
||||
function getDeploymentDisplay(deployment: ServerDeploy): {
|
||||
label: string;
|
||||
hint: string;
|
||||
} {
|
||||
if (deployment === "wrangler") {
|
||||
return {
|
||||
label: "Wrangler",
|
||||
hint: "Deploy to Cloudflare Workers using Wrangler",
|
||||
};
|
||||
}
|
||||
if (deployment === "alchemy") {
|
||||
return {
|
||||
label: "Alchemy",
|
||||
hint: "Deploy to Cloudflare Workers using Alchemy",
|
||||
};
|
||||
}
|
||||
return {
|
||||
label: deployment,
|
||||
hint: `Add ${deployment} deployment`,
|
||||
};
|
||||
}
|
||||
|
||||
export async function getServerDeploymentChoice(
|
||||
deployment?: ServerDeploy,
|
||||
runtime?: Runtime,
|
||||
backend?: Backend,
|
||||
webDeploy?: WebDeploy,
|
||||
): Promise<ServerDeploy> {
|
||||
if (deployment !== undefined) return deployment;
|
||||
|
||||
if (backend === "none" || backend === "convex") {
|
||||
return "none";
|
||||
}
|
||||
|
||||
const options: DeploymentOption[] = [];
|
||||
|
||||
if (runtime === "workers") {
|
||||
["alchemy", "wrangler"].forEach((deploy) => {
|
||||
const { label, hint } = getDeploymentDisplay(deploy as ServerDeploy);
|
||||
options.unshift({
|
||||
value: deploy as ServerDeploy,
|
||||
label,
|
||||
hint,
|
||||
});
|
||||
});
|
||||
} else {
|
||||
options.push({ value: "none", label: "None", hint: "Manual setup" });
|
||||
}
|
||||
|
||||
const response = await select<ServerDeploy>({
|
||||
message: "Select server deployment",
|
||||
options,
|
||||
initialValue:
|
||||
webDeploy === "alchemy"
|
||||
? "alchemy"
|
||||
: runtime === "workers"
|
||||
? "wrangler"
|
||||
: DEFAULT_CONFIG.serverDeploy,
|
||||
});
|
||||
|
||||
if (isCancel(response)) return exitCancelled("Operation cancelled");
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
export async function getServerDeploymentToAdd(
|
||||
runtime?: Runtime,
|
||||
existingDeployment?: ServerDeploy,
|
||||
): Promise<ServerDeploy> {
|
||||
const options: DeploymentOption[] = [];
|
||||
|
||||
if (runtime === "workers") {
|
||||
if (existingDeployment !== "wrangler") {
|
||||
const { label, hint } = getDeploymentDisplay("wrangler");
|
||||
options.push({
|
||||
value: "wrangler",
|
||||
label,
|
||||
hint,
|
||||
});
|
||||
}
|
||||
|
||||
if (existingDeployment !== "alchemy") {
|
||||
const { label, hint } = getDeploymentDisplay("alchemy");
|
||||
options.push({
|
||||
value: "alchemy",
|
||||
label,
|
||||
hint,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (existingDeployment && existingDeployment !== "none") {
|
||||
return "none";
|
||||
}
|
||||
|
||||
if (options.length > 0) {
|
||||
options.push({
|
||||
value: "none",
|
||||
label: "None",
|
||||
hint: "Skip deployment setup",
|
||||
});
|
||||
}
|
||||
|
||||
if (options.length === 0) {
|
||||
return "none";
|
||||
}
|
||||
|
||||
const response = await select<ServerDeploy>({
|
||||
message: "Select server deployment",
|
||||
options,
|
||||
initialValue:
|
||||
runtime === "workers" ? "wrangler" : DEFAULT_CONFIG.serverDeploy,
|
||||
});
|
||||
|
||||
if (isCancel(response)) return exitCancelled("Operation cancelled");
|
||||
|
||||
return response;
|
||||
}
|
||||
@@ -18,12 +18,18 @@ function getDeploymentDisplay(deployment: WebDeploy): {
|
||||
label: string;
|
||||
hint: string;
|
||||
} {
|
||||
if (deployment === "workers") {
|
||||
if (deployment === "wrangler") {
|
||||
return {
|
||||
label: "Cloudflare Workers",
|
||||
label: "Wrangler",
|
||||
hint: "Deploy to Cloudflare Workers using Wrangler",
|
||||
};
|
||||
}
|
||||
if (deployment === "alchemy") {
|
||||
return {
|
||||
label: "Alchemy",
|
||||
hint: "Deploy to Cloudflare Workers using Alchemy",
|
||||
};
|
||||
}
|
||||
return {
|
||||
label: deployment,
|
||||
hint: `Add ${deployment} deployment`,
|
||||
@@ -41,14 +47,16 @@ export async function getDeploymentChoice(
|
||||
return "none";
|
||||
}
|
||||
|
||||
const options: DeploymentOption[] = [
|
||||
{
|
||||
value: "workers",
|
||||
label: "Cloudflare Workers",
|
||||
hint: "Deploy to Cloudflare Workers using Wrangler",
|
||||
const options: DeploymentOption[] = ["wrangler", "alchemy", "none"].map(
|
||||
(deploy) => {
|
||||
const { label, hint } = getDeploymentDisplay(deploy as WebDeploy);
|
||||
return {
|
||||
value: deploy as WebDeploy,
|
||||
label,
|
||||
hint,
|
||||
};
|
||||
},
|
||||
{ value: "none", label: "None", hint: "Manual setup" },
|
||||
];
|
||||
);
|
||||
|
||||
const response = await select<WebDeploy>({
|
||||
message: "Select web deployment",
|
||||
@@ -71,10 +79,19 @@ export async function getDeploymentToAdd(
|
||||
|
||||
const options: DeploymentOption[] = [];
|
||||
|
||||
if (existingDeployment !== "workers") {
|
||||
const { label, hint } = getDeploymentDisplay("workers");
|
||||
if (existingDeployment !== "wrangler") {
|
||||
const { label, hint } = getDeploymentDisplay("wrangler");
|
||||
options.push({
|
||||
value: "workers",
|
||||
value: "wrangler",
|
||||
label,
|
||||
hint,
|
||||
});
|
||||
}
|
||||
|
||||
if (existingDeployment !== "alchemy") {
|
||||
const { label, hint } = getDeploymentDisplay("alchemy");
|
||||
options.push({
|
||||
value: "alchemy",
|
||||
label,
|
||||
hint,
|
||||
});
|
||||
|
||||
@@ -104,10 +104,15 @@ export const ProjectNameSchema = z
|
||||
export type ProjectName = z.infer<typeof ProjectNameSchema>;
|
||||
|
||||
export const WebDeploySchema = z
|
||||
.enum(["workers", "none"])
|
||||
.enum(["wrangler", "alchemy", "none"])
|
||||
.describe("Web deployment");
|
||||
export type WebDeploy = z.infer<typeof WebDeploySchema>;
|
||||
|
||||
export const ServerDeploySchema = z
|
||||
.enum(["wrangler", "alchemy", "none"])
|
||||
.describe("Server deployment");
|
||||
export type ServerDeploy = z.infer<typeof ServerDeploySchema>;
|
||||
|
||||
export const DirectoryConflictSchema = z
|
||||
.enum(["merge", "overwrite", "increment", "error"])
|
||||
.describe("How to handle existing directory conflicts");
|
||||
@@ -132,6 +137,7 @@ export type CreateInput = {
|
||||
runtime?: Runtime;
|
||||
api?: API;
|
||||
webDeploy?: WebDeploy;
|
||||
serverDeploy?: ServerDeploy;
|
||||
directoryConflict?: DirectoryConflict;
|
||||
renderTitle?: boolean;
|
||||
disableAnalytics?: boolean;
|
||||
@@ -140,6 +146,7 @@ export type CreateInput = {
|
||||
export type AddInput = {
|
||||
addons?: Addons[];
|
||||
webDeploy?: WebDeploy;
|
||||
serverDeploy?: ServerDeploy;
|
||||
projectDir?: string;
|
||||
install?: boolean;
|
||||
packageManager?: PackageManager;
|
||||
@@ -167,6 +174,7 @@ export interface ProjectConfig {
|
||||
dbSetup: DatabaseSetup;
|
||||
api: API;
|
||||
webDeploy: WebDeploy;
|
||||
serverDeploy: ServerDeploy;
|
||||
}
|
||||
|
||||
export interface BetterTStackConfig {
|
||||
@@ -184,6 +192,7 @@ export interface BetterTStackConfig {
|
||||
dbSetup: DatabaseSetup;
|
||||
api: API;
|
||||
webDeploy: WebDeploy;
|
||||
serverDeploy: ServerDeploy;
|
||||
}
|
||||
|
||||
export interface InitResult {
|
||||
|
||||
@@ -5,13 +5,19 @@ import { isTelemetryEnabled } from "./telemetry";
|
||||
const POSTHOG_API_KEY = process.env.POSTHOG_API_KEY || "";
|
||||
const POSTHOG_HOST = process.env.POSTHOG_HOST;
|
||||
|
||||
function generateSessionId() {
|
||||
const rand = Math.random().toString(36).slice(2);
|
||||
const now = Date.now().toString(36);
|
||||
return `cli_${now}${rand}`;
|
||||
}
|
||||
|
||||
export async function trackProjectCreation(
|
||||
config: ProjectConfig,
|
||||
disableAnalytics = false,
|
||||
) {
|
||||
if (!isTelemetryEnabled() || disableAnalytics) return;
|
||||
|
||||
const sessionId = `cli_${crypto.randomUUID().replace(/-/g, "")}`;
|
||||
const sessionId = generateSessionId();
|
||||
// biome-ignore lint/correctness/noUnusedVariables: `projectName`, `projectDir`, and `relativePath` are not used in the event properties
|
||||
const { projectName, projectDir, relativePath, ...safeConfig } = config;
|
||||
|
||||
@@ -21,8 +27,8 @@ export async function trackProjectCreation(
|
||||
properties: {
|
||||
...safeConfig,
|
||||
cli_version: getLatestCLIVersion(),
|
||||
node_version: process.version,
|
||||
platform: process.platform,
|
||||
node_version: typeof process !== "undefined" ? process.version : "",
|
||||
platform: typeof process !== "undefined" ? process.platform : "",
|
||||
$ip: null,
|
||||
},
|
||||
distinct_id: sessionId,
|
||||
|
||||
@@ -22,6 +22,7 @@ export async function writeBtsConfig(projectConfig: ProjectConfig) {
|
||||
dbSetup: projectConfig.dbSetup,
|
||||
api: projectConfig.api,
|
||||
webDeploy: projectConfig.webDeploy,
|
||||
serverDeploy: projectConfig.serverDeploy,
|
||||
};
|
||||
|
||||
const baseContent = {
|
||||
@@ -40,6 +41,7 @@ export async function writeBtsConfig(projectConfig: ProjectConfig) {
|
||||
dbSetup: btsConfig.dbSetup,
|
||||
api: btsConfig.api,
|
||||
webDeploy: btsConfig.webDeploy,
|
||||
serverDeploy: btsConfig.serverDeploy,
|
||||
};
|
||||
|
||||
let configContent = JSON.stringify(baseContent);
|
||||
@@ -91,7 +93,9 @@ export async function readBtsConfig(
|
||||
|
||||
export async function updateBtsConfig(
|
||||
projectDir: string,
|
||||
updates: Partial<Pick<BetterTStackConfig, "addons" | "webDeploy">>,
|
||||
updates: Partial<
|
||||
Pick<BetterTStackConfig, "addons" | "webDeploy" | "serverDeploy">
|
||||
>,
|
||||
) {
|
||||
try {
|
||||
const configPath = path.join(projectDir, BTS_CONFIG_FILE);
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
import type {
|
||||
Addons,
|
||||
API,
|
||||
Backend,
|
||||
CLIInput,
|
||||
Frontend,
|
||||
ProjectConfig,
|
||||
ServerDeploy,
|
||||
WebDeploy,
|
||||
} from "../types";
|
||||
import { validateAddonCompatibility } from "./addon-compatibility";
|
||||
@@ -252,6 +254,21 @@ export function validateWebDeployRequiresWebFrontend(
|
||||
}
|
||||
}
|
||||
|
||||
export function validateServerDeployRequiresBackend(
|
||||
serverDeploy: ServerDeploy | undefined,
|
||||
backend: Backend | undefined,
|
||||
) {
|
||||
if (
|
||||
serverDeploy &&
|
||||
serverDeploy !== "none" &&
|
||||
(!backend || backend === "none")
|
||||
) {
|
||||
exitWithError(
|
||||
"'--server-deploy' requires a backend. Please select a backend or set '--server-deploy none'.",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export function validateAddonsAgainstFrontends(
|
||||
addons: Addons[] = [],
|
||||
frontends: Frontend[] = [],
|
||||
@@ -297,3 +314,31 @@ export function validateExamplesCompatibility(
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export function validateAlchemyCompatibility(
|
||||
webDeploy: WebDeploy | undefined,
|
||||
serverDeploy: ServerDeploy | undefined,
|
||||
frontends: Frontend[] = [],
|
||||
) {
|
||||
const isAlchemyWebDeploy = webDeploy === "alchemy";
|
||||
const isAlchemyServerDeploy = serverDeploy === "alchemy";
|
||||
|
||||
if (isAlchemyWebDeploy || isAlchemyServerDeploy) {
|
||||
const incompatibleFrontends = frontends.filter(
|
||||
(f) => f === "next" || f === "react-router",
|
||||
);
|
||||
|
||||
if (incompatibleFrontends.length > 0) {
|
||||
const deployType =
|
||||
isAlchemyWebDeploy && isAlchemyServerDeploy
|
||||
? "web and server deployment"
|
||||
: isAlchemyWebDeploy
|
||||
? "web deployment"
|
||||
: "server deployment";
|
||||
|
||||
exitWithError(
|
||||
`Alchemy ${deployType} is temporarily not compatible with ${incompatibleFrontends.join(" and ")} frontend(s). Please choose a different frontend or deployment option.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
134
apps/cli/src/utils/config-processing.ts
Normal file
134
apps/cli/src/utils/config-processing.ts
Normal file
@@ -0,0 +1,134 @@
|
||||
import path from "node:path";
|
||||
import type {
|
||||
API,
|
||||
Backend,
|
||||
CLIInput,
|
||||
Database,
|
||||
DatabaseSetup,
|
||||
ORM,
|
||||
PackageManager,
|
||||
ProjectConfig,
|
||||
Runtime,
|
||||
ServerDeploy,
|
||||
WebDeploy,
|
||||
} from "../types";
|
||||
|
||||
export function processArrayOption<T>(
|
||||
options: (T | "none")[] | undefined,
|
||||
): T[] {
|
||||
if (!options || options.length === 0) return [];
|
||||
if (options.includes("none" as T | "none")) return [];
|
||||
return options.filter((item): item is T => item !== "none");
|
||||
}
|
||||
|
||||
export function deriveProjectName(
|
||||
projectName?: string,
|
||||
projectDirectory?: string,
|
||||
): string {
|
||||
if (projectName) {
|
||||
return projectName;
|
||||
}
|
||||
if (projectDirectory) {
|
||||
return path.basename(path.resolve(process.cwd(), projectDirectory));
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
export function processFlags(
|
||||
options: CLIInput,
|
||||
projectName?: string,
|
||||
): Partial<ProjectConfig> {
|
||||
const config: Partial<ProjectConfig> = {};
|
||||
|
||||
if (options.api) {
|
||||
config.api = options.api as API;
|
||||
}
|
||||
|
||||
if (options.backend) {
|
||||
config.backend = options.backend as Backend;
|
||||
}
|
||||
|
||||
if (options.database) {
|
||||
config.database = options.database as Database;
|
||||
}
|
||||
|
||||
if (options.orm) {
|
||||
config.orm = options.orm as ORM;
|
||||
}
|
||||
|
||||
if (options.auth !== undefined) {
|
||||
config.auth = options.auth;
|
||||
}
|
||||
|
||||
if (options.git !== undefined) {
|
||||
config.git = options.git;
|
||||
}
|
||||
|
||||
if (options.install !== undefined) {
|
||||
config.install = options.install;
|
||||
}
|
||||
|
||||
if (options.runtime) {
|
||||
config.runtime = options.runtime as Runtime;
|
||||
}
|
||||
|
||||
if (options.dbSetup) {
|
||||
config.dbSetup = options.dbSetup as DatabaseSetup;
|
||||
}
|
||||
|
||||
if (options.packageManager) {
|
||||
config.packageManager = options.packageManager as PackageManager;
|
||||
}
|
||||
|
||||
if (options.webDeploy) {
|
||||
config.webDeploy = options.webDeploy as WebDeploy;
|
||||
}
|
||||
|
||||
if (options.serverDeploy) {
|
||||
config.serverDeploy = options.serverDeploy as ServerDeploy;
|
||||
}
|
||||
|
||||
const derivedName = deriveProjectName(projectName, options.projectDirectory);
|
||||
if (derivedName) {
|
||||
config.projectName = projectName || derivedName;
|
||||
}
|
||||
|
||||
if (options.frontend && options.frontend.length > 0) {
|
||||
config.frontend = processArrayOption(options.frontend);
|
||||
}
|
||||
|
||||
if (options.addons && options.addons.length > 0) {
|
||||
config.addons = processArrayOption(options.addons);
|
||||
}
|
||||
|
||||
if (options.examples && options.examples.length > 0) {
|
||||
config.examples = processArrayOption(options.examples);
|
||||
}
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
export function getProvidedFlags(options: CLIInput): Set<string> {
|
||||
return new Set(
|
||||
Object.keys(options).filter(
|
||||
(key) => options[key as keyof CLIInput] !== undefined,
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
export function validateNoneExclusivity<T>(
|
||||
options: (T | "none")[] | undefined,
|
||||
optionName: string,
|
||||
): void {
|
||||
if (!options || options.length === 0) return;
|
||||
|
||||
if (options.includes("none" as T | "none") && options.length > 1) {
|
||||
throw new Error(`Cannot combine 'none' with other ${optionName}.`);
|
||||
}
|
||||
}
|
||||
|
||||
export function validateArrayOptions(options: CLIInput): void {
|
||||
validateNoneExclusivity(options.frontend, "frontend options");
|
||||
validateNoneExclusivity(options.addons, "addons");
|
||||
validateNoneExclusivity(options.examples, "examples");
|
||||
}
|
||||
333
apps/cli/src/utils/config-validation.ts
Normal file
333
apps/cli/src/utils/config-validation.ts
Normal file
@@ -0,0 +1,333 @@
|
||||
import type {
|
||||
CLIInput,
|
||||
Database,
|
||||
DatabaseSetup,
|
||||
ProjectConfig,
|
||||
Runtime,
|
||||
} from "../types";
|
||||
import {
|
||||
coerceBackendPresets,
|
||||
ensureSingleWebAndNative,
|
||||
incompatibleFlagsForBackend,
|
||||
isWebFrontend,
|
||||
validateAddonsAgainstFrontends,
|
||||
validateApiFrontendCompatibility,
|
||||
validateExamplesCompatibility,
|
||||
validateServerDeployRequiresBackend,
|
||||
validateWebDeployRequiresWebFrontend,
|
||||
validateWorkersCompatibility,
|
||||
validateAlchemyCompatibility,
|
||||
} from "./compatibility-rules";
|
||||
import { exitWithError } from "./errors";
|
||||
|
||||
export function validateDatabaseOrmAuth(
|
||||
cfg: Partial<ProjectConfig>,
|
||||
flags?: Set<string>,
|
||||
): void {
|
||||
const db = cfg.database;
|
||||
const orm = cfg.orm;
|
||||
const has = (k: string) => (flags ? flags.has(k) : true);
|
||||
|
||||
if (has("orm") && has("database") && orm === "mongoose" && db !== "mongodb") {
|
||||
exitWithError(
|
||||
"Mongoose ORM requires MongoDB database. Please use '--database mongodb' or choose a different ORM.",
|
||||
);
|
||||
}
|
||||
|
||||
if (has("orm") && has("database") && orm === "drizzle" && db === "mongodb") {
|
||||
exitWithError(
|
||||
"Drizzle ORM does not support MongoDB. Please use '--orm mongoose' or '--orm prisma' or choose a different database.",
|
||||
);
|
||||
}
|
||||
|
||||
if (
|
||||
has("database") &&
|
||||
has("orm") &&
|
||||
db === "mongodb" &&
|
||||
orm &&
|
||||
orm !== "mongoose" &&
|
||||
orm !== "prisma" &&
|
||||
orm !== "none"
|
||||
) {
|
||||
exitWithError(
|
||||
"MongoDB database requires Mongoose or Prisma ORM. Please use '--orm mongoose' or '--orm prisma' or choose a different database.",
|
||||
);
|
||||
}
|
||||
|
||||
if (has("database") && has("orm") && db && db !== "none" && orm === "none") {
|
||||
exitWithError(
|
||||
"Database selection requires an ORM. Please choose '--orm drizzle', '--orm prisma', or '--orm mongoose'.",
|
||||
);
|
||||
}
|
||||
|
||||
if (has("orm") && has("database") && orm && orm !== "none" && db === "none") {
|
||||
exitWithError(
|
||||
"ORM selection requires a database. Please choose a database or set '--orm none'.",
|
||||
);
|
||||
}
|
||||
|
||||
if (has("auth") && has("database") && cfg.auth && db === "none") {
|
||||
exitWithError(
|
||||
"Authentication requires a database. Please choose a database or set '--no-auth'.",
|
||||
);
|
||||
}
|
||||
|
||||
if (cfg.auth && db === "none") {
|
||||
exitWithError(
|
||||
"Authentication requires a database. Please choose a database or set '--no-auth'.",
|
||||
);
|
||||
}
|
||||
|
||||
if (orm && orm !== "none" && db === "none") {
|
||||
exitWithError(
|
||||
"ORM selection requires a database. Please choose a database or set '--orm none'.",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export function validateDatabaseSetup(
|
||||
config: Partial<ProjectConfig>,
|
||||
providedFlags: Set<string>,
|
||||
): void {
|
||||
const { dbSetup, database, runtime } = config;
|
||||
|
||||
if (
|
||||
providedFlags.has("dbSetup") &&
|
||||
providedFlags.has("database") &&
|
||||
dbSetup &&
|
||||
dbSetup !== "none" &&
|
||||
database === "none"
|
||||
) {
|
||||
exitWithError(
|
||||
"Database setup requires a database. Please choose a database or set '--db-setup none'.",
|
||||
);
|
||||
}
|
||||
|
||||
const setupValidations: Record<
|
||||
DatabaseSetup,
|
||||
{ database?: Database; runtime?: Runtime; errorMessage: string }
|
||||
> = {
|
||||
turso: {
|
||||
database: "sqlite",
|
||||
errorMessage:
|
||||
"Turso setup requires SQLite database. Please use '--database sqlite' or choose a different setup.",
|
||||
},
|
||||
neon: {
|
||||
database: "postgres",
|
||||
errorMessage:
|
||||
"Neon setup requires PostgreSQL database. Please use '--database postgres' or choose a different setup.",
|
||||
},
|
||||
"prisma-postgres": {
|
||||
database: "postgres",
|
||||
errorMessage:
|
||||
"Prisma PostgreSQL setup requires PostgreSQL database. Please use '--database postgres' or choose a different setup.",
|
||||
},
|
||||
"mongodb-atlas": {
|
||||
database: "mongodb",
|
||||
errorMessage:
|
||||
"MongoDB Atlas setup requires MongoDB database. Please use '--database mongodb' or choose a different setup.",
|
||||
},
|
||||
supabase: {
|
||||
database: "postgres",
|
||||
errorMessage:
|
||||
"Supabase setup requires PostgreSQL database. Please use '--database postgres' or choose a different setup.",
|
||||
},
|
||||
d1: {
|
||||
database: "sqlite",
|
||||
runtime: "workers",
|
||||
errorMessage:
|
||||
"Cloudflare D1 setup requires SQLite database and Cloudflare Workers runtime.",
|
||||
},
|
||||
docker: {
|
||||
errorMessage:
|
||||
"Docker setup is not compatible with SQLite database or Cloudflare Workers runtime.",
|
||||
},
|
||||
none: { errorMessage: "" },
|
||||
};
|
||||
|
||||
if (dbSetup && dbSetup !== "none") {
|
||||
const validation = setupValidations[dbSetup];
|
||||
|
||||
if (validation.database && database !== validation.database) {
|
||||
exitWithError(validation.errorMessage);
|
||||
}
|
||||
|
||||
if (validation.runtime && runtime !== validation.runtime) {
|
||||
exitWithError(validation.errorMessage);
|
||||
}
|
||||
|
||||
if (dbSetup === "docker") {
|
||||
if (database === "sqlite") {
|
||||
exitWithError(
|
||||
"Docker setup is not compatible with SQLite database. SQLite is file-based and doesn't require Docker. Please use '--database postgres', '--database mysql', '--database mongodb', or choose a different setup.",
|
||||
);
|
||||
}
|
||||
if (runtime === "workers") {
|
||||
exitWithError(
|
||||
"Docker setup is not compatible with Cloudflare Workers runtime. Workers runtime uses serverless databases (D1) and doesn't support local Docker containers. Please use '--db-setup d1' for SQLite or choose a different runtime.",
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function validateBackendConstraints(
|
||||
config: Partial<ProjectConfig>,
|
||||
providedFlags: Set<string>,
|
||||
options: CLIInput,
|
||||
): void {
|
||||
const { backend } = config;
|
||||
|
||||
if (
|
||||
providedFlags.has("backend") &&
|
||||
backend &&
|
||||
backend !== "convex" &&
|
||||
backend !== "none"
|
||||
) {
|
||||
if (providedFlags.has("runtime") && options.runtime === "none") {
|
||||
exitWithError(
|
||||
"'--runtime none' is only supported with '--backend convex' or '--backend none'. Please choose 'bun', 'node', or remove the --runtime flag.",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (backend === "convex" || backend === "none") {
|
||||
const incompatibleFlags = incompatibleFlagsForBackend(
|
||||
backend,
|
||||
providedFlags,
|
||||
options,
|
||||
);
|
||||
if (incompatibleFlags.length > 0) {
|
||||
exitWithError(
|
||||
`The following flags are incompatible with '--backend ${backend}': ${incompatibleFlags.join(
|
||||
", ",
|
||||
)}. Please remove them.`,
|
||||
);
|
||||
}
|
||||
|
||||
if (
|
||||
backend === "convex" &&
|
||||
providedFlags.has("frontend") &&
|
||||
options.frontend
|
||||
) {
|
||||
const incompatibleFrontends = options.frontend.filter(
|
||||
(f) => f === "solid",
|
||||
);
|
||||
if (incompatibleFrontends.length > 0) {
|
||||
exitWithError(
|
||||
`The following frontends are not compatible with '--backend convex': ${incompatibleFrontends.join(
|
||||
", ",
|
||||
)}. Please choose a different frontend or backend.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
coerceBackendPresets(config);
|
||||
}
|
||||
}
|
||||
|
||||
export function validateFrontendConstraints(
|
||||
config: Partial<ProjectConfig>,
|
||||
providedFlags: Set<string>,
|
||||
): void {
|
||||
const { frontend } = config;
|
||||
|
||||
if (frontend && frontend.length > 0) {
|
||||
ensureSingleWebAndNative(frontend);
|
||||
|
||||
if (
|
||||
providedFlags.has("api") &&
|
||||
providedFlags.has("frontend") &&
|
||||
config.api
|
||||
) {
|
||||
validateApiFrontendCompatibility(config.api, frontend);
|
||||
}
|
||||
}
|
||||
|
||||
const hasWebFrontendFlag = (frontend ?? []).some((f) => isWebFrontend(f));
|
||||
validateWebDeployRequiresWebFrontend(config.webDeploy, hasWebFrontendFlag);
|
||||
}
|
||||
|
||||
export function validateApiConstraints(
|
||||
config: Partial<ProjectConfig>,
|
||||
options: CLIInput,
|
||||
): void {
|
||||
if (config.api === "none") {
|
||||
if (
|
||||
options.examples &&
|
||||
!(options.examples.length === 1 && options.examples[0] === "none") &&
|
||||
options.backend !== "convex"
|
||||
) {
|
||||
exitWithError(
|
||||
"Cannot use '--examples' when '--api' is set to 'none'. Please remove the --examples flag or choose an API type.",
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function validateFullConfig(
|
||||
config: Partial<ProjectConfig>,
|
||||
providedFlags: Set<string>,
|
||||
options: CLIInput,
|
||||
): void {
|
||||
validateDatabaseOrmAuth(config, providedFlags);
|
||||
validateDatabaseSetup(config, providedFlags);
|
||||
|
||||
validateBackendConstraints(config, providedFlags, options);
|
||||
|
||||
validateFrontendConstraints(config, providedFlags);
|
||||
|
||||
validateApiConstraints(config, options);
|
||||
|
||||
validateServerDeployRequiresBackend(config.serverDeploy, config.backend);
|
||||
|
||||
validateWorkersCompatibility(providedFlags, options, config);
|
||||
|
||||
if (config.addons && config.addons.length > 0) {
|
||||
validateAddonsAgainstFrontends(config.addons, config.frontend);
|
||||
config.addons = [...new Set(config.addons)];
|
||||
}
|
||||
|
||||
validateExamplesCompatibility(
|
||||
config.examples ?? [],
|
||||
config.backend,
|
||||
config.database,
|
||||
config.frontend ?? [],
|
||||
);
|
||||
|
||||
validateAlchemyCompatibility(
|
||||
config.webDeploy,
|
||||
config.serverDeploy,
|
||||
config.frontend ?? [],
|
||||
);
|
||||
}
|
||||
|
||||
export function validateConfigForProgrammaticUse(
|
||||
config: Partial<ProjectConfig>,
|
||||
): void {
|
||||
try {
|
||||
validateDatabaseOrmAuth(config);
|
||||
|
||||
if (config.frontend && config.frontend.length > 0) {
|
||||
ensureSingleWebAndNative(config.frontend);
|
||||
}
|
||||
|
||||
validateApiFrontendCompatibility(config.api, config.frontend);
|
||||
|
||||
if (config.addons && config.addons.length > 0) {
|
||||
validateAddonsAgainstFrontends(config.addons, config.frontend);
|
||||
}
|
||||
|
||||
validateExamplesCompatibility(
|
||||
config.examples ?? [],
|
||||
config.backend,
|
||||
config.database,
|
||||
config.frontend ?? [],
|
||||
);
|
||||
} catch (error) {
|
||||
if (error instanceof Error) {
|
||||
throw error;
|
||||
}
|
||||
throw new Error(String(error));
|
||||
}
|
||||
}
|
||||
@@ -107,6 +107,12 @@ export function displayConfig(config: Partial<ProjectConfig>) {
|
||||
);
|
||||
}
|
||||
|
||||
if (config.serverDeploy !== undefined) {
|
||||
configDisplay.push(
|
||||
`${pc.blue("Server Deployment:")} ${String(config.serverDeploy)}`,
|
||||
);
|
||||
}
|
||||
|
||||
if (configDisplay.length === 0) {
|
||||
return pc.yellow("No configuration selected.");
|
||||
}
|
||||
|
||||
61
apps/cli/src/utils/format-with-biome.ts
Normal file
61
apps/cli/src/utils/format-with-biome.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
import path from "node:path";
|
||||
import { Biome } from "@biomejs/js-api/nodejs";
|
||||
import fs from "fs-extra";
|
||||
import { glob } from "tinyglobby";
|
||||
|
||||
export async function formatProjectWithBiome(projectDir: string) {
|
||||
const biome = new Biome();
|
||||
const { projectKey } = biome.openProject(projectDir);
|
||||
|
||||
biome.applyConfiguration(projectKey, {
|
||||
formatter: {
|
||||
enabled: true,
|
||||
indentStyle: "tab",
|
||||
},
|
||||
javascript: {
|
||||
formatter: {
|
||||
quoteStyle: "double",
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const files = await glob("**/*", {
|
||||
cwd: projectDir,
|
||||
dot: true,
|
||||
absolute: true,
|
||||
onlyFiles: true,
|
||||
});
|
||||
|
||||
for (const filePath of files) {
|
||||
try {
|
||||
const ext = path.extname(filePath).toLowerCase();
|
||||
const supported = new Set([
|
||||
".ts",
|
||||
".tsx",
|
||||
".js",
|
||||
".jsx",
|
||||
".cjs",
|
||||
".mjs",
|
||||
".cts",
|
||||
".mts",
|
||||
".json",
|
||||
".jsonc",
|
||||
".md",
|
||||
".mdx",
|
||||
".css",
|
||||
".scss",
|
||||
".html",
|
||||
]);
|
||||
if (!supported.has(ext)) continue;
|
||||
|
||||
const original = await fs.readFile(filePath, "utf8");
|
||||
const result = biome.formatContent(projectKey, original, { filePath });
|
||||
const content = result?.content;
|
||||
if (typeof content !== "string") continue;
|
||||
if (content.length === 0 && original.length > 0) continue;
|
||||
if (content !== original) {
|
||||
await fs.writeFile(filePath, content);
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
}
|
||||
@@ -30,6 +30,7 @@ export function generateReproducibleCommand(config: ProjectConfig): string {
|
||||
|
||||
flags.push(`--db-setup ${config.dbSetup}`);
|
||||
flags.push(`--web-deploy ${config.webDeploy}`);
|
||||
flags.push(`--server-deploy ${config.serverDeploy}`);
|
||||
flags.push(config.git ? "--git" : "--no-git");
|
||||
flags.push(`--package-manager ${config.packageManager}`);
|
||||
flags.push(config.install ? "--install" : "--no-install");
|
||||
|
||||
@@ -14,8 +14,9 @@ export async function handleDirectoryConflict(
|
||||
}> {
|
||||
while (true) {
|
||||
const resolvedPath = path.resolve(process.cwd(), currentPathInput);
|
||||
const dirExists = fs.pathExistsSync(resolvedPath);
|
||||
const dirIsNotEmpty = dirExists && fs.readdirSync(resolvedPath).length > 0;
|
||||
const dirExists = await fs.pathExists(resolvedPath);
|
||||
const dirIsNotEmpty =
|
||||
dirExists && (await fs.readdir(resolvedPath)).length > 0;
|
||||
|
||||
if (!dirIsNotEmpty) {
|
||||
return { finalPathInput: currentPathInput, shouldClearDirectory: false };
|
||||
|
||||
47
apps/cli/src/utils/project-name-validation.ts
Normal file
47
apps/cli/src/utils/project-name-validation.ts
Normal file
@@ -0,0 +1,47 @@
|
||||
import path from "node:path";
|
||||
import { ProjectNameSchema } from "../types";
|
||||
import { exitWithError } from "./errors";
|
||||
|
||||
export function validateProjectName(name: string): void {
|
||||
const result = ProjectNameSchema.safeParse(name);
|
||||
if (!result.success) {
|
||||
exitWithError(
|
||||
`Invalid project name: ${
|
||||
result.error.issues[0]?.message || "Invalid project name"
|
||||
}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export function validateProjectNameThrow(name: string): void {
|
||||
const result = ProjectNameSchema.safeParse(name);
|
||||
if (!result.success) {
|
||||
throw new Error(`Invalid project name: ${result.error.issues[0]?.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
export function extractAndValidateProjectName(
|
||||
projectName?: string,
|
||||
projectDirectory?: string,
|
||||
throwOnError = false,
|
||||
): string {
|
||||
const derivedName =
|
||||
projectName ||
|
||||
(projectDirectory
|
||||
? path.basename(path.resolve(process.cwd(), projectDirectory))
|
||||
: "");
|
||||
|
||||
if (!derivedName) {
|
||||
return "";
|
||||
}
|
||||
|
||||
const nameToValidate = projectName ? path.basename(projectName) : derivedName;
|
||||
|
||||
if (throwOnError) {
|
||||
validateProjectNameThrow(nameToValidate);
|
||||
} else {
|
||||
validateProjectName(nameToValidate);
|
||||
}
|
||||
|
||||
return projectName || derivedName;
|
||||
}
|
||||
@@ -1,514 +1,85 @@
|
||||
import path from "node:path";
|
||||
import type { CLIInput, ProjectConfig } from "./types";
|
||||
import {
|
||||
type API,
|
||||
type Backend,
|
||||
type CLIInput,
|
||||
type Database,
|
||||
type DatabaseSetup,
|
||||
type ORM,
|
||||
type PackageManager,
|
||||
type ProjectConfig,
|
||||
ProjectNameSchema,
|
||||
type Runtime,
|
||||
type WebDeploy,
|
||||
} from "./types";
|
||||
getProvidedFlags,
|
||||
processFlags,
|
||||
validateArrayOptions,
|
||||
} from "./utils/config-processing";
|
||||
import {
|
||||
coerceBackendPresets,
|
||||
ensureSingleWebAndNative,
|
||||
incompatibleFlagsForBackend,
|
||||
isWebFrontend,
|
||||
validateAddonsAgainstFrontends,
|
||||
validateApiFrontendCompatibility,
|
||||
validateExamplesCompatibility,
|
||||
validateWebDeployRequiresWebFrontend,
|
||||
validateWorkersCompatibility,
|
||||
} from "./utils/compatibility-rules";
|
||||
validateConfigForProgrammaticUse,
|
||||
validateFullConfig,
|
||||
} from "./utils/config-validation";
|
||||
import { exitWithError } from "./utils/errors";
|
||||
|
||||
function processArrayOption<T>(options: (T | "none")[] | undefined): T[] {
|
||||
if (!options || options.length === 0) return [];
|
||||
if (options.includes("none" as T | "none")) return [];
|
||||
return options.filter((item): item is T => item !== "none");
|
||||
}
|
||||
|
||||
function deriveProjectName(
|
||||
projectName?: string,
|
||||
projectDirectory?: string,
|
||||
): string {
|
||||
if (projectName) {
|
||||
return projectName;
|
||||
}
|
||||
if (projectDirectory) {
|
||||
return path.basename(path.resolve(process.cwd(), projectDirectory));
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
function validateProjectName(name: string): void {
|
||||
const result = ProjectNameSchema.safeParse(name);
|
||||
if (!result.success) {
|
||||
exitWithError(
|
||||
`Invalid project name: ${
|
||||
result.error.issues[0]?.message || "Invalid project name"
|
||||
}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
import { extractAndValidateProjectName } from "./utils/project-name-validation";
|
||||
|
||||
export function processAndValidateFlags(
|
||||
options: CLIInput,
|
||||
providedFlags: Set<string>,
|
||||
projectName?: string,
|
||||
): Partial<ProjectConfig> {
|
||||
const config: Partial<ProjectConfig> = {};
|
||||
|
||||
if (options.api) {
|
||||
config.api = options.api as API;
|
||||
if (options.api === "none") {
|
||||
if (
|
||||
options.examples &&
|
||||
!(options.examples.length === 1 && options.examples[0] === "none") &&
|
||||
options.backend !== "convex"
|
||||
) {
|
||||
exitWithError(
|
||||
"Cannot use '--examples' when '--api' is set to 'none'. Please remove the --examples flag or choose an API type.",
|
||||
if (options.yolo) {
|
||||
const cfg = processFlags(options, projectName);
|
||||
const validatedProjectName = extractAndValidateProjectName(
|
||||
projectName,
|
||||
options.projectDirectory,
|
||||
true,
|
||||
);
|
||||
if (validatedProjectName) {
|
||||
cfg.projectName = validatedProjectName;
|
||||
}
|
||||
}
|
||||
return cfg;
|
||||
}
|
||||
|
||||
if (options.backend) {
|
||||
config.backend = options.backend as Backend;
|
||||
try {
|
||||
validateArrayOptions(options);
|
||||
} catch (error) {
|
||||
exitWithError(error instanceof Error ? error.message : String(error));
|
||||
}
|
||||
|
||||
if (
|
||||
providedFlags.has("backend") &&
|
||||
config.backend &&
|
||||
config.backend !== "convex" &&
|
||||
config.backend !== "none"
|
||||
) {
|
||||
if (providedFlags.has("runtime") && options.runtime === "none") {
|
||||
exitWithError(
|
||||
`'--runtime none' is only supported with '--backend convex' or '--backend none'. Please choose 'bun', 'node', or remove the --runtime flag.`,
|
||||
const config = processFlags(options, projectName);
|
||||
|
||||
const validatedProjectName = extractAndValidateProjectName(
|
||||
projectName,
|
||||
options.projectDirectory,
|
||||
false,
|
||||
);
|
||||
}
|
||||
if (validatedProjectName) {
|
||||
config.projectName = validatedProjectName;
|
||||
}
|
||||
|
||||
if (options.database) {
|
||||
config.database = options.database as Database;
|
||||
}
|
||||
if (options.orm) {
|
||||
config.orm = options.orm as ORM;
|
||||
}
|
||||
if (options.auth !== undefined) {
|
||||
config.auth = options.auth;
|
||||
}
|
||||
if (options.git !== undefined) {
|
||||
config.git = options.git;
|
||||
}
|
||||
if (options.install !== undefined) {
|
||||
config.install = options.install;
|
||||
}
|
||||
if (options.runtime) {
|
||||
config.runtime = options.runtime as Runtime;
|
||||
}
|
||||
if (options.dbSetup) {
|
||||
config.dbSetup = options.dbSetup as DatabaseSetup;
|
||||
}
|
||||
if (options.packageManager) {
|
||||
config.packageManager = options.packageManager as PackageManager;
|
||||
}
|
||||
|
||||
if (options.webDeploy) {
|
||||
config.webDeploy = options.webDeploy as WebDeploy;
|
||||
}
|
||||
|
||||
const derivedName = deriveProjectName(projectName, options.projectDirectory);
|
||||
if (derivedName) {
|
||||
const nameToValidate = projectName
|
||||
? path.basename(projectName)
|
||||
: derivedName;
|
||||
validateProjectName(nameToValidate);
|
||||
config.projectName = projectName || derivedName;
|
||||
}
|
||||
|
||||
if (options.frontend && options.frontend.length > 0) {
|
||||
if (options.frontend.includes("none")) {
|
||||
if (options.frontend.length > 1) {
|
||||
exitWithError(`Cannot combine 'none' with other frontend options.`);
|
||||
}
|
||||
config.frontend = [];
|
||||
} else {
|
||||
const validOptions = processArrayOption(options.frontend);
|
||||
ensureSingleWebAndNative(validOptions);
|
||||
config.frontend = validOptions;
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
providedFlags.has("api") &&
|
||||
providedFlags.has("frontend") &&
|
||||
config.api &&
|
||||
config.frontend &&
|
||||
config.frontend.length > 0
|
||||
) {
|
||||
validateApiFrontendCompatibility(config.api, config.frontend);
|
||||
}
|
||||
if (options.addons && options.addons.length > 0) {
|
||||
if (options.addons.includes("none")) {
|
||||
if (options.addons.length > 1) {
|
||||
exitWithError(`Cannot combine 'none' with other addons.`);
|
||||
}
|
||||
config.addons = [];
|
||||
} else {
|
||||
config.addons = processArrayOption(options.addons);
|
||||
}
|
||||
}
|
||||
if (options.examples && options.examples.length > 0) {
|
||||
if (options.examples.includes("none")) {
|
||||
if (options.examples.length > 1) {
|
||||
exitWithError("Cannot combine 'none' with other examples.");
|
||||
}
|
||||
config.examples = [];
|
||||
} else {
|
||||
config.examples = processArrayOption(options.examples);
|
||||
if (options.examples.includes("none") && config.backend !== "convex") {
|
||||
config.examples = [];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (config.backend === "convex" || config.backend === "none") {
|
||||
const incompatibleFlags = incompatibleFlagsForBackend(
|
||||
config.backend,
|
||||
providedFlags,
|
||||
options,
|
||||
);
|
||||
if (incompatibleFlags.length > 0) {
|
||||
exitWithError(
|
||||
`The following flags are incompatible with '--backend ${config.backend}': ${incompatibleFlags.join(
|
||||
", ",
|
||||
)}. Please remove them.`,
|
||||
);
|
||||
}
|
||||
|
||||
if (
|
||||
config.backend === "convex" &&
|
||||
providedFlags.has("frontend") &&
|
||||
options.frontend
|
||||
) {
|
||||
const incompatibleFrontends = options.frontend.filter(
|
||||
(f) => f === "solid",
|
||||
);
|
||||
if (incompatibleFrontends.length > 0) {
|
||||
exitWithError(
|
||||
`The following frontends are not compatible with '--backend convex': ${incompatibleFrontends.join(
|
||||
", ",
|
||||
)}. Please choose a different frontend or backend.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
coerceBackendPresets(config);
|
||||
}
|
||||
|
||||
if (
|
||||
providedFlags.has("orm") &&
|
||||
providedFlags.has("database") &&
|
||||
config.orm === "mongoose" &&
|
||||
config.database !== "mongodb"
|
||||
) {
|
||||
exitWithError(
|
||||
"Mongoose ORM requires MongoDB database. Please use '--database mongodb' or choose a different ORM.",
|
||||
);
|
||||
}
|
||||
|
||||
if (
|
||||
providedFlags.has("database") &&
|
||||
providedFlags.has("orm") &&
|
||||
config.database === "mongodb" &&
|
||||
config.orm &&
|
||||
config.orm !== "mongoose" &&
|
||||
config.orm !== "prisma"
|
||||
) {
|
||||
exitWithError(
|
||||
"MongoDB database requires Mongoose or Prisma ORM. Please use '--orm mongoose' or '--orm prisma' or choose a different database.",
|
||||
);
|
||||
}
|
||||
|
||||
if (
|
||||
providedFlags.has("orm") &&
|
||||
providedFlags.has("database") &&
|
||||
config.orm === "drizzle" &&
|
||||
config.database === "mongodb"
|
||||
) {
|
||||
exitWithError(
|
||||
"Drizzle ORM does not support MongoDB. Please use '--orm mongoose' or '--orm prisma' or choose a different database.",
|
||||
);
|
||||
}
|
||||
|
||||
if (
|
||||
providedFlags.has("database") &&
|
||||
providedFlags.has("orm") &&
|
||||
config.database &&
|
||||
config.database !== "none" &&
|
||||
config.orm === "none"
|
||||
) {
|
||||
exitWithError(
|
||||
"Database selection requires an ORM. Please choose '--orm drizzle', '--orm prisma', or '--orm mongoose'.",
|
||||
);
|
||||
}
|
||||
|
||||
if (
|
||||
providedFlags.has("orm") &&
|
||||
providedFlags.has("database") &&
|
||||
config.orm &&
|
||||
config.orm !== "none" &&
|
||||
config.database === "none"
|
||||
) {
|
||||
exitWithError(
|
||||
"ORM selection requires a database. Please choose a database or set '--orm none'.",
|
||||
);
|
||||
}
|
||||
|
||||
if (
|
||||
providedFlags.has("auth") &&
|
||||
providedFlags.has("database") &&
|
||||
config.auth &&
|
||||
config.database === "none"
|
||||
) {
|
||||
exitWithError(
|
||||
"Authentication requires a database. Please choose a database or set '--no-auth'.",
|
||||
);
|
||||
}
|
||||
|
||||
if (
|
||||
providedFlags.has("dbSetup") &&
|
||||
providedFlags.has("database") &&
|
||||
config.dbSetup &&
|
||||
config.dbSetup !== "none" &&
|
||||
config.database === "none"
|
||||
) {
|
||||
exitWithError(
|
||||
"Database setup requires a database. Please choose a database or set '--db-setup none'.",
|
||||
);
|
||||
}
|
||||
|
||||
if (
|
||||
providedFlags.has("dbSetup") &&
|
||||
(config.database ? providedFlags.has("database") : true) &&
|
||||
config.dbSetup === "turso" &&
|
||||
config.database !== "sqlite"
|
||||
) {
|
||||
exitWithError(
|
||||
"Turso setup requires SQLite database. Please use '--database sqlite' or choose a different setup.",
|
||||
);
|
||||
}
|
||||
|
||||
if (
|
||||
providedFlags.has("dbSetup") &&
|
||||
(config.database ? providedFlags.has("database") : true) &&
|
||||
config.dbSetup === "neon" &&
|
||||
config.database !== "postgres"
|
||||
) {
|
||||
exitWithError(
|
||||
"Neon setup requires PostgreSQL database. Please use '--database postgres' or choose a different setup.",
|
||||
);
|
||||
}
|
||||
|
||||
if (
|
||||
providedFlags.has("dbSetup") &&
|
||||
(config.database ? providedFlags.has("database") : true) &&
|
||||
config.dbSetup === "prisma-postgres" &&
|
||||
config.database !== "postgres"
|
||||
) {
|
||||
exitWithError(
|
||||
"Prisma PostgreSQL setup requires PostgreSQL database. Please use '--database postgres' or choose a different setup.",
|
||||
);
|
||||
}
|
||||
|
||||
if (
|
||||
providedFlags.has("dbSetup") &&
|
||||
(config.database ? providedFlags.has("database") : true) &&
|
||||
config.dbSetup === "mongodb-atlas" &&
|
||||
config.database !== "mongodb"
|
||||
) {
|
||||
exitWithError(
|
||||
"MongoDB Atlas setup requires MongoDB database. Please use '--database mongodb' or choose a different setup.",
|
||||
);
|
||||
}
|
||||
|
||||
if (
|
||||
providedFlags.has("dbSetup") &&
|
||||
(config.database ? providedFlags.has("database") : true) &&
|
||||
config.dbSetup === "supabase" &&
|
||||
config.database !== "postgres"
|
||||
) {
|
||||
exitWithError(
|
||||
"Supabase setup requires PostgreSQL database. Please use '--database postgres' or choose a different setup.",
|
||||
);
|
||||
}
|
||||
|
||||
if (config.dbSetup === "d1") {
|
||||
if (
|
||||
(providedFlags.has("dbSetup") && providedFlags.has("database")) ||
|
||||
(providedFlags.has("dbSetup") && !config.database)
|
||||
) {
|
||||
if (config.database !== "sqlite") {
|
||||
exitWithError(
|
||||
"Cloudflare D1 setup requires SQLite database. Please use '--database sqlite' or choose a different setup.",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
(providedFlags.has("dbSetup") && providedFlags.has("runtime")) ||
|
||||
(providedFlags.has("dbSetup") && !config.runtime)
|
||||
) {
|
||||
if (config.runtime !== "workers") {
|
||||
exitWithError(
|
||||
"Cloudflare D1 setup requires the Cloudflare Workers runtime. Please use '--runtime workers' or choose a different setup.",
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
providedFlags.has("dbSetup") &&
|
||||
providedFlags.has("database") &&
|
||||
config.dbSetup === "docker" &&
|
||||
config.database === "sqlite"
|
||||
) {
|
||||
exitWithError(
|
||||
"Docker setup is not compatible with SQLite database. SQLite is file-based and doesn't require Docker. Please use '--database postgres', '--database mysql', '--database mongodb', or choose a different setup.",
|
||||
);
|
||||
}
|
||||
|
||||
if (
|
||||
providedFlags.has("dbSetup") &&
|
||||
providedFlags.has("runtime") &&
|
||||
config.dbSetup === "docker" &&
|
||||
config.runtime === "workers"
|
||||
) {
|
||||
exitWithError(
|
||||
"Docker setup is not compatible with Cloudflare Workers runtime. Workers runtime uses serverless databases (D1) and doesn't support local Docker containers. Please use '--db-setup d1' for SQLite or choose a different runtime.",
|
||||
);
|
||||
}
|
||||
|
||||
validateWorkersCompatibility(providedFlags, options, config);
|
||||
|
||||
const hasWebFrontendFlag = (config.frontend ?? []).some((f) =>
|
||||
isWebFrontend(f),
|
||||
);
|
||||
validateWebDeployRequiresWebFrontend(config.webDeploy, hasWebFrontendFlag);
|
||||
validateFullConfig(config, providedFlags, options);
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
export function validateConfigCompatibility(config: Partial<ProjectConfig>) {
|
||||
const effectiveDatabase = config.database;
|
||||
const effectiveBackend = config.backend;
|
||||
const effectiveFrontend = config.frontend;
|
||||
const effectiveApi = config.api;
|
||||
|
||||
validateApiFrontendCompatibility(effectiveApi, effectiveFrontend);
|
||||
|
||||
if (config.addons && config.addons.length > 0) {
|
||||
validateAddonsAgainstFrontends(config.addons, effectiveFrontend);
|
||||
config.addons = [...new Set(config.addons)];
|
||||
}
|
||||
|
||||
validateExamplesCompatibility(
|
||||
config.examples ?? [],
|
||||
effectiveBackend,
|
||||
effectiveDatabase,
|
||||
effectiveFrontend ?? [],
|
||||
);
|
||||
}
|
||||
|
||||
export function processProvidedFlagsWithoutValidation(
|
||||
options: CLIInput,
|
||||
projectName?: string,
|
||||
): Partial<ProjectConfig> {
|
||||
const config: Partial<ProjectConfig> = {};
|
||||
const config = processFlags(options, projectName);
|
||||
|
||||
if (options.api) {
|
||||
config.api = options.api as API;
|
||||
}
|
||||
|
||||
if (options.backend) {
|
||||
config.backend = options.backend as Backend;
|
||||
}
|
||||
|
||||
if (options.database) {
|
||||
config.database = options.database as Database;
|
||||
}
|
||||
|
||||
if (options.orm) {
|
||||
config.orm = options.orm as ORM;
|
||||
}
|
||||
|
||||
if (options.auth !== undefined) {
|
||||
config.auth = options.auth;
|
||||
}
|
||||
|
||||
if (options.git !== undefined) {
|
||||
config.git = options.git;
|
||||
}
|
||||
|
||||
if (options.install !== undefined) {
|
||||
config.install = options.install;
|
||||
}
|
||||
|
||||
if (options.runtime) {
|
||||
config.runtime = options.runtime as Runtime;
|
||||
}
|
||||
|
||||
if (options.dbSetup) {
|
||||
config.dbSetup = options.dbSetup as DatabaseSetup;
|
||||
}
|
||||
|
||||
if (options.packageManager) {
|
||||
config.packageManager = options.packageManager as PackageManager;
|
||||
}
|
||||
|
||||
if (options.webDeploy) {
|
||||
config.webDeploy = options.webDeploy as WebDeploy;
|
||||
}
|
||||
|
||||
const derivedName = deriveProjectName(projectName, options.projectDirectory);
|
||||
if (derivedName) {
|
||||
const nameToValidate = projectName
|
||||
? path.basename(projectName)
|
||||
: derivedName;
|
||||
const result = ProjectNameSchema.safeParse(nameToValidate);
|
||||
if (!result.success) {
|
||||
throw new Error(
|
||||
`Invalid project name: ${result.error.issues[0]?.message}`,
|
||||
const validatedProjectName = extractAndValidateProjectName(
|
||||
projectName,
|
||||
options.projectDirectory,
|
||||
true,
|
||||
);
|
||||
}
|
||||
config.projectName = projectName || derivedName;
|
||||
}
|
||||
|
||||
if (options.frontend && options.frontend.length > 0) {
|
||||
config.frontend = processArrayOption(options.frontend);
|
||||
}
|
||||
|
||||
if (options.addons && options.addons.length > 0) {
|
||||
config.addons = processArrayOption(options.addons);
|
||||
}
|
||||
|
||||
if (options.examples && options.examples.length > 0) {
|
||||
config.examples = processArrayOption(options.examples);
|
||||
if (validatedProjectName) {
|
||||
config.projectName = validatedProjectName;
|
||||
}
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
export function getProvidedFlags(options: CLIInput): Set<string> {
|
||||
return new Set(
|
||||
Object.keys(options).filter(
|
||||
(key) => options[key as keyof CLIInput] !== undefined,
|
||||
),
|
||||
);
|
||||
export function validateConfigCompatibility(
|
||||
config: Partial<ProjectConfig>,
|
||||
providedFlags?: Set<string>,
|
||||
options?: CLIInput,
|
||||
) {
|
||||
if (options?.yolo) return;
|
||||
if (options && providedFlags) {
|
||||
validateFullConfig(config, providedFlags, options);
|
||||
} else {
|
||||
validateConfigForProgrammaticUse(config);
|
||||
}
|
||||
}
|
||||
|
||||
export { getProvidedFlags };
|
||||
|
||||
@@ -21,6 +21,7 @@
|
||||
"!bts.jsonc",
|
||||
"!**/.expo",
|
||||
"!**/.wrangler",
|
||||
"!**/.alchemy",
|
||||
"!**/wrangler.jsonc",
|
||||
"!**/.source"
|
||||
]
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"type": "stdio",
|
||||
"command": "npx",
|
||||
"args": ["-y", "@upstash/context7-mcp"]
|
||||
}{{#if (or (eq runtime "workers") (eq webDeploy "workers"))}},
|
||||
}{{#if (or (eq runtime "workers") (eq webDeploy "wrangler"))}},
|
||||
"cloudflare": {
|
||||
"command": "npx",
|
||||
"args": ["mcp-remote", "https://docs.mcp.cloudflare.com/sse"]
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
"!bts.jsonc",
|
||||
"!**/.expo",
|
||||
"!**/.wrangler",
|
||||
"!**/.alchemy",
|
||||
"!**/wrangler.jsonc",
|
||||
"!**/.source"
|
||||
]
|
||||
|
||||
@@ -21,6 +21,13 @@ export const auth = betterAuth({
|
||||
],
|
||||
emailAndPassword: {
|
||||
enabled: true,
|
||||
},
|
||||
advanced: {
|
||||
defaultCookieAttributes: {
|
||||
sameSite: "none",
|
||||
secure: true,
|
||||
httpOnly: true,
|
||||
},
|
||||
}
|
||||
{{#if (or (includes frontend "native-nativewind") (includes frontend "native-unistyles"))}}
|
||||
, plugins: [expo()]
|
||||
@@ -54,8 +61,13 @@ export const auth = betterAuth({
|
||||
emailAndPassword: {
|
||||
enabled: true,
|
||||
},
|
||||
secret: process.env.BETTER_AUTH_SECRET,
|
||||
baseURL: process.env.BETTER_AUTH_URL,
|
||||
advanced: {
|
||||
defaultCookieAttributes: {
|
||||
sameSite: "none",
|
||||
secure: true,
|
||||
httpOnly: true,
|
||||
},
|
||||
},
|
||||
{{#if (or (includes frontend "native-nativewind") (includes frontend "native-unistyles"))}}
|
||||
plugins: [expo()],
|
||||
{{/if}}
|
||||
@@ -85,6 +97,13 @@ export const auth = betterAuth({
|
||||
},
|
||||
secret: env.BETTER_AUTH_SECRET,
|
||||
baseURL: env.BETTER_AUTH_URL,
|
||||
advanced: {
|
||||
defaultCookieAttributes: {
|
||||
sameSite: "none",
|
||||
secure: true,
|
||||
httpOnly: true,
|
||||
},
|
||||
},
|
||||
{{#if (or (includes frontend "native-nativewind") (includes frontend "native-unistyles"))}}
|
||||
plugins: [expo()],
|
||||
{{/if}}
|
||||
@@ -110,6 +129,13 @@ export const auth = betterAuth({
|
||||
],
|
||||
emailAndPassword: {
|
||||
enabled: true,
|
||||
},
|
||||
advanced: {
|
||||
defaultCookieAttributes: {
|
||||
sameSite: "none",
|
||||
secure: true,
|
||||
httpOnly: true,
|
||||
},
|
||||
}
|
||||
{{#if (or (includes frontend "native-nativewind") (includes frontend "native-unistyles"))}}
|
||||
, plugins: [expo()]
|
||||
@@ -133,6 +159,13 @@ export const auth = betterAuth({
|
||||
],
|
||||
emailAndPassword: {
|
||||
enabled: true,
|
||||
},
|
||||
advanced: {
|
||||
defaultCookieAttributes: {
|
||||
sameSite: "none",
|
||||
secure: true,
|
||||
httpOnly: true,
|
||||
},
|
||||
}
|
||||
{{#if (or (includes frontend "native-nativewind") (includes frontend "native-unistyles"))}}
|
||||
, plugins: [expo()]
|
||||
|
||||
@@ -16,6 +16,7 @@ dist/
|
||||
.idea/usage.statistics.xml
|
||||
.idea/shelf
|
||||
.wrangler
|
||||
.alchemy
|
||||
/.next/
|
||||
.vercel
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
{{else if (eq runtime "bun")}}
|
||||
"bun"
|
||||
{{else if (eq runtime "workers")}}
|
||||
"./worker-configuration",
|
||||
"@cloudflare/workers-types",
|
||||
"node"
|
||||
{{else}}
|
||||
"node",
|
||||
|
||||
@@ -1,2 +1,4 @@
|
||||
node_modules
|
||||
.turbo
|
||||
.alchemy
|
||||
.env
|
||||
@@ -7,11 +7,13 @@ export default defineConfig({
|
||||
// DOCS: https://orm.drizzle.team/docs/guides/d1-http-with-drizzle-kit
|
||||
dialect: "sqlite",
|
||||
driver: "d1-http",
|
||||
{{#if (eq serverDeploy "wrangler")}}
|
||||
dbCredentials: {
|
||||
accountId: process.env.CLOUDFLARE_ACCOUNT_ID!,
|
||||
databaseId: process.env.CLOUDFLARE_DATABASE_ID!,
|
||||
token: process.env.CLOUDFLARE_D1_TOKEN!,
|
||||
},
|
||||
{{/if}}
|
||||
{{else}}
|
||||
dialect: "turso",
|
||||
dbCredentials: {
|
||||
|
||||
208
apps/cli/templates/deploy/alchemy/alchemy.run.ts.hbs
Normal file
208
apps/cli/templates/deploy/alchemy/alchemy.run.ts.hbs
Normal file
@@ -0,0 +1,208 @@
|
||||
import alchemy from "alchemy";
|
||||
{{#if (eq webDeploy "alchemy")}}
|
||||
{{#if (includes frontend "next")}}
|
||||
import { Next } from "alchemy/cloudflare";
|
||||
{{else if (includes frontend "nuxt")}}
|
||||
import { Nuxt } from "alchemy/cloudflare";
|
||||
{{else if (includes frontend "svelte")}}
|
||||
import { SvelteKit } from "alchemy/cloudflare";
|
||||
{{else if (includes frontend "tanstack-start")}}
|
||||
import { TanStackStart } from "alchemy/cloudflare";
|
||||
{{else if (includes frontend "tanstack-router")}}
|
||||
import { Vite } from "alchemy/cloudflare";
|
||||
{{else if (includes frontend "react-router")}}
|
||||
import { ReactRouter } from "alchemy/cloudflare";
|
||||
{{else if (includes frontend "solid")}}
|
||||
import { Vite } from "alchemy/cloudflare";
|
||||
{{/if}}
|
||||
{{/if}}
|
||||
{{#if (eq serverDeploy "alchemy")}}
|
||||
import { Worker, WranglerJson } from "alchemy/cloudflare";
|
||||
{{#if (eq dbSetup "d1")}}
|
||||
import { D1Database } from "alchemy/cloudflare";
|
||||
{{/if}}
|
||||
{{/if}}
|
||||
{{#if (and (eq serverDeploy "alchemy") (eq dbSetup "d1"))}}
|
||||
import { Exec } from "alchemy/os";
|
||||
{{/if}}
|
||||
import { config } from "dotenv";
|
||||
|
||||
{{#if (and (eq webDeploy "alchemy") (eq serverDeploy "alchemy"))}}
|
||||
config({ path: "./.env" });
|
||||
config({ path: "./apps/web/.env" });
|
||||
config({ path: "./apps/server/.env" });
|
||||
{{else if (or (eq webDeploy "alchemy") (eq serverDeploy "alchemy"))}}
|
||||
config({ path: "./.env" });
|
||||
{{/if}}
|
||||
|
||||
const app = await alchemy("{{projectName}}");
|
||||
|
||||
{{#if (and (eq serverDeploy "alchemy") (eq dbSetup "d1"))}}
|
||||
await Exec("db-generate", {
|
||||
{{#if (and (eq webDeploy "alchemy") (eq serverDeploy "alchemy"))}}cwd: "apps/server",{{/if}}
|
||||
command: "{{packageManager}} run db:generate",
|
||||
});
|
||||
|
||||
const db = await D1Database("database", {
|
||||
name: `${app.name}-${app.stage}-db`,
|
||||
migrationsDir: "apps/server/src/db/migrations",
|
||||
});
|
||||
{{/if}}
|
||||
|
||||
{{#if (eq webDeploy "alchemy")}}
|
||||
{{#if (includes frontend "next")}}
|
||||
export const web = await Next("web", {
|
||||
{{#if (eq serverDeploy "alchemy")}}cwd: "apps/web",{{/if}}
|
||||
name: `${app.name}-${app.stage}-web`,
|
||||
bindings: {
|
||||
{{#if (eq backend "convex")}}
|
||||
NEXT_PUBLIC_CONVEX_URL: process.env.NEXT_PUBLIC_CONVEX_URL || "",
|
||||
{{else}}
|
||||
NEXT_PUBLIC_SERVER_URL: process.env.NEXT_PUBLIC_SERVER_URL || "",
|
||||
{{/if}}
|
||||
},
|
||||
dev: {
|
||||
command: "{{packageManager}} run dev"
|
||||
}
|
||||
});
|
||||
{{else if (includes frontend "nuxt")}}
|
||||
export const web = await Nuxt("web", {
|
||||
{{#if (eq serverDeploy "alchemy")}}cwd: "apps/web",{{/if}}
|
||||
name: `${app.name}-${app.stage}-web`,
|
||||
bindings: {
|
||||
{{#if (eq backend "convex")}}
|
||||
NUXT_PUBLIC_CONVEX_URL: process.env.NUXT_PUBLIC_CONVEX_URL || "",
|
||||
{{else}}
|
||||
NUXT_PUBLIC_SERVER_URL: process.env.NUXT_PUBLIC_SERVER_URL || "",
|
||||
{{/if}}
|
||||
},
|
||||
dev: {
|
||||
command: "{{packageManager}} run dev"
|
||||
}
|
||||
});
|
||||
{{else if (includes frontend "svelte")}}
|
||||
export const web = await SvelteKit("web", {
|
||||
{{#if (eq serverDeploy "alchemy")}}cwd: "apps/web",{{/if}}
|
||||
name: `${app.name}-${app.stage}-web`,
|
||||
bindings: {
|
||||
{{#if (eq backend "convex")}}
|
||||
PUBLIC_CONVEX_URL: process.env.PUBLIC_CONVEX_URL || "",
|
||||
{{else}}
|
||||
PUBLIC_SERVER_URL: process.env.PUBLIC_SERVER_URL || "",
|
||||
{{/if}}
|
||||
},
|
||||
dev: {
|
||||
command: "{{packageManager}} run dev"
|
||||
}
|
||||
});
|
||||
{{else if (includes frontend "tanstack-start")}}
|
||||
export const web = await TanStackStart("web", {
|
||||
{{#if (eq serverDeploy "alchemy")}}cwd: "apps/web",{{/if}}
|
||||
name: `${app.name}-${app.stage}-web`,
|
||||
bindings: {
|
||||
{{#if (eq backend "convex")}}
|
||||
VITE_CONVEX_URL: process.env.VITE_CONVEX_URL || "",
|
||||
{{else}}
|
||||
VITE_SERVER_URL: process.env.VITE_SERVER_URL || "",
|
||||
{{/if}}
|
||||
},
|
||||
dev: {
|
||||
command: "{{packageManager}} run dev"
|
||||
}
|
||||
});
|
||||
{{else if (includes frontend "tanstack-router")}}
|
||||
export const web = await Vite("web", {
|
||||
{{#if (eq serverDeploy "alchemy")}}cwd: "apps/web",{{/if}}
|
||||
name: `${app.name}-${app.stage}-web`,
|
||||
assets: "dist",
|
||||
bindings: {
|
||||
{{#if (eq backend "convex")}}
|
||||
VITE_CONVEX_URL: process.env.VITE_CONVEX_URL || "",
|
||||
{{else}}
|
||||
VITE_SERVER_URL: process.env.VITE_SERVER_URL || "",
|
||||
{{/if}}
|
||||
},
|
||||
dev: {
|
||||
command: "{{packageManager}} run dev"
|
||||
}
|
||||
});
|
||||
{{else if (includes frontend "react-router")}}
|
||||
export const web = await ReactRouter("web", {
|
||||
{{#if (eq serverDeploy "alchemy")}}cwd: "apps/web",{{/if}}
|
||||
name: `${app.name}-${app.stage}-web`,
|
||||
bindings: {
|
||||
{{#if (eq backend "convex")}}
|
||||
VITE_CONVEX_URL: process.env.VITE_CONVEX_URL || "",
|
||||
{{else}}
|
||||
VITE_SERVER_URL: process.env.VITE_SERVER_URL || "",
|
||||
{{/if}}
|
||||
},
|
||||
dev: {
|
||||
command: "{{packageManager}} run dev"
|
||||
}
|
||||
});
|
||||
{{else if (includes frontend "solid")}}
|
||||
export const web = await Vite("web", {
|
||||
{{#if (eq serverDeploy "alchemy")}}cwd: "apps/web",{{/if}}
|
||||
name: `${app.name}-${app.stage}-web`,
|
||||
assets: "dist",
|
||||
bindings: {
|
||||
{{#if (eq backend "convex")}}
|
||||
VITE_CONVEX_URL: process.env.VITE_CONVEX_URL || "",
|
||||
{{else}}
|
||||
VITE_SERVER_URL: process.env.VITE_SERVER_URL || "",
|
||||
{{/if}}
|
||||
},
|
||||
dev: {
|
||||
command: "{{packageManager}} run dev"
|
||||
}
|
||||
});
|
||||
{{/if}}
|
||||
{{/if}}
|
||||
|
||||
{{#if (eq serverDeploy "alchemy")}}
|
||||
export const server = await Worker("server", {
|
||||
{{#if (eq webDeploy "alchemy")}}cwd: "apps/server",{{/if}}
|
||||
name: `${app.name}-${app.stage}`,
|
||||
entrypoint: "src/index.ts",
|
||||
compatibility: "node",
|
||||
bindings: {
|
||||
{{#if (eq dbSetup "d1")}}
|
||||
DB: db,
|
||||
{{else if (and (ne database "none") (ne dbSetup "none"))}}
|
||||
DATABASE_URL: alchemy.secret(process.env.DATABASE_URL),
|
||||
{{/if}}
|
||||
CORS_ORIGIN: process.env.CORS_ORIGIN || "",
|
||||
{{#if auth}}
|
||||
BETTER_AUTH_SECRET: alchemy.secret(process.env.BETTER_AUTH_SECRET),
|
||||
BETTER_AUTH_URL: process.env.BETTER_AUTH_URL || "",
|
||||
{{/if}}
|
||||
{{#if (includes examples "ai")}}
|
||||
GOOGLE_GENERATIVE_AI_API_KEY: alchemy.secret(process.env.GOOGLE_GENERATIVE_AI_API_KEY),
|
||||
{{/if}}
|
||||
{{#if (eq dbSetup "turso")}}
|
||||
DATABASE_AUTH_TOKEN: alchemy.secret(process.env.DATABASE_AUTH_TOKEN),
|
||||
{{/if}}
|
||||
},
|
||||
dev: {
|
||||
port: 3000,
|
||||
},
|
||||
});
|
||||
|
||||
await WranglerJson("wrangler", {
|
||||
worker: server,
|
||||
});
|
||||
{{/if}}
|
||||
|
||||
|
||||
|
||||
{{#if (and (eq webDeploy "alchemy") (eq serverDeploy "alchemy"))}}
|
||||
console.log(`Web -> ${web.url}`);
|
||||
console.log(`Server -> ${server.url}`);
|
||||
{{else if (eq webDeploy "alchemy")}}
|
||||
console.log(`Web -> ${web.url}`);
|
||||
{{else if (eq serverDeploy "alchemy")}}
|
||||
console.log(`Server -> ${server.url}`);
|
||||
{{/if}}
|
||||
|
||||
await app.finalize();
|
||||
20
apps/cli/templates/deploy/alchemy/env.d.ts.hbs
Normal file
20
apps/cli/templates/deploy/alchemy/env.d.ts.hbs
Normal file
@@ -0,0 +1,20 @@
|
||||
// This file infers types for the cloudflare:workers environment from your Alchemy Worker.
|
||||
// @see https://alchemy.run/concepts/bindings/#type-safe-bindings
|
||||
|
||||
{{#if (eq webDeploy "alchemy")}}
|
||||
import type { server } from "../../alchemy.run";
|
||||
{{else}}
|
||||
import type { server } from "./alchemy.run";
|
||||
{{/if}}
|
||||
|
||||
export type CloudflareEnv = typeof server.Env;
|
||||
|
||||
declare global {
|
||||
type Env = CloudflareEnv;
|
||||
}
|
||||
|
||||
declare module "cloudflare:workers" {
|
||||
namespace Cloudflare {
|
||||
export interface Env extends CloudflareEnv {}
|
||||
}
|
||||
}
|
||||
11
apps/cli/templates/deploy/alchemy/wrangler.jsonc.hbs
Normal file
11
apps/cli/templates/deploy/alchemy/wrangler.jsonc.hbs
Normal file
@@ -0,0 +1,11 @@
|
||||
// This is a temporary wrangler.jsonc file that will be overwritten by alchemy
|
||||
// It's only here so that `wrangler dev` can work or use alchemy dev instead
|
||||
{
|
||||
"name": "{{projectName}}",
|
||||
"main": "src/index.ts",
|
||||
"compatibility_date": "2025-08-16",
|
||||
"compatibility_flags": [
|
||||
"nodejs_compat",
|
||||
"nodejs_compat_populate_process_env"
|
||||
]
|
||||
}
|
||||
@@ -3,7 +3,7 @@
|
||||
* https://developers.cloudflare.com/workers/wrangler/configuration/
|
||||
*/
|
||||
{
|
||||
"$schema": "../../node_modules/wrangler/config-schema.json",
|
||||
"$schema": "./node_modules/wrangler/config-schema.json",
|
||||
"name": "{{projectName}}",
|
||||
"main": "./.output/server/index.mjs",
|
||||
"compatibility_date": "2025-07-01",
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"$schema": "../../node_modules/wrangler/config-schema.json",
|
||||
"$schema": "./node_modules/wrangler/config-schema.json",
|
||||
"main": ".open-next/worker.js",
|
||||
"name": "{{projectName}}",
|
||||
"compatibility_date": "2025-07-05",
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"$schema": "../../node_modules/wrangler/config-schema.json",
|
||||
"$schema": "./node_modules/wrangler/config-schema.json",
|
||||
"name": "{{projectName}}",
|
||||
"compatibility_date": "2025-04-03",
|
||||
"assets": {
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"$schema": "../../node_modules/wrangler/config-schema.json",
|
||||
"$schema": "./node_modules/wrangler/config-schema.json",
|
||||
"name": "{{projectName}}",
|
||||
"compatibility_date": "2025-04-03",
|
||||
"assets": {
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"$schema": "../../node_modules/wrangler/config-schema.json",
|
||||
"$schema": "./node_modules/wrangler/config-schema.json",
|
||||
"name": "{{projectName}}",
|
||||
"main": ".output/server/index.mjs",
|
||||
"compatibility_date": "2025-07-05",
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"$schema": "../../node_modules/wrangler/config-schema.json",
|
||||
"$schema": "./node_modules/wrangler/config-schema.json",
|
||||
"name": "{{projectName}}",
|
||||
"compatibility_date": "2025-04-03",
|
||||
"assets": {
|
||||
@@ -3,7 +3,7 @@
|
||||
* https://developers.cloudflare.com/workers/wrangler/configuration/
|
||||
*/
|
||||
{
|
||||
"$schema": "../../node_modules/wrangler/config-schema.json",
|
||||
"$schema": "./node_modules/wrangler/config-schema.json",
|
||||
"name": "{{projectName}}",
|
||||
"main": ".svelte-kit/cloudflare/_worker.js",
|
||||
"compatibility_date": "2025-07-05",
|
||||
@@ -5,6 +5,8 @@
|
||||
.nitro
|
||||
.cache
|
||||
dist
|
||||
.wrangler
|
||||
.alchemy
|
||||
|
||||
# Node dependencies
|
||||
node_modules
|
||||
@@ -22,3 +24,4 @@ logs
|
||||
.env
|
||||
.env.*
|
||||
!.env.example
|
||||
|
||||
|
||||
@@ -12,9 +12,7 @@
|
||||
},
|
||||
{
|
||||
"path": "./.nuxt/tsconfig.node.json"
|
||||
}
|
||||
{{#unless (or (eq backend "convex") (eq backend "none"))}}
|
||||
,
|
||||
}{{#unless (or (eq backend "convex") (eq backend "none"))}},
|
||||
{
|
||||
"path": "../server"
|
||||
}
|
||||
|
||||
@@ -26,6 +26,7 @@
|
||||
.vercel
|
||||
.netlify
|
||||
.wrangler
|
||||
.alchemy
|
||||
|
||||
# Environment & local files
|
||||
.env*
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user