diff --git a/.vscode/extensions.json b/.vscode/extensions.json new file mode 100644 index 0000000..74baffc --- /dev/null +++ b/.vscode/extensions.json @@ -0,0 +1,3 @@ +{ + "recommendations": ["denoland.vscode-deno"] +} diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..af62c23 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,24 @@ +{ + "deno.enablePaths": [ + "supabase/functions" + ], + "deno.lint": true, + "deno.unstable": [ + "bare-node-builtins", + "byonm", + "sloppy-imports", + "unsafe-proto", + "webgpu", + "broadcast-channel", + "worker-options", + "cron", + "kv", + "ffi", + "fs", + "http", + "net" + ], + "[typescript]": { + "editor.defaultFormatter": "denoland.vscode-deno" + } +} diff --git a/endpoints/postmark (localhost).bru b/endpoints/postmark (localhost).bru index de35350..8f7f284 100644 --- a/endpoints/postmark (localhost).bru +++ b/endpoints/postmark (localhost).bru @@ -17,6 +17,13 @@ body:json { "Email": "billing@creditplus.com", "Name": "CreditPlus Billing Department" }, + "ToFull": [ + { + "Email": "franpessano1@gmail.com", + "Name": "", + "MailboxHash": "ahoy" + } + ], "Subject": "Outstanding Utility Bill Payment Due" } } diff --git a/endpoints/postmark (vercel).bru b/endpoints/postmark (vercel).bru index 0c75e18..3fe2982 100644 --- a/endpoints/postmark (vercel).bru +++ b/endpoints/postmark (vercel).bru @@ -17,6 +17,13 @@ body:json { "Email": "billing@creditplus.com", "Name": "CreditPlus Billing Department" }, + "ToFull": [ + { + "Email": "franpessano1@gmail.com", + "Name": "", + "MailboxHash": "ahoy" + } + ], "Subject": "Outstanding Utility Bill Payment Due" } } diff --git a/package.json b/package.json index 97a3472..e60e496 100644 --- a/package.json +++ b/package.json @@ -69,5 +69,13 @@ "tailwindcss-animate": "^1.0.7", "vaul": "^1.0.0", "zod": "^3.23.8" + }, + "pnpm": { + "onlyBuiltDependencies": [ + "supabase" + ] + }, + "devDependencies": { + "supabase": "^2.24.3" } } diff --git a/src/components/Configuration.tsx b/src/components/Configuration.tsx new file mode 100644 index 0000000..7413cf9 --- /dev/null +++ b/src/components/Configuration.tsx @@ -0,0 +1,400 @@ +import React, { useEffect, useState } from "react"; +import { + supabase, + type AdditionalEmail, + type UserProfile, + type EmailProcessingUsage, +} from "../lib/supabase"; +import { Button } from "@/components/ui/button"; +import { Input } from "@/components/ui/input"; +import { Label } from "@/components/ui/label"; +import { + Card, + CardContent, + CardDescription, + CardHeader, + CardTitle, +} from "@/components/ui/card"; +import { Badge } from "@/components/ui/badge"; +import { Progress } from "@/components/ui/progress"; +import { Alert, AlertDescription } from "@/components/ui/alert"; +import { Separator } from "@/components/ui/separator"; +import { + Settings, + Mail, + Plus, + Trash2, + Check, + X, + TrendingUp, + Infinity, + AlertCircle, +} from "lucide-react"; +import { toast } from "../hooks/use-toast"; + +export function Configuration() { + const [profile, setProfile] = useState(null); + const [additionalEmails, setAdditionalEmails] = useState( + [] + ); + const [usage, setUsage] = useState(null); + const [loading, setLoading] = useState(true); + const [newEmail, setNewEmail] = useState(""); + const [addingEmail, setAddingEmail] = useState(false); + + useEffect(() => { + fetchUserData(); + }, []); + + const fetchUserData = async () => { + try { + const { + data: { user }, + } = await supabase.auth.getUser(); + if (!user) return; + + // Fetch user profile + const { data: profileData } = await supabase + .from("user_profiles") + .select("*") + .eq("user_id", user.id) + .single(); + + // Fetch additional emails + const { data: emailsData } = await supabase + .from("additional_emails") + .select("*") + .eq("user_id", user.id) + .order("created_at", { ascending: false }); + + // Fetch current month usage + const currentMonth = new Date().toISOString().slice(0, 7); // YYYY-MM + const { data: usageData } = await supabase + .from("email_processing_usage") + .select("*") + .eq("user_id", user.id) + .eq("month_year", currentMonth) + .single(); + + setProfile(profileData); + setAdditionalEmails(emailsData || []); + setUsage(usageData); + } catch (error) { + console.error("Error fetching user data:", error); + } finally { + setLoading(false); + } + }; + + const addAdditionalEmail = async () => { + if (!newEmail || !profile) return; + + setAddingEmail(true); + try { + const { + data: { user }, + } = await supabase.auth.getUser(); + if (!user) return; + + const { data, error } = await supabase + .from("additional_emails") + .insert({ + user_id: user.id, + email_address: newEmail.trim().toLowerCase(), + }) + .select() + .single(); + + if (error) throw error; + + setAdditionalEmails([data, ...additionalEmails]); + setNewEmail(""); + toast({ + title: "Email added successfully", + description: "Additional email has been added to your account.", + }); + } catch (error: any) { + toast({ + title: "Error adding email", + description: error.message, + variant: "destructive", + }); + } finally { + setAddingEmail(false); + } + }; + + const removeAdditionalEmail = async (emailId: string) => { + try { + const { error } = await supabase + .from("additional_emails") + .delete() + .eq("id", emailId); + + if (error) throw error; + + setAdditionalEmails( + additionalEmails.filter((email) => email.id !== emailId) + ); + toast({ + title: "Email removed", + description: "Additional email has been removed from your account.", + }); + } catch (error: any) { + toast({ + title: "Error removing email", + description: error.message, + variant: "destructive", + }); + } + }; + + const getUsagePercentage = () => { + if (!profile || !usage) return 0; + return Math.min( + (usage.emails_processed / profile.email_processing_limit) * 100, + 100 + ); + }; + + const getRemainingEmails = () => { + if (!profile || !usage) return profile?.email_processing_limit || 1000; + return Math.max(profile.email_processing_limit - usage.emails_processed, 0); + }; + + if (loading) { + return ( +
+
+ + Loading configuration... +
+
+ ); + } + + return ( +
+
+ {/* Header */} +
+

+ + Configuration +

+

+ Manage your account settings and email processing options +

+
+ +
+ {/* Email Processing Usage */} + + + + + Email Processing Usage + + + Track your monthly email processing usage and limits + + + +
+
+

+ Emails Processed This Month +

+

+ {usage?.emails_processed || 0} /{" "} + {profile?.email_processing_limit || 1000} +

+
+
+

Remaining

+

+ {getRemainingEmails() === + (profile?.email_processing_limit || 1000) ? ( + + + Unlimited + + ) : ( + getRemainingEmails() + )} +

+
+
+ +
+
+ Progress + {getUsagePercentage().toFixed(1)}% +
+ +
+ + {getUsagePercentage() > 80 && ( + + + + You're approaching your monthly email processing limit. + Consider upgrading your plan if you need to process more + emails. + + + )} +
+
+ + {/* Additional Emails */} + + + + + Additional Email Addresses + + + Add additional email addresses to process debt emails from + multiple accounts + + + + {/* Add new email */} +
+
+ + setNewEmail(e.target.value)} + onKeyPress={(e) => { + if (e.key === "Enter") { + addAdditionalEmail(); + } + }} + /> +
+ +
+ + + + {/* List of additional emails */} +
+ {additionalEmails.length === 0 ? ( +
+ +

No additional emails

+

+ Add additional email addresses to expand your debt + processing capabilities. +

+
+ ) : ( + additionalEmails.map((email) => ( +
+
+ +
+

{email.email_address}

+

+ Added{" "} + {new Date(email.created_at).toLocaleDateString()} +

+
+
+
+ + {email.verified ? ( + <> + + Verified + + ) : ( + <> + + Unverified + + )} + + +
+
+ )) + )} +
+
+
+ + {/* Account Information */} + + + Account Information + + Your account details and settings + + + +
+
+ +

+ {profile?.email_processing_limit || 1000} emails/month +

+
+
+ +

+ {profile?.created_at + ? new Date(profile.created_at).toLocaleDateString() + : "N/A"} +

+
+
+ + + {profile?.onboarding_completed ? "Completed" : "Pending"} + +
+
+ +

+ {additionalEmails.length} configured +

+
+
+
+
+
+
+
+ ); +} diff --git a/src/components/Dashboard.tsx b/src/components/Dashboard.tsx index 18bf3d5..b366dc9 100644 --- a/src/components/Dashboard.tsx +++ b/src/components/Dashboard.tsx @@ -1,8 +1,9 @@ import React, { useEffect, useState } from "react"; -import { supabase, type Debt } from "../lib/supabase"; +import { supabase, type Debt, type UserProfile } from "../lib/supabase"; import { Button } from "@/components/ui/button"; import { DebtCard } from "./DebtCard"; import { DebtTimeline } from "./DebtTimeline"; +import { OnboardingDialog } from "./OnboardingDialog"; import { Card, CardContent, @@ -22,11 +23,14 @@ import { RefreshCw, BarChart3, LogOut, + Settings, } from "lucide-react"; export function Dashboard() { const [debts, setDebts] = useState([]); const [loading, setLoading] = useState(true); + const [userProfile, setUserProfile] = useState(null); + const [showOnboarding, setShowOnboarding] = useState(false); const [stats, setStats] = useState({ totalDebts: 0, totalAmount: 0, @@ -35,6 +39,7 @@ export function Dashboard() { }); useEffect(() => { + fetchUserProfile(); fetchDebts(); setupRealtimeSubscription(); }, []); @@ -43,11 +48,41 @@ export function Dashboard() { calculateStats(); }, [debts]); + const fetchUserProfile = async () => { + try { + const { + data: { user }, + } = await supabase.auth.getUser(); + if (!user) return; + + const { data: profile } = await supabase + .from("user_profiles") + .select("*") + .eq("user_id", user.id) + .single(); + + setUserProfile(profile); + + // Show onboarding if user hasn't completed it + if (profile && !profile.onboarding_completed) { + setShowOnboarding(true); + } + } catch (error) { + console.error("Error fetching user profile:", error); + } + }; + const fetchDebts = async () => { try { + const { + data: { user }, + } = await supabase.auth.getUser(); + if (!user) return; + const { data, error } = await supabase .from("debts") .select("*") + .eq("user_id", user.id) .order("created_at", { ascending: false }); if (error) throw error; @@ -111,6 +146,12 @@ export function Dashboard() { }); }; + const handleOnboardingComplete = () => { + setShowOnboarding(false); + // Refresh user profile to reflect onboarding completion + fetchUserProfile(); + }; + const handleSignOut = async () => { await supabase.auth.signOut(); window.location.href = "/"; @@ -152,13 +193,18 @@ export function Dashboard() {

- - InboxNegotiator Dashboard + Dashboard

AI-powered debt resolution platform with real-time updates

+
{/* Stats Cards */} @@ -290,6 +336,12 @@ export function Dashboard() {

Real-time updates powered by Supabase

+ + {/* Onboarding Dialog */} + ); } diff --git a/src/components/Navbar.tsx b/src/components/Navbar.tsx index b167611..496df08 100644 --- a/src/components/Navbar.tsx +++ b/src/components/Navbar.tsx @@ -1,104 +1,118 @@ -import React, { useEffect, useState } from 'react'; -import { supabase } from '../lib/supabase'; -import type { User } from '@supabase/supabase-js'; -import { Button } from '@/components/ui/button'; -import { - DropdownMenu, - DropdownMenuContent, - DropdownMenuItem, - DropdownMenuSeparator, - DropdownMenuTrigger -} from '@/components/ui/dropdown-menu'; -import { Avatar, AvatarFallback } from '@/components/ui/avatar'; -import { BarChart3, LogOut, User as UserIcon } from 'lucide-react'; -import { ModeToggle } from './ModeToggle'; +import React, { useEffect, useState } from "react"; +import { supabase } from "../lib/supabase"; +import type { User } from "@supabase/supabase-js"; +import { Button } from "@/components/ui/button"; +import { + DropdownMenu, + DropdownMenuContent, + DropdownMenuItem, + DropdownMenuSeparator, + DropdownMenuTrigger, +} from "@/components/ui/dropdown-menu"; +import { Avatar, AvatarFallback } from "@/components/ui/avatar"; +import { BarChart3, LogOut, User as UserIcon, Settings } from "lucide-react"; +import { ModeToggle } from "./ModeToggle"; export function Navbar() { - const [user, setUser] = useState(null); + const [user, setUser] = useState(null); - useEffect(() => { - supabase.auth.getSession().then(({ data: { session } }) => { - setUser(session?.user ?? null); - }); + useEffect(() => { + supabase.auth.getSession().then(({ data: { session } }) => { + setUser(session?.user ?? null); + }); - const { data: { subscription } } = supabase.auth.onAuthStateChange( - (event, session) => { - setUser(session?.user ?? null); - } - ); + const { + data: { subscription }, + } = supabase.auth.onAuthStateChange((event, session) => { + setUser(session?.user ?? null); + }); - return () => subscription.unsubscribe(); - }, []); + return () => subscription.unsubscribe(); + }, []); - const handleSignOut = async () => { - await supabase.auth.signOut(); - window.location.href = '/'; - }; + const handleSignOut = async () => { + await supabase.auth.signOut(); + window.location.href = "/"; + }; - const getInitials = (email: string) => { - return email.substring(0, 2).toUpperCase(); - }; + const getInitials = (email: string) => { + return email.substring(0, 2).toUpperCase(); + }; - return ( - + ); +} diff --git a/src/components/OnboardingDialog.tsx b/src/components/OnboardingDialog.tsx new file mode 100644 index 0000000..15c1e3f --- /dev/null +++ b/src/components/OnboardingDialog.tsx @@ -0,0 +1,235 @@ +import React, { useState } from "react"; +import { supabase } from "../lib/supabase"; +import { Button } from "@/components/ui/button"; +import { Input } from "@/components/ui/input"; +import { Label } from "@/components/ui/label"; +import { + Dialog, + DialogContent, + DialogDescription, + DialogHeader, + DialogTitle, +} from "@/components/ui/dialog"; +import { Alert, AlertDescription } from "@/components/ui/alert"; +import { Mail, Plus, CheckCircle, ArrowRight, UserCheck } from "lucide-react"; +import { toast } from "../hooks/use-toast"; + +interface OnboardingDialogProps { + open: boolean; + onComplete: () => void; +} + +export function OnboardingDialog({ open, onComplete }: OnboardingDialogProps) { + const [step, setStep] = useState<"welcome" | "email" | "complete">("welcome"); + const [email, setEmail] = useState(""); + const [loading, setLoading] = useState(false); + const [skipEmail, setSkipEmail] = useState(false); + + const handleAddEmail = async () => { + if (!email && !skipEmail) return; + + setLoading(true); + try { + const { + data: { user }, + } = await supabase.auth.getUser(); + if (!user) return; + + if (email && !skipEmail) { + const { error } = await supabase.from("additional_emails").insert({ + user_id: user.id, + email_address: email.trim().toLowerCase(), + }); + + if (error) throw error; + } + + // Mark onboarding as completed + const { error: profileError } = await supabase + .from("user_profiles") + .update({ onboarding_completed: true }) + .eq("user_id", user.id); + + if (profileError) throw profileError; + + setStep("complete"); + } catch (error: any) { + toast({ + title: "Error", + description: error.message, + variant: "destructive", + }); + } finally { + setLoading(false); + } + }; + + const handleComplete = () => { + onComplete(); + }; + + const handleSkipEmail = () => { + setSkipEmail(true); + }; + + return ( + {}}> + + {/* Hide close button */} + {step === "welcome" && ( + <> + + + + Welcome to InboxNegotiator! + + + + Your account has been created successfully. Let's get you set up + to start processing debt emails with AI assistance. + + +
+ + + + InboxNegotiator helps you automatically negotiate debt + settlements by processing emails sent to your configured email + addresses. + + + + {/*
+

What you can do:

+
    +
  • • Process up to 1,000 debt emails per month
  • +
  • • AI-powered debt amount and vendor extraction
  • +
  • • Automated negotiation responses
  • +
  • • Real-time tracking and analytics
  • +
+
*/} + +
+ +
+
+ + )} + + {step === "email" && ( + <> + + + + Add Additional Email (Optional) + + + + Would you like to add an additional email address to process debt + emails from multiple accounts? + + + You can attach here the Postmark inbound email address for the + additional email.{" "} + + Postmark Inbound Email Parsing Guide + + + +
+
+ + setEmail(e.target.value)} + onKeyPress={(e) => { + if (e.key === "Enter" && email) { + handleAddEmail(); + } + }} + /> +

+ You can always add more email addresses later in the + configuration page. +

+
+ +
+ + +
+
+ + )} + + {step === "complete" && ( + <> + + + + Setup Complete! + + + Your account is now ready to process debt emails with AI + assistance. + + + +
+ + + + Start by forwarding debt collection emails to your configured + addresses. Our AI will automatically extract debt information + and begin the negotiation process. + + + +
+

Next steps:

+
    +
  • • Forward debt emails to your monitored addresses
  • +
  • • Monitor negotiations in your dashboard
  • +
  • • Review AI-generated settlement offers
  • +
  • • Track your savings and progress
  • +
+
+ +
+ +
+
+ + )} +
+
+ ); +} diff --git a/src/lib/supabase-admin.ts b/src/lib/supabase-admin.ts index 9835764..1c06475 100644 --- a/src/lib/supabase-admin.ts +++ b/src/lib/supabase-admin.ts @@ -1,4 +1,5 @@ import { createClient } from "@supabase/supabase-js"; +import type { SupabaseClient } from "@supabase/supabase-js"; /** * Creates a Supabase client with service role key for server-side operations @@ -49,3 +50,104 @@ export function handleDatabaseError(error: any) { originalError: process.env.NODE_ENV === "development" ? error : undefined, }; } + +/** + * Find user ID by email address (primary or additional email) + * First checks public.users table, then additional_emails if needed + */ +export async function getUserIdByEmail( + email: string, + supabaseAdmin?: SupabaseClient +): Promise { + const client = supabaseAdmin || createSupabaseAdmin(); + + try { + // First try to find user by primary email in public.users table + const { data: primaryUser, error: primaryError } = await client + .from("users") + .select("id") + .eq("email", email.toLowerCase()) + .maybeSingle(); + + if (primaryError) { + console.error("Error finding user by primary email:", primaryError); + } + + if (primaryUser) { + return primaryUser.id; + } + + // If not found, check additional emails + const { data: additionalEmail, error: additionalError } = await client + .from("additional_emails") + .select("user_id") + .eq("email_address", email.toLowerCase()) + // TODO: START REQUIRING VERIFIED ADDITIONAL EMAILS + // .eq("verified", true) + .eq("verified", false) + .maybeSingle(); + + if (additionalError) { + console.error("Error finding user by additional email:", additionalError); + return null; + } + + return additionalEmail?.user_id || null; + } catch (error) { + console.error("Error in getUserIdByEmail:", error); + return null; + } +} + +/** + * Get full user information by email address (primary or additional email) + * First checks public.users table, then additional_emails if needed + */ +export async function getUserByEmail( + email: string, + supabaseAdmin?: SupabaseClient +) { + const client = supabaseAdmin || createSupabaseAdmin(); + + try { + // First try to find user by primary email in public.users table + const { data: primaryUser, error: primaryError } = await client + .from("users") + .select("*") + .eq("email", email.toLowerCase()) + .maybeSingle(); + + if (primaryError) { + console.error("Error finding user by primary email:", primaryError); + } + + if (primaryUser) { + return primaryUser; + } + + // If not found, check additional emails and join with users table + const { data: userViaAdditionalEmail, error: additionalError } = await client + .from("additional_emails") + .select(` + user_id, + users!inner ( + id, + email, + created_at + ) + `) + .eq("email_address", email.toLowerCase()) + .eq("verified", true) + .maybeSingle(); + + if (additionalError) { + console.error("Error finding user by additional email:", additionalError); + return null; + } + + return userViaAdditionalEmail?.users || null; + } catch (error) { + console.error("Error in getUserByEmail:", error); + return null; + } +} diff --git a/src/lib/supabase.ts b/src/lib/supabase.ts index c216175..75df668 100644 --- a/src/lib/supabase.ts +++ b/src/lib/supabase.ts @@ -9,6 +9,12 @@ if (!supabaseUrl || !supabaseAnonKey) { export const supabase = createClient(supabaseUrl, supabaseAnonKey); +export type User = { + id: string; + email: string; + created_at: string; +}; + export type Debt = { id: string; created_at: string; @@ -19,6 +25,10 @@ export type Debt = { status: 'received' | 'negotiating' | 'settled' | 'failed' | 'opted_out'; negotiated_plan: string | null; projected_savings: number; + user_id: string; + description?: string | null; + due_date?: string | null; + metadata?: Record | null; }; export type AuditLog = { @@ -27,4 +37,33 @@ export type AuditLog = { debt_id: string; action: string; details: Record; +}; + +export type UserProfile = { + id: string; + user_id: string; + created_at: string; + updated_at: string; + onboarding_completed: boolean; + first_login_at: string | null; + email_processing_limit: number; +}; + +export type AdditionalEmail = { + id: string; + user_id: string; + email_address: string; + verified: boolean; + verification_token: string | null; + created_at: string; + updated_at: string; +}; + +export type EmailProcessingUsage = { + id: string; + user_id: string; + month_year: string; + emails_processed: number; + created_at: string; + updated_at: string; }; \ No newline at end of file diff --git a/src/pages/api/postmark.ts b/src/pages/api/postmark.ts index 09ce319..416d9d7 100644 --- a/src/pages/api/postmark.ts +++ b/src/pages/api/postmark.ts @@ -1,16 +1,15 @@ import type { APIRoute } from "astro"; -import { supabase } from "../../lib/supabase"; import { createSupabaseAdmin, handleDatabaseError, + getUserIdByEmail, } from "../../lib/supabase-admin"; import { generateObject } from "ai"; import { createGoogleGenerativeAI, - google, - type GoogleGenerativeAIProviderOptions, } from "@ai-sdk/google"; import { z } from "zod"; +import type { SupabaseClient } from "@supabase/supabase-js"; // Schema for debt information extraction const debtSchema = z.object({ @@ -71,6 +70,23 @@ async function parseDebtWithAI(emailText: string, fromEmail: string) { } } + +// Function to increment email processing usage +async function incrementEmailUsage(userId: string, supabaseAdmin: SupabaseClient) { + try { + // Call the database function to increment usage + const { error } = await supabaseAdmin.rpc('increment_email_usage', { + target_user_id: userId + }); + + if (error) { + console.error("Error incrementing email usage:", error); + } + } catch (error) { + console.error("Error calling increment_email_usage:", error); + } +} + export const POST: APIRoute = async ({ request }) => { try { // Create service role client for webhook operations (bypasses RLS) @@ -102,6 +118,17 @@ export const POST: APIRoute = async ({ request }) => { const optOutKeywords = ["STOP", "UNSUBSCRIBE", "OPT-OUT", "REMOVE"]; const textBody = data.TextBody || data.HtmlBody || ""; const fromEmail = data.FromFull?.Email || data.From || "unknown"; + const toEmail = data.ToFull?.[0]?.Email || data.To || ""; + + // Find the user who should receive this debt + const userId = await getUserIdByEmail(toEmail, supabaseAdmin); + if (!userId) { + console.warn(`No user found for email: ${toEmail}`); + return new Response("No matching user found", { status: 200 }); + } + + // Increment email processing usage + await incrementEmailUsage(userId, supabaseAdmin); const hasOptOut = optOutKeywords.some((keyword) => textBody.toUpperCase().includes(keyword) @@ -110,6 +137,7 @@ export const POST: APIRoute = async ({ request }) => { if (hasOptOut) { // Log opt-out and don't process further const { error } = await supabaseAdmin.from("debts").insert({ + user_id: userId, vendor: fromEmail, amount: 0, raw_email: textBody, @@ -146,6 +174,7 @@ export const POST: APIRoute = async ({ request }) => { const { data: insertedDebt, error: insertError } = await supabaseAdmin .from("debts") .insert({ + user_id: userId, vendor: debtInfo.vendor, amount: debtInfo.amount, raw_email: textBody, @@ -156,6 +185,7 @@ export const POST: APIRoute = async ({ request }) => { isDebtCollection: debtInfo.isDebtCollection, subject: data.Subject, fromEmail: fromEmail, + toEmail: toEmail, }, }) .select() diff --git a/src/pages/configuration.astro b/src/pages/configuration.astro new file mode 100644 index 0000000..5d4aa65 --- /dev/null +++ b/src/pages/configuration.astro @@ -0,0 +1,15 @@ +--- +import "@/styles/globals.css"; +import Layout from "../layouts/Layout.astro"; +import { Configuration as ConfigComponent } from "../components/Configuration"; +import { Navbar } from "../components/Navbar"; +import { AuthGuard } from "../components/AuthGuard"; +--- + + + + + + + + diff --git a/supabase/.gitignore b/supabase/.gitignore new file mode 100644 index 0000000..ad9264f --- /dev/null +++ b/supabase/.gitignore @@ -0,0 +1,8 @@ +# Supabase +.branches +.temp + +# dotenvx +.env.keys +.env.local +.env.*.local diff --git a/supabase/config.toml b/supabase/config.toml new file mode 100644 index 0000000..0b9d501 --- /dev/null +++ b/supabase/config.toml @@ -0,0 +1,315 @@ +# For detailed configuration reference documentation, visit: +# https://supabase.com/docs/guides/local-development/cli/config +# A string used to distinguish different Supabase projects on the same host. Defaults to the +# working directory name when running `supabase init`. +project_id = "inbox-negotiator" + +[api] +enabled = true +# Port to use for the API URL. +port = 54321 +# Schemas to expose in your API. Tables, views and stored procedures in this schema will get API +# endpoints. `public` and `graphql_public` schemas are included by default. +schemas = ["public", "graphql_public"] +# Extra schemas to add to the search_path of every request. +extra_search_path = ["public", "extensions"] +# The maximum number of rows returns from a view, table, or stored procedure. Limits payload size +# for accidental or malicious requests. +max_rows = 1000 + +[api.tls] +# Enable HTTPS endpoints locally using a self-signed certificate. +enabled = false + +[db] +# Port to use for the local database URL. +port = 54322 +# Port used by db diff command to initialize the shadow database. +shadow_port = 54320 +# The database major version to use. This has to be the same as your remote database's. Run `SHOW +# server_version;` on the remote database to check. +major_version = 15 + +[db.pooler] +enabled = false +# Port to use for the local connection pooler. +port = 54329 +# Specifies when a server connection can be reused by other clients. +# Configure one of the supported pooler modes: `transaction`, `session`. +pool_mode = "transaction" +# How many server connections to allow per user/database pair. +default_pool_size = 20 +# Maximum number of client connections allowed. +max_client_conn = 100 + +# [db.vault] +# secret_key = "env(SECRET_VALUE)" + +[db.migrations] +# Specifies an ordered list of schema files that describe your database. +# Supports glob patterns relative to supabase directory: "./schemas/*.sql" +schema_paths = [] + +[db.seed] +# If enabled, seeds the database after migrations during a db reset. +enabled = true +# Specifies an ordered list of seed files to load during db reset. +# Supports glob patterns relative to supabase directory: "./seeds/*.sql" +sql_paths = ["./seed.sql"] + +[realtime] +enabled = true +# Bind realtime via either IPv4 or IPv6. (default: IPv4) +# ip_version = "IPv6" +# The maximum length in bytes of HTTP request headers. (default: 4096) +# max_header_length = 4096 + +[studio] +enabled = true +# Port to use for Supabase Studio. +port = 54323 +# External URL of the API server that frontend connects to. +api_url = "http://127.0.0.1" +# OpenAI API Key to use for Supabase AI in the Supabase Studio. +openai_api_key = "env(OPENAI_API_KEY)" + +# Email testing server. Emails sent with the local dev setup are not actually sent - rather, they +# are monitored, and you can view the emails that would have been sent from the web interface. +[inbucket] +enabled = true +# Port to use for the email testing server web interface. +port = 54324 +# Uncomment to expose additional ports for testing user applications that send emails. +# smtp_port = 54325 +# pop3_port = 54326 +# admin_email = "admin@email.com" +# sender_name = "Admin" + +[storage] +enabled = true +# The maximum file size allowed (e.g. "5MB", "500KB"). +file_size_limit = "50MiB" + +# Image transformation API is available to Supabase Pro plan. +# [storage.image_transformation] +# enabled = true + +# Uncomment to configure local storage buckets +# [storage.buckets.images] +# public = false +# file_size_limit = "50MiB" +# allowed_mime_types = ["image/png", "image/jpeg"] +# objects_path = "./images" + +[auth] +enabled = true +# The base URL of your website. Used as an allow-list for redirects and for constructing URLs used +# in emails. +site_url = "http://127.0.0.1:3000" +# A list of *exact* URLs that auth providers are permitted to redirect to post authentication. +additional_redirect_urls = ["https://127.0.0.1:3000"] +# How long tokens are valid for, in seconds. Defaults to 3600 (1 hour), maximum 604,800 (1 week). +jwt_expiry = 3600 +# If disabled, the refresh token will never expire. +enable_refresh_token_rotation = true +# Allows refresh tokens to be reused after expiry, up to the specified interval in seconds. +# Requires enable_refresh_token_rotation = true. +refresh_token_reuse_interval = 10 +# Allow/disallow new user signups to your project. +enable_signup = true +# Allow/disallow anonymous sign-ins to your project. +enable_anonymous_sign_ins = false +# Allow/disallow testing manual linking of accounts +enable_manual_linking = false +# Passwords shorter than this value will be rejected as weak. Minimum 6, recommended 8 or more. +minimum_password_length = 6 +# Passwords that do not meet the following requirements will be rejected as weak. Supported values +# are: `letters_digits`, `lower_upper_letters_digits`, `lower_upper_letters_digits_symbols` +password_requirements = "" + +[auth.rate_limit] +# Number of emails that can be sent per hour. Requires auth.email.smtp to be enabled. +email_sent = 2 +# Number of SMS messages that can be sent per hour. Requires auth.sms to be enabled. +sms_sent = 30 +# Number of anonymous sign-ins that can be made per hour per IP address. Requires enable_anonymous_sign_ins = true. +anonymous_users = 30 +# Number of sessions that can be refreshed in a 5 minute interval per IP address. +token_refresh = 150 +# Number of sign up and sign-in requests that can be made in a 5 minute interval per IP address (excludes anonymous users). +sign_in_sign_ups = 30 +# Number of OTP / Magic link verifications that can be made in a 5 minute interval per IP address. +token_verifications = 30 +# Number of Web3 logins that can be made in a 5 minute interval per IP address. +web3 = 30 + +# Configure one of the supported captcha providers: `hcaptcha`, `turnstile`. +# [auth.captcha] +# enabled = true +# provider = "hcaptcha" +# secret = "" + +[auth.email] +# Allow/disallow new user signups via email to your project. +enable_signup = true +# If enabled, a user will be required to confirm any email change on both the old, and new email +# addresses. If disabled, only the new email is required to confirm. +double_confirm_changes = true +# If enabled, users need to confirm their email address before signing in. +enable_confirmations = false +# If enabled, users will need to reauthenticate or have logged in recently to change their password. +secure_password_change = false +# Controls the minimum amount of time that must pass before sending another signup confirmation or password reset email. +max_frequency = "1s" +# Number of characters used in the email OTP. +otp_length = 6 +# Number of seconds before the email OTP expires (defaults to 1 hour). +otp_expiry = 3600 + +# Use a production-ready SMTP server +# [auth.email.smtp] +# enabled = true +# host = "smtp.sendgrid.net" +# port = 587 +# user = "apikey" +# pass = "env(SENDGRID_API_KEY)" +# admin_email = "admin@email.com" +# sender_name = "Admin" + +# Uncomment to customize email template +# [auth.email.template.invite] +# subject = "You have been invited" +# content_path = "./supabase/templates/invite.html" + +[auth.sms] +# Allow/disallow new user signups via SMS to your project. +enable_signup = false +# If enabled, users need to confirm their phone number before signing in. +enable_confirmations = false +# Template for sending OTP to users +template = "Your code is {{ .Code }}" +# Controls the minimum amount of time that must pass before sending another sms otp. +max_frequency = "5s" + +# Use pre-defined map of phone number to OTP for testing. +# [auth.sms.test_otp] +# 4152127777 = "123456" + +# Configure logged in session timeouts. +# [auth.sessions] +# Force log out after the specified duration. +# timebox = "24h" +# Force log out if the user has been inactive longer than the specified duration. +# inactivity_timeout = "8h" + +# This hook runs before a token is issued and allows you to add additional claims based on the authentication method used. +# [auth.hook.custom_access_token] +# enabled = true +# uri = "pg-functions:////" + +# Configure one of the supported SMS providers: `twilio`, `twilio_verify`, `messagebird`, `textlocal`, `vonage`. +[auth.sms.twilio] +enabled = false +account_sid = "" +message_service_sid = "" +# DO NOT commit your Twilio auth token to git. Use environment variable substitution instead: +auth_token = "env(SUPABASE_AUTH_SMS_TWILIO_AUTH_TOKEN)" + +# Multi-factor-authentication is available to Supabase Pro plan. +[auth.mfa] +# Control how many MFA factors can be enrolled at once per user. +max_enrolled_factors = 10 + +# Control MFA via App Authenticator (TOTP) +[auth.mfa.totp] +enroll_enabled = false +verify_enabled = false + +# Configure MFA via Phone Messaging +[auth.mfa.phone] +enroll_enabled = false +verify_enabled = false +otp_length = 6 +template = "Your code is {{ .Code }}" +max_frequency = "5s" + +# Configure MFA via WebAuthn +# [auth.mfa.web_authn] +# enroll_enabled = true +# verify_enabled = true + +# Use an external OAuth provider. The full list of providers are: `apple`, `azure`, `bitbucket`, +# `discord`, `facebook`, `github`, `gitlab`, `google`, `keycloak`, `linkedin_oidc`, `notion`, `twitch`, +# `twitter`, `slack`, `spotify`, `workos`, `zoom`. +[auth.external.apple] +enabled = false +client_id = "" +# DO NOT commit your OAuth provider secret to git. Use environment variable substitution instead: +secret = "env(SUPABASE_AUTH_EXTERNAL_APPLE_SECRET)" +# Overrides the default auth redirectUrl. +redirect_uri = "" +# Overrides the default auth provider URL. Used to support self-hosted gitlab, single-tenant Azure, +# or any other third-party OIDC providers. +url = "" +# If enabled, the nonce check will be skipped. Required for local sign in with Google auth. +skip_nonce_check = false + +# Allow Solana wallet holders to sign in to your project via the Sign in with Solana (SIWS, EIP-4361) standard. +# You can configure "web3" rate limit in the [auth.rate_limit] section and set up [auth.captcha] if self-hosting. +[auth.web3.solana] +enabled = false + +# Use Firebase Auth as a third-party provider alongside Supabase Auth. +[auth.third_party.firebase] +enabled = false +# project_id = "my-firebase-project" + +# Use Auth0 as a third-party provider alongside Supabase Auth. +[auth.third_party.auth0] +enabled = false +# tenant = "my-auth0-tenant" +# tenant_region = "us" + +# Use AWS Cognito (Amplify) as a third-party provider alongside Supabase Auth. +[auth.third_party.aws_cognito] +enabled = false +# user_pool_id = "my-user-pool-id" +# user_pool_region = "us-east-1" + +# Use Clerk as a third-party provider alongside Supabase Auth. +[auth.third_party.clerk] +enabled = false +# Obtain from https://clerk.com/setup/supabase +# domain = "example.clerk.accounts.dev" + +[edge_runtime] +enabled = true +# Configure one of the supported request policies: `oneshot`, `per_worker`. +# Use `oneshot` for hot reload, or `per_worker` for load testing. +policy = "oneshot" +# Port to attach the Chrome inspector for debugging edge functions. +inspector_port = 8083 +# The Deno major version to use. +deno_version = 1 + +# [edge_runtime.secrets] +# secret_key = "env(SECRET_VALUE)" + +[analytics] +enabled = true +port = 54327 +# Configure one of the supported backends: `postgres`, `bigquery`. +backend = "postgres" + +# Experimental features may be deprecated any time +[experimental] +# Configures Postgres storage engine to use OrioleDB (S3) +orioledb_version = "" +# Configures S3 bucket URL, eg. .s3-.amazonaws.com +s3_host = "env(S3_HOST)" +# Configures S3 bucket region, eg. us-east-1 +s3_region = "env(S3_REGION)" +# Configures AWS_ACCESS_KEY_ID for S3 bucket +s3_access_key = "env(S3_ACCESS_KEY)" +# Configures AWS_SECRET_ACCESS_KEY for S3 bucket +s3_secret_key = "env(S3_SECRET_KEY)" diff --git a/supabase/migrations/20250607001000_add_user_features.sql b/supabase/migrations/20250607001000_add_user_features.sql new file mode 100644 index 0000000..7c2ed57 --- /dev/null +++ b/supabase/migrations/20250607001000_add_user_features.sql @@ -0,0 +1,183 @@ +/* + # User Features Migration + + 1. New Tables + - `user_profiles` - Track user onboarding and additional info + - `additional_emails` - Store additional email addresses per user + - `email_processing_usage` - Track email processing usage + + 2. Security + - Enable RLS on all new tables + - Add policies for user-specific data access + + 3. Performance + - Add indexes for optimal querying +*/ + +-- Create user_profiles table for tracking onboarding and user preferences +CREATE TABLE IF NOT EXISTS user_profiles ( + id uuid PRIMARY KEY DEFAULT gen_random_uuid(), + user_id uuid REFERENCES auth.users(id) ON DELETE CASCADE NOT NULL, + created_at timestamptz DEFAULT now(), + updated_at timestamptz DEFAULT now(), + onboarding_completed boolean DEFAULT false, + first_login_at timestamptz, + email_processing_limit integer DEFAULT 1000, -- monthly limit + UNIQUE(user_id) +); + +-- Create additional_emails table +CREATE TABLE IF NOT EXISTS additional_emails ( + id uuid PRIMARY KEY DEFAULT gen_random_uuid(), + user_id uuid REFERENCES auth.users(id) ON DELETE CASCADE NOT NULL, + email_address text NOT NULL, + verified boolean DEFAULT false, + verification_token text, + created_at timestamptz DEFAULT now(), + updated_at timestamptz DEFAULT now(), + UNIQUE(email_address), + CHECK (email_address ~* '^[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,}$') +); + +-- Create email_processing_usage table to track usage +CREATE TABLE IF NOT EXISTS email_processing_usage ( + id uuid PRIMARY KEY DEFAULT gen_random_uuid(), + user_id uuid REFERENCES auth.users(id) ON DELETE CASCADE NOT NULL, + month_year text NOT NULL, -- format: YYYY-MM + emails_processed integer DEFAULT 0, + created_at timestamptz DEFAULT now(), + updated_at timestamptz DEFAULT now(), + UNIQUE(user_id, month_year) +); + +-- Enable RLS +ALTER TABLE user_profiles ENABLE ROW LEVEL SECURITY; +ALTER TABLE additional_emails ENABLE ROW LEVEL SECURITY; +ALTER TABLE email_processing_usage ENABLE ROW LEVEL SECURITY; + +-- Add user_id column to debts table for proper user association +ALTER TABLE debts ADD COLUMN IF NOT EXISTS user_id uuid REFERENCES auth.users(id) ON DELETE CASCADE; + +-- Update debts policies to be user-specific +DROP POLICY IF EXISTS "Allow all operations on debts" ON debts; +CREATE POLICY "Users can manage their own debts" + ON debts + FOR ALL + TO authenticated + USING (auth.uid() = user_id) + WITH CHECK (auth.uid() = user_id); + +-- Create policies for user_profiles +CREATE POLICY "Users can view their own profile" + ON user_profiles + FOR SELECT + TO authenticated + USING (auth.uid() = user_id); + +CREATE POLICY "Users can insert their own profile" + ON user_profiles + FOR INSERT + TO authenticated + WITH CHECK (auth.uid() = user_id); + +CREATE POLICY "Users can update their own profile" + ON user_profiles + FOR UPDATE + TO authenticated + USING (auth.uid() = user_id) + WITH CHECK (auth.uid() = user_id); + +-- Create policies for additional_emails +CREATE POLICY "Users can manage their own additional emails" + ON additional_emails + FOR ALL + TO authenticated + USING (auth.uid() = user_id) + WITH CHECK (auth.uid() = user_id); + +-- Create policies for email_processing_usage +CREATE POLICY "Users can view their own usage" + ON email_processing_usage + FOR SELECT + TO authenticated + USING (auth.uid() = user_id); + +CREATE POLICY "Service can manage usage records" + ON email_processing_usage + FOR ALL + TO service_role + USING (true) + WITH CHECK (true); + +-- Create indexes for performance +CREATE INDEX IF NOT EXISTS idx_user_profiles_user_id ON user_profiles(user_id); +CREATE INDEX IF NOT EXISTS idx_additional_emails_user_id ON additional_emails(user_id); +CREATE INDEX IF NOT EXISTS idx_additional_emails_email ON additional_emails(email_address); +CREATE INDEX IF NOT EXISTS idx_email_processing_usage_user_id ON email_processing_usage(user_id); +CREATE INDEX IF NOT EXISTS idx_email_processing_usage_month ON email_processing_usage(user_id, month_year); +CREATE INDEX IF NOT EXISTS idx_debts_user_id ON debts(user_id); + +-- Create function to update updated_at timestamp +CREATE OR REPLACE FUNCTION update_updated_at_column() +RETURNS TRIGGER AS $$ +BEGIN + NEW.updated_at = now(); + RETURN NEW; +END; +$$ language 'plpgsql'; + +-- Create triggers for updated_at +DROP TRIGGER IF EXISTS update_user_profiles_updated_at ON user_profiles; +CREATE TRIGGER update_user_profiles_updated_at + BEFORE UPDATE ON user_profiles + FOR EACH ROW + EXECUTE FUNCTION update_updated_at_column(); + +DROP TRIGGER IF EXISTS update_additional_emails_updated_at ON additional_emails; +CREATE TRIGGER update_additional_emails_updated_at + BEFORE UPDATE ON additional_emails + FOR EACH ROW + EXECUTE FUNCTION update_updated_at_column(); + +DROP TRIGGER IF EXISTS update_email_processing_usage_updated_at ON email_processing_usage; +CREATE TRIGGER update_email_processing_usage_updated_at + BEFORE UPDATE ON email_processing_usage + FOR EACH ROW + EXECUTE FUNCTION update_updated_at_column(); + +-- Function to create user profile on signup +CREATE OR REPLACE FUNCTION handle_new_user_add_user_profiles() +RETURNS TRIGGER AS $$ +BEGIN + INSERT INTO public.user_profiles (user_id, first_login_at) + VALUES (NEW.id, now()); + RETURN NEW; +END; +$$ language 'plpgsql' security definer; + +-- Trigger to create user profile when user signs up +DROP TRIGGER IF EXISTS on_auth_user_created_add_user_profiles ON auth.users; +CREATE TRIGGER on_auth_user_created_add_user_profiles + AFTER INSERT ON auth.users + FOR EACH ROW + EXECUTE FUNCTION handle_new_user_add_user_profiles(); + +-- Function to increment email processing usage +CREATE OR REPLACE FUNCTION increment_email_usage(target_user_id uuid) +RETURNS void AS $$ +DECLARE + current_month text := to_char(now(), 'YYYY-MM'); +BEGIN + INSERT INTO public.email_processing_usage (user_id, month_year, emails_processed) + VALUES (target_user_id, current_month, 1) + ON CONFLICT (user_id, month_year) + DO UPDATE SET + emails_processed = email_processing_usage.emails_processed + 1, + updated_at = now(); +END; +$$ language 'plpgsql'; + +-- Enable real-time for new tables +ALTER publication supabase_realtime ADD TABLE user_profiles; +ALTER publication supabase_realtime ADD TABLE additional_emails; +ALTER publication supabase_realtime ADD TABLE email_processing_usage; diff --git a/supabase/migrations/20250607006000_create_public_users_table.sql b/supabase/migrations/20250607006000_create_public_users_table.sql new file mode 100644 index 0000000..7d07a42 --- /dev/null +++ b/supabase/migrations/20250607006000_create_public_users_table.sql @@ -0,0 +1,41 @@ +-- Create a public users table that mirrors relevant auth.users data +-- This avoids the need for SECURITY DEFINER functions + +-- Create the public users table +CREATE TABLE public.users ( + id UUID REFERENCES auth.users NOT NULL PRIMARY KEY, + email TEXT NOT NULL, + created_at TIMESTAMPTZ DEFAULT NOW() NOT NULL +); + +-- Create indexes for performance +CREATE INDEX idx_users_email ON public.users(email); + +-- Create RLS policies for the users table +ALTER TABLE public.users ENABLE ROW LEVEL SECURITY; + +-- Users can only read their own data +CREATE POLICY "Users can view own profile" ON public.users + FOR SELECT USING (auth.uid() = id); + +-- Create a function to handle new user creation +CREATE OR REPLACE FUNCTION public.handle_new_user_add_public_users() +RETURNS trigger AS $$ +BEGIN + INSERT INTO public.users (id, email, created_at) + VALUES (new.id, new.email, new.created_at); + RETURN new; +END; +$$ LANGUAGE plpgsql SECURITY DEFINER; + +-- Create trigger to automatically create public.users record when auth.users is created +-- Drop existing trigger if it exists +DROP TRIGGER IF EXISTS on_auth_user_created_add_public_users ON auth.users; + +CREATE TRIGGER on_auth_user_created_add_public_users + AFTER INSERT ON auth.users + FOR EACH ROW EXECUTE PROCEDURE public.handle_new_user_add_public_users(); + +-- Grant necessary permissions +GRANT SELECT ON public.users TO authenticated; +GRANT SELECT ON public.users TO anon;