Pre-process analytics data at build time

This commit is contained in:
Aman Varshney
2025-06-04 02:14:47 +05:30
parent b449e19e10
commit ba678139f5
4 changed files with 201 additions and 138 deletions

4
apps/web/.gitignore vendored
View File

@@ -25,4 +25,6 @@ yarn-error.log*
# others
.env*.local
.vercel
next-env.d.ts
next-env.d.ts
/public/analytics-data.json

View File

@@ -3,11 +3,12 @@
"version": "0.0.0",
"private": true,
"scripts": {
"build": "next build",
"build": "bun scripts/generate-analytics.ts && next build",
"dev": "next dev --turbopack",
"start": "next start",
"check": "biome check --write .",
"postinstall": "fumadocs-mdx"
"postinstall": "fumadocs-mdx",
"generate-analytics": "bun scripts/generate-analytics.ts"
},
"dependencies": {
"@radix-ui/react-dialog": "^1.1.14",

View File

@@ -0,0 +1,183 @@
import { existsSync, mkdirSync, writeFileSync } from "node:fs";
import { join } from "node:path";
import Papa from "papaparse";
interface AnalyticsData {
date: string;
hour: number;
cli_version: string;
node_version: string;
platform: string;
backend: string;
database: string;
orm: string;
dbSetup: string;
auth: string;
api: string;
packageManager: string;
frontend0: string;
frontend1: string;
examples0: string;
examples1: string;
addons: string[];
git: string;
install: string;
runtime: string;
}
interface CSVRow {
[key: string]: string;
}
interface ProcessedAnalyticsData {
data: AnalyticsData[];
lastUpdated: string | null;
generatedAt: string;
totalRecords: number;
}
async function generateAnalyticsData(): Promise<void> {
try {
console.log("🔄 Fetching analytics data...");
const response = await fetch("https://r2.amanv.dev/export.csv");
const csvText = await response.text();
console.log("📊 Processing CSV data...");
let processedData: AnalyticsData[] = [];
Papa.parse<CSVRow>(csvText, {
header: true,
complete: (results) => {
try {
processedData = results.data
.map((row): AnalyticsData | null => {
const timestamp = row["*.timestamp"] || new Date().toISOString();
const date = timestamp.includes("T")
? timestamp.split("T")[0]
: timestamp.split(" ")[0];
let hour = 0;
try {
const timestampDate = new Date(timestamp);
if (!Number.isNaN(timestampDate.getTime())) {
hour = timestampDate.getUTCHours();
}
} catch {
hour = 0;
}
const addons = [
row["*.properties.addons.0"],
row["*.properties.addons.1"],
row["*.properties.addons.2"],
row["*.properties.addons.3"],
row["*.properties.addons.4"],
row["*.properties.addons.5"],
].filter(Boolean);
return {
date,
hour,
cli_version: row["*.properties.cli_version"] || "unknown",
node_version: row["*.properties.node_version"] || "unknown",
platform: row["*.properties.platform"] || "unknown",
backend: row["*.properties.backend"] || "none",
database: row["*.properties.database"] || "none",
orm: row["*.properties.orm"] || "none",
dbSetup: row["*.properties.dbSetup"] || "none",
auth:
row["*.properties.auth"] === "True" ? "enabled" : "disabled",
api: row["*.properties.api"] || "none",
packageManager: row["*.properties.packageManager"] || "unknown",
frontend0: row["*.properties.frontend.0"] || "",
frontend1: row["*.properties.frontend.1"] || "",
examples0: row["*.properties.examples.0"] || "",
examples1: row["*.properties.examples.1"] || "",
addons,
git:
row["*.properties.git"] === "True" ? "enabled" : "disabled",
install:
row["*.properties.install"] === "True"
? "enabled"
: "disabled",
runtime: row["*.properties.runtime"] || "unknown",
};
})
.filter((item): item is AnalyticsData =>
Boolean(item?.date && item?.platform !== "unknown"),
);
} catch (error) {
console.error("Error parsing CSV:", error);
}
},
error: (error: unknown) => {
console.error("Papa Parse error:", error);
},
});
const lines = csvText.split("\n");
const timestampColumn = lines[0]
.split(",")
.findIndex((header) => header.includes("timestamp"));
let lastUpdated: string | null = null;
if (timestampColumn !== -1) {
const timestamps = lines
.slice(1)
.filter((line) => line.trim())
.map((line) => {
const columns = line.split(",");
return columns[timestampColumn]?.replace(/"/g, "");
})
.filter(Boolean)
.map((timestamp) => new Date(timestamp))
.filter((date) => !Number.isNaN(date.getTime()));
if (timestamps.length > 0) {
const mostRecentDate = new Date(
Math.max(...timestamps.map((d) => d.getTime())),
);
lastUpdated = mostRecentDate.toLocaleDateString("en-US", {
year: "numeric",
month: "short",
day: "numeric",
hour: "2-digit",
minute: "2-digit",
timeZone: "UTC",
});
}
}
const analyticsData: ProcessedAnalyticsData = {
data: processedData,
lastUpdated,
generatedAt: new Date().toISOString(),
totalRecords: processedData.length,
};
const publicDir = join(process.cwd(), "public");
if (!existsSync(publicDir)) {
mkdirSync(publicDir, { recursive: true });
}
const outputPath = join(publicDir, "analytics-data.json");
writeFileSync(outputPath, JSON.stringify(analyticsData, null, 2));
console.log(
`✅ Generated analytics data with ${processedData.length} records`,
);
console.log(`📁 Saved to: ${outputPath}`);
console.log(`🕒 Last data update: ${lastUpdated}`);
} catch (error) {
console.error("❌ Error generating analytics data:", error);
process.exit(1);
}
}
if (process.argv[1]?.endsWith("generate-analytics.ts")) {
await generateAnalyticsData();
}
export { generateAnalyticsData };

View File

@@ -12,7 +12,6 @@ import { format, parseISO } from "date-fns";
import { Cpu, Download, Terminal, TrendingUp, Users } from "lucide-react";
import Image from "next/image";
import Link from "next/link";
import Papa from "papaparse";
import { useCallback, useEffect, useState } from "react";
import {
Area,
@@ -409,147 +408,28 @@ export default function AnalyticsPage() {
const [lastUpdated, setLastUpdated] = useState<string | null>(null);
const [loadingLastUpdated, setLoadingLastUpdated] = useState(true);
const loadCSVData = useCallback(async () => {
const loadAnalyticsData = useCallback(async () => {
try {
const response = await fetch("https://r2.amanv.dev/export.csv");
const csvText = await response.text();
const response = await fetch("/analytics-data.json");
const analyticsData = await response.json();
Papa.parse(csvText, {
header: true,
complete: (results) => {
try {
const parsedData = (results.data as Record<string, string>[])
.map((row) => {
const timestamp =
row["*.timestamp"] || new Date().toISOString();
const date = timestamp.includes("T")
? timestamp.split("T")[0]
: timestamp.split(" ")[0];
setData(analyticsData.data || []);
setLastUpdated(analyticsData.lastUpdated || null);
let hour = 0;
try {
const timestampDate = new Date(timestamp);
if (!Number.isNaN(timestampDate.getTime())) {
hour = timestampDate.getUTCHours();
}
} catch {
hour = 0;
}
const addons = [
row["*.properties.addons.0"],
row["*.properties.addons.1"],
row["*.properties.addons.2"],
row["*.properties.addons.3"],
row["*.properties.addons.4"],
row["*.properties.addons.5"],
].filter(Boolean);
return {
date,
hour,
cli_version: row["*.properties.cli_version"] || "unknown",
node_version: row["*.properties.node_version"] || "unknown",
platform: row["*.properties.platform"] || "unknown",
backend: row["*.properties.backend"] || "none",
database: row["*.properties.database"] || "none",
orm: row["*.properties.orm"] || "none",
dbSetup: row["*.properties.dbSetup"] || "none",
auth:
row["*.properties.auth"] === "True"
? "enabled"
: "disabled",
api: row["*.properties.api"] || "none",
packageManager:
row["*.properties.packageManager"] || "unknown",
frontend0: row["*.properties.frontend.0"] || "",
frontend1: row["*.properties.frontend.1"] || "",
examples0: row["*.properties.examples.0"] || "",
examples1: row["*.properties.examples.1"] || "",
addons,
git:
row["*.properties.git"] === "True" ? "enabled" : "disabled",
install:
row["*.properties.install"] === "True"
? "enabled"
: "disabled",
runtime: row["*.properties.runtime"] || "unknown",
};
})
.filter((item): item is AnalyticsData =>
Boolean(item.date && item.platform !== "unknown"),
);
if (parsedData.length > 0) {
setData(parsedData);
console.log(`Loaded ${parsedData.length} records from CSV`);
}
} catch (error: unknown) {
console.error("Error parsing CSV:", error);
}
},
error: (error: unknown) => {
console.error("Papa Parse error:", error);
},
});
console.log(
`Loaded ${analyticsData.data?.length || 0} records from static JSON`,
);
console.log(`Data generated at: ${analyticsData.generatedAt}`);
} catch (error: unknown) {
console.error("Error loading CSV:", error);
}
}, []);
const fetchLastUpdated = useCallback(async () => {
try {
const response = await fetch("https://r2.amanv.dev/export.csv");
const csvText = await response.text();
const lines = csvText.split("\n");
const timestampColumn = lines[0]
.split(",")
.findIndex((header) => header.includes("timestamp"));
if (timestampColumn !== -1) {
const timestamps = lines
.slice(1)
.filter((line) => line.trim())
.map((line) => {
const columns = line.split(",");
return columns[timestampColumn]?.replace(/"/g, "");
})
.filter(Boolean)
.map((timestamp) => new Date(timestamp))
.filter((date) => !Number.isNaN(date.getTime()));
if (timestamps.length > 0) {
const mostRecentDate = new Date(
Math.max(...timestamps.map((d) => d.getTime())),
);
setLastUpdated(
mostRecentDate.toLocaleDateString("en-US", {
year: "numeric",
month: "short",
day: "numeric",
hour: "2-digit",
minute: "2-digit",
timeZone: "UTC",
}),
);
} else {
setLastUpdated("NO_DATA_FOUND");
}
} else {
setLastUpdated("TIMESTAMP_COLUMN_NOT_FOUND");
}
} catch (error) {
console.error("Error fetching last updated date:", error);
setLastUpdated("ERROR_PARSING_CSV");
console.error("Error loading analytics data:", error);
} finally {
setLoadingLastUpdated(false);
}
}, []);
useEffect(() => {
loadCSVData();
fetchLastUpdated();
}, [loadCSVData, fetchLastUpdated]);
loadAnalyticsData();
}, [loadAnalyticsData]);
const getPlatformData = () => {
const platformCounts = data.reduce(
@@ -676,7 +556,6 @@ export default function AnalyticsPage() {
const getFrontendData = () => {
const frontendCounts = data.reduce(
(acc, item) => {
// Count frontend0 if it exists and is not empty
if (
item.frontend0 &&
item.frontend0 !== "none" &&
@@ -684,7 +563,6 @@ export default function AnalyticsPage() {
) {
acc[item.frontend0] = (acc[item.frontend0] || 0) + 1;
}
// Count frontend1 if it exists and is not empty
if (
item.frontend1 &&
item.frontend1 !== "none" &&
@@ -967,7 +845,6 @@ export default function AnalyticsPage() {
);
const backend = item.backend || "none";
// Build the combo string with all frontends + backend
const parts = [...frontends];
if (backend !== "none") {
parts.push(backend);