mirror of
https://github.com/pupperpowell/bibdle.git
synced 2026-04-05 17:33:31 -04:00
- Overall return rate: % of all-time players who played more than once - New player return rate: 7-day rolling avg of daily first-timer return rates, with velocity vs prior 7 days - 7-day and 30-day retention over time: per-cohort-day retention series Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
342 lines
12 KiB
TypeScript
342 lines
12 KiB
TypeScript
import { db } from '$lib/server/db';
|
||
import { dailyCompletions, user } from '$lib/server/db/schema';
|
||
import { eq, gte, count, countDistinct, avg, asc, min } from 'drizzle-orm';
|
||
import type { PageServerLoad } from './$types';
|
||
|
||
function estDateStr(daysAgo = 0): string {
|
||
const estNow = new Date(Date.now() - 5 * 60 * 60 * 1000); // UTC-5
|
||
estNow.setUTCDate(estNow.getUTCDate() - daysAgo);
|
||
return estNow.toISOString().slice(0, 10);
|
||
}
|
||
|
||
function prevDay(d: string): string {
|
||
const dt = new Date(d + 'T00:00:00Z');
|
||
dt.setUTCDate(dt.getUTCDate() - 1);
|
||
return dt.toISOString().slice(0, 10);
|
||
}
|
||
|
||
function addDays(d: string, n: number): string {
|
||
const dt = new Date(d + 'T00:00:00Z');
|
||
dt.setUTCDate(dt.getUTCDate() + n);
|
||
return dt.toISOString().slice(0, 10);
|
||
}
|
||
|
||
export const load: PageServerLoad = async () => {
|
||
const todayEst = estDateStr(0);
|
||
const yesterdayEst = estDateStr(1);
|
||
const sevenDaysAgo = estDateStr(7);
|
||
|
||
// Three weekly windows for first + second derivative calculations
|
||
// Week A: last 7 days (indices 0–6)
|
||
// Week B: 7–13 days ago (indices 7–13)
|
||
// Week C: 14–20 days ago (indices 14–20)
|
||
const weekAStart = estDateStr(6);
|
||
const weekBEnd = estDateStr(7);
|
||
const weekBStart = estDateStr(13);
|
||
const weekCEnd = estDateStr(14);
|
||
const weekCStart = estDateStr(20);
|
||
|
||
// ── Scalar stats ──────────────────────────────────────────────────────────
|
||
|
||
const [{ todayCount }] = await db
|
||
.select({ todayCount: count() })
|
||
.from(dailyCompletions)
|
||
.where(eq(dailyCompletions.date, todayEst));
|
||
|
||
const [{ totalCount }] = await db
|
||
.select({ totalCount: count() })
|
||
.from(dailyCompletions);
|
||
|
||
const [{ uniquePlayers }] = await db
|
||
.select({ uniquePlayers: countDistinct(dailyCompletions.anonymousId) })
|
||
.from(dailyCompletions);
|
||
|
||
const [{ weeklyPlayers }] = await db
|
||
.select({ weeklyPlayers: countDistinct(dailyCompletions.anonymousId) })
|
||
.from(dailyCompletions)
|
||
.where(gte(dailyCompletions.date, sevenDaysAgo));
|
||
|
||
const todayPlayers = await db
|
||
.selectDistinct({ id: dailyCompletions.anonymousId })
|
||
.from(dailyCompletions)
|
||
.where(eq(dailyCompletions.date, todayEst));
|
||
|
||
const yesterdayPlayers = await db
|
||
.selectDistinct({ id: dailyCompletions.anonymousId })
|
||
.from(dailyCompletions)
|
||
.where(eq(dailyCompletions.date, yesterdayEst));
|
||
|
||
const todaySet = new Set(todayPlayers.map((r) => r.id));
|
||
const activeStreaks = yesterdayPlayers.filter((r) => todaySet.has(r.id)).length;
|
||
|
||
const [{ avgGuessesRaw }] = await db
|
||
.select({ avgGuessesRaw: avg(dailyCompletions.guessCount) })
|
||
.from(dailyCompletions)
|
||
.where(eq(dailyCompletions.date, todayEst));
|
||
|
||
const avgGuessesToday = avgGuessesRaw != null ? parseFloat(avgGuessesRaw) : null;
|
||
|
||
const [{ registeredUsers }] = await db
|
||
.select({ registeredUsers: count() })
|
||
.from(user);
|
||
|
||
const avgCompletionsPerPlayer =
|
||
uniquePlayers > 0 ? Math.round((totalCount / uniquePlayers) * 100) / 100 : null;
|
||
|
||
// ── 21-day completions per day (covers all three weekly windows) ──────────
|
||
|
||
const rawPerDay21 = await db
|
||
.select({ date: dailyCompletions.date, dayCount: count() })
|
||
.from(dailyCompletions)
|
||
.where(gte(dailyCompletions.date, weekCStart))
|
||
.groupBy(dailyCompletions.date)
|
||
.orderBy(asc(dailyCompletions.date));
|
||
|
||
const counts21 = new Map(rawPerDay21.map((r) => [r.date, r.dayCount]));
|
||
|
||
// Build indexed array: index 0 = today, index 20 = 20 days ago
|
||
const completionsPerDay: number[] = [];
|
||
for (let i = 0; i <= 20; i++) {
|
||
completionsPerDay.push(counts21.get(estDateStr(i)) ?? 0);
|
||
}
|
||
|
||
// last14Days for the trend chart (most recent first)
|
||
const last14Days: { date: string; count: number }[] = [];
|
||
for (let i = 0; i <= 13; i++) {
|
||
last14Days.push({ date: estDateStr(i), count: completionsPerDay[i] });
|
||
}
|
||
|
||
// Weekly totals from the indexed array
|
||
const weekATotal = completionsPerDay.slice(0, 7).reduce((a, b) => a + b, 0);
|
||
const weekBTotal = completionsPerDay.slice(7, 14).reduce((a, b) => a + b, 0);
|
||
const weekCTotal = completionsPerDay.slice(14, 21).reduce((a, b) => a + b, 0);
|
||
|
||
// First derivative: avg daily completions change (week A vs week B)
|
||
const completionsVelocity = Math.round(((weekATotal - weekBTotal) / 7) * 10) / 10;
|
||
// Second derivative: is velocity itself increasing or decreasing?
|
||
const completionsAcceleration =
|
||
Math.round((((weekATotal - weekBTotal) - (weekBTotal - weekCTotal)) / 7) * 10) / 10;
|
||
|
||
// ── 90-day per-user data (reused for streaks + weekly user sets) ──────────
|
||
|
||
const ninetyDaysAgo = estDateStr(90);
|
||
const recentCompletions = await db
|
||
.select({ anonymousId: dailyCompletions.anonymousId, date: dailyCompletions.date })
|
||
.from(dailyCompletions)
|
||
.where(gte(dailyCompletions.date, ninetyDaysAgo))
|
||
.orderBy(asc(dailyCompletions.date));
|
||
|
||
// Group dates by user (ascending) and users by date
|
||
const userDatesMap = new Map<string, string[]>();
|
||
const dateUsersMap = new Map<string, Set<string>>();
|
||
for (const row of recentCompletions) {
|
||
const arr = userDatesMap.get(row.anonymousId);
|
||
if (arr) arr.push(row.date);
|
||
else userDatesMap.set(row.anonymousId, [row.date]);
|
||
|
||
let s = dateUsersMap.get(row.date);
|
||
if (!s) { s = new Set(); dateUsersMap.set(row.date, s); }
|
||
s.add(row.anonymousId);
|
||
}
|
||
|
||
// ── Streak distribution ───────────────────────────────────────────────────
|
||
|
||
const streakDistribution = new Map<number, number>();
|
||
for (const dates of userDatesMap.values()) {
|
||
const desc = dates.slice().reverse();
|
||
if (desc[0] !== todayEst && desc[0] !== yesterdayEst) continue;
|
||
let streak = 1;
|
||
let cur = desc[0];
|
||
for (let i = 1; i < desc.length; i++) {
|
||
if (desc[i] === prevDay(cur)) {
|
||
streak++;
|
||
cur = desc[i];
|
||
} else {
|
||
break;
|
||
}
|
||
}
|
||
if (streak >= 2) {
|
||
streakDistribution.set(streak, (streakDistribution.get(streak) ?? 0) + 1);
|
||
}
|
||
}
|
||
|
||
const streakChart = Array.from(streakDistribution.entries())
|
||
.sort((a, b) => a[0] - b[0])
|
||
.map(([days, userCount]) => ({ days, count: userCount }));
|
||
|
||
// ── Weekly user sets (for user-based velocity + churn) ───────────────────
|
||
|
||
const weekAUsers = new Set<string>();
|
||
const weekBUsers = new Set<string>();
|
||
const weekCUsers = new Set<string>();
|
||
|
||
for (const [userId, dates] of userDatesMap) {
|
||
if (dates.some((d) => d >= weekAStart)) weekAUsers.add(userId);
|
||
if (dates.some((d) => d >= weekBStart && d <= weekBEnd)) weekBUsers.add(userId);
|
||
if (dates.some((d) => d >= weekCStart && d <= weekCEnd)) weekCUsers.add(userId);
|
||
}
|
||
|
||
// First derivative: weekly unique users change
|
||
const userVelocity = weekAUsers.size - weekBUsers.size;
|
||
// Second derivative: is user growth speeding up or slowing down?
|
||
const userAcceleration =
|
||
weekAUsers.size - weekBUsers.size - (weekBUsers.size - weekCUsers.size);
|
||
|
||
// ── New players + churn ───────────────────────────────────────────────────
|
||
// New players: anonymousIds whose first-ever completion falls in the last 7 days.
|
||
// Checking against all-time data (not just the 90-day window) ensures accuracy.
|
||
const firstDates = await db
|
||
.select({
|
||
anonymousId: dailyCompletions.anonymousId,
|
||
firstDate: min(dailyCompletions.date),
|
||
totalCompletions: count()
|
||
})
|
||
.from(dailyCompletions)
|
||
.groupBy(dailyCompletions.anonymousId);
|
||
|
||
const newUsers7d = firstDates.filter((r) => r.firstDate != null && r.firstDate >= weekAStart).length;
|
||
|
||
// Churned: played in week B but not at all in week A
|
||
const churned7d = [...weekBUsers].filter((id) => !weekAUsers.has(id)).length;
|
||
|
||
// Net growth = truly new arrivals minus departures
|
||
const netGrowth7d = newUsers7d - churned7d;
|
||
|
||
// ── Return rate ───────────────────────────────────────────────────────────
|
||
// "Return rate": % of all-time unique players who have ever played more than once.
|
||
const playersWithReturn = firstDates.filter((r) => r.totalCompletions >= 2).length;
|
||
const overallReturnRate =
|
||
firstDates.length > 0
|
||
? Math.round((playersWithReturn / firstDates.length) * 1000) / 10
|
||
: null;
|
||
|
||
// Daily new-player return rate: for each day D, what % of first-time players
|
||
// on D ever came back (i.e. totalCompletions >= 2)?
|
||
const dailyNewPlayerReturn = new Map<string, { cohort: number; returned: number }>();
|
||
for (const r of firstDates) {
|
||
if (!r.firstDate) continue;
|
||
const existing = dailyNewPlayerReturn.get(r.firstDate) ?? { cohort: 0, returned: 0 };
|
||
existing.cohort++;
|
||
if (r.totalCompletions >= 2) existing.returned++;
|
||
dailyNewPlayerReturn.set(r.firstDate, existing);
|
||
}
|
||
|
||
// Build chronological array of daily rates (oldest first, days 60→1 ago)
|
||
// Days with fewer than 3 new players get rate=null to exclude from rolling avg
|
||
const dailyReturnRates: { date: string; cohort: number; rate: number | null }[] = [];
|
||
for (let i = 60; i >= 1; i--) {
|
||
const dateD = estDateStr(i);
|
||
const d = dailyNewPlayerReturn.get(dateD);
|
||
dailyReturnRates.push({
|
||
date: dateD,
|
||
cohort: d?.cohort ?? 0,
|
||
rate: d && d.cohort >= 3 ? Math.round((d.returned / d.cohort) * 1000) / 10 : null
|
||
});
|
||
}
|
||
|
||
// 7-day trailing rolling average of the daily rates
|
||
// Index 0 = 60 days ago, index 59 = yesterday
|
||
const newPlayerReturnSeries = dailyReturnRates.map((r, idx) => {
|
||
const window = dailyReturnRates
|
||
.slice(Math.max(0, idx - 6), idx + 1)
|
||
.filter((d) => d.rate !== null);
|
||
const avg =
|
||
window.length > 0
|
||
? Math.round((window.reduce((sum, d) => sum + (d.rate ?? 0), 0) / window.length) * 10) /
|
||
10
|
||
: null;
|
||
return { date: r.date, cohort: r.cohort, rate: r.rate, rollingAvg: avg };
|
||
});
|
||
|
||
// Velocity: avg of last 7 complete days (idx 53–59) vs prior 7 (idx 46–52)
|
||
const recentWindow = newPlayerReturnSeries.slice(53).filter((d) => d.rate !== null);
|
||
const priorWindow = newPlayerReturnSeries.slice(46, 53).filter((d) => d.rate !== null);
|
||
const current7dReturnAvg =
|
||
recentWindow.length > 0
|
||
? Math.round(
|
||
(recentWindow.reduce((a, d) => a + (d.rate ?? 0), 0) / recentWindow.length) * 10
|
||
) / 10
|
||
: null;
|
||
const prior7dReturnAvg =
|
||
priorWindow.length > 0
|
||
? Math.round(
|
||
(priorWindow.reduce((a, d) => a + (d.rate ?? 0), 0) / priorWindow.length) * 10
|
||
) / 10
|
||
: null;
|
||
const returnRateChange =
|
||
current7dReturnAvg !== null && prior7dReturnAvg !== null
|
||
? Math.round((current7dReturnAvg - prior7dReturnAvg) * 10) / 10
|
||
: null;
|
||
|
||
// ── Retention over time ───────────────────────────────────────────────────
|
||
// For each cohort day D, retention = % of that day's players who played
|
||
// again within the next N days. Only compute for days where D+N is in the past.
|
||
|
||
function retentionSeries(
|
||
windowDays: number,
|
||
seriesLength: number
|
||
): { date: string; rate: number; cohortSize: number }[] {
|
||
// Earliest computable cohort day: today - (windowDays + 1)
|
||
// We use index windowDays+1 through windowDays+seriesLength
|
||
const series: { date: string; rate: number; cohortSize: number }[] = [];
|
||
for (let i = windowDays + 1; i <= windowDays + seriesLength; i++) {
|
||
const dateD = estDateStr(i);
|
||
const cohort = dateUsersMap.get(dateD);
|
||
if (!cohort || cohort.size < 3) continue; // skip tiny cohorts
|
||
let retained = 0;
|
||
for (const userId of cohort) {
|
||
for (let j = 1; j <= windowDays; j++) {
|
||
if (dateUsersMap.get(addDays(dateD, j))?.has(userId)) {
|
||
retained++;
|
||
break;
|
||
}
|
||
}
|
||
}
|
||
series.push({
|
||
date: dateD,
|
||
rate: Math.round((retained / cohort.size) * 1000) / 10,
|
||
cohortSize: cohort.size
|
||
});
|
||
}
|
||
series.reverse(); // chronological (oldest first)
|
||
return series;
|
||
}
|
||
|
||
const retention7dSeries = retentionSeries(7, 30);
|
||
const retention30dSeries = retentionSeries(30, 30);
|
||
|
||
return {
|
||
todayEst,
|
||
stats: {
|
||
todayCount,
|
||
totalCount,
|
||
uniquePlayers,
|
||
weeklyPlayers,
|
||
activeStreaks,
|
||
avgGuessesToday,
|
||
registeredUsers,
|
||
avgCompletionsPerPlayer
|
||
},
|
||
growth: {
|
||
completionsVelocity,
|
||
completionsAcceleration,
|
||
userVelocity,
|
||
userAcceleration,
|
||
newUsers7d,
|
||
churned7d,
|
||
netGrowth7d
|
||
},
|
||
last14Days,
|
||
streakChart,
|
||
retention7dSeries,
|
||
retention30dSeries,
|
||
overallReturnRate,
|
||
newPlayerReturnSeries: newPlayerReturnSeries.slice(-30),
|
||
newPlayerReturnVelocity: {
|
||
current7dAvg: current7dReturnAvg,
|
||
prior7dAvg: prior7dReturnAvg,
|
||
change: returnRateChange
|
||
}
|
||
};
|
||
};
|