Testing a theory

This commit is contained in:
Christopher Cookman 2026-01-25 14:37:11 -07:00
commit 41fbc1270a
9 changed files with 2590 additions and 0 deletions

142
.gitignore vendored Normal file
View file

@ -0,0 +1,142 @@
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# Snowpack dependency directory (https://snowpack.dev/)
web_modules/
# TypeScript cache
*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional stylelint cache
.stylelintcache
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variable files
.env
.env.*
!.env.example
# parcel-bundler cache (https://parceljs.org/)
.cache
.parcel-cache
# Next.js build output
.next
out
# Nuxt.js build / generate output
.nuxt
dist
.output
# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# vuepress v2.x temp and cache directory
.temp
.cache
# Sveltekit cache directory
.svelte-kit/
# vitepress build output
**/.vitepress/dist
# vitepress cache directory
**/.vitepress/cache
# Docusaurus cache and generated files
.docusaurus
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# Firebase cache directory
.firebase/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# yarn v3
.pnp.*
.yarn/*
!.yarn/patches
!.yarn/plugins
!.yarn/releases
!.yarn/sdks
!.yarn/versions
# Vite files
vite.config.js.timestamp-*
vite.config.ts.timestamp-*
.vite/
database.db

49
index.js Normal file
View file

@ -0,0 +1,49 @@
require("dotenv").config({quiet:true});
//DB Setup
const sqlite3 = require('sqlite3').verbose();
const dbFile = process.env.DB_FILE || 'database.db';
global.db = new sqlite3.Database(dbFile, (err) => {
if (err) {
console.error('Could not connect to database', err);
process.exit(1); // We simply CANNOT continue without a database
} else {
console.log('Connected to database');
require("./migrations.js")(global.db);
}
});
//Express Setup
const express = require('express');
const app = express();
const port = process.env.SERVER_PORT || 3000;
app.use(express.json());
app.use(express.urlencoded({ extended: true }));
app.use(express.static('public'));
// Load routes dynamically from routes/**
const fs = require('fs');
const path = require('path');
const routesPath = path.join(__dirname, 'routes');
// Load recursively
function loadRoutes(dir) {
fs.readdirSync(dir).forEach(file => {
const fullPath = path.join(dir, file);
if (fs.lstatSync(fullPath).isDirectory()) {
loadRoutes(fullPath);
} else if (file.endsWith('.js')) {
const route = require(fullPath);
if (route.path && route.router) {
app.use(route.path, route.router);
console.log(`Loaded route: ${route.path} from ${fullPath}`);
}
}
});
}
loadRoutes(routesPath);
app.listen(port, () => {
console.log(`Listening on ${port}`);
});

81
migrations.js Normal file
View file

@ -0,0 +1,81 @@
const fs = require('fs');
const path = require('path');
// filepath: c:\Users\Chris\git\Roblox-Analytics\migrations.js
module.exports = function(db) {
const migrationsDir = path.join(__dirname, 'migrations');
if (!fs.existsSync(migrationsDir)) {
console.log('No migrations directory found:', migrationsDir);
return;
}
const sqlFiles = fs.readdirSync(migrationsDir)
.filter(f => f.toLowerCase().endsWith('.sql'))
.sort();
if (sqlFiles.length === 0) {
console.log('No .sql migration files found in', migrationsDir);
return;
}
db.serialize(() => {
// Ensure migrations table exists
db.run(
`CREATE TABLE IF NOT EXISTS migrations (
name TEXT PRIMARY KEY,
applied_at TEXT NOT NULL
)`,
(err) => {
if (err) {
console.error('Could not create migrations table', err);
process.exit(1);
}
// Apply each migration in order if not already applied
sqlFiles.forEach(file => {
const name = file;
const fullPath = path.join(migrationsDir, file);
db.get('SELECT name FROM migrations WHERE name = ?', [name], (err, row) => {
if (err) {
console.error('Failed checking migration', name, err);
process.exit(1);
}
if (row) {
console.log('Skipping applied migration:', name);
return;
}
let sql;
try {
sql = fs.readFileSync(fullPath, 'utf8');
} catch (readErr) {
console.error('Failed reading migration file', fullPath, readErr);
process.exit(1);
}
db.exec(sql, function(execErr) {
if (execErr) {
console.error('Failed applying migration', name, execErr);
process.exit(1);
}
db.run(
"INSERT INTO migrations(name, applied_at) VALUES(?, datetime('now'))",
[name],
function(insertErr) {
if (insertErr) {
console.error('Failed recording migration', name, insertErr);
process.exit(1);
}
console.log('Applied migration:', name);
}
);
});
});
});
}
);
});
};

10
migrations/001_init.sql Normal file
View file

@ -0,0 +1,10 @@
CREATE TABLE IF NOT EXISTS analytics (
id TEXT PRIMARY KEY NOT NULL,
placeId INTEGER NOT NULL,
revision INTEGER NOT NULL,
startupTime INTEGER NOT NULL,
serverDuration INTEGER NOT NULL DEFAULT 0,
endTime INTEGER DEFAULT NULL,
shutdownReason TEXT DEFAULT NULL,
allPlayers BLOB NOT NULL DEFAULT '{}'
)

2227
package-lock.json generated Normal file

File diff suppressed because it is too large Load diff

18
package.json Normal file
View file

@ -0,0 +1,18 @@
{
"name": "roblox-analytics",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"keywords": [],
"author": "",
"license": "ISC",
"type": "commonjs",
"dependencies": {
"dotenv": "^17.2.3",
"express": "^5.2.1",
"sqlite3": "^5.1.7"
}
}

21
routes/api/heartbeat.js Normal file
View file

@ -0,0 +1,21 @@
const express = require('express');
const router = express.Router();
const db = global.db;
router.post('/', async (req, res) => {
const { serverId, totalPlayers, currentPlayers, duration } = req.body;
if (!serverId || totalPlayers === undefined || currentPlayers === undefined || duration === undefined) {
return res.status(400).json({ error: 'Missing required fields' });
}
db.run(
'UPDATE analytics SET serverDuration = ?, allPlayers = ? WHERE id = ?',
[duration, JSON.stringify(currentPlayers), serverId],
function(err) {
if (err) {
console.error('Failed to record heartbeat data', err);
return res.status(500).json({ error: 'Database error' });
}
return res.status(200).json({ message: 'Heartbeat data recorded' });
}
);
});

20
routes/api/shutdown.js Normal file
View file

@ -0,0 +1,20 @@
const express = require('express');
const router = express.Router();
const db = global.db;
router.post('/', async (req, res) => {
const {serverId, serverStartTime, shutdownReason, shutdownTime, duration, totalPlayers } = req.body;
if (!serverId || !serverStartTime || !shutdownReason || !shutdownTime || duration === undefined || totalPlayers === undefined) {
return res.status(400).json({ error: 'Missing required fields' });
}
db.run('UPDATE analytics SET endTime = ?, shutdownReason = ?, serverDuration = ?, allPlayers = ? WHERE id = ?',
[shutdownTime, shutdownReason, duration, JSON.stringify(totalPlayers), serverId],
function(err) {
if (err) {
console.error('Failed to record shutdown data', err);
return res.status(500).json({ error: 'Database error' });
}
return res.status(200).json({ message: 'Shutdown data recorded' });
}
);
});

22
routes/api/startup.js Normal file
View file

@ -0,0 +1,22 @@
const express = require('express');
const router = express.Router();
const db = global.db;
router.post('/', async (req, res) => {
const { serverId, serverStartTime, revision, placeId} = req.body;
if (!serverId || !serverStartTime || !revision || !placeId) {
return res.status(400).json({ error: 'Missing required fields' });
}
console.log(`Recording startup: ${serverId} at ${serverStartTime} for place ${placeId} rev ${revision}`);
db.run(
'INSERT INTO analytics (id, placeId, revision, startupTime) VALUES (?, ?, ?, ?)',
[serverId, placeId, revision, serverStartTime],
function(err) {
if (err) {
console.error('Failed to record startup data', err);
return res.status(500).json({ error: 'Database error' });
}
return res.status(200).json({ message: 'Startup data recorded' });
}
)
});