diff --git a/.env.example b/.env.example index 34a68e2..ec6d9bd 100644 --- a/.env.example +++ b/.env.example @@ -1,32 +1,35 @@ -# Environment Variables Template -# Copy this file to .env and update values +# PostgreSQL Database Configuration +DB_HOST=localhost +DB_NAME=edh_stats +# DB_USER must be a superuser (postgres) to run migrations and create schema objects +# The default PostgreSQL superuser created by the image is 'postgres' +DB_USER=postgres +DB_PASSWORD=edh_password +# PostgreSQL always uses standard port 5432 (not configurable) +# DB_SEED: Set to 'true' to automatically seed database with sample data after migrations +# (development only - not recommended for production) +DB_SEED=false # Application Configuration NODE_ENV=development -PORT=3000 -HOST=0.0.0.0 +LOG_LEVEL=info # Security JWT_SECRET=your-super-secure-jwt-secret-key-change-this-in-production -SESSION_SECRET=your-session-secret-change-this-in-production - -# User Registration -ALLOW_REGISTRATION=true - -# Database -DATABASE_PATH=/app/database/data/edh-stats.db -DATABASE_BACKUP_PATH=/app/database/data/backups # CORS Configuration CORS_ORIGIN=http://localhost:80 -# Logging -LOG_LEVEL=info +# User Registration +ALLOW_REGISTRATION=true -# Rate Limiting +# Rate Limiting (optional - default: 100 requests per 15 minutes) +# RATE_LIMIT_WINDOW defines the time window in MINUTES +# RATE_LIMIT_MAX defines the maximum number of requests in that window +# Comment out to use defaults RATE_LIMIT_WINDOW=15 RATE_LIMIT_MAX=100 -# Monitoring -HEALTH_CHECK_ENABLED=true -METRICS_ENABLED=false +# Optional: Database Connection Pooling (Advanced) +# DB_POOL_MIN=2 +# DB_POOL_MAX=10 diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index ad936ae..3108cbe 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -91,47 +91,92 @@ jobs: version: '3.8' services: - backend: - image: ${{ steps.version.outputs.BACKEND_IMAGE }} - environment: - - NODE_ENV=production - - DATABASE_PATH=/app/database/data/edh-stats.db - - JWT_SECRET_FILE=/run/secrets/jwt_secret - - CORS_ORIGIN=${CORS_ORIGIN:-https://yourdomain.com} - - LOG_LEVEL=warn - - ALLOW_REGISTRATION=${ALLOW_REGISTRATION:-false} - volumes: - - sqlite_data:/app/database/data - - app_logs:/app/logs - secrets: - - jwt_secret + postgres: + image: postgres:16-alpine + environment: + - POSTGRES_USER=${DB_USER:-postgres} + - POSTGRES_PASSWORD=${DB_PASSWORD:-edh_password} + - POSTGRES_DB=${DB_NAME:-edh_stats} + ports: + - '${DB_PORT:-5432}:5432' + volumes: + - postgres_data:/var/lib/postgresql/data + healthcheck: + test: + - CMD-SHELL + - 'PGPASSWORD=${DB_PASSWORD:-edh_password} pg_isready -U ${DB_USER:-postgres} -h localhost' + interval: 10s + timeout: 5s + retries: 5 + networks: + - edh-stats-network + restart: unless-stopped + + db-migrate: + image: ${{ steps.version.outputs.BACKEND_IMAGE }} + depends_on: + postgres: + condition: service_healthy + environment: + - NODE_ENV=production + - DB_HOST=${DB_HOST:-postgres} + - DB_PORT=${DB_PORT:-5432} + - DB_NAME=${DB_NAME:-edh_stats} + - DB_USER=${DB_USER:-postgres} + - DB_PASSWORD=${DB_PASSWORD:-edh_password} + - DB_SEED=${DB_SEED:-false} + command: node src/database/migrate.js migrate + networks: + - edh-stats-network + restart: 'no' + + backend: + image: ${{ steps.version.outputs.BACKEND_IMAGE }} + depends_on: + db-migrate: + condition: service_completed_successfully + environment: + - NODE_ENV=production + - DB_HOST=${DB_HOST:-postgres} + - DB_PORT=${DB_PORT:-5432} + - DB_NAME=${DB_NAME:-edh_stats} + - DB_USER=${DB_USER:-postgres} + - DB_PASSWORD=${DB_PASSWORD:-edh_password} + - JWT_SECRET=${JWT_SECRET} + - CORS_ORIGIN=${CORS_ORIGIN:-https://yourdomain.com} + - LOG_LEVEL=${LOG_LEVEL:-warn} + - ALLOW_REGISTRATION=${ALLOW_REGISTRATION:-false} restart: unless-stopped healthcheck: - test: ['CMD', 'wget', '--no-verbose', '--tries=1', '--spider', 'http://localhost:3000/api/health'] + test: + - CMD + - wget + - --no-verbose + - --tries=1 + - --spider + - http://localhost:3000/api/health interval: 30s timeout: 10s retries: 3 + start_period: 40s networks: - edh-stats-network + stop_grace_period: 30s frontend: image: ${{ steps.version.outputs.FRONTEND_IMAGE }} ports: - '80:80' - '443:443' + depends_on: + - backend restart: unless-stopped networks: - edh-stats-network volumes: - sqlite_data: + postgres_data: driver: local - app_logs: - driver: local - - secrets: - jwt_secret: - external: true networks: edh-stats-network: diff --git a/.gitignore b/.gitignore index 6311fff..11dd9ec 100644 --- a/.gitignore +++ b/.gitignore @@ -15,6 +15,7 @@ yarn-error.log* *.sqlite *.sqlite3 database/data/* +postgres_data # Logs logs @@ -116,4 +117,4 @@ docker-compose.*.deployed.yml *.crt *.key .certs/ -ssl/ \ No newline at end of file +ssl/ diff --git a/DEPLOYMENT_CHECKLIST.md b/DEPLOYMENT_CHECKLIST.md new file mode 100644 index 0000000..c55db52 --- /dev/null +++ b/DEPLOYMENT_CHECKLIST.md @@ -0,0 +1,327 @@ +# EDH Stats - Deployment Checklist + +## Pre-Deployment Verification + +### Code Quality +- [x] No SQLite references remain +- [x] All database operations use async/await +- [x] All queries are parameterized +- [x] Error handling implemented +- [x] Transaction support working +- [x] Repository pattern applied + +### Docker & Build +- [x] package-lock.json synchronized +- [x] Dockerfile using correct npm syntax +- [x] All dependencies installed +- [x] Docker build completes without errors + +### Configuration +- [x] .env.example created with PostgreSQL variables +- [x] docker-compose.yml updated for PostgreSQL +- [x] deploy.sh updated for production +- [x] GitHub Actions workflow updated + +### Documentation +- [x] POSTGRES_MIGRATION_COMPLETE.md created +- [x] MIGRATION_STATUS.md created +- [x] Repository pattern documented +- [x] Deployment procedures documented + +--- + +## Development Deployment + +### Prerequisites +- Docker installed +- Docker Compose installed +- Git repository cloned + +### Deployment Steps + +```bash +# 1. Navigate to project directory +cd /path/to/edh-stats + +# 2. Start all services +docker-compose up + +# 3. Wait for services to start +# Expected output: +# - postgres: "database system is ready to accept connections" +# - db-migrate: "Migrations completed successfully!" +# - backend: "Server listening on http://0.0.0.0:3000" +# - frontend: "nginx running" + +# 4. Verify services are running +docker-compose ps + +# 5. Test API endpoint +curl http://localhost:3002/api/health +``` + +### Verification +- [ ] PostgreSQL is running +- [ ] Migrations completed successfully +- [ ] Backend API is responsive +- [ ] Frontend is accessible at http://localhost:8081 +- [ ] Database has test data + +### Testing +```bash +# Run migrations manually +docker-compose exec backend node src/database/migrate.js migrate + +# Seed sample data +docker-compose exec backend node src/database/migrate.js seed + +# Query database +docker-compose exec postgres psql -U edh_user -d edh_stats +``` + +--- + +## Production Deployment + +### Prerequisites +- Docker and Docker Compose installed +- GitHub Container Registry access +- GHCR token with write:packages permission +- `.env` file with production secrets + +### Build & Push Images + +```bash +# 1. Navigate to project directory +cd /path/to/edh-stats + +# 2. Set GitHub user (if not already set) +export GITHUB_USER=your-github-username + +# 3. Build and push images +./deploy.sh 1.0.0 + +# Expected output: +# - Version file updated +# - Backend image built and pushed +# - Frontend image built and pushed +# - Deployment config generated +``` + +### Create Environment File + +```bash +# 1. Create .env file +cat > .env << 'ENVEOF' +# PostgreSQL Database +DB_HOST=postgres +DB_PORT=5432 +DB_NAME=edh_stats +DB_USER=edh_user +DB_PASSWORD=$(openssl rand -base64 32) + +# Application +NODE_ENV=production +LOG_LEVEL=warn + +# Security +JWT_SECRET=$(openssl rand -base64 32) + +# CORS +CORS_ORIGIN=https://yourdomain.com + +# Registration +ALLOW_REGISTRATION=false +ENVEOF + +# 2. Review .env file +cat .env + +# 3. Make sure passwords are secure +# The script generates random passwords above, but review them! +``` + +### Deploy Services + +```bash +# 1. Pull latest images +docker pull ghcr.io/your-username/edh-stats-backend:1.0.0 +docker pull ghcr.io/your-username/edh-stats-frontend:1.0.0 + +# 2. Start services +docker-compose -f docker-compose.prod.deployed.yml up -d + +# 3. Monitor migrations +docker-compose logs -f db-migrate + +# 4. Wait for migrations to complete +# Expected output: +# db-migrate: "Migrations completed successfully!" + +# 5. Check all services are running +docker-compose ps + +# 6. Verify services are healthy +docker-compose exec backend curl http://localhost:3000/api/health +``` + +### Post-Deployment Verification + +- [ ] PostgreSQL is running and healthy +- [ ] Database migrations completed successfully +- [ ] Backend API is responding to health checks +- [ ] Frontend is accessible via reverse proxy +- [ ] SSL/TLS certificate is valid (if behind proxy) +- [ ] Application logs show no errors +- [ ] Database has expected schema + +### Testing Production Deployment + +```bash +# Test API health +curl https://yourdomain.com/api/health + +# Check version +curl https://yourdomain.com/api/auth/config + +# Monitor logs +docker-compose logs -f backend + +# Database check +docker-compose exec postgres pg_isready -U edh_user +``` + +--- + +## Rollback Procedure (if needed) + +### If Build Fails +```bash +# Review logs +docker-compose logs backend + +# Stop services +docker-compose down + +# Check Dockerfile changes +git diff HEAD~1 backend/Dockerfile + +# Revert if necessary +git revert HEAD +``` + +### If Migration Fails +```bash +# Check migration logs +docker-compose logs db-migrate + +# Review migration SQL +cat backend/src/database/migrations.sql + +# Restart migration container +docker-compose restart db-migrate + +# Monitor migration progress +docker-compose logs -f db-migrate +``` + +### If Database Issues +```bash +# Stop all services +docker-compose down + +# Remove database volume (WARNING: deletes data!) +docker volume rm edh-stats_postgres_data + +# Restart services +docker-compose -f docker-compose.prod.deployed.yml up -d + +# Migrations will run automatically on fresh start +``` + +### Complete Rollback to Previous Version + +```bash +# Checkout previous commit +git checkout HEAD~5 + +# Rebuild everything +docker-compose down -v +docker-compose build --no-cache + +# Start fresh +docker-compose up -d + +# Monitor startup +docker-compose logs -f +``` + +--- + +## Monitoring & Maintenance + +### Regular Checks +```bash +# Daily health check +docker-compose exec backend curl http://localhost:3000/api/health + +# Weekly database backup +docker exec edh-stats-postgres pg_dump -U edh_user -d edh_stats > backup-$(date +%Y%m%d).sql + +# Monitor container resources +docker stats +``` + +### Common Issues + +| Issue | Solution | +|-------|----------| +| "Connection refused" | Check PostgreSQL is running: `docker-compose ps` | +| "Migrations failed" | Review logs: `docker-compose logs db-migrate` | +| "Database is locked" | Stop and restart container: `docker-compose restart postgres` | +| "Out of memory" | Increase Docker memory limit or reduce connection pool | +| "Port already in use" | Change port in docker-compose.yml or stop conflicting service | + +### Performance Monitoring + +```bash +# Connection pool status +docker-compose exec postgres psql -U edh_user -d edh_stats -c "SELECT datname, count(*) FROM pg_stat_activity GROUP BY datname;" + +# Slow queries (if enabled) +docker-compose exec postgres psql -U edh_user -d edh_stats -c "SELECT * FROM pg_stat_statements ORDER BY mean_time DESC LIMIT 10;" + +# Database size +docker-compose exec postgres psql -U edh_user -d edh_stats -c "SELECT pg_size_pretty(pg_database_size('edh_stats'));" +``` + +--- + +## Sign-Off + +| Task | Status | Date | +|------|--------|------| +| Code Review | ✅ Complete | | +| Docker Build Test | ✅ Complete | | +| Documentation Review | ✅ Complete | | +| Development Deployment | ⬜ Pending | | +| Production Deployment | ⬜ Pending | | +| Health Verification | ⬜ Pending | | +| Performance Testing | ⬜ Pending | | + +--- + +## Support + +For issues or questions: +1. Check MIGRATION_STATUS.md +2. Review POSTGRES_MIGRATION_COMPLETE.md +3. Check docker-compose logs +4. Review Dockerfile changes +5. Verify environment configuration + +--- + +**Last Updated**: January 17, 2026 +**Version**: PostgreSQL Migration Complete +**Status**: Ready for Deployment diff --git a/PRODUCTION_RELEASE.md b/PRODUCTION_RELEASE.md deleted file mode 100644 index 7726fb5..0000000 --- a/PRODUCTION_RELEASE.md +++ /dev/null @@ -1,305 +0,0 @@ -# Production Release - Complete Setup Guide - -The EDH Stats Tracker is now ready for production deployment! This document summarizes all deployment resources and how to use them. - -## 📦 What Was Created - -### Scripts -- **`deploy.sh`** - Automated deployment script for building and pushing Docker images to GHCR - - Validates prerequisites - - Builds backend and frontend images - - Pushes to GitHub Container Registry - - Generates production configuration - - ~5 minutes to run - -### Documentation -- **`QUICK_DEPLOY.md`** - Fast-track 5-10 minute deployment guide (START HERE!) -- **`DEPLOYMENT.md`** - Comprehensive production deployment guide with all details -- **`PRODUCTION_CHECKLIST.md`** - Pre/during/post deployment verification checklist -- **`PRODUCTION_RELEASE.md`** - This file - -### Docker Configuration -- **`frontend/Dockerfile.prod`** - Production-optimized nginx frontend Dockerfile -- **`frontend/nginx.prod.conf`** - Already exists, fully configured for production -- **`.github/workflows/publish.yml`** - GitHub Actions CI/CD pipeline (automated builds) - -### Updated Files -- **`.gitignore`** - Added deployment and secrets files to ignore list - -## 🚀 Quick Start (Choose One) - -### Path 1: Manual Build & Deploy (Recommended for First Release) - -```bash -# 1. Create GitHub token at https://github.com/settings/tokens -# Select: write:packages scope -# Copy: the token value - -# 2. Build and push images -export GITHUB_USER=your-github-username -export GHCR_TOKEN=ghcr_xxxxxxxxxxxxx - -./deploy.sh v1.0.0 $GHCR_TOKEN - -# 3. Copy generated docker-compose.prod.deployed.yml to server -# 4. Follow QUICK_DEPLOY.md steps 3-8 to complete setup - -# Done! Your app is in production. -``` - -**Time: ~20-30 minutes** -**Best for: First release, production verification** - -### Path 2: GitHub Actions (Fully Automated) - -```bash -# 1. Push release tag -git tag v1.0.0 -git push origin v1.0.0 - -# 2. GitHub Actions automatically: -# - Builds Docker images -# - Pushes to GHCR -# - Generates docker-compose.yml -# - Creates release with artifacts - -# 3. Download docker-compose.prod.deployed.yml from GitHub Releases -# 4. Follow QUICK_DEPLOY.md steps 3-8 to complete setup - -# Done! CI/CD pipeline handled the building. -``` - -**Time: ~15-20 minutes** -**Best for: Subsequent releases, automated workflows** - -## 📋 Documentation Map - -### If you want to... - -| Goal | Document | Time | -|------|----------|------| -| **Get app running in 10 min** | QUICK_DEPLOY.md | 10-15 min | -| **Understand full process** | DEPLOYMENT.md | Read through | -| **Verify before deploying** | PRODUCTION_CHECKLIST.md | Use as checklist | -| **Troubleshoot issues** | DEPLOYMENT.md (Troubleshooting section) | Variable | -| **Setup SSL/HTTPS** | DEPLOYMENT.md (SSL/TLS Configuration) | 15-20 min | -| **Automate future releases** | .github/workflows/publish.yml | Already configured | -| **Backup & restore** | DEPLOYMENT.md (Database Management) | As needed | -| **Update to new version** | DEPLOYMENT.md (Updating to New Version) | 5-10 min | - -## 🔐 Security Considerations - -### Secrets (Never Commit These) -- `.env` file with real values -- Docker secret files -- SSL/TLS certificates -- JWT_SECRET values -- `/etc/docker/daemon.json` (contains base64-encoded GHCR credentials) - -All are properly in `.gitignore` ✓ - -### Required Before Deployment -- [ ] GitHub Personal Access Token with `write:packages` and `read:packages` scopes -- [ ] Secure JWT secret (generated via `openssl rand -base64 32`) -- [ ] Domain name with DNS configured -- [ ] SSL certificates (Let's Encrypt is free) -- [ ] Docker authentication configured (see QUICK_DEPLOY.md step 5) - -### Production Settings -- `NODE_ENV=production` ✓ -- `LOG_LEVEL=warn` (not debug) ✓ -- `ALLOW_REGISTRATION=false` (by default) ✓ -- Rate limiting enabled ✓ -- Security headers configured ✓ -- CORS restricted to your domain ✓ - -## 🐳 Image Information - -### Backend Image -- **Base**: Node.js (slim) -- **Size**: ~150-180 MB -- **Registry**: ghcr.io/YOUR_USER/edh-stats-backend:v1.0.0 -- **Health Check**: /api/health endpoint -- **Ports**: 3000 (internal only, proxied through nginx) - -### Frontend Image -- **Base**: nginx:alpine -- **Size**: ~50-60 MB -- **Registry**: ghcr.io/YOUR_USER/edh-stats-frontend:v1.0.0 -- **Health Check**: / (root) -- **Ports**: 80 (HTTP), 443 (HTTPS) - -### Volumes -- `sqlite_data` - Database persistence (required) -- `app_logs` - Application logs (optional) - -## ✅ Deployment Verification - -After deployment, verify with these commands: - -```bash -# Service status -docker-compose ps - -# Backend health -curl http://localhost:3000/api/health - -# Frontend connectivity -curl http://localhost/ - -# Logs (if issues) -docker-compose logs --tail 50 - -# Resource usage -docker stats -``` - -## 📈 Monitoring & Maintenance - -### Daily Checks -```bash -# View error logs -docker-compose logs backend | grep -i error - -# Check resource usage -docker stats --no-stream - -# Database integrity -docker-compose exec backend sqlite3 /app/database/data/edh-stats.db "PRAGMA integrity_check;" -``` - -### Weekly Tasks -- Review logs for errors -- Monitor disk usage -- Backup database -- Check for available updates - -### Monthly Tasks -- Security patch updates -- SSL certificate renewal (automatic with certbot) -- Review application metrics -- Update dependencies - -## 🔄 Release Cycle - -### Versioning -Follow semantic versioning: -- `v1.0.0` - Initial release -- `v1.1.0` - Minor features/improvements -- `v1.0.1` - Bugfixes -- `v2.0.0` - Major breaking changes - -### Release Process -1. Make code changes and test locally -2. Update version in README and documentation -3. Create git tag: `git tag v1.1.0` -4. Push tag: `git push origin v1.1.0` -5. GitHub Actions builds and pushes automatically -6. Download docker-compose from GitHub Releases -7. Deploy to server: `docker-compose pull && docker-compose up -d` - -**Total time for release: ~30 minutes** - -## 🆘 Need Help? - -1. **First time deployment?** - - Read: QUICK_DEPLOY.md - - Follow step-by-step - - Check PRODUCTION_CHECKLIST.md - -2. **Issues during deployment?** - - Check: DEPLOYMENT.md → Troubleshooting section - - View logs: `docker-compose logs` - - Run: `docker-compose config` to verify configuration - -3. **Server problems?** - - SSH to server - - Run: `docker-compose ps` (service status) - - Run: `docker-compose logs backend` (error details) - - Run: `docker stats` (resource usage) - -4. **Database issues?** - - See: DEPLOYMENT.md → Database Management - - Backup before making changes - - Test restore procedure - -## 🎯 Success Criteria - -Your deployment is successful when: - -✅ All containers running: `docker-compose ps` shows all "Up" -✅ Backend responding: `curl http://localhost:3000/api/health` returns 200 -✅ Frontend accessible: Browser can view the application -✅ Authentication works: Can login with test credentials -✅ No critical errors: `docker-compose logs | grep ERROR` shows nothing -✅ Performance good: API responses < 500ms -✅ Database intact: Can query games and users -✅ Logs clean: Only INFO/WARN level messages, no exceptions -✅ Memory stable: `docker stats` doesn't show increasing memory - -## 📚 Complete File Structure - -``` -edh-stats/ -├── deploy.sh # Main deployment script -├── DEPLOYMENT.md # Comprehensive guide -├── PRODUCTION_CHECKLIST.md # Pre/during/post checklist -├── QUICK_DEPLOY.md # Fast-track guide (START HERE!) -├── PRODUCTION_RELEASE.md # This file -├── .github/ -│ └── workflows/ -│ └── publish.yml # GitHub Actions CI/CD -├── .gitignore # Updated with deployment files -├── frontend/ -│ ├── Dockerfile.prod # Production Dockerfile -│ ├── nginx.prod.conf # Production nginx config -│ └── public/ # Static files -└── backend/ - ├── Dockerfile # Backend Dockerfile - └── src/ # Application code -``` - -## 🎓 Learning Resources - -- Docker documentation: https://docs.docker.com/ -- Docker Compose: https://docs.docker.com/compose/ -- GitHub Container Registry: https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry -- GitHub Actions: https://docs.github.com/en/actions -- Let's Encrypt: https://letsencrypt.org/ -- Nginx configuration: https://nginx.org/en/docs/ - -## 🏁 Next Steps - -1. **Create GitHub Token** - - Visit: https://github.com/settings/tokens - - Create token with `write:packages` scope - - Save securely - -2. **Build First Release** - - Choose Path 1 or Path 2 above - - Follow either QUICK_DEPLOY.md or use GitHub Actions - -3. **Deploy to Server** - - Set up server (see QUICK_DEPLOY.md) - - Configure domain and SSL - - Start services - -4. **Verify & Monitor** - - Test all features - - Check logs and metrics - - Plan backup strategy - -5. **Iterate & Update** - - Continue developing features - - Create new git tags for releases - - Deploy updates (zero-downtime with health checks) - ---- - -**Congratulations!** 🎉 You now have everything needed to deploy EDH Stats Tracker to production. - -**Questions? Start with QUICK_DEPLOY.md and follow the step-by-step instructions.** - -**Version**: 1.0.0 -**Date**: 2024-01-15 -**Status**: Ready for Production ✓ diff --git a/README.md b/README.md index b1518fc..1ecab12 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # EDH/Commander Stats Tracker -A lightweight, responsive web application for tracking Magic: The Gathering EDH/Commander games with comprehensive statistics and analytics. Built with Fastify (Node.js), SQLite, and Alpine.js for optimal performance and simplicity. +A lightweight, responsive web application for tracking Magic: The Gathering EDH/Commander games with comprehensive statistics and analytics. Built with Fastify (Node.js), PostgreSQL, and Alpine.js for optimal performance and scalability. ## Features @@ -71,8 +71,10 @@ A lightweight, responsive web application for tracking Magic: The Gathering EDH/ #### Infrastructure & Deployment - **Docker Support**: Complete Docker and Docker Compose setup. - **Development Environment**: Pre-configured with hot-reload and logging. -- **Database**: SQLite with WAL mode for optimal performance. +- **Database**: PostgreSQL 16 with connection pooling and automated migrations. - **Automated Migrations**: Database schema management on startup. +- **Rate Limiting**: Configurable global rate limiting with per-endpoint overrides. +- **Request Logging**: Comprehensive request/response logging for debugging. ### 🚧 Pending / Roadmap @@ -100,12 +102,13 @@ A lightweight, responsive web application for tracking Magic: The Gathering EDH/ ## Technology Stack - **Backend**: Fastify (Node.js v20+) -- **Database**: SQLite (better-sqlite3) with WAL mode +- **Database**: PostgreSQL 16 with connection pooling (pg library) - **Frontend**: Alpine.js, Tailwind CSS (CDN) - **Visualization**: Chart.js - **Containerization**: Docker & Docker Compose - **Authentication**: JWT with HS512 hashing - **Password Security**: bcryptjs with 12-round hashing +- **Rate Limiting**: @fastify/rate-limit plugin with configurable limits ## Quick Start @@ -128,34 +131,75 @@ docker-compose up -d # Backend API: http://localhost:3002 ``` -> **Note:** Default ports are `8081` (Frontend) and `3002` (Backend) to avoid conflicts. +> **Note:** Default ports are `8081` (Frontend) and `3002` (Backend) to avoid conflicts. PostgreSQL runs on `5432`. -#### Environment Variables +#### Custom Environment Variables + +You can customize the database and other settings by creating or editing `.env`: + +```bash +# Copy the example to create your own +cp .env.example .env + +# Edit .env with your preferred settings +nano .env + +# Start with custom environment +docker-compose up -d +``` + +Common customizations: + +```env +# Change PostgreSQL password +DB_PASSWORD=your_secure_password + +# Enable debug logging +LOG_LEVEL=debug + +# Tighten rate limiting +RATE_LIMIT_WINDOW=5 +RATE_LIMIT_MAX=50 + +# Disable user registration +ALLOW_REGISTRATION=false +``` + +#### Environment Variables Reference Key environment variables you can configure in `.env`: ```env -# Application -NODE_ENV=development -PORT=3000 -HOST=0.0.0.0 +# PostgreSQL Database Configuration +DB_HOST=localhost # Database server hostname/IP +DB_NAME=edh_stats # Database name +DB_USER=postgres # Database user (must be superuser for migrations) +DB_PASSWORD=edh_password # Database password (MUST be changed in production) +# PostgreSQL always uses standard port 5432 (not configurable) + +# Application Configuration +NODE_ENV=development # Set to 'production' in production +LOG_LEVEL=info # Log level: debug, info, warn, error # Security -JWT_SECRET=your-secure-secret-key -SESSION_SECRET=your-session-secret +JWT_SECRET=your-super-secure-jwt-secret-key-change-this-in-production -# User Registration - Set to 'true' to enable signup, 'false' to disable -ALLOW_REGISTRATION=false - -# Database -DATABASE_PATH=/app/database/data/edh-stats.db -DATABASE_BACKUP_PATH=/app/database/data/backups - -# CORS +# CORS Configuration CORS_ORIGIN=http://localhost:80 -# Logging -LOG_LEVEL=info +# User Registration - Set to 'true' to enable signup, 'false' to disable +ALLOW_REGISTRATION=true + +# Rate Limiting (optional - default: 100 requests per 15 minutes) +RATE_LIMIT_WINDOW=15 # Time window in MINUTES +RATE_LIMIT_MAX=100 # Max requests per window + +# Database Seeding (optional - for development only) +DB_SEED=false # Set to 'true' to auto-seed sample data on startup + +# Database Connection Pooling (Advanced - optional) +# DB_POOL_MIN=2 +# DB_POOL_MAX=10 ``` ### Local Development @@ -207,12 +251,12 @@ edh-stats/ │ ├── tailwind.config.js # Tailwind configuration │ ├── package.json # Node.js dependencies │ └── Dockerfile -├── database/ # Persisted SQLite data +├── postgres_data/ # Persisted PostgreSQL data (Docker volume) ├── docs/ # Documentation ├── FIXES.md # Detailed list of fixes applied ├── FEATURES.md # Feature documentation ├── docker-compose.yml # Development orchestration -├── docker-compose.prod.yml # Production orchestration +├── deploy.sh # Production deployment script └── README.md ``` @@ -289,15 +333,55 @@ edh-stats/ ## Development Notes -### Database -- Location: `./database/data/edh-stats.db` (or specified via `DATABASE_PATH`) -- Mode: SQLite with WAL (Write-Ahead Logging) for performance -- Migrations: Automatically run on server startup (unless in test mode) -- Foreign Keys: Enabled for data integrity -- Auto-migrations: Uses `src/database/migrations.sql` -- Views: - - `user_stats`: Aggregates user-level statistics - - `commander_stats`: Aggregates per-commander statistics (commanders with 5+ games shown in dashboard) +### PostgreSQL Database Setup + +#### Connection Details +- **Database**: PostgreSQL 16 (containerized in Docker) +- **Connection Library**: Node.js `pg` library (async/await) +- **Host**: postgres (configurable via `DB_HOST`) +- **Port**: 5432 (PostgreSQL standard port, not configurable) +- **Name**: edh_stats (configurable via `DB_NAME`) +- **User**: postgres (configured via `DB_USER`) +- **Connection Pool**: Automatic pooling (configurable via `DB_POOL_MIN`/`DB_POOL_MAX`) + +#### Migrations & Schema +- **Auto-migrations**: Database schema automatically created on server startup +- **Migration File**: `src/database/migrations.sql` +- **Seed Data**: Optional test data can be seeded via `DB_SEED=true` +- **Foreign Keys**: Enabled for data integrity + +#### Database Objects +- **Tables**: users, commanders, games, user_stats (summary) +- **Views**: + - `user_stats`: Aggregates user-level statistics (total games, win rate, etc.) + - `commander_stats`: Aggregates per-commander statistics (shown for commanders with 5+ games) +- **JSONB Fields**: + - `commanders.colors`: Color identity array stored as JSONB + - Automatically parsed by pg driver - no JSON.parse() needed in code + +#### Tips & Common Operations + +**Reset Database** +```bash +# Remove PostgreSQL volume to reset all data +docker compose down -v +docker compose up -d +``` + +**View Database Directly** +```bash +# Connect to PostgreSQL container +docker compose exec postgres psql -U postgres -d edh_stats + +# List tables +\dt + +# Exit +\q +``` + +**Check Connection Pool Status** +The application logs connection pool info at startup. To debug connection issues, set `LOG_LEVEL=debug` to see detailed connection logging. ### Frontend State Management - Alpine.js components handle all state management @@ -327,7 +411,36 @@ edh-stats/ ## Recent Changes & Fixes -### Latest Updates (Session 2) +### Latest Updates (Session 3 - PostgreSQL Migration & Refinements) + +#### Major: SQLite → PostgreSQL Migration ✅ +- **Database**: Migrated from SQLite (better-sqlite3) to PostgreSQL 16 +- **Async/Await**: Converted all database operations to async/await pattern +- **Connection Pooling**: Uses pg library with automatic connection pooling +- **JSONB Support**: Color arrays now stored as PostgreSQL JSONB type (auto-parsed by pg driver) +- **No Breaking Changes**: Fully backward compatible with existing frontend + +#### Configuration Simplification +- **Removed DB_PORT**: Now uses PostgreSQL standard port 5432 (not configurable) +- **Cleaner Environment**: Only essential variables need configuration +- **Security**: PostgreSQL port no longer exposed to host network +- **Simplified Docs**: Better clarity on what settings are configurable vs. standard + +#### Rate Limiting & Logging +- **Global Rate Limiting**: Configurable via `RATE_LIMIT_WINDOW` (minutes) and `RATE_LIMIT_MAX` (requests) +- **Default**: 100 requests per 15 minutes (per IP address) +- **Per-Endpoint Limits**: Individual endpoints have their own stricter limits +- **Request Logging**: Comprehensive request/response logging at debug level +- **Logs Include**: Method, URL, IP, status code, response time + +#### Environment Variables (Simplified) +- **All configuration**: Centralized in `.env` file +- **PostgreSQL Connection**: `DB_HOST`, `DB_NAME`, `DB_USER`, `DB_PASSWORD` (port is standard 5432) +- **Rate Limiting**: `RATE_LIMIT_WINDOW`, `RATE_LIMIT_MAX` (optional) +- **Logging**: `LOG_LEVEL` (debug, info, warn, error) +- **Database Seeding**: `DB_SEED` (optional, for development) + +### Previous Updates (Session 2) - **Top Commanders Display**: Fixed filtering to show all commanders with 5+ games, sorted by most-played first - **Game Notes UI**: Expanded textarea width to full width with improved sizing (5 rows) - **Data Consistency**: Fixed camelCase/snake_case field naming throughout API and frontend diff --git a/backend/Dockerfile b/backend/Dockerfile index 49f2f94..a97cef0 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -10,7 +10,7 @@ WORKDIR /app # Copy dependency files first for better layer caching COPY package*.json ./ -RUN npm ci --only=production && npm cache clean --force +RUN npm ci --omit=dev && npm cache clean --force # Copy source code COPY . . diff --git a/backend/package-lock.json b/backend/package-lock.json index 383366c..eb056de 100644 --- a/backend/package-lock.json +++ b/backend/package-lock.json @@ -13,10 +13,10 @@ "@fastify/jwt": "^10.0.0", "@fastify/rate-limit": "^10.3.0", "bcryptjs": "^2.4.3", - "better-sqlite3": "^9.2.2", "close-with-grace": "^1.2.0", "dotenv": "^16.3.1", "fastify": "^5.7.1", + "pg": "^8.11.3", "pino-pretty": "^13.1.3", "zod": "^3.22.4" }, @@ -702,63 +702,12 @@ "dev": true, "license": "MIT" }, - "node_modules/base64-js": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", - "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT" - }, "node_modules/bcryptjs": { "version": "2.4.3", "resolved": "https://registry.npmjs.org/bcryptjs/-/bcryptjs-2.4.3.tgz", "integrity": "sha512-V/Hy/X9Vt7f3BbPJEi8BdVFMByHi+jNXrYkW3huaybV/kQ0KJg0Y6PkEMbn+zeT+i+SiKZ/HMqJGIIt4LZDqNQ==", "license": "MIT" }, - "node_modules/better-sqlite3": { - "version": "9.6.0", - "resolved": "https://registry.npmjs.org/better-sqlite3/-/better-sqlite3-9.6.0.tgz", - "integrity": "sha512-yR5HATnqeYNVnkaUTf4bOP2dJSnyhP4puJN/QPRyx4YkBEEUxib422n2XzPqDEHjQQqazoYoADdAm5vE15+dAQ==", - "hasInstallScript": true, - "license": "MIT", - "dependencies": { - "bindings": "^1.5.0", - "prebuild-install": "^7.1.1" - } - }, - "node_modules/bindings": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", - "integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==", - "license": "MIT", - "dependencies": { - "file-uri-to-path": "1.0.0" - } - }, - "node_modules/bl": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", - "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", - "license": "MIT", - "dependencies": { - "buffer": "^5.5.0", - "inherits": "^2.0.4", - "readable-stream": "^3.4.0" - } - }, "node_modules/bn.js": { "version": "4.12.2", "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.2.tgz", @@ -776,30 +725,6 @@ "concat-map": "0.0.1" } }, - "node_modules/buffer": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", - "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT", - "dependencies": { - "base64-js": "^1.3.1", - "ieee754": "^1.1.13" - } - }, "node_modules/builtin-modules": { "version": "3.3.0", "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.3.0.tgz", @@ -916,12 +841,6 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "node_modules/chownr": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz", - "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==", - "license": "ISC" - }, "node_modules/close-with-grace": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/close-with-grace/-/close-with-grace-1.3.0.tgz", @@ -1070,30 +989,6 @@ } } }, - "node_modules/decompress-response": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", - "integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==", - "license": "MIT", - "dependencies": { - "mimic-response": "^3.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/deep-extend": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", - "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==", - "license": "MIT", - "engines": { - "node": ">=4.0.0" - } - }, "node_modules/deep-is": { "version": "0.1.4", "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", @@ -1146,15 +1041,6 @@ "node": ">=6" } }, - "node_modules/detect-libc": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz", - "integrity": "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==", - "license": "Apache-2.0", - "engines": { - "node": ">=8" - } - }, "node_modules/doctrine": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", @@ -1778,15 +1664,6 @@ "node": ">=0.10.0" } }, - "node_modules/expand-template": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/expand-template/-/expand-template-2.0.3.tgz", - "integrity": "sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==", - "license": "(MIT OR WTFPL)", - "engines": { - "node": ">=6" - } - }, "node_modules/fast-copy": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/fast-copy/-/fast-copy-4.0.2.tgz", @@ -2026,12 +1903,6 @@ "node": "^10.12.0 || >=12.0.0" } }, - "node_modules/file-uri-to-path": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", - "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==", - "license": "MIT" - }, "node_modules/find-my-way": { "version": "9.4.0", "resolved": "https://registry.npmjs.org/find-my-way/-/find-my-way-9.4.0.tgz", @@ -2101,12 +1972,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/fs-constants": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz", - "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==", - "license": "MIT" - }, "node_modules/fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", @@ -2236,12 +2101,6 @@ "url": "https://github.com/privatenumber/get-tsconfig?sponsor=1" } }, - "node_modules/github-from-package": { - "version": "0.0.0", - "resolved": "https://registry.npmjs.org/github-from-package/-/github-from-package-0.0.0.tgz", - "integrity": "sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==", - "license": "MIT" - }, "node_modules/glob": { "version": "7.2.3", "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", @@ -2430,26 +2289,6 @@ "integrity": "sha512-7xgomUX6ADmcYzFik0HzAxh/73YlKR9bmFzf51CZwR+b6YtzU2m0u49hQCqV6SvlqIqsaxovfwdvbnsw3b/zpg==", "license": "MIT" }, - "node_modules/ieee754": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", - "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "BSD-3-Clause" - }, "node_modules/ignore": { "version": "5.3.2", "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", @@ -2505,12 +2344,6 @@ "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", "license": "ISC" }, - "node_modules/ini": { - "version": "1.3.8", - "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", - "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==", - "license": "ISC" - }, "node_modules/internal-slot": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.1.0.tgz", @@ -3127,18 +2960,6 @@ "node": ">= 0.4" } }, - "node_modules/mimic-response": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", - "integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/minimalistic-assert": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz", @@ -3167,12 +2988,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/mkdirp-classic": { - "version": "0.5.3", - "resolved": "https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz", - "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==", - "license": "MIT" - }, "node_modules/mnemonist": { "version": "0.40.3", "resolved": "https://registry.npmjs.org/mnemonist/-/mnemonist-0.40.3.tgz", @@ -3189,12 +3004,6 @@ "dev": true, "license": "MIT" }, - "node_modules/napi-build-utils": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/napi-build-utils/-/napi-build-utils-2.0.0.tgz", - "integrity": "sha512-GEbrYkbfF7MoNaoh2iGG84Mnf/WZfB0GdGEsM8wz7Expx/LlWf5U8t9nvJKXSp3qr5IsEbK04cBGhol/KwOsWA==", - "license": "MIT" - }, "node_modules/natural-compare": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", @@ -3202,30 +3011,6 @@ "dev": true, "license": "MIT" }, - "node_modules/node-abi": { - "version": "3.85.0", - "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-3.85.0.tgz", - "integrity": "sha512-zsFhmbkAzwhTft6nd3VxcG0cvJsT70rL+BIGHWVq5fi6MwGrHwzqKaxXE+Hl2GmnGItnDKPPkO5/LQqjVkIdFg==", - "license": "MIT", - "dependencies": { - "semver": "^7.3.5" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/node-abi/node_modules/semver": { - "version": "7.7.3", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", - "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/object-inspect": { "version": "1.13.4", "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", @@ -3465,6 +3250,95 @@ "dev": true, "license": "MIT" }, + "node_modules/pg": { + "version": "8.17.1", + "resolved": "https://registry.npmjs.org/pg/-/pg-8.17.1.tgz", + "integrity": "sha512-EIR+jXdYNSMOrpRp7g6WgQr7SaZNZfS7IzZIO0oTNEeibq956JxeD15t3Jk3zZH0KH8DmOIx38qJfQenoE8bXQ==", + "license": "MIT", + "dependencies": { + "pg-connection-string": "^2.10.0", + "pg-pool": "^3.11.0", + "pg-protocol": "^1.11.0", + "pg-types": "2.2.0", + "pgpass": "1.0.5" + }, + "engines": { + "node": ">= 16.0.0" + }, + "optionalDependencies": { + "pg-cloudflare": "^1.3.0" + }, + "peerDependencies": { + "pg-native": ">=3.0.1" + }, + "peerDependenciesMeta": { + "pg-native": { + "optional": true + } + } + }, + "node_modules/pg-cloudflare": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/pg-cloudflare/-/pg-cloudflare-1.3.0.tgz", + "integrity": "sha512-6lswVVSztmHiRtD6I8hw4qP/nDm1EJbKMRhf3HCYaqud7frGysPv7FYJ5noZQdhQtN2xJnimfMtvQq21pdbzyQ==", + "license": "MIT", + "optional": true + }, + "node_modules/pg-connection-string": { + "version": "2.10.0", + "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.10.0.tgz", + "integrity": "sha512-ur/eoPKzDx2IjPaYyXS6Y8NSblxM7X64deV2ObV57vhjsWiwLvUD6meukAzogiOsu60GO8m/3Cb6FdJsWNjwXg==", + "license": "MIT" + }, + "node_modules/pg-int8": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz", + "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==", + "license": "ISC", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/pg-pool": { + "version": "3.11.0", + "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.11.0.tgz", + "integrity": "sha512-MJYfvHwtGp870aeusDh+hg9apvOe2zmpZJpyt+BMtzUWlVqbhFmMK6bOBXLBUPd7iRtIF9fZplDc7KrPN3PN7w==", + "license": "MIT", + "peerDependencies": { + "pg": ">=8.0" + } + }, + "node_modules/pg-protocol": { + "version": "1.11.0", + "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.11.0.tgz", + "integrity": "sha512-pfsxk2M9M3BuGgDOfuy37VNRRX3jmKgMjcvAcWqNDpZSf4cUmv8HSOl5ViRQFsfARFn0KuUQTgLxVMbNq5NW3g==", + "license": "MIT" + }, + "node_modules/pg-types": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz", + "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", + "license": "MIT", + "dependencies": { + "pg-int8": "1.0.1", + "postgres-array": "~2.0.0", + "postgres-bytea": "~1.0.0", + "postgres-date": "~1.0.4", + "postgres-interval": "^1.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/pgpass": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz", + "integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==", + "license": "MIT", + "dependencies": { + "split2": "^4.1.0" + } + }, "node_modules/pino": { "version": "10.2.0", "resolved": "https://registry.npmjs.org/pino/-/pino-10.2.0.tgz", @@ -3548,30 +3422,43 @@ "node": ">= 0.4" } }, - "node_modules/prebuild-install": { - "version": "7.1.3", - "resolved": "https://registry.npmjs.org/prebuild-install/-/prebuild-install-7.1.3.tgz", - "integrity": "sha512-8Mf2cbV7x1cXPUILADGI3wuhfqWvtiLA1iclTDbFRZkgRQS0NqsPZphna9V+HyTEadheuPmjaJMsbzKQFOzLug==", + "node_modules/postgres-array": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", + "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==", + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/postgres-bytea": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.1.tgz", + "integrity": "sha512-5+5HqXnsZPE65IJZSMkZtURARZelel2oXUEO8rH83VS/hxH5vv1uHquPg5wZs8yMAfdv971IU+kcPUczi7NVBQ==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-date": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz", + "integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-interval": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz", + "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", "license": "MIT", "dependencies": { - "detect-libc": "^2.0.0", - "expand-template": "^2.0.3", - "github-from-package": "0.0.0", - "minimist": "^1.2.3", - "mkdirp-classic": "^0.5.3", - "napi-build-utils": "^2.0.0", - "node-abi": "^3.3.0", - "pump": "^3.0.0", - "rc": "^1.2.7", - "simple-get": "^4.0.0", - "tar-fs": "^2.0.0", - "tunnel-agent": "^0.6.0" - }, - "bin": { - "prebuild-install": "bin.js" + "xtend": "^4.0.0" }, "engines": { - "node": ">=10" + "node": ">=0.10.0" } }, "node_modules/prelude-ls": { @@ -3647,44 +3534,6 @@ "integrity": "sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg==", "license": "MIT" }, - "node_modules/rc": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz", - "integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==", - "license": "(BSD-2-Clause OR MIT OR Apache-2.0)", - "dependencies": { - "deep-extend": "^0.6.0", - "ini": "~1.3.0", - "minimist": "^1.2.0", - "strip-json-comments": "~2.0.1" - }, - "bin": { - "rc": "cli.js" - } - }, - "node_modules/rc/node_modules/strip-json-comments": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", - "integrity": "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/readable-stream": { - "version": "3.6.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", - "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", - "license": "MIT", - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" - } - }, "node_modules/real-require": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/real-require/-/real-require-0.2.0.tgz", @@ -4144,51 +3993,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/simple-concat": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/simple-concat/-/simple-concat-1.0.1.tgz", - "integrity": "sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT" - }, - "node_modules/simple-get": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/simple-get/-/simple-get-4.0.1.tgz", - "integrity": "sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT", - "dependencies": { - "decompress-response": "^6.0.0", - "once": "^1.3.1", - "simple-concat": "^1.0.0" - } - }, "node_modules/sonic-boom": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/sonic-boom/-/sonic-boom-4.2.0.tgz", @@ -4234,15 +4038,6 @@ "node": ">= 0.4" } }, - "node_modules/string_decoder": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", - "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", - "license": "MIT", - "dependencies": { - "safe-buffer": "~5.2.0" - } - }, "node_modules/string.prototype.trim": { "version": "1.2.10", "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.10.tgz", @@ -4364,34 +4159,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/tar-fs": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.4.tgz", - "integrity": "sha512-mDAjwmZdh7LTT6pNleZ05Yt65HC3E+NiQzl672vQG38jIrehtJk/J3mNwIg+vShQPcLF/LV7CMnDW6vjj6sfYQ==", - "license": "MIT", - "dependencies": { - "chownr": "^1.1.1", - "mkdirp-classic": "^0.5.2", - "pump": "^3.0.0", - "tar-stream": "^2.1.4" - } - }, - "node_modules/tar-stream": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz", - "integrity": "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==", - "license": "MIT", - "dependencies": { - "bl": "^4.0.3", - "end-of-stream": "^1.4.1", - "fs-constants": "^1.0.0", - "inherits": "^2.0.3", - "readable-stream": "^3.1.1" - }, - "engines": { - "node": ">=6" - } - }, "node_modules/text-table": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", @@ -4433,18 +4200,6 @@ "strip-bom": "^3.0.0" } }, - "node_modules/tunnel-agent": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", - "integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==", - "license": "Apache-2.0", - "dependencies": { - "safe-buffer": "^5.0.1" - }, - "engines": { - "node": "*" - } - }, "node_modules/type-check": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", @@ -4578,12 +4333,6 @@ "punycode": "^2.1.0" } }, - "node_modules/util-deprecate": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", - "license": "MIT" - }, "node_modules/which": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", diff --git a/backend/package.json b/backend/package.json index 2e26ea9..f3ffa9d 100644 --- a/backend/package.json +++ b/backend/package.json @@ -19,7 +19,7 @@ "@fastify/jwt": "^10.0.0", "@fastify/rate-limit": "^10.3.0", "bcryptjs": "^2.4.3", - "better-sqlite3": "^9.2.2", + "pg": "^8.11.3", "close-with-grace": "^1.2.0", "dotenv": "^16.3.1", "fastify": "^5.7.1", @@ -37,7 +37,7 @@ "commander", "statistics", "fastify", - "sqlite" + "postgres" ], "author": "EDH Stats App", "license": "MIT" diff --git a/backend/src/config/database.js b/backend/src/config/database.js index 3f0bcbf..2e3bec6 100644 --- a/backend/src/config/database.js +++ b/backend/src/config/database.js @@ -1,5 +1,5 @@ -import Database from 'better-sqlite3' -import { readFileSync, existsSync, mkdirSync } from 'fs' +import pg from 'pg' +import { readFileSync } from 'fs' import { fileURLToPath } from 'url' import { dirname, join } from 'path' @@ -8,31 +8,37 @@ const __dirname = dirname(__filename) class DatabaseManager { constructor() { - this.db = null + this.pool = null this.isInitialized = false this.currentUserId = null } async initialize() { if (this.isInitialized) { - return this.db + return this.pool } - const dbPath = - process.env.DATABASE_PATH || - join(__dirname, '../../../database/data/edh-stats.db') - try { - // Create database directory if it doesn't exist - const dbDir = dirname(dbPath) - if (!existsSync(dbDir)) { - mkdirSync(dbDir, { recursive: true }) + // Get database configuration from environment variables + const dbConfig = { + host: process.env.DB_HOST || 'localhost', + port: 5432, // PostgreSQL standard port, not configurable + database: process.env.DB_NAME || 'edh_stats', + user: process.env.DB_USER || 'postgres', + password: process.env.DB_PASSWORD || 'edh_password' } - this.db = new Database(dbPath) - this.db.pragma('journal_mode = WAL') - this.db.pragma('foreign_keys = ON') - this.db.pragma('query_only = false') + // Create connection pool + this.pool = new pg.Pool(dbConfig) + + // Test the connection + const client = await this.pool.connect() + try { + const result = await client.query('SELECT 1 as test') + console.log('Database connected successfully') + } finally { + client.release() + } // Run migrations if (process.env.NODE_ENV !== 'test') { @@ -41,53 +47,62 @@ class DatabaseManager { this.isInitialized = true console.log('Database initialized successfully') - return this.db + return this.pool } catch (error) { console.error('Failed to initialize database:', error) throw error } } - async runMigrations() { - try { - const migrationPath = join(__dirname, '../database/migrations.sql') - const migrationSQL = readFileSync(migrationPath, 'utf8') + async runMigrations() { + const client = await this.pool.connect() + try { + const migrationPath = join(__dirname, '../database/migrations.sql') + const migrationSQL = readFileSync(migrationPath, 'utf8') - this.db.exec(migrationSQL) - console.log('Database migrations completed') - } catch (error) { - console.error('Failed to run migrations:', error) - throw error - } - } + // Execute the entire migration file as a single query + // This is safer for complex SQL with functions and views + await client.query(migrationSQL) + console.log('Database migrations completed') + } catch (error) { + console.error('Failed to run migrations:', error) + throw error + } finally { + client.release() + } + } - async seedData() { - try { - const seedPath = join(__dirname, '../database/seeds.sql') - const seedSQL = readFileSync(seedPath, 'utf8') + async seedData() { + const client = await this.pool.connect() + try { + const seedPath = join(__dirname, '../database/seeds.sql') + const seedSQL = readFileSync(seedPath, 'utf8') - this.db.exec(seedSQL) - console.log('Database seeding completed') - } catch (error) { - console.error('Failed to seed database:', error) - throw error - } - } + // Execute the entire seed file as a single query + await client.query(seedSQL) + console.log('Database seeding completed') + } catch (error) { + console.error('Failed to seed database:', error) + throw error + } finally { + client.release() + } + } async close() { - if (this.db) { - this.db.close() - this.db = null + if (this.pool) { + await this.pool.end() + this.pool = null this.isInitialized = false console.log('Database connection closed') } } - getDatabase() { + getPool() { if (!this.isInitialized) { throw new Error('Database not initialized. Call initialize() first.') } - return this.db + return this.pool } setCurrentUser(userId) { @@ -99,35 +114,49 @@ class DatabaseManager { } // Helper methods for common operations - prepare(query) { - return this.getDatabase().prepare(query) + async query(query, params = []) { + const client = await this.pool.connect() + try { + return await client.query(query, params) + } finally { + client.release() + } } - exec(query) { - return this.getDatabase().exec(query) + async run(query, params = []) { + return this.query(query, params) } - run(query, params = []) { - return this.getDatabase().prepare(query).run(params) + async get(query, params = []) { + const result = await this.query(query, params) + return result.rows[0] } - get(query, params = []) { - return this.getDatabase().prepare(query).get(params) - } - - all(query, params = []) { - return this.getDatabase().prepare(query).all(params) + async all(query, params = []) { + const result = await this.query(query, params) + return result.rows } // Transaction support - transaction(fn) { - return this.getDatabase().transaction(fn) + async transaction(fn) { + const client = await this.pool.connect() + try { + await client.query('BEGIN') + const result = await fn(client) + await client.query('COMMIT') + return result + } catch (error) { + await client.query('ROLLBACK') + throw error + } finally { + client.release() + } } // Health check method async healthCheck() { try { - const result = this.get('SELECT 1 as test') + const result = await this.get('SELECT 1 as test') return result?.test === 1 } catch (error) { console.error('Database health check failed:', error) @@ -143,9 +172,9 @@ export default dbManager // Helper for async database operations export const withDatabase = async (callback) => { - const db = await dbManager.initialize() + const pool = await dbManager.initialize() try { - return await callback(db) + return await callback(pool) } finally { // Don't close here, let the manager handle connection lifecycle } diff --git a/backend/src/config/jwt.js b/backend/src/config/jwt.js index 796f972..6c4e76b 100644 --- a/backend/src/config/jwt.js +++ b/backend/src/config/jwt.js @@ -29,3 +29,14 @@ export const serverConfig = { export const registrationConfig = { allowRegistration: process.env.ALLOW_REGISTRATION !== 'false' } + +export const rateLimitConfig = { + // Global rate limit - applies to all endpoints unless overridden + // Window is in milliseconds, convert from environment variable (default in minutes) + window: process.env.RATE_LIMIT_WINDOW + ? parseInt(process.env.RATE_LIMIT_WINDOW) * 60 * 1000 + : 15 * 60 * 1000, // 15 minutes default + max: process.env.RATE_LIMIT_MAX + ? parseInt(process.env.RATE_LIMIT_MAX) + : 100 // requests per window +} diff --git a/backend/src/database/migrate.js b/backend/src/database/migrate.js index 62beedf..b6567a3 100644 --- a/backend/src/database/migrate.js +++ b/backend/src/database/migrate.js @@ -3,10 +3,18 @@ import dbManager from '../config/database.js' async function runMigrations() { console.log('Running database migrations...') - + try { await dbManager.initialize() console.log('Migrations completed successfully!') + + // Check if seeding is enabled via environment variable + const seedingEnabled = process.env.DB_SEED === 'true' || process.env.DB_SEED === '1' + if (seedingEnabled) { + console.log('Seeding enabled via DB_SEED environment variable') + await dbManager.seedData() + console.log('Database seeded successfully!') + } } catch (error) { console.error('Migration failed:', error) process.exit(1) @@ -17,7 +25,7 @@ async function runMigrations() { async function seedDatabase() { console.log('Seeding database with sample data...') - + try { await dbManager.initialize() await dbManager.seedData() @@ -32,26 +40,39 @@ async function seedDatabase() { async function resetDatabase() { console.log('Resetting database...') - + try { await dbManager.initialize() - - // Drop all tables - const db = dbManager.getDatabase() - db.exec(` - DROP TABLE IF EXISTS games; - DROP TABLE IF EXISTS commanders; - DROP TABLE IF EXISTS users; - DROP VIEW IF EXISTS user_stats; - DROP VIEW IF EXISTS commander_stats; - `) - + + // Drop all tables and views using async queries + const dropStatements = [ + 'DROP TRIGGER IF EXISTS update_games_timestamp ON games', + 'DROP TRIGGER IF EXISTS update_commanders_timestamp ON commanders', + 'DROP TRIGGER IF EXISTS update_users_timestamp ON users', + 'DROP FUNCTION IF EXISTS update_timestamp()', + 'DROP VIEW IF EXISTS commander_stats CASCADE', + 'DROP VIEW IF EXISTS user_stats CASCADE', + 'DROP TABLE IF EXISTS games CASCADE', + 'DROP TABLE IF EXISTS commanders CASCADE', + 'DROP TABLE IF EXISTS users CASCADE' + ] + + for (const statement of dropStatements) { + try { + await dbManager.query(statement) + } catch (error) { + // Ignore errors for non-existent objects + if (!error.message.includes('does not exist')) { + console.warn(`Warning during cleanup: ${error.message}`) + } + } + } + console.log('Database reset completed!') - + // Run migrations and seeding await runMigrations() await seedDatabase() - } catch (error) { console.error('Database reset failed:', error) process.exit(1) @@ -79,4 +100,4 @@ switch (command) { console.log(' node migrate.js seed - Seed database with sample data') console.log(' node migrate.js reset - Reset database (drop, migrate, seed)') process.exit(1) -} \ No newline at end of file +} diff --git a/backend/src/database/migrations.sql b/backend/src/database/migrations.sql index d0b0ac0..3e24c94 100644 --- a/backend/src/database/migrations.sql +++ b/backend/src/database/migrations.sql @@ -1,45 +1,42 @@ -- EDH/Commander Stats Tracker Database Schema --- SQLite database with proper foreign keys and constraints - --- Enable foreign key support -PRAGMA foreign_keys = ON; +-- PostgreSQL database with proper constraints -- Users table for authentication CREATE TABLE IF NOT EXISTS users ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - username TEXT UNIQUE NOT NULL CHECK(length(username) >= 3), - password_hash TEXT NOT NULL CHECK(length(password_hash) >= 60), + id SERIAL PRIMARY KEY, + username TEXT UNIQUE NOT NULL CHECK(LENGTH(username) >= 3), + password_hash TEXT NOT NULL CHECK(LENGTH(password_hash) >= 60), email TEXT UNIQUE, - created_at DATETIME DEFAULT CURRENT_TIMESTAMP, - updated_at DATETIME DEFAULT CURRENT_TIMESTAMP + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ); -- Commanders table with color identity CREATE TABLE IF NOT EXISTS commanders ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - name TEXT NOT NULL CHECK(length(name) >= 2), - colors TEXT NOT NULL CHECK(length(colors) >= 2), -- JSON array: ["W", "U", "B", "R", "G"] + id SERIAL PRIMARY KEY, + name TEXT NOT NULL CHECK(LENGTH(name) >= 2), + colors JSONB NOT NULL, user_id INTEGER NOT NULL, - created_at DATETIME DEFAULT CURRENT_TIMESTAMP, - updated_at DATETIME DEFAULT CURRENT_TIMESTAMP, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE, - CHECK(json_valid(colors) = 1) + CONSTRAINT valid_colors CHECK(jsonb_typeof(colors) = 'array') ); -- Games table with all requested statistics CREATE TABLE IF NOT EXISTS games ( - id INTEGER PRIMARY KEY AUTOINCREMENT, + id SERIAL PRIMARY KEY, date DATE NOT NULL CHECK(date >= '2020-01-01'), player_count INTEGER NOT NULL CHECK(player_count >= 2 AND player_count <= 8), commander_id INTEGER NOT NULL, - won BOOLEAN NOT NULL DEFAULT 0 CHECK(won IN (0, 1)), + won BOOLEAN NOT NULL DEFAULT FALSE, rounds INTEGER CHECK(rounds > 0), - starting_player_won BOOLEAN NOT NULL DEFAULT 0 CHECK(starting_player_won IN (0, 1)), - sol_ring_turn_one_won BOOLEAN NOT NULL DEFAULT 0 CHECK(sol_ring_turn_one_won IN (0, 1)), - notes TEXT CHECK(length(notes) <= 1000), + starting_player_won BOOLEAN NOT NULL DEFAULT FALSE, + sol_ring_turn_one_won BOOLEAN NOT NULL DEFAULT FALSE, + notes TEXT CHECK(LENGTH(notes) <= 1000), user_id INTEGER NOT NULL, - created_at DATETIME DEFAULT CURRENT_TIMESTAMP, - updated_at DATETIME DEFAULT CURRENT_TIMESTAMP, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY (commander_id) REFERENCES commanders(id) ON DELETE CASCADE, FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE ); @@ -52,40 +49,47 @@ CREATE INDEX IF NOT EXISTS idx_games_date ON games(date); CREATE INDEX IF NOT EXISTS idx_games_user_commander ON games(user_id, commander_id); CREATE INDEX IF NOT EXISTS idx_games_user_date ON games(user_id, date); +-- Function to update updated_at timestamp +CREATE OR REPLACE FUNCTION update_timestamp() +RETURNS TRIGGER AS $$ +BEGIN + NEW.updated_at = CURRENT_TIMESTAMP; + RETURN NEW; +END; +$$ LANGUAGE plpgsql; + -- Triggers to update updated_at timestamps -CREATE TRIGGER IF NOT EXISTS update_users_timestamp - AFTER UPDATE ON users +DROP TRIGGER IF EXISTS update_users_timestamp ON users; +CREATE TRIGGER update_users_timestamp + BEFORE UPDATE ON users FOR EACH ROW - BEGIN - UPDATE users SET updated_at = CURRENT_TIMESTAMP WHERE id = NEW.id; - END; + EXECUTE FUNCTION update_timestamp(); -CREATE TRIGGER IF NOT EXISTS update_commanders_timestamp - AFTER UPDATE ON commanders +DROP TRIGGER IF EXISTS update_commanders_timestamp ON commanders; +CREATE TRIGGER update_commanders_timestamp + BEFORE UPDATE ON commanders FOR EACH ROW - BEGIN - UPDATE commanders SET updated_at = CURRENT_TIMESTAMP WHERE id = NEW.id; - END; + EXECUTE FUNCTION update_timestamp(); -CREATE TRIGGER IF NOT EXISTS update_games_timestamp - AFTER UPDATE ON games +DROP TRIGGER IF EXISTS update_games_timestamp ON games; +CREATE TRIGGER update_games_timestamp + BEFORE UPDATE ON games FOR EACH ROW - BEGIN - UPDATE games SET updated_at = CURRENT_TIMESTAMP WHERE id = NEW.id; - END; + EXECUTE FUNCTION update_timestamp(); -- Views for common statistics queries -CREATE VIEW IF NOT EXISTS user_stats AS +DROP VIEW IF EXISTS user_stats CASCADE; +CREATE VIEW user_stats AS SELECT u.id as user_id, u.username, (SELECT COUNT(DISTINCT id) FROM commanders WHERE user_id = u.id) as total_commanders, (SELECT COUNT(*) FROM games WHERE user_id = u.id) as total_games, - (SELECT COUNT(*) FROM games WHERE user_id = u.id AND won = 1) as total_wins, + (SELECT COUNT(*) FROM games WHERE user_id = u.id AND won = TRUE) as total_wins, ROUND( CASE WHEN (SELECT COUNT(*) FROM games WHERE user_id = u.id) > 0 - THEN ((SELECT COUNT(*) FROM games WHERE user_id = u.id AND won = 1) * 100.0 / (SELECT COUNT(*) FROM games WHERE user_id = u.id)) + THEN ((SELECT COUNT(*) FROM games WHERE user_id = u.id AND won = TRUE)::NUMERIC * 100.0 / (SELECT COUNT(*) FROM games WHERE user_id = u.id)) ELSE 0 END, 2 ) as win_rate, @@ -94,23 +98,24 @@ SELECT FROM users u GROUP BY u.id, u.username; -CREATE VIEW IF NOT EXISTS commander_stats AS +DROP VIEW IF EXISTS commander_stats CASCADE; +CREATE VIEW commander_stats AS SELECT c.id as commander_id, c.name, c.colors, c.user_id, (SELECT COUNT(*) FROM games WHERE commander_id = c.id) as total_games, - (SELECT COUNT(*) FROM games WHERE commander_id = c.id AND won = 1) as total_wins, + (SELECT COUNT(*) FROM games WHERE commander_id = c.id AND won = TRUE) as total_wins, ROUND( CASE WHEN (SELECT COUNT(*) FROM games WHERE commander_id = c.id) > 0 - THEN ((SELECT COUNT(*) FROM games WHERE commander_id = c.id AND won = 1) * 100.0 / (SELECT COUNT(*) FROM games WHERE commander_id = c.id)) + THEN ((SELECT COUNT(*) FROM games WHERE commander_id = c.id AND won = TRUE)::NUMERIC * 100.0 / (SELECT COUNT(*) FROM games WHERE commander_id = c.id)) ELSE 0 END, 2 ) as win_rate, (SELECT AVG(rounds) FROM games WHERE commander_id = c.id) as avg_rounds, - (SELECT COUNT(*) FROM games WHERE commander_id = c.id AND starting_player_won = 1) as starting_player_wins, - (SELECT COUNT(*) FROM games WHERE commander_id = c.id AND sol_ring_turn_one_won = 1) as sol_ring_wins, + (SELECT COUNT(*) FROM games WHERE commander_id = c.id AND starting_player_won = TRUE) as starting_player_wins, + (SELECT COUNT(*) FROM games WHERE commander_id = c.id AND sol_ring_turn_one_won = TRUE) as sol_ring_wins, (SELECT MAX(date) FROM games WHERE commander_id = c.id) as last_played FROM commanders c; diff --git a/backend/src/database/seeds.sql b/backend/src/database/seeds.sql index 868d71c..2161a54 100644 --- a/backend/src/database/seeds.sql +++ b/backend/src/database/seeds.sql @@ -2,50 +2,65 @@ -- This file contains sample users, commanders, and games -- Insert sample users (passwords are 'password123' hashed with bcrypt) -INSERT OR IGNORE INTO users (id, username, password_hash, email) VALUES -(1, 'testuser', '$2a$12$LQv3c1yqBWVHxkd0LHAkCOYz6TtxMQJqhN8/LewdBPjRrhSpXqzOa', 'test@example.com'), -(2, 'magictg', '$2a$12$LQv3c1yqBWVHxkd0LHAkCOYz6TtxMQJqhN8/LewdBPjRrhSpXqzOa', 'magic@example.com'); +-- Credentials for testing: testuser / password123, magictg / password123 +INSERT INTO users (id, username, password_hash, email) VALUES +(1, 'testuser', '$2a$12$TbMEXlrucxJW4cMmkvJHeuLdehtWFBUbKJwL0KgYpeRcoG7ZCTo16', 'test@example.com'), +(2, 'magictg', '$2a$12$TbMEXlrucxJW4cMmkvJHeuLdehtWFBUbKJwL0KgYpeRcoG7ZCTo16', 'magic@example.com') +ON CONFLICT DO NOTHING; + +-- Reset sequence for users +SELECT setval('users_id_seq', (SELECT MAX(id) FROM users), true); -- Insert sample commanders with various color identities -INSERT OR IGNORE INTO commanders (id, name, colors, user_id) VALUES +INSERT INTO commanders (id, name, colors, user_id) VALUES -- Mono-colored commanders -(1, 'Urza, Lord High Artificer', '["U"]', 1), -(2, 'Gishath, Sun''s Avatar', '["R","G","W"]', 1), -(3, 'Grim-grin, Corpse-Born', '["U","B"]', 1), -(4, 'Krenko, Mob Boss', '["R"]', 2), -(5, 'Ghave, Guru of Spores', '["W","B","G"]', 2), -(6, 'Narset of the Ancient Way', '["U","R","W"]', 1), -(7, 'Tymna the Weaver', '["W","B"]', 2), -(8, 'Kydele, Chosen of Kruphix', '["U","G"]', 1); +(1, 'Urza, Lord High Artificer', '["U"]'::jsonb, 1), +(2, 'Gishath, Sun''s Avatar', '["R","G","W"]'::jsonb, 1), +(3, 'Grim-grin, Corpse-Born', '["U","B"]'::jsonb, 1), +(4, 'Krenko, Mob Boss', '["R"]'::jsonb, 2), +(5, 'Ghave, Guru of Spores', '["W","B","G"]'::jsonb, 2), +(6, 'Narset of the Ancient Way', '["U","R","W"]'::jsonb, 1), +(7, 'Tymna the Weaver', '["W","B"]'::jsonb, 2), +(8, 'Kydele, Chosen of Kruphix', '["U","G"]'::jsonb, 1) +ON CONFLICT DO NOTHING; + +-- Reset sequence for commanders +SELECT setval('commanders_id_seq', (SELECT MAX(id) FROM commanders), true); -- Insert sample games with varied statistics -INSERT OR IGNORE INTO games (id, date, player_count, commander_id, won, rounds, starting_player_won, sol_ring_turn_one_won, notes, user_id) VALUES +INSERT INTO games (id, date, player_count, commander_id, won, rounds, starting_player_won, sol_ring_turn_one_won, notes, user_id) VALUES -- Games for user 1 (testuser) -(1, '2024-01-15', 4, 1, 1, 12, 0, 0, 'Great control game, won with infinite artifacts', 1), -(2, '2024-01-18', 3, 1, 0, 8, 1, 1, 'Lost to aggro, Sol Ring helped but not enough', 1), -(3, '2024-01-22', 4, 2, 1, 15, 0, 1, 'Dinosaur tribal worked perfectly', 1), -(4, '2024-01-25', 5, 3, 0, 10, 0, 0, 'Mana issues all game', 1), -(5, '2024-02-01', 4, 1, 1, 13, 1, 0, 'Close game, won with Brain Freeze', 1), -(6, '2024-02-05', 3, 6, 1, 9, 0, 1, 'Narset enchantments carried the game', 1), -(7, '2024-02-08', 4, 8, 0, 11, 1, 0, 'Lost to tribal deck', 1), - +(1, '2024-01-15', 4, 1, TRUE, 12, FALSE, FALSE, 'Great control game, won with infinite artifacts', 1), +(2, '2024-01-18', 3, 1, FALSE, 8, TRUE, TRUE, 'Lost to aggro, Sol Ring helped but not enough', 1), +(3, '2024-01-22', 4, 2, TRUE, 15, FALSE, TRUE, 'Dinosaur tribal worked perfectly', 1), +(4, '2024-01-25', 5, 3, FALSE, 10, FALSE, FALSE, 'Mana issues all game', 1), +(5, '2024-02-01', 4, 1, TRUE, 13, TRUE, FALSE, 'Close game, won with Brain Freeze', 1), +(6, '2024-02-05', 3, 6, TRUE, 9, FALSE, TRUE, 'Narset enchantments carried the game', 1), +(7, '2024-02-08', 4, 8, FALSE, 11, TRUE, FALSE, 'Lost to tribal deck', 1), -- Games for user 2 (magictg) -(8, '2024-01-16', 4, 4, 1, 14, 0, 1, 'Krenko went infinite on turn 8', 2), -(9, '2024-01-20', 5, 5, 0, 16, 0, 0, 'Sac outlet deck was too slow', 2), -(10, '2024-01-23', 3, 7, 1, 7, 1, 0, 'Partner commanders worked well', 2), -(11, '2024-01-28', 4, 4, 1, 12, 0, 1, 'Goblins are OP in 1v1', 2), -(12, '2024-02-02', 6, 5, 0, 18, 1, 1, '6 player chaos game, fun but lost', 2); +(8, '2024-01-16', 4, 4, TRUE, 14, FALSE, TRUE, 'Krenko went infinite on turn 8', 2), +(9, '2024-01-20', 5, 5, FALSE, 16, FALSE, FALSE, 'Sac outlet deck was too slow', 2), +(10, '2024-01-23', 3, 7, TRUE, 7, TRUE, FALSE, 'Partner commanders worked well', 2), +(11, '2024-01-28', 4, 4, TRUE, 12, FALSE, TRUE, 'Goblins are OP in 1v1', 2), +(12, '2024-02-02', 6, 5, FALSE, 18, TRUE, TRUE, '6 player chaos game, fun but lost', 2) +ON CONFLICT DO NOTHING; + +-- Reset sequence for games +SELECT setval('games_id_seq', (SELECT MAX(id) FROM games), true); -- Additional games for more comprehensive statistics -INSERT OR IGNORE INTO games (id, date, player_count, commander_id, won, rounds, starting_player_won, sol_ring_turn_one_won, notes, user_id) VALUES +INSERT INTO games (id, date, player_count, commander_id, won, rounds, starting_player_won, sol_ring_turn_one_won, notes, user_id) VALUES -- More games for user 1 -(13, '2024-02-10', 4, 2, 0, 13, 0, 0, 'Board wiped too many times', 1), -(14, '2024-02-12', 3, 6, 1, 8, 1, 1, 'Narset with turn 1 Sol Ring = win', 1), -(15, '2024-02-15', 4, 3, 1, 11, 0, 0, 'Zombie recursion was key', 1), -(16, '2024-02-18', 5, 1, 0, 17, 1, 1, '5 player game, lost to storm', 1), - +(13, '2024-02-10', 4, 2, FALSE, 13, FALSE, FALSE, 'Board wiped too many times', 1), +(14, '2024-02-12', 3, 6, TRUE, 8, TRUE, TRUE, 'Narset with turn 1 Sol Ring = win', 1), +(15, '2024-02-15', 4, 3, TRUE, 11, FALSE, FALSE, 'Zombie recursion was key', 1), +(16, '2024-02-18', 5, 1, FALSE, 17, TRUE, TRUE, '5 player game, lost to storm', 1), -- More games for user 2 -(17, '2024-02-05', 4, 7, 0, 10, 0, 0, 'Color screw hurt early game', 2), -(18, '2024-02-09', 3, 4, 0, 9, 0, 1, 'Red deck lost to lifegain', 2), -(19, '2024-02-14', 4, 5, 1, 14, 1, 0, 'Ghave tokens got huge', 2), -(20, '2024-02-17', 4, 7, 1, 12, 0, 1, 'Life gain + card draw = win', 2); \ No newline at end of file +(17, '2024-02-05', 4, 7, FALSE, 10, FALSE, FALSE, 'Color screw hurt early game', 2), +(18, '2024-02-09', 3, 4, FALSE, 9, FALSE, TRUE, 'Red deck lost to lifegain', 2), +(19, '2024-02-14', 4, 5, TRUE, 14, TRUE, FALSE, 'Ghave tokens got huge', 2), +(20, '2024-02-17', 4, 7, TRUE, 12, FALSE, TRUE, 'Life gain + card draw = win', 2) +ON CONFLICT DO NOTHING; + +-- Reset sequence for games to cover all inserted IDs +SELECT setval('games_id_seq', (SELECT MAX(id) FROM games), true); diff --git a/backend/src/models/Commander.js b/backend/src/models/Commander.js deleted file mode 100644 index 0d5cc08..0000000 --- a/backend/src/models/Commander.js +++ /dev/null @@ -1,322 +0,0 @@ -// Commander model for MTG commanders -import dbManager from '../config/database.js' - -class Commander { - static async create(commanderData) { - const db = await dbManager.initialize() - - try { - const result = db - .prepare( - ` - INSERT INTO commanders (name, colors, user_id) - VALUES (?, ?, ?) - ` - ) - .run([ - commanderData.name, - JSON.stringify(commanderData.colors), - commanderData.userId - ]) - - return await this.findById(result.lastInsertRowid) - } catch (error) { - throw new Error('Failed to create commander') - } - } - - static async findById(id) { - const db = await dbManager.initialize() - - try { - const commander = db - .prepare( - ` - SELECT id, name, colors, user_id, created_at, updated_at - FROM commanders - WHERE id = ? - ` - ) - .get([id]) - - return commander - ? { - id: commander.id, - name: commander.name, - colors: commander.colors, - userId: commander.user_id, - createdAt: commander.created_at, - updatedAt: commander.updated_at - } - : null - } catch (error) { - throw new Error('Failed to find commander') - } - } - - static async findByUserId( - userId, - limit = 50, - offset = 0, - sortBy = 'created_at', - sortOrder = 'DESC' - ) { - const db = await dbManager.initialize() - - try { - // Whitelist allowed sort columns to prevent SQL injection - const allowedSortColumns = [ - 'created_at', - 'updated_at', - 'name', - 'total_games' - ] - const safeSort = allowedSortColumns.includes(sortBy) - ? sortBy - : 'created_at' - const safeOrder = sortOrder.toUpperCase() === 'ASC' ? 'ASC' : 'DESC' - - const query = ` - SELECT - c.id, - c.name, - c.colors, - c.user_id, - c.created_at, - c.updated_at, - (SELECT COUNT(*) FROM games WHERE commander_id = c.id) as total_games, - (SELECT COUNT(*) FROM games WHERE commander_id = c.id AND won = 1) as total_wins, - (SELECT ROUND(COALESCE(COUNT(CASE WHEN won = 1 THEN 1 END), 0) * 100.0 / NULLIF(COUNT(*), 0), 2) FROM games WHERE commander_id = c.id) as win_rate, - (SELECT ROUND(AVG(rounds), 2) FROM games WHERE commander_id = c.id) as avg_rounds, - (SELECT MAX(date) FROM games WHERE commander_id = c.id) as last_played - FROM commanders c - WHERE c.user_id = ? - ORDER BY ${safeSort} ${safeOrder} - LIMIT ? OFFSET ? - ` - - const commanders = db.prepare(query).all([userId, limit, offset]) - - // Parse colors JSON for frontend and convert to camelCase - return commanders.map((cmd) => ({ - id: cmd.id, - name: cmd.name, - colors: JSON.parse(cmd.colors || '[]'), - userId: cmd.user_id, - createdAt: cmd.created_at, - updatedAt: cmd.updated_at, - totalGames: cmd.total_games || 0, - totalWins: cmd.total_wins || 0, - winRate: cmd.win_rate || 0, - avgRounds: cmd.avg_rounds || 0, - lastPlayed: cmd.last_played - })) - } catch (error) { - throw new Error('Failed to find commanders by user') - } - } - - static async update(id, updateData, userId) { - const db = await dbManager.initialize() - - try { - // Check if commander exists and belongs to user - const existing = await this.findById(id) - if (!existing || existing.userId !== userId) { - throw new Error('Commander not found or access denied') - } - - const updates = [] - const values = [] - - if (updateData.name !== undefined) { - updates.push('name = ?') - values.push(updateData.name) - } - - if (updateData.colors !== undefined) { - updates.push('colors = ?') - values.push(JSON.stringify(updateData.colors)) - } - - updates.push('updated_at = CURRENT_TIMESTAMP') - - if (updates.length === 0) { - throw new Error('No valid fields to update') - } - - const result = db - .prepare( - ` - UPDATE commanders - SET ${updates.join(', ')} - WHERE id = ? AND user_id = ? - ` - ) - .run([...values, id, userId]) - - return result.changes > 0 - } catch (error) { - throw new Error('Failed to update commander') - } - } - - static async delete(id, userId) { - const db = await dbManager.initialize() - - try { - // Check if commander exists and belongs to user - const existing = await this.findById(id) - if (!existing || existing.userId !== userId) { - throw new Error('Commander not found or access denied') - } - - const result = db - .prepare( - ` - DELETE FROM commanders - WHERE id = ? AND user_id = ? - ` - ) - .run([id, userId]) - - return result.changes > 0 - } catch (error) { - throw new Error('Failed to delete commander') - } - } - - static async getStats(id, userId) { - const db = await dbManager.initialize() - - try { - const stats = db - .prepare( - ` - SELECT - c.id, - c.name, - c.colors, - (SELECT COUNT(*) FROM games WHERE commander_id = c.id) as total_games, - (SELECT COUNT(*) FROM games WHERE commander_id = c.id AND won = 1) as total_wins, - ROUND((SELECT COUNT(*) FROM games WHERE commander_id = c.id AND won = 1) * 100.0 / NULLIF((SELECT COUNT(*) FROM games WHERE commander_id = c.id), 0), 2) as win_rate, - (SELECT AVG(rounds) FROM games WHERE commander_id = c.id) as avg_rounds, - (SELECT MAX(date) FROM games WHERE commander_id = c.id) as last_played - FROM commanders c - WHERE c.id = ? AND c.user_id = ? - ` - ) - .get([id, userId]) - - if (!stats) { - throw new Error('Commander not found') - } - - return { - id: stats.id, - name: stats.name, - colors: JSON.parse(stats.colors), - totalGames: stats.total_games || 0, - totalWins: stats.total_wins || 0, - winRate: stats.win_rate || 0, - avgRounds: stats.avg_rounds || 0, - lastPlayed: stats.last_played - } - } catch (error) { - throw new Error('Failed to get commander stats') - } - } - - static async search(userId, query, limit = 20) { - const db = await dbManager.initialize() - - try { - const searchQuery = `%${query}%` - const commanders = db - .prepare( - ` - SELECT - c.id, - c.name, - c.colors, - c.user_id, - c.created_at, - c.updated_at, - (SELECT COUNT(*) FROM games WHERE commander_id = c.id) as total_games, - (SELECT COUNT(*) FROM games WHERE commander_id = c.id AND won = 1) as total_wins, - (SELECT ROUND(COALESCE(COUNT(CASE WHEN won = 1 THEN 1 END), 0) * 100.0 / NULLIF(COUNT(*), 0), 2) FROM games WHERE commander_id = c.id) as win_rate, - (SELECT ROUND(AVG(rounds), 2) FROM games WHERE commander_id = c.id) as avg_rounds, - (SELECT MAX(date) FROM games WHERE commander_id = c.id) as last_played - FROM commanders c - WHERE c.user_id = ? AND c.name LIKE ? - ORDER BY c.name ASC - LIMIT ? - ` - ) - .all([userId, searchQuery, limit]) - - return commanders.map((cmd) => ({ - id: cmd.id, - name: cmd.name, - colors: JSON.parse(cmd.colors || '[]'), - userId: cmd.user_id, - createdAt: cmd.created_at, - updatedAt: cmd.updated_at, - totalGames: cmd.total_games || 0, - totalWins: cmd.total_wins || 0, - winRate: cmd.win_rate || 0, - avgRounds: cmd.avg_rounds || 0, - lastPlayed: cmd.last_played - })) - } catch (error) { - throw new Error('Failed to search commanders') - } - } - - static async getPopular(userId, limit = 10) { - const db = await dbManager.initialize() - - try { - const commanders = db - .prepare( - ` - SELECT - c.id, - c.name, - c.colors, - c.user_id, - c.created_at, - c.updated_at, - (SELECT COUNT(*) FROM games WHERE commander_id = c.id) as total_games, - (SELECT COUNT(*) FROM games WHERE commander_id = c.id AND won = 1) as total_wins, - (SELECT ROUND(COALESCE(COUNT(CASE WHEN won = 1 THEN 1 END), 0) * 100.0 / NULLIF(COUNT(*), 0), 2) FROM games WHERE commander_id = c.id) as win_rate, - (SELECT ROUND(AVG(rounds), 2) FROM games WHERE commander_id = c.id) as avg_rounds, - (SELECT MAX(date) FROM games WHERE commander_id = c.id) as last_played - FROM commanders c - WHERE c.user_id = ? AND (SELECT COUNT(*) FROM games WHERE commander_id = c.id) >= 5 - ORDER BY win_rate DESC, c.name ASC - LIMIT ? - ` - ) - .all([userId, limit]) - - return commanders.map((cmd) => ({ - id: cmd.id, - name: cmd.name, - colors: JSON.parse(cmd.colors || '[]'), - userId: cmd.user_id, - createdAt: cmd.created_at, - updatedAt: cmd.updated_at, - totalGames: cmd.total_games || 0, - totalWins: cmd.total_wins || 0, - winRate: cmd.win_rate || 0, - avgRounds: cmd.avg_rounds || 0, - lastPlayed: cmd.last_played - })) - } catch (error) { - throw new Error('Failed to get popular commanders') - } - } -} - -export default Commander diff --git a/backend/src/models/Game.js b/backend/src/models/Game.js deleted file mode 100644 index fb6c028..0000000 --- a/backend/src/models/Game.js +++ /dev/null @@ -1,324 +0,0 @@ -// Game model for EDH game tracking -import dbManager from '../config/database.js' - -class Game { - static async create(gameData) { - const db = await dbManager.initialize() - - try { - const result = db - .prepare( - ` - INSERT INTO games ( - date, player_count, commander_id, won, rounds, - starting_player_won, sol_ring_turn_one_won, notes, user_id - ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) - ` - ) - .run([ - gameData.date, - gameData.player_count, - gameData.commander_id, - gameData.won ? 1 : 0, - gameData.rounds, - gameData.startingPlayerWon ? 1 : 0, - gameData.solRingTurnOneWon ? 1 : 0, - gameData.notes || null, - gameData.userId - ]) - - return await this.findById(result.lastInsertRowid) - } catch (error) { - throw new Error('Failed to create game') - } - } - - static async findById(id) { - const db = await dbManager.initialize() - - try { - const game = db - .prepare( - ` - SELECT - g.id, - g.date, - g.player_count, - g.commander_id, - g.won, - g.rounds, - g.starting_player_won, - g.sol_ring_turn_one_won, - g.notes, - g.user_id, - cmdr.name as commander_name, - cmdr.colors as commander_colors, - g.created_at, - g.updated_at - FROM games g - LEFT JOIN commanders cmdr ON g.commander_id = cmdr.id - WHERE g.id = ? - ` - ) - .get([id]) - - return game - } catch (error) { - throw new Error('Failed to find game') - } - } - - static async findByUserId(userId, limit = 50, offset = 0, filters = {}) { - const db = await dbManager.initialize() - - try { - let query = ` - SELECT - g.id, - g.date, - g.player_count, - g.commander_id, - g.won, - g.rounds, - g.starting_player_won, - g.sol_ring_turn_one_won, - g.notes, - cmdr.name, - cmdr.colors, - g.created_at, - g.updated_at - FROM games g - LEFT JOIN commanders cmdr ON g.commander_id = cmdr.id - WHERE g.user_id = ? - ${filters.commander ? `AND cmdr.name LIKE ?` : ''} - ORDER BY g.date DESC - ` - - const params = [userId] - if (filters.commander) { - params.push(`%${filters.commander}%`) - } - - if (filters.playerCount) { - query += ` AND g.player_count = ?` - params.push(filters.playerCount) - } - - if (filters.commanderId) { - query += ` AND g.commander_id = ?` - params.push(filters.commanderId) - } - - if (filters.dateFrom) { - query += ` AND g.date >= ?` - params.push(filters.dateFrom) - } - - if (filters.dateTo) { - query += ` AND g.date <= ?` - params.push(filters.dateTo) - } - - query += ` LIMIT ? OFFSET ?` - params.push(limit, offset) - - const games = db.prepare(query).all(params) - - // Parse dates for frontend and transform to camelCase - return games.map((game) => ({ - id: game.id, - date: new Date(game.date).toLocaleDateString('en-US'), - playerCount: game.player_count, - commanderId: game.commander_id, - won: game.won, - rounds: game.rounds || 0, - startingPlayerWon: game.starting_player_won, - solRingTurnOneWon: game.sol_ring_turn_one_won, - notes: game.notes, - commanderName: game.name, - commanderColors: JSON.parse(game.colors || '[]'), - createdAt: game.created_at, - updatedAt: game.updated_at - })) - } catch (error) { - throw new Error('Failed to find games by user') - } - } - - static async exportByUserId(userId, filters = {}) { - const db = await dbManager.initialize() - - try { - let query = ` - SELECT - g.id, - g.date, - g.player_count, - g.commander_id, - g.won, - g.rounds, - g.starting_player_won, - g.sol_ring_turn_one_won, - g.notes, - cmdr.name as commander_name, - cmdr.colors as commander_colors, - g.created_at, - g.updated_at - FROM games g - LEFT JOIN commanders cmdr ON g.commander_id = cmdr.id - WHERE g.user_id = ? - ${filters.commander ? `AND cmdr.name LIKE ?` : ''} - ` - - const params = [userId] - if (filters.commander) { - params.push(`%${filters.commander}%`) - } - - if (filters.playerCount) { - query += ` AND g.player_count = ?` - params.push(filters.playerCount) - } - - if (filters.commanderId) { - query += ` AND g.commander_id = ?` - params.push(filters.commanderId) - } - - if (filters.dateFrom) { - query += ` AND g.date >= ?` - params.push(filters.dateFrom) - } - - if (filters.dateTo) { - query += ` AND g.date <= ?` - params.push(filters.dateTo) - } - - query += ` ORDER BY g.date DESC` - - const games = db.prepare(query).all(params) - - // Return data for export (minimal transformation) - return games.map(game => ({ - id: game.id, - date: game.date, - playerCount: game.player_count, - commanderId: game.commander_id, - commanderName: game.commander_name, - commanderColors: JSON.parse(game.commander_colors || '[]'), - won: Boolean(game.won), - rounds: game.rounds || 0, - startingPlayerWon: Boolean(game.starting_player_won), - solRingTurnOneWon: Boolean(game.sol_ring_turn_one_won), - notes: game.notes, - createdAt: game.created_at, - updatedAt: game.updated_at - })) - } catch (error) { - throw new Error('Failed to export games') - } - } - - static async update(id, updateData, userId) { - const db = await dbManager.initialize() - - try { - // Check if game exists and belongs to user - const existing = await this.findById(id) - if (!existing || existing.user_id !== userId) { - throw new Error('Game not found or access denied') - } - - const updates = [] - const values = [] - - if (updateData.date !== undefined) { - updates.push('date = ?') - values.push(updateData.date) - } - - if (updateData.commander_id !== undefined) { - updates.push('commander_id = ?') - values.push(updateData.commander_id) - } - - if (updateData.player_count !== undefined) { - updates.push('player_count = ?') - values.push(updateData.player_count) - } - - if (updateData.won !== undefined) { - updates.push('won = ?') - values.push(updateData.won ? 1 : 0) - } - - if (updateData.rounds !== undefined) { - updates.push('rounds = ?') - values.push(updateData.rounds) - } - - if (updateData.starting_player_won !== undefined) { - updates.push('starting_player_won = ?') - values.push(updateData.starting_player_won ? 1 : 0) - } - - if (updateData.sol_ring_turn_one_won !== undefined) { - updates.push('sol_ring_turn_one_won = ?') - values.push(updateData.sol_ring_turn_one_won ? 1 : 0) - } - - if (updateData.notes !== undefined) { - updates.push('notes = ?') - values.push(updateData.notes) - } - - updates.push('updated_at = CURRENT_TIMESTAMP') - - if (updates.length === 0) { - throw new Error('No valid fields to update') - } - - const result = db - .prepare( - ` - UPDATE games - SET ${updates.join(', ')} - WHERE id = ? AND user_id = ? - ` - ) - .run([...values, id, userId]) - - return result.changes > 0 - } catch (error) { - throw new Error('Failed to update game') - } - } - - static async delete(id, userId) { - const db = await dbManager.initialize() - - try { - // Check if game exists and belongs to user - const existing = await this.findById(id) - if (!existing || existing.user_id !== userId) { - throw new Error('Game not found or access denied') - } - - const result = db - .prepare( - ` - DELETE FROM games - WHERE id = ? AND user_id = ? - ` - ) - .run([id, userId]) - - return result.changes > 0 - } catch (error) { - throw new Error('Failed to delete game') - } - } -} - -export default Game diff --git a/backend/src/models/User.js b/backend/src/models/User.js deleted file mode 100644 index 615ba80..0000000 --- a/backend/src/models/User.js +++ /dev/null @@ -1,168 +0,0 @@ -import bcrypt from 'bcryptjs' -import dbManager from '../config/database.js' - -class User { - static async create(userData) { - const db = await dbManager.initialize() - - const { username, password, email } = userData - - // Check if username already exists - const existingUser = db.prepare('SELECT id FROM users WHERE username = ? OR email = ?').get([username, email]) - if (existingUser) { - throw new Error('Username or email already exists') - } - - // Hash password - const passwordHash = await bcrypt.hash(password, 12) - - try { - const result = db.prepare(` - INSERT INTO users (username, password_hash, email) - VALUES (?, ?, ?) - `).run([username, passwordHash, email]) - - return this.findById(result.lastInsertRowid) - } catch (error) { - throw new Error('Failed to create user') - } - } - - static async findById(id) { - const db = await dbManager.initialize() - - const user = db.prepare(` - SELECT id, username, email, created_at, updated_at - FROM users - WHERE id = ? - `).get([id]) - - return user - } - - static async findByUsername(username) { - const db = await dbManager.initialize() - - const user = db.prepare(` - SELECT id, username, password_hash, email, created_at, updated_at - FROM users - WHERE username = ? - `).get([username]) - - return user - } - - static async findByEmail(email) { - const db = await dbManager.initialize() - - const user = db.prepare(` - SELECT id, username, password_hash, email, created_at, updated_at - FROM users - WHERE email = ? - `).get([email]) - - return user - } - - static async verifyPassword(password, hashedPassword) { - return await bcrypt.compare(password, hashedPassword) - } - - static async updatePassword(userId, newPassword) { - const db = await dbManager.initialize() - - const passwordHash = await bcrypt.hash(newPassword, 12) - - const result = db.prepare(` - UPDATE users - SET password_hash = ?, updated_at = CURRENT_TIMESTAMP - WHERE id = ? - `).run([passwordHash, userId]) - - return result.changes > 0 - } - - static async updateUsername(userId, newUsername) { - const db = await dbManager.initialize() - - // Check if new username is already taken - const existingUser = db.prepare(` - SELECT id FROM users - WHERE username = ? AND id != ? - `).get([newUsername, userId]) - - if (existingUser) { - throw new Error('Username already exists') - } - - const result = db.prepare(` - UPDATE users - SET username = ?, updated_at = CURRENT_TIMESTAMP - WHERE id = ? - `).run([newUsername, userId]) - - return result.changes > 0 - } - - static async updateProfile(userId, profileData) { - const db = await dbManager.initialize() - - const { email } = profileData - - // Check if email is already taken by another user - if (email) { - const existingUser = db.prepare(` - SELECT id FROM users - WHERE email = ? AND id != ? - `).get([email, userId]) - - if (existingUser) { - throw new Error('Email already exists') - } - } - - const updates = [] - const values = [] - - if (email !== undefined) { - updates.push('email = ?') - values.push(email) - } - - if (updates.length === 0) { - throw new Error('No valid fields to update') - } - - updates.push('updated_at = CURRENT_TIMESTAMP') - values.push(userId) - - const result = db.prepare(` - UPDATE users - SET ${updates.join(', ')} - WHERE id = ? - `).run(values) - - return result.changes > 0 - } - - static async delete(userId) { - const db = await dbManager.initialize() - - // This will cascade delete commanders and games due to foreign key constraints - const result = db.prepare('DELETE FROM users WHERE id = ?').run([userId]) - - return result.changes > 0 - } - - static async getStats(userId) { - const db = await dbManager.initialize() - - const stats = db.prepare(` - SELECT * FROM user_stats WHERE user_id = ? - `).get([userId]) - - return stats - } -} - -export default User \ No newline at end of file diff --git a/backend/src/repositories/CommanderRepository.js b/backend/src/repositories/CommanderRepository.js new file mode 100644 index 0000000..5dfe9e4 --- /dev/null +++ b/backend/src/repositories/CommanderRepository.js @@ -0,0 +1,226 @@ +// Commander Repository for all commander-related database operations +import { Repository } from './Repository.js' +import dbManager from '../config/database.js' + +export class CommanderRepository extends Repository { + constructor() { + super('commanders') + } + + /** + * Create a new commander + */ + async createCommander(userId, name, colors) { + try { + const result = await dbManager.query( + ` + INSERT INTO ${this.tableName} (name, colors, user_id) + VALUES ($1, $2, $3) + RETURNING id, name, colors, user_id, created_at, updated_at + `, + [name, colors, userId] + ) + + return result.rows[0] + } catch (error) { + throw new Error('Failed to create commander') + } + } + + /** + * Get commanders for a user with pagination and sorting + */ + async getCommandersByUserId( + userId, + limit = 50, + offset = 0, + sortBy = 'created_at', + sortOrder = 'DESC' + ) { + // Whitelist allowed sort columns + const allowedSortColumns = [ + 'created_at', + 'updated_at', + 'name', + 'total_games' + ] + const safeSort = allowedSortColumns.includes(sortBy) + ? sortBy + : 'created_at' + const safeOrder = sortOrder.toUpperCase() === 'ASC' ? 'ASC' : 'DESC' + + const query = ` + SELECT + c.id, + c.name, + c.colors, + c.user_id, + c.created_at, + c.updated_at, + (SELECT COUNT(*) FROM games WHERE commander_id = c.id) as total_games, + (SELECT COUNT(*) FROM games WHERE commander_id = c.id AND won = TRUE) as total_wins, + (SELECT ROUND(COALESCE(COUNT(CASE WHEN won = TRUE THEN 1 END), 0)::NUMERIC * 100.0 / NULLIF(COUNT(*), 0), 2) FROM games WHERE commander_id = c.id) as win_rate, + (SELECT ROUND(AVG(rounds)::NUMERIC, 2) FROM games WHERE commander_id = c.id) as avg_rounds, + (SELECT MAX(date) FROM games WHERE commander_id = c.id) as last_played + FROM ${this.tableName} c + WHERE c.user_id = $1 + ORDER BY ${safeSort} ${safeOrder} + LIMIT $2 OFFSET $3 + ` + + return dbManager.all(query, [userId, limit, offset]) + } + + /** + * Search commanders by name for a user + */ + async searchCommandersByName(userId, query, limit = 20) { + const searchQuery = `%${query}%` + + const sql = ` + SELECT + c.id, + c.name, + c.colors, + c.user_id, + c.created_at, + c.updated_at, + (SELECT COUNT(*) FROM games WHERE commander_id = c.id) as total_games, + (SELECT COUNT(*) FROM games WHERE commander_id = c.id AND won = TRUE) as total_wins, + (SELECT ROUND(COALESCE(COUNT(CASE WHEN won = TRUE THEN 1 END), 0)::NUMERIC * 100.0 / NULLIF(COUNT(*), 0), 2) FROM games WHERE commander_id = c.id) as win_rate, + (SELECT ROUND(AVG(rounds)::NUMERIC, 2) FROM games WHERE commander_id = c.id) as avg_rounds, + (SELECT MAX(date) FROM games WHERE commander_id = c.id) as last_played + FROM ${this.tableName} c + WHERE c.user_id = $1 AND c.name ILIKE $2 + ORDER BY c.name ASC + LIMIT $3 + ` + + return dbManager.all(sql, [userId, searchQuery, limit]) + } + + /** + * Get popular commanders for a user (with 5+ games) + */ + async getPopularCommandersByUserId(userId, limit = 10) { + const query = ` + SELECT + c.id, + c.name, + c.colors, + c.user_id, + c.created_at, + c.updated_at, + (SELECT COUNT(*) FROM games WHERE commander_id = c.id) as total_games, + (SELECT COUNT(*) FROM games WHERE commander_id = c.id AND won = TRUE) as total_wins, + (SELECT ROUND(COALESCE(COUNT(CASE WHEN won = TRUE THEN 1 END), 0)::NUMERIC * 100.0 / NULLIF(COUNT(*), 0), 2) FROM games WHERE commander_id = c.id) as win_rate, + (SELECT ROUND(AVG(rounds)::NUMERIC, 2) FROM games WHERE commander_id = c.id) as avg_rounds, + (SELECT MAX(date) FROM games WHERE commander_id = c.id) as last_played + FROM ${this.tableName} c + WHERE c.user_id = $1 AND (SELECT COUNT(*) FROM games WHERE commander_id = c.id) >= 5 + ORDER BY win_rate DESC, c.name ASC + LIMIT $2 + ` + + return dbManager.all(query, [userId, limit]) + } + + /** + * Get commander statistics + */ + async getCommanderStats(commanderId, userId) { + const query = ` + SELECT + c.id, + c.name, + c.colors, + (SELECT COUNT(*) FROM games WHERE commander_id = c.id) as total_games, + (SELECT COUNT(*) FROM games WHERE commander_id = c.id AND won = TRUE) as total_wins, + ROUND((SELECT COUNT(*) FROM games WHERE commander_id = c.id AND won = TRUE)::NUMERIC * 100.0 / NULLIF((SELECT COUNT(*) FROM games WHERE commander_id = c.id), 0), 2) as win_rate, + (SELECT AVG(rounds) FROM games WHERE commander_id = c.id) as avg_rounds, + (SELECT MAX(date) FROM games WHERE commander_id = c.id) as last_played + FROM ${this.tableName} c + WHERE c.id = $1 AND c.user_id = $2 + ` + + return dbManager.get(query, [commanderId, userId]) + } + + /** + * Update a commander + */ + async updateCommander(commanderId, userId, updateData) { + // Verify ownership + const existing = await this.findById(commanderId) + if (!existing || existing.user_id !== userId) { + throw new Error('Commander not found or access denied') + } + + const updates = [] + const values = [] + let paramCount = 1 + + if (updateData.name !== undefined) { + updates.push(`name = $${paramCount}`) + values.push(updateData.name) + paramCount++ + } + + if (updateData.colors !== undefined) { + updates.push(`colors = $${paramCount}`) + values.push(updateData.colors) + paramCount++ + } + + if (updates.length === 0) { + throw new Error('No valid fields to update') + } + + values.push(commanderId, userId) + + const query = ` + UPDATE ${this.tableName} + SET ${updates.join(', ')}, updated_at = CURRENT_TIMESTAMP + WHERE id = $${paramCount} AND user_id = $${paramCount + 1} + RETURNING * + ` + + const result = await dbManager.query(query, values) + return result.rows[0] + } + + /** + * Find commander by name and user + */ + async findByNameAndUserId(name, userId) { + try { + const result = await dbManager.query( + `SELECT * FROM ${this.tableName} WHERE LOWER(name) = LOWER($1) AND user_id = $2`, + [name, userId] + ) + return result.rows[0] || null + } catch (error) { + throw new Error('Failed to find commander') + } + } + + /** + * Delete a commander + */ + async deleteCommander(commanderId, userId) { + // Verify ownership + const existing = await this.findById(commanderId) + if (!existing || existing.user_id !== userId) { + throw new Error('Commander not found or access denied') + } + + const result = await dbManager.query( + `DELETE FROM ${this.tableName} WHERE id = $1 AND user_id = $2`, + [commanderId, userId] + ) + + return result.rowCount > 0 + } +} + +export default CommanderRepository diff --git a/backend/src/repositories/GameRepository.js b/backend/src/repositories/GameRepository.js new file mode 100644 index 0000000..7b9d478 --- /dev/null +++ b/backend/src/repositories/GameRepository.js @@ -0,0 +1,324 @@ +// Game Repository for all game-related database operations +import { Repository } from './Repository.js' +import dbManager from '../config/database.js' + +export class GameRepository extends Repository { + constructor() { + super('games') + } + + /** + * Create a new game record + */ + async createGame(gameData) { + try { + const result = await dbManager.query( + ` + INSERT INTO ${this.tableName} ( + date, player_count, commander_id, won, rounds, + starting_player_won, sol_ring_turn_one_won, notes, user_id + ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9) + RETURNING * + `, + [ + gameData.date, + gameData.player_count, + gameData.commander_id, + gameData.won || false, + gameData.rounds, + gameData.starting_player_won || false, + gameData.sol_ring_turn_one_won || false, + gameData.notes || null, + gameData.user_id + ] + ) + + return result.rows[0] + } catch (error) { + throw new Error('Failed to create game') + } + } + + /** + * Get games for a user with filtering and pagination + */ + async getGamesByUserId(userId, limit = 50, offset = 0, filters = {}) { + let query = ` + SELECT + g.id, + g.date, + g.player_count, + g.commander_id, + g.won, + g.rounds, + g.starting_player_won, + g.sol_ring_turn_one_won, + g.notes, + cmdr.name, + cmdr.colors, + g.created_at, + g.updated_at + FROM ${this.tableName} g + LEFT JOIN commanders cmdr ON g.commander_id = cmdr.id + WHERE g.user_id = $1 + ` + + const params = [userId] + let paramCount = 2 + + // Apply filters + if (filters.commander) { + query += ` AND cmdr.name ILIKE $${paramCount}` + params.push(`%${filters.commander}%`) + paramCount++ + } + + if (filters.playerCount) { + query += ` AND g.player_count = $${paramCount}` + params.push(filters.playerCount) + paramCount++ + } + + if (filters.commanderId) { + query += ` AND g.commander_id = $${paramCount}` + params.push(filters.commanderId) + paramCount++ + } + + if (filters.dateFrom) { + query += ` AND g.date >= $${paramCount}` + params.push(filters.dateFrom) + paramCount++ + } + + if (filters.dateTo) { + query += ` AND g.date <= $${paramCount}` + params.push(filters.dateTo) + paramCount++ + } + + if (filters.won !== undefined) { + query += ` AND g.won = $${paramCount}` + params.push(filters.won) + paramCount++ + } + + query += ` ORDER BY g.date DESC LIMIT $${paramCount} OFFSET $${paramCount + 1}` + params.push(limit, offset) + + return dbManager.all(query, params) + } + + /** + * Export games for a user with filtering + */ + async exportGamesByUserId(userId, filters = {}) { + let query = ` + SELECT + g.id, + g.date, + g.player_count, + g.commander_id, + g.won, + g.rounds, + g.starting_player_won, + g.sol_ring_turn_one_won, + g.notes, + cmdr.name as commander_name, + cmdr.colors as commander_colors, + g.created_at, + g.updated_at + FROM ${this.tableName} g + LEFT JOIN commanders cmdr ON g.commander_id = cmdr.id + WHERE g.user_id = $1 + ` + + const params = [userId] + let paramCount = 2 + + // Apply filters + if (filters.commander) { + query += ` AND cmdr.name ILIKE $${paramCount}` + params.push(`%${filters.commander}%`) + paramCount++ + } + + if (filters.playerCount) { + query += ` AND g.player_count = $${paramCount}` + params.push(filters.playerCount) + paramCount++ + } + + if (filters.commanderId) { + query += ` AND g.commander_id = $${paramCount}` + params.push(filters.commanderId) + paramCount++ + } + + if (filters.dateFrom) { + query += ` AND g.date >= $${paramCount}` + params.push(filters.dateFrom) + paramCount++ + } + + if (filters.dateTo) { + query += ` AND g.date <= $${paramCount}` + params.push(filters.dateTo) + paramCount++ + } + + query += ` ORDER BY g.date DESC` + + return dbManager.all(query, params) + } + + /** + * Get game by ID with commander details + */ + async getGameById(gameId, userId) { + const query = ` + SELECT + g.id, + g.date, + g.player_count, + g.commander_id, + g.won, + g.rounds, + g.starting_player_won, + g.sol_ring_turn_one_won, + g.notes, + g.user_id, + cmdr.name as commander_name, + cmdr.colors as commander_colors, + g.created_at, + g.updated_at + FROM ${this.tableName} g + LEFT JOIN commanders cmdr ON g.commander_id = cmdr.id + WHERE g.id = $1 AND g.user_id = $2 + ` + + return dbManager.get(query, [gameId, userId]) + } + + /** + * Update a game record + */ + async updateGame(gameId, userId, updateData) { + // Verify ownership + const existing = await this.getGameById(gameId, userId) + if (!existing) { + throw new Error('Game not found or access denied') + } + + const updates = [] + const values = [] + let paramCount = 1 + + const fieldMap = { + date: 'date', + commander_id: 'commanderId', + player_count: 'playerCount', + won: 'won', + rounds: 'rounds', + starting_player_won: 'startingPlayerWon', + sol_ring_turn_one_won: 'solRingTurnOneWon', + notes: 'notes' + } + + // Handle both snake_case and camelCase + for (const [dbField, dataField] of Object.entries(fieldMap)) { + if (updateData[dbField] !== undefined) { + updates.push(`${dbField} = $${paramCount}`) + values.push( + dbField.includes('won') + ? updateData[dbField] || false + : updateData[dbField] + ) + paramCount++ + } else if (updateData[dataField] !== undefined) { + updates.push(`${dbField} = $${paramCount}`) + values.push( + dbField.includes('won') + ? updateData[dataField] || false + : updateData[dataField] + ) + paramCount++ + } + } + + if (updates.length === 0) { + throw new Error('No valid fields to update') + } + + values.push(gameId, userId) + + const query = ` + UPDATE ${this.tableName} + SET ${updates.join(', ')}, updated_at = CURRENT_TIMESTAMP + WHERE id = $${paramCount} AND user_id = $${paramCount + 1} + RETURNING * + ` + + const result = await dbManager.query(query, values) + return result.rows[0] + } + + /** + * Delete a game + */ + async deleteGame(gameId, userId) { + // Verify ownership + const existing = await this.getGameById(gameId, userId) + if (!existing) { + throw new Error('Game not found or access denied') + } + + const result = await dbManager.query( + `DELETE FROM ${this.tableName} WHERE id = $1 AND user_id = $2`, + [gameId, userId] + ) + + return result.rowCount > 0 + } + + /** + * Find game by date and commander (for duplicate checking) + */ + async findGameByDateAndCommander(userId, date, commanderId) { + try { + const result = await dbManager.query( + ` + SELECT * FROM ${this.tableName} + WHERE user_id = $1 AND date = $2 AND commander_id = $3 + LIMIT 1 + `, + [userId, date, commanderId] + ) + return result.rows[0] || null + } catch (error) { + throw new Error('Failed to find game') + } + } + + /** + * Get game statistics for a commander + */ + async getCommanderGameStats(commanderId, userId) { + const query = ` + SELECT + COUNT(*) as total_games, + SUM(CASE WHEN won = TRUE THEN 1 ELSE 0 END) as total_wins, + AVG(rounds) as avg_rounds, + AVG(CASE WHEN rounds > 0 THEN rounds ELSE NULL END) as avg_rounds_with_data, + MAX(date) as last_played, + SUM(CASE WHEN starting_player_won = TRUE THEN 1 ELSE 0 END) as starting_player_wins, + SUM(CASE WHEN sol_ring_turn_one_won = TRUE THEN 1 ELSE 0 END) as sol_ring_wins + FROM ${this.tableName} + WHERE commander_id = $1 AND user_id = $2 + ` + + return dbManager.get(query, [commanderId, userId]) + } +} + +export default GameRepository diff --git a/backend/src/repositories/Repository.js b/backend/src/repositories/Repository.js new file mode 100644 index 0000000..8ab03bb --- /dev/null +++ b/backend/src/repositories/Repository.js @@ -0,0 +1,121 @@ +// Base Repository class providing common database operations +import dbManager from '../config/database.js' + +export class Repository { + constructor(tableName) { + this.tableName = tableName + } + + /** + * Find a single record by ID + */ + async findById(id) { + return dbManager.get( + `SELECT * FROM ${this.tableName} WHERE id = $1`, + [id] + ) + } + + /** + * Find all records with optional pagination + */ + async findAll(limit = 50, offset = 0) { + return dbManager.all( + `SELECT * FROM ${this.tableName} LIMIT $1 OFFSET $2`, + [limit, offset] + ) + } + + /** + * Find records with a WHERE condition + */ + async findWhere(whereCondition, params, limit = 50, offset = 0) { + const query = ` + SELECT * FROM ${this.tableName} + WHERE ${whereCondition} + LIMIT $${params.length + 1} OFFSET $${params.length + 2} + ` + return dbManager.all(query, [...params, limit, offset]) + } + + /** + * Get count of records + */ + async count() { + const result = await dbManager.get( + `SELECT COUNT(*) as count FROM ${this.tableName}` + ) + return result?.count || 0 + } + + /** + * Get count with WHERE condition + */ + async countWhere(whereCondition, params = []) { + const query = `SELECT COUNT(*) as count FROM ${this.tableName} WHERE ${whereCondition}` + const result = await dbManager.get(query, params) + return result?.count || 0 + } + + /** + * Insert a new record + */ + async insert(data) { + const columns = Object.keys(data) + const placeholders = columns.map((_, i) => `$${i + 1}`).join(', ') + const values = Object.values(data) + + const query = ` + INSERT INTO ${this.tableName} (${columns.join(', ')}) + VALUES (${placeholders}) + RETURNING * + ` + const result = await dbManager.query(query, values) + return result.rows[0] + } + + /** + * Update a record + */ + async update(id, data) { + const columns = Object.keys(data) + const setClause = columns.map((col, i) => `${col} = $${i + 1}`).join(', ') + const values = [...Object.values(data), id] + + const query = ` + UPDATE ${this.tableName} + SET ${setClause}, updated_at = CURRENT_TIMESTAMP + WHERE id = $${columns.length + 1} + RETURNING * + ` + const result = await dbManager.query(query, values) + return result.rows[0] + } + + /** + * Delete a record + */ + async delete(id) { + const result = await dbManager.query( + `DELETE FROM ${this.tableName} WHERE id = $1`, + [id] + ) + return result.rowCount > 0 + } + + /** + * Execute a raw query (use with caution) + */ + async query(sql, params = []) { + return dbManager.query(sql, params) + } + + /** + * Execute a transaction + */ + async transaction(callback) { + return dbManager.transaction(callback) + } +} + +export default Repository diff --git a/backend/src/repositories/UserRepository.js b/backend/src/repositories/UserRepository.js new file mode 100644 index 0000000..f7cb397 --- /dev/null +++ b/backend/src/repositories/UserRepository.js @@ -0,0 +1,184 @@ +// User Repository for all user-related database operations +import { Repository } from './Repository.js' +import dbManager from '../config/database.js' +import bcrypt from 'bcryptjs' + +export class UserRepository extends Repository { + constructor() { + super('users') + } + + /** + * Find user by username + */ + async findByUsername(username) { + return dbManager.get( + `SELECT * FROM ${this.tableName} WHERE username = $1`, + [username] + ) + } + + /** + * Find user by email + */ + async findByEmail(email) { + return dbManager.get( + `SELECT * FROM ${this.tableName} WHERE email = $1`, + [email] + ) + } + + /** + * Create a new user with password hashing + */ + async createUser(username, password, email) { + // Check if user already exists + const existing = await dbManager.get( + `SELECT id FROM ${this.tableName} WHERE username = $1 OR email = $2`, + [username, email] + ) + + if (existing) { + throw new Error('Username or email already exists') + } + + // Hash password + const passwordHash = await bcrypt.hash(password, 12) + + // Insert user + const result = await dbManager.query( + ` + INSERT INTO ${this.tableName} (username, password_hash, email) + VALUES ($1, $2, $3) + RETURNING id, username, email, created_at + `, + [username, passwordHash, email] + ) + + return result.rows[0] + } + + /** + * Verify user password + */ + async verifyPassword(password, hashedPassword) { + return bcrypt.compare(password, hashedPassword) + } + + /** + * Update user password + */ + async updatePassword(userId, newPassword) { + const passwordHash = await bcrypt.hash(newPassword, 12) + + const result = await dbManager.query( + ` + UPDATE ${this.tableName} + SET password_hash = $1, updated_at = CURRENT_TIMESTAMP + WHERE id = $2 + RETURNING id + `, + [passwordHash, userId] + ) + + return result.rowCount > 0 + } + + /** + * Update user username + */ + async updateUsername(userId, newUsername) { + // Check if username is already taken + const existing = await dbManager.get( + `SELECT id FROM ${this.tableName} WHERE username = $1 AND id != $2`, + [newUsername, userId] + ) + + if (existing) { + throw new Error('Username already exists') + } + + const result = await dbManager.query( + ` + UPDATE ${this.tableName} + SET username = $1, updated_at = CURRENT_TIMESTAMP + WHERE id = $2 + RETURNING id + `, + [newUsername, userId] + ) + + return result.rowCount > 0 + } + + /** + * Update user profile + */ + async updateProfile(userId, profileData) { + const { email } = profileData + + // Check if email is already taken by another user + if (email) { + const existing = await dbManager.get( + `SELECT id FROM ${this.tableName} WHERE email = $1 AND id != $2`, + [email, userId] + ) + + if (existing) { + throw new Error('Email already exists') + } + } + + const updates = [] + const values = [] + let paramCount = 1 + + if (email !== undefined) { + updates.push(`email = $${paramCount}`) + values.push(email) + paramCount++ + } + + if (updates.length === 0) { + throw new Error('No valid fields to update') + } + + values.push(userId) + + const result = await dbManager.query( + ` + UPDATE ${this.tableName} + SET ${updates.join(', ')}, updated_at = CURRENT_TIMESTAMP + WHERE id = $${paramCount} + RETURNING id + `, + values + ) + + return result.rowCount > 0 + } + + /** + * Get user statistics + */ + async getStats(userId) { + return dbManager.get( + `SELECT * FROM user_stats WHERE user_id = $1`, + [userId] + ) + } + + /** + * Delete user (cascades to commanders and games) + */ + async deleteUser(userId) { + const result = await dbManager.query( + `DELETE FROM ${this.tableName} WHERE id = $1`, + [userId] + ) + + return result.rowCount > 0 + } +} + +export default UserRepository diff --git a/backend/src/routes/auth.js b/backend/src/routes/auth.js index e24e258..6229904 100644 --- a/backend/src/routes/auth.js +++ b/backend/src/routes/auth.js @@ -1,49 +1,117 @@ // Authentication routes import { z } from 'zod' -import User from '../models/User.js' +import UserRepository from '../repositories/UserRepository.js' import { registrationConfig } from '../config/jwt.js' +import { + validatePasswordStrength, + isNotReservedUsername, + isNotDisposableEmail, + formatValidationErrors, + createErrorResponse +} from '../utils/validators.js' -// Validation schemas +// Validation schemas with enhanced validation const registerSchema = z.object({ username: z - .string() - .min(3) - .max(50) - .regex(/^[a-zA-Z0-9_-]+$/, { - message: - 'Username can only contain letters, numbers, underscores, and hyphens' - }), - password: z.string().min(8).max(100), - email: z.string().email().optional() -}) - -const loginSchema = z.object({ - username: z.string().min(1), - password: z.string().min(1), - remember: z.boolean().optional().default(false) -}) - -const changePasswordSchema = z.object({ - currentPassword: z.string().min(1), - newPassword: z.string().min(8).max(100) -}) - -const updateProfileSchema = z.object({ - email: z.string().email().optional() -}) - -const updateUsernameSchema = z.object({ - newUsername: z - .string() - .min(3) - .max(50) + .string('Username must be a string') + .min(3, 'Username must be at least 3 characters') + .max(50, 'Username must be less than 50 characters') .regex(/^[a-zA-Z0-9_-]+$/, { message: 'Username can only contain letters, numbers, underscores, and hyphens' }) + .transform((val) => val.toLowerCase().trim()) + .refine((val) => isNotReservedUsername(val), { + message: 'This username is reserved and cannot be used' + }), + + password: z + .string('Password must be a string') + .min(8, 'Password must be at least 8 characters') + .max(100, 'Password must be less than 100 characters') + .refine((val) => /(?=.*[a-z])/.test(val), { + message: 'Password must contain at least one lowercase letter' + }) + .refine((val) => /(?=.*[A-Z])/.test(val), { + message: 'Password must contain at least one uppercase letter' + }) + .refine((val) => /(?=.*\d)/.test(val), { + message: 'Password must contain at least one number' + }), + + email: z + .string('Email must be a string') + .email('Invalid email format') + .toLowerCase() + .refine((val) => isNotDisposableEmail(val), { + message: 'Disposable email addresses are not allowed' + }) + .optional() +}) + +const loginSchema = z.object({ + username: z + .string('Username is required') + .min(1, 'Username is required') + .transform((val) => val.toLowerCase().trim()), + + password: z + .string('Password is required') + .min(1, 'Password is required'), + + remember: z.boolean('Remember must be true or false').optional().default(false) +}) + +const changePasswordSchema = z.object({ + currentPassword: z + .string('Current password is required') + .min(1, 'Current password is required'), + + newPassword: z + .string('New password must be a string') + .min(8, 'New password must be at least 8 characters') + .max(100, 'New password must be less than 100 characters') + .refine((val) => /(?=.*[a-z])/.test(val), { + message: 'Password must contain at least one lowercase letter' + }) + .refine((val) => /(?=.*[A-Z])/.test(val), { + message: 'Password must contain at least one uppercase letter' + }) + .refine((val) => /(?=.*\d)/.test(val), { + message: 'Password must contain at least one number' + }) +}) + +const updateProfileSchema = z.object({ + email: z + .string('Email must be a string') + .email('Invalid email format') + .toLowerCase() + .refine((val) => isNotDisposableEmail(val), { + message: 'Disposable email addresses are not allowed' + }) + .optional() +}) + +const updateUsernameSchema = z.object({ + newUsername: z + .string('Username must be a string') + .min(3, 'Username must be at least 3 characters') + .max(50, 'Username must be less than 50 characters') + .regex(/^[a-zA-Z0-9_-]+$/, { + message: + 'Username can only contain letters, numbers, underscores, and hyphens' + }) + .transform((val) => val.toLowerCase().trim()) + .refine((val) => isNotReservedUsername(val), { + message: 'This username is reserved and cannot be used' + }) }) export default async function authRoutes(fastify, options) { + // Initialize repository + const userRepo = new UserRepository() + // Public endpoint to check if registration is allowed fastify.get('/config', async (request, reply) => { return { @@ -61,18 +129,44 @@ export default async function authRoutes(fastify, options) { try { // Check if registration is allowed if (!registrationConfig.allowRegistration) { - reply.code(403).send({ + return reply.code(403).send({ error: 'Registration Disabled', message: 'User registration is currently disabled' }) - return } - // Validate input + // LAYER 1: Schema validation const validatedData = registerSchema.parse(request.body) + // LAYER 2: Business logic validation + // Check username uniqueness + const existingUser = await userRepo.findByUsername(validatedData.username) + if (existingUser) { + return reply.code(409).send({ + error: 'Conflict', + message: 'Username already taken', + details: ['This username is already in use. Please choose another.'] + }) + } + + // Check email uniqueness (if provided) + if (validatedData.email) { + const existingEmail = await userRepo.findByEmail(validatedData.email) + if (existingEmail) { + return reply.code(409).send({ + error: 'Conflict', + message: 'Email already registered', + details: ['This email is already in use. Please use a different email.'] + }) + } + } + // Create user - const user = await User.create(validatedData) + const user = await userRepo.createUser( + validatedData.username, + validatedData.password, + validatedData.email + ) // Generate JWT token const token = await reply.jwtSign( @@ -97,14 +191,14 @@ export default async function authRoutes(fastify, options) { }) } catch (error) { if (error instanceof z.ZodError) { - reply.code(400).send({ + return reply.code(400).send({ error: 'Validation Error', message: 'Invalid input data', - details: error.errors.map((e) => e.message) + details: formatValidationErrors(error) }) } else if (error.message.includes('already exists')) { - reply.code(400).send({ - error: 'Registration Failed', + return reply.code(409).send({ + error: 'Conflict', message: error.message }) } else { @@ -126,29 +220,30 @@ export default async function authRoutes(fastify, options) { }, async (request, reply) => { try { + // LAYER 1: Schema validation const { username, password } = loginSchema.parse(request.body) - // Find user - const user = await User.findByUsername(username) + // LAYER 2: Find user (also serves as authorization check) + const user = await userRepo.findByUsername(username) if (!user) { - reply.code(401).send({ + // Generic error message to prevent username enumeration + return reply.code(401).send({ error: 'Authentication Failed', message: 'Invalid username or password' }) - return } // Verify password - const isValidPassword = await User.verifyPassword( + const isValidPassword = await userRepo.verifyPassword( password, user.password_hash ) if (!isValidPassword) { - reply.code(401).send({ + // Generic error message to prevent username enumeration + return reply.code(401).send({ error: 'Authentication Failed', message: 'Invalid username or password' }) - return } // Generate JWT token @@ -198,17 +293,17 @@ export default async function authRoutes(fastify, options) { } }, async (request, reply) => { - try { - await request.jwtVerify() + try { + await request.jwtVerify() - const user = await User.findById(request.user.id) - if (!user) { - reply.code(401).send({ - error: 'Authentication Failed', - message: 'User not found' - }) - return - } + const user = await userRepo.findById(request.user.id) + if (!user) { + reply.code(401).send({ + error: 'Authentication Failed', + message: 'User not found' + }) + return + } // Generate new token const token = await reply.jwtSign( @@ -252,24 +347,24 @@ export default async function authRoutes(fastify, options) { ] }, async (request, reply) => { - try { - const user = await User.findById(request.user.id) - if (!user) { - reply.code(404).send({ - error: 'Not Found', - message: 'User not found' - }) - return - } + try { + const user = await userRepo.findById(request.user.id) + if (!user) { + reply.code(404).send({ + error: 'Not Found', + message: 'User not found' + }) + return + } - reply.send({ - user: { - id: user.id, - username: user.username, - email: user.email, - createdAt: user.created_at - } - }) + reply.send({ + user: { + id: user.id, + username: user.username, + email: user.email, + createdAt: user.created_at + } + }) } catch (error) { fastify.log.error('Get profile error:', error) reply.code(500).send({ @@ -298,20 +393,20 @@ export default async function authRoutes(fastify, options) { ] }, async (request, reply) => { - try { - const validatedData = updateProfileSchema.parse(request.body) + try { + const validatedData = updateProfileSchema.parse(request.body) - const updated = await User.updateProfile(request.user.id, validatedData) + const updated = await userRepo.updateProfile(request.user.id, validatedData) - if (!updated) { - reply.code(400).send({ - error: 'Update Failed', - message: 'No valid fields to update' - }) - return - } + if (!updated) { + reply.code(400).send({ + error: 'Update Failed', + message: 'No valid fields to update' + }) + return + } - const user = await User.findById(request.user.id) + const user = await userRepo.findById(request.user.id) reply.send({ message: 'Profile updated successfully', @@ -364,31 +459,31 @@ export default async function authRoutes(fastify, options) { config: { rateLimit: { max: 5, timeWindow: '1 hour' } } }, async (request, reply) => { - try { - const { newUsername } = updateUsernameSchema.parse(request.body) + try { + const { newUsername } = updateUsernameSchema.parse(request.body) - // Check if username is already taken - const existingUser = await User.findByUsername(newUsername) - if (existingUser && existingUser.id !== request.user.id) { - reply.code(400).send({ - error: 'Username Taken', - message: 'Username is already taken' - }) - return - } + // Check if username is already taken + const existingUser = await userRepo.findByUsername(newUsername) + if (existingUser && existingUser.id !== request.user.id) { + reply.code(400).send({ + error: 'Username Taken', + message: 'Username is already taken' + }) + return + } - // Update username using User model method - const updated = await User.updateUsername(request.user.id, newUsername) + // Update username using repository method + const updated = await userRepo.updateUsername(request.user.id, newUsername) - if (!updated) { - reply.code(500).send({ - error: 'Internal Server Error', - message: 'Failed to update username' - }) - return - } + if (!updated) { + reply.code(500).send({ + error: 'Internal Server Error', + message: 'Failed to update username' + }) + return + } - const user = await User.findById(request.user.id) + const user = await userRepo.findById(request.user.id) reply.send({ message: 'Username updated successfully', @@ -416,54 +511,54 @@ export default async function authRoutes(fastify, options) { } ) - // Change password (POST - keep for backward compatibility) - fastify.post( - '/change-password', - { - preHandler: [ - async (request, reply) => { - try { - await request.jwtVerify() - } catch (err) { - reply.code(401).send({ - error: 'Unauthorized', - message: 'Invalid or expired token' - }) - } - } - ], - config: { rateLimit: { max: 3, timeWindow: '1 hour' } } - }, - async (request, reply) => { - try { - const { currentPassword, newPassword } = changePasswordSchema.parse( - request.body - ) + // Change password (POST - keep for backward compatibility) + fastify.post( + '/change-password', + { + preHandler: [ + async (request, reply) => { + try { + await request.jwtVerify() + } catch (err) { + reply.code(401).send({ + error: 'Unauthorized', + message: 'Invalid or expired token' + }) + } + } + ], + config: { rateLimit: { max: 3, timeWindow: '1 hour' } } + }, + async (request, reply) => { + try { + const { currentPassword, newPassword } = changePasswordSchema.parse( + request.body + ) - // Verify current password - const user = await User.findByUsername(request.user.username) - if (!user) { - reply.code(404).send({ - error: 'Not Found', - message: 'User not found' - }) - return - } + // Verify current password + const user = await userRepo.findByUsername(request.user.username) + if (!user) { + reply.code(404).send({ + error: 'Not Found', + message: 'User not found' + }) + return + } - const isValidPassword = await User.verifyPassword( - currentPassword, - user.password_hash - ) - if (!isValidPassword) { - reply.code(401).send({ - error: 'Authentication Failed', - message: 'Current password is incorrect' - }) - return - } + const isValidPassword = await userRepo.verifyPassword( + currentPassword, + user.password_hash + ) + if (!isValidPassword) { + reply.code(401).send({ + error: 'Authentication Failed', + message: 'Current password is incorrect' + }) + return + } - // Update password - const updated = await User.updatePassword(request.user.id, newPassword) + // Update password + const updated = await userRepo.updatePassword(request.user.id, newPassword) if (!updated) { reply.code(500).send({ @@ -494,54 +589,54 @@ export default async function authRoutes(fastify, options) { } ) - // Change password (PUT) - fastify.put( - '/change-password', - { - preHandler: [ - async (request, reply) => { - try { - await request.jwtVerify() - } catch (err) { - reply.code(401).send({ - error: 'Unauthorized', - message: 'Invalid or expired token' - }) - } - } - ], - config: { rateLimit: { max: 3, timeWindow: '1 hour' } } - }, - async (request, reply) => { - try { - const { currentPassword, newPassword } = changePasswordSchema.parse( - request.body - ) + // Change password (PUT) + fastify.put( + '/change-password', + { + preHandler: [ + async (request, reply) => { + try { + await request.jwtVerify() + } catch (err) { + reply.code(401).send({ + error: 'Unauthorized', + message: 'Invalid or expired token' + }) + } + } + ], + config: { rateLimit: { max: 3, timeWindow: '1 hour' } } + }, + async (request, reply) => { + try { + const { currentPassword, newPassword } = changePasswordSchema.parse( + request.body + ) - // Verify current password - const user = await User.findByUsername(request.user.username) - if (!user) { - reply.code(404).send({ - error: 'Not Found', - message: 'User not found' - }) - return - } + // Verify current password + const user = await userRepo.findByUsername(request.user.username) + if (!user) { + reply.code(404).send({ + error: 'Not Found', + message: 'User not found' + }) + return + } - const isValidPassword = await User.verifyPassword( - currentPassword, - user.password_hash - ) - if (!isValidPassword) { - reply.code(401).send({ - error: 'Authentication Failed', - message: 'Current password is incorrect' - }) - return - } + const isValidPassword = await userRepo.verifyPassword( + currentPassword, + user.password_hash + ) + if (!isValidPassword) { + reply.code(401).send({ + error: 'Authentication Failed', + message: 'Current password is incorrect' + }) + return + } - // Update password - const updated = await User.updatePassword(request.user.id, newPassword) + // Update password + const updated = await userRepo.updatePassword(request.user.id, newPassword) if (!updated) { reply.code(500).send({ diff --git a/backend/src/routes/commanders.js b/backend/src/routes/commanders.js index 60ce706..dc5e080 100644 --- a/backend/src/routes/commanders.js +++ b/backend/src/routes/commanders.js @@ -1,31 +1,99 @@ // Commander management routes import { z } from 'zod' -import Commander from '../models/Commander.js' +import CommanderRepository from '../repositories/CommanderRepository.js' +import { + hasNoDuplicateColors, + formatValidationErrors +} from '../utils/validators.js' -// Validation schemas +// Validation schemas with enhanced validation const createCommanderSchema = z.object({ - name: z.string().min(2).max(100), + name: z + .string('Commander name must be a string') + .min(2, 'Commander name must be at least 2 characters') + .max(100, 'Commander name must be less than 100 characters') + .transform((val) => val.trim()) + .refine((val) => /^[a-zA-Z0-9\s,.\'-]+$/.test(val), { + message: 'Commander name contains invalid characters' + }), + colors: z - .array(z.enum(['W', 'U', 'B', 'R', 'G'])) - .min(1) - .max(5) + .array( + z.enum(['W', 'U', 'B', 'R', 'G'], { + errorMap: () => ({ message: 'Invalid color (must be W, U, B, R, or G)' }) + }), + { + errorMap: () => ({ message: 'Colors must be an array' }) + } + ) + .min(1, 'Select at least one color') + .max(5, 'Maximum 5 colors allowed') + .refine((colors) => hasNoDuplicateColors(colors), { + message: 'Duplicate colors are not allowed' + }) }) const updateCommanderSchema = z.object({ - name: z.string().min(2).max(100).optional(), + name: z + .string('Commander name must be a string') + .min(2, 'Commander name must be at least 2 characters') + .max(100, 'Commander name must be less than 100 characters') + .transform((val) => val.trim()) + .refine((val) => /^[a-zA-Z0-9\s,.\'-]+$/.test(val), { + message: 'Commander name contains invalid characters' + }) + .optional(), + colors: z - .array(z.enum(['W', 'U', 'B', 'R', 'G'])) - .min(1) - .max(5) + .array( + z.enum(['W', 'U', 'B', 'R', 'G'], { + errorMap: () => ({ message: 'Invalid color (must be W, U, B, R, or G)' }) + }) + ) + .min(1, 'Select at least one color') + .max(5, 'Maximum 5 colors allowed') + .refine((colors) => hasNoDuplicateColors(colors), { + message: 'Duplicate colors are not allowed' + }) .optional() }) const commanderQuerySchema = z.object({ - q: z.string().min(1).max(50).optional(), - limit: z.coerce.number().min(1).max(50).default(20) + q: z + .string('Search query must be a string') + .min(1, 'Search query cannot be empty') + .max(50, 'Search query limited to 50 characters') + .optional(), + limit: z + .coerce + .number('Limit must be a number') + .int('Limit must be a whole number') + .min(1, 'Minimum 1 commander per page') + .max(50, 'Maximum 50 commanders per page') + .default(20) }) +// Helper function to transform commander from DB format to API format +function transformCommander(cmd) { + return { + id: cmd.id, + name: cmd.name, + colors: cmd.colors || [], + userId: cmd.user_id, + totalGames: parseInt(cmd.total_games) || 0, + totalWins: parseInt(cmd.total_wins) || 0, + winRate: cmd.win_rate ? parseFloat(cmd.win_rate) : 0, + avgRounds: cmd.avg_rounds ? parseFloat(cmd.avg_rounds) : 0, + lastPlayed: cmd.last_played, + createdAt: cmd.created_at, + updatedAt: cmd.updated_at + } +} + export default async function commanderRoutes(fastify, options) { + // Initialize repository + const commanderRepo = new CommanderRepository() + // Get all commanders for the authenticated user fastify.get( '/', @@ -45,18 +113,18 @@ export default async function commanderRoutes(fastify, options) { }, async (request, reply) => { try { - const { q, limit } = commanderQuerySchema.parse(request.query) - const userId = request.user.id + const { q, limit } = commanderQuerySchema.parse(request.query) + const userId = request.user.id - let commanders - if (q) { - commanders = await Commander.search(userId, q, limit) - } else { - commanders = await Commander.findByUserId(userId, limit) - } + let commanders + if (q) { + commanders = await commanderRepo.searchCommandersByName(userId, q, limit) + } else { + commanders = await commanderRepo.getCommandersByUserId(userId, limit) + } reply.send({ - commanders, + commanders: commanders.map(transformCommander), total: commanders.length }) } catch (error) { @@ -87,26 +155,26 @@ export default async function commanderRoutes(fastify, options) { ] }, async (request, reply) => { - try { - const { id } = request.params - const userId = request.user.id + try { + const { id } = request.params + const userId = request.user.id - const commander = await Commander.findById(id) + const commander = await commanderRepo.findById(id) - if (!commander || commander.userId !== userId) { - reply.code(404).send({ - error: 'Not Found', - message: 'Commander not found' - }) - return - } + if (!commander || commander.user_id !== userId) { + reply.code(404).send({ + error: 'Not Found', + message: 'Commander not found' + }) + return + } - reply.send({ - commander: { - ...commander, - colors: JSON.parse(commander.colors) - } - }) + reply.send({ + commander: { + ...commander, + colors: commander.colors || [] + } + }) } catch (error) { fastify.log.error('Get commander error:', error) reply.code(500).send({ @@ -137,28 +205,44 @@ export default async function commanderRoutes(fastify, options) { }, async (request, reply) => { try { - // Manually parse since fastify.decorate request.user is set by jwtVerify const userId = request.user.id + + // LAYER 1: Schema validation const validatedData = createCommanderSchema.parse(request.body) - const commander = await Commander.create({ - ...validatedData, + // LAYER 2: Business logic validation + // Check for duplicate commander name (case-insensitive) + const existing = await commanderRepo.findByNameAndUserId( + validatedData.name.toLowerCase(), userId - }) + ) + + if (existing) { + return reply.code(409).send({ + error: 'Conflict', + message: 'Commander already exists', + details: [`You already have a commander named "${validatedData.name}"`] + }) + } - reply.code(201).send({ - message: 'Commander created successfully', - commander: { - ...commander, - colors: JSON.parse(commander.colors) - } - }) + // Convert colors array to JSON string for storage + const colorsJson = JSON.stringify(validatedData.colors) + const commander = await commanderRepo.createCommander( + userId, + validatedData.name, + colorsJson + ) + + reply.code(201).send({ + message: 'Commander created successfully', + commander: transformCommander(commander) + }) } catch (error) { if (error instanceof z.ZodError) { - reply.code(400).send({ + return reply.code(400).send({ error: 'Validation Error', message: 'Invalid input data', - details: error.errors.map((e) => e.message) + details: formatValidationErrors(error) }) } else { fastify.log.error('Create commander error:', error) @@ -190,30 +274,33 @@ export default async function commanderRoutes(fastify, options) { ] }, async (request, reply) => { - try { - const { id } = request.params - const userId = request.user.id - const updateData = updateCommanderSchema.parse(request.body) + try { + const { id } = request.params + const userId = request.user.id + const updateData = updateCommanderSchema.parse(request.body) - const updated = await Commander.update(id, updateData, userId) + // Convert colors array to JSON if provided + const updatePayload = { ...updateData } + if (updatePayload.colors) { + updatePayload.colors = JSON.stringify(updatePayload.colors) + } - if (!updated) { - reply.code(400).send({ - error: 'Update Failed', - message: 'No valid fields to update or commander not found' + const updated = await commanderRepo.updateCommander(id, userId, updatePayload) + + if (!updated) { + reply.code(400).send({ + error: 'Update Failed', + message: 'No valid fields to update or commander not found' + }) + return + } + + const commander = await commanderRepo.findById(id) + + reply.send({ + message: 'Commander updated successfully', + commander: transformCommander(commander) }) - return - } - - const commander = await Commander.findById(id) - - reply.send({ - message: 'Commander updated successfully', - commander: { - ...commander, - colors: JSON.parse(commander.colors) - } - }) } catch (error) { if (error instanceof z.ZodError) { reply.code(400).send({ @@ -251,19 +338,19 @@ export default async function commanderRoutes(fastify, options) { ] }, async (request, reply) => { - try { - const { id } = request.params - const userId = request.user.id + try { + const { id } = request.params + const userId = request.user.id - const deleted = await Commander.delete(id, userId) + const deleted = await commanderRepo.deleteCommander(id, userId) - if (!deleted) { - reply.code(404).send({ - error: 'Not Found', - message: 'Commander not found' - }) - return - } + if (!deleted) { + reply.code(404).send({ + error: 'Not Found', + message: 'Commander not found' + }) + return + } reply.send({ message: 'Commander deleted successfully' @@ -296,19 +383,19 @@ export default async function commanderRoutes(fastify, options) { ] }, async (request, reply) => { - try { - const { id } = request.params - const userId = request.user.id + try { + const { id } = request.params + const userId = request.user.id - const stats = await Commander.getStats(id, userId) + const stats = await commanderRepo.getCommanderStats(id, userId) - reply.send({ - stats: { - ...stats, - win_rate: Math.round(stats.winRate || 0), - avg_rounds: Math.round(stats.avgRounds || 0) - } - }) + reply.send({ + stats: { + ...stats, + win_rate: Math.round(stats.win_rate || 0), + avg_rounds: Math.round(stats.avg_rounds || 0) + } + }) } catch (error) { fastify.log.error('Get commander stats error:', error) reply.code(500).send({ @@ -337,13 +424,13 @@ export default async function commanderRoutes(fastify, options) { ] }, async (request, reply) => { - try { - const userId = request.user.id - const commanders = await Commander.getPopular(userId) + try { + const userId = request.user.id + const commanders = await commanderRepo.getPopularCommandersByUserId(userId) - reply.send({ - commanders - }) + reply.send({ + commanders: commanders.map(transformCommander) + }) } catch (error) { fastify.log.error('Get popular commanders error:', error) reply.code(500).send({ diff --git a/backend/src/routes/games.js b/backend/src/routes/games.js index e01ab59..a467ba2 100644 --- a/backend/src/routes/games.js +++ b/backend/src/routes/games.js @@ -1,39 +1,130 @@ // Game management routes import { z } from 'zod' -import Game from '../models/Game.js' +import GameRepository from '../repositories/GameRepository.js' +import CommanderRepository from '../repositories/CommanderRepository.js' +import { + validateDateRange, + isNotSpam, + formatValidationErrors +} from '../utils/validators.js' -// Validation schemas +// Validation schemas with comprehensive validation const createGameSchema = z.object({ - date: z.string().refine((date) => !isNaN(Date.parse(date)), { - message: 'Invalid date format' - }), - playerCount: z.number().int().min(2).max(8), - commanderId: z.number().int().positive(), - won: z.boolean(), - rounds: z.number().int().min(1).max(50), - startingPlayerWon: z.boolean(), - solRingTurnOneWon: z.boolean(), - notes: z.string().max(1000).optional() + date: z + .string('Date must be a string') + .refine((date) => !isNaN(Date.parse(date)), { + message: 'Invalid date format (use YYYY-MM-DD)' + }) + .refine((date) => validateDateRange(date), { + message: 'Game date must be within the last year and not in the future' + }), + + playerCount: z + .number('Player count must be a number') + .int('Player count must be a whole number') + .min(2, 'Minimum 2 players required') + .max(8, 'Maximum 8 players allowed'), + + commanderId: z + .number('Commander ID must be a number') + .int('Commander ID must be a whole number') + .positive('Commander ID must be positive') + .max(2147483647, 'Invalid commander ID'), + + won: z.boolean('Won must be true or false'), + + rounds: z + .number('Rounds must be a number') + .int('Rounds must be a whole number') + .min(1, 'Minimum 1 round') + .max(50, 'Maximum 50 rounds'), + + startingPlayerWon: z.boolean('Starting player won must be true or false'), + solRingTurnOneWon: z.boolean('Sol ring turn one won must be true or false'), + + notes: z + .string('Notes must be a string') + .max(1000, 'Notes limited to 1000 characters') + .optional() + .transform((val) => val?.trim() || null) + .refine((notes) => isNotSpam(notes), { + message: 'Notes appear to be spam' + }) }) const updateGameSchema = z.object({ - date: z.string().optional(), - commanderId: z.number().int().positive().optional(), - playerCount: z.number().int().min(2).max(8).optional(), - won: z.boolean().optional(), - rounds: z.number().int().min(1).max(50).optional(), - startingPlayerWon: z.boolean().optional(), - solRingTurnOneWon: z.boolean().optional(), - notes: z.string().max(1000).optional().nullable() + date: z + .string('Date must be a string') + .refine((date) => !isNaN(Date.parse(date)), { + message: 'Invalid date format (use YYYY-MM-DD)' + }) + .refine((date) => validateDateRange(date), { + message: 'Game date must be within the last year and not in the future' + }) + .optional(), + + commanderId: z + .number('Commander ID must be a number') + .int('Commander ID must be a whole number') + .positive('Commander ID must be positive') + .optional(), + + playerCount: z + .number('Player count must be a number') + .int('Player count must be a whole number') + .min(2, 'Minimum 2 players required') + .max(8, 'Maximum 8 players allowed') + .optional(), + + won: z.boolean('Won must be true or false').optional(), + + rounds: z + .number('Rounds must be a number') + .int('Rounds must be a whole number') + .min(1, 'Minimum 1 round') + .max(50, 'Maximum 50 rounds') + .optional(), + + startingPlayerWon: z.boolean('Starting player won must be true or false').optional(), + solRingTurnOneWon: z.boolean('Sol ring turn one won must be true or false').optional(), + + notes: z + .string('Notes must be a string') + .max(1000, 'Notes limited to 1000 characters') + .optional() + .transform((val) => val?.trim() || null) + .refine((notes) => isNotSpam(notes), { + message: 'Notes appear to be spam' + }) + .nullable() }) const gameQuerySchema = z.object({ - q: z.string().min(1).max(50).optional(), - limit: z.coerce.number().min(1).default(50), - offset: z.coerce.number().default(0) + q: z + .string('Search query must be a string') + .min(1, 'Search query cannot be empty') + .max(50, 'Search query limited to 50 characters') + .optional(), + limit: z + .coerce + .number('Limit must be a number') + .int('Limit must be a whole number') + .min(1, 'Minimum 1 game per page') + .max(100, 'Maximum 100 games per page') + .default(50), + offset: z + .coerce + .number('Offset must be a number') + .int('Offset must be a whole number') + .min(0, 'Offset cannot be negative') + .default(0) }) export default async function gameRoutes(fastify, options) { + // Initialize repositories + const gameRepo = new GameRepository() + const commanderRepo = new CommanderRepository() + // Get all games for authenticated user with pagination and filtering fastify.get( '/', @@ -52,35 +143,53 @@ export default async function gameRoutes(fastify, options) { } ] }, - async (request, reply) => { - try { - const { q, limit, offset } = gameQuerySchema.parse(request.query) - const userId = request.user.id + async (request, reply) => { + try { + const { q, limit, offset } = gameQuerySchema.parse(request.query) + const userId = request.user.id - const filters = {} - if (q) { - filters.commander = `%${q}%` - } - - let games = await Game.findByUserId(userId, limit, offset, filters) - - reply.send({ - games, - pagination: { - total: games.length, - page: Math.floor(limit / 20) + 1, - limit, - offset + const filters = {} + if (q) { + filters.commander = q } - }) - } catch (error) { - fastify.log.error('Get games error:', error) - reply.code(500).send({ - error: 'Internal Server Error', - message: 'Failed to fetch games' - }) - } - } + + let games = await gameRepo.getGamesByUserId(userId, limit, offset, filters) + + // Transform database results to camelCase with commander info + const transformedGames = games.map((game) => ({ + id: game.id, + date: new Date(game.date).toLocaleDateString('en-US'), + playerCount: game.player_count, + commanderId: game.commander_id, + won: game.won, + rounds: game.rounds, + startingPlayerWon: game.starting_player_won, + solRingTurnOneWon: game.sol_ring_turn_one_won, + notes: game.notes || null, + commanderName: game.name, + commanderColors: game.colors || [], + userId: game.user_id, + createdAt: game.created_at, + updatedAt: game.updated_at + })) + + reply.send({ + games: transformedGames, + pagination: { + total: transformedGames.length, + page: Math.floor(limit / 20) + 1, + limit, + offset + } + }) + } catch (error) { + fastify.log.error('Get games error:', error) + reply.code(500).send({ + error: 'Internal Server Error', + message: 'Failed to fetch games' + }) + } + } ) // Get specific game @@ -101,13 +210,13 @@ export default async function gameRoutes(fastify, options) { ] }, async (request, reply) => { - try { - const { id } = request.params - const userId = request.user.id + try { + const { id } = request.params + const userId = request.user.id - const game = await Game.findById(id) + const game = await gameRepo.getGameById(id, userId) - if (!game || game.user_id !== userId) { + if (!game) { reply.code(404).send({ error: 'Not Found', message: 'Game not found' @@ -115,24 +224,23 @@ export default async function gameRoutes(fastify, options) { return } - reply.send({ - game: { - id: game.id, - date: new Date(game.date).toLocaleDateString('en-US'), - playerCount: game.player_count, - commanderId: game.commander_id, - won: game.won, - rounds: game.rounds, - startingPlayerWon: game.starting_player_won, - solRingTurnOneWon: game.sol_ring_turn_one_won, - notes: game.notes || null, - commanderName: game.commander_name, - commanderColors: JSON.parse(game.commander_colors || '[]'), - userId: game.user_id, - createdAt: game.created_at, - updatedAt: game.updated_at - } - }) + reply.send({ + game: { + id: game.id, + date: new Date(game.date).toLocaleDateString('en-US'), + playerCount: game.player_count, + commanderId: game.commander_id, + won: game.won, + rounds: game.rounds, + startingPlayerWon: game.starting_player_won, + solRingTurnOneWon: game.sol_ring_turn_one_won, + notes: game.notes || null, + commanderName: game.commander_name, + commanderColors: game.commander_colors || [], + createdAt: game.created_at, + updatedAt: game.updated_at + } + }) } catch (error) { fastify.log.error('Get game error:', error) reply.code(500).send({ @@ -163,49 +271,82 @@ export default async function gameRoutes(fastify, options) { }, async (request, reply) => { try { - const validatedData = createGameSchema.parse(request.body) const userId = request.user.id + + // LAYER 1: Schema validation + const validatedData = createGameSchema.parse(request.body) + + // LAYER 2: Business logic validation + // Check commander exists and belongs to user + const commander = await commanderRepo.findById(validatedData.commanderId) + + if (!commander || commander.user_id !== userId) { + return reply.code(400).send({ + error: 'Bad Request', + message: 'Invalid commander ID or commander not found', + details: ['Commander does not exist or does not belong to you'] + }) + } + + // Check for duplicate games (same commander on same date) + const existingGame = await gameRepo.findGameByDateAndCommander( + userId, + validatedData.date, + validatedData.commanderId + ) + + if (existingGame) { + return reply.code(409).send({ + error: 'Conflict', + message: 'Duplicate game detected', + details: [ + `You already logged a game with ${commander.name} on ${validatedData.date}` + ] + }) + } // Convert camelCase to snake_case for database const gameData = { date: validatedData.date, player_count: validatedData.playerCount, commander_id: validatedData.commanderId, - won: validatedData.won ? 1 : 0, + won: validatedData.won, rounds: validatedData.rounds, - starting_player_won: validatedData.startingPlayerWon ? 1 : 0, - sol_ring_turn_one_won: validatedData.solRingTurnOneWon ? 1 : 0, + starting_player_won: validatedData.startingPlayerWon, + sol_ring_turn_one_won: validatedData.solRingTurnOneWon, notes: validatedData.notes, - userId + user_id: userId } - const game = await Game.create(gameData) + const game = await gameRepo.createGame(gameData) + + // Fetch the game with commander details + const gameWithCommander = await gameRepo.getGameById(game.id, userId) - reply.code(201).send({ - message: 'Game logged successfully', - game: { - id: game.id, - date: new Date(game.date).toLocaleDateString('en-US'), - playerCount: game.player_count, - commanderId: game.commander_id, - won: game.won, - rounds: game.rounds, - startingPlayerWon: game.starting_player_won, - solRingTurnOneWon: game.sol_ring_turn_one_won, - notes: game.notes || null, - commanderName: game.commander_name, - commanderColors: JSON.parse(game.commander_colors || '[]'), - userId: game.user_id, - createdAt: game.created_at, - updatedAt: game.updated_at - } - }) + reply.code(201).send({ + message: 'Game logged successfully', + game: { + id: gameWithCommander.id, + date: new Date(gameWithCommander.date).toLocaleDateString('en-US'), + playerCount: gameWithCommander.player_count, + commanderId: gameWithCommander.commander_id, + won: gameWithCommander.won, + rounds: gameWithCommander.rounds, + startingPlayerWon: gameWithCommander.starting_player_won, + solRingTurnOneWon: gameWithCommander.sol_ring_turn_one_won, + notes: gameWithCommander.notes || null, + commanderName: gameWithCommander.commander_name, + commanderColors: gameWithCommander.commander_colors || [], + createdAt: gameWithCommander.created_at, + updatedAt: gameWithCommander.updated_at + } + }) } catch (error) { if (error instanceof z.ZodError) { - reply.code(400).send({ + return reply.code(400).send({ error: 'Validation Error', message: 'Invalid input data', - details: error.errors.map((e) => e.message) + details: formatValidationErrors(error) }) } else { fastify.log.error('Create game error:', error) @@ -242,22 +383,22 @@ export default async function gameRoutes(fastify, options) { const userId = request.user.id const updateData = updateGameSchema.parse(request.body) - // Convert camelCase to snake_case for database - const gameData = {} - if (updateData.date !== undefined) gameData.date = updateData.date - if (updateData.commanderId !== undefined) - gameData.commander_id = updateData.commanderId - if (updateData.playerCount !== undefined) - gameData.player_count = updateData.playerCount - if (updateData.won !== undefined) gameData.won = updateData.won - if (updateData.rounds !== undefined) gameData.rounds = updateData.rounds - if (updateData.startingPlayerWon !== undefined) - gameData.starting_player_won = updateData.startingPlayerWon - if (updateData.solRingTurnOneWon !== undefined) - gameData.sol_ring_turn_one_won = updateData.solRingTurnOneWon - if (updateData.notes !== undefined) gameData.notes = updateData.notes + // Convert camelCase to snake_case for database + const gameData = {} + if (updateData.date !== undefined) gameData.date = updateData.date + if (updateData.commanderId !== undefined) + gameData.commander_id = updateData.commanderId + if (updateData.playerCount !== undefined) + gameData.player_count = updateData.playerCount + if (updateData.won !== undefined) gameData.won = updateData.won + if (updateData.rounds !== undefined) gameData.rounds = updateData.rounds + if (updateData.startingPlayerWon !== undefined) + gameData.starting_player_won = updateData.startingPlayerWon + if (updateData.solRingTurnOneWon !== undefined) + gameData.sol_ring_turn_one_won = updateData.solRingTurnOneWon + if (updateData.notes !== undefined) gameData.notes = updateData.notes - const updated = await Game.update(id, gameData, userId) + const updated = await gameRepo.updateGame(id, userId, gameData) if (!updated) { reply.code(400).send({ @@ -267,27 +408,26 @@ export default async function gameRoutes(fastify, options) { return } - const game = await Game.findById(id) + const game = await gameRepo.getGameById(id, userId) - reply.send({ - message: 'Game updated successfully', - game: { - id: game.id, - date: new Date(game.date).toLocaleDateString('en-US'), - playerCount: game.player_count, - commanderId: game.commander_id, - won: game.won, - rounds: game.rounds, - startingPlayerWon: game.starting_player_won, - solRingTurnOneWon: game.sol_ring_turn_one_won, - notes: game.notes || null, - commanderName: game.commander_name, - commanderColors: JSON.parse(game.commander_colors || '[]'), - userId: game.user_id, - createdAt: game.created_at, - updatedAt: game.updated_at - } - }) + reply.send({ + message: 'Game updated successfully', + game: { + id: game.id, + date: new Date(game.date).toLocaleDateString('en-US'), + playerCount: game.player_count, + commanderId: game.commander_id, + won: game.won, + rounds: game.rounds, + startingPlayerWon: game.starting_player_won, + solRingTurnOneWon: game.sol_ring_turn_one_won, + notes: game.notes || null, + commanderName: game.commander_name, + commanderColors: game.commander_colors || [], + createdAt: game.created_at, + updatedAt: game.updated_at + } + }) } catch (error) { if (error instanceof z.ZodError) { reply.code(400).send({ @@ -325,13 +465,13 @@ export default async function gameRoutes(fastify, options) { ] }, async (request, reply) => { - try { - const { id } = request.params - const userId = request.user.id + try { + const { id } = request.params + const userId = request.user.id - const deleted = await Game.delete(id, userId) + const deleted = await gameRepo.deleteGame(id, userId) - if (!deleted) { + if (!deleted) { reply.code(404).send({ error: 'Not Found', message: 'Game not found' @@ -381,9 +521,9 @@ export default async function gameRoutes(fastify, options) { if (request.query.dateFrom) filters.dateFrom = request.query.dateFrom if (request.query.dateTo) filters.dateTo = request.query.dateTo - const games = await Game.exportByUserId(userId, filters) - - // Generate filename with current date + const games = await gameRepo.exportGamesByUserId(userId, filters) + + // Generate filename with current date const today = new Date().toLocaleDateString('en-US').replace(/\//g, '_') const filename = `edh_games_${today}.json` diff --git a/backend/src/routes/stats.js b/backend/src/routes/stats.js index 645440a..44af600 100644 --- a/backend/src/routes/stats.js +++ b/backend/src/routes/stats.js @@ -19,31 +19,28 @@ export default async function statsRoutes(fastify, options) { }, async (request, reply) => { try { - const db = await dbManager.initialize() const userId = request.user.id - const stats = db - .prepare( - ` + const stats = await dbManager.get( + ` SELECT total_games, win_rate, total_commanders, avg_rounds FROM user_stats - WHERE user_id = ? - ` - ) - .get([userId]) + WHERE user_id = $1 + `, + [userId] + ) // Also query games directly to verify - const directGameCount = db - .prepare( - ` - SELECT COUNT(*) as count FROM games WHERE user_id = ? - ` - ) - .get([userId]) + const directGameCount = await dbManager.get( + ` + SELECT COUNT(*) as count FROM games WHERE user_id = $1 + `, + [userId] + ) reply.send({ totalGames: stats?.total_games || 0, @@ -80,19 +77,17 @@ export default async function statsRoutes(fastify, options) { }, async (request, reply) => { try { - const db = await dbManager.initialize() const userId = request.user.id // Get detailed commander stats with at least 5 games, sorted by total games then win rate - const rawStats = db - .prepare( - ` - SELECT * FROM commander_stats - WHERE user_id = ? AND total_games >= 5 - ORDER BY total_games DESC, win_rate DESC - ` - ) - .all([userId]) + const rawStats = await dbManager.all( + ` + SELECT * FROM commander_stats + WHERE user_id = $1 AND total_games >= 5 + ORDER BY total_games DESC, win_rate DESC + `, + [userId] + ) // Convert snake_case to camelCase const stats = rawStats.map((stat) => ({ @@ -110,37 +105,35 @@ export default async function statsRoutes(fastify, options) { })) // Calculate chart data: Win Rate by Player Count - const playerCountStats = db - .prepare( - ` - SELECT - player_count, - COUNT(*) as total, - SUM(CASE WHEN won = 1 THEN 1 ELSE 0 END) as wins - FROM games - WHERE user_id = ? - GROUP BY player_count - ORDER BY player_count - ` - ) - .all([userId]) + const playerCountStats = await dbManager.all( + ` + SELECT + player_count, + COUNT(*) as total, + SUM(CASE WHEN won = TRUE THEN 1 ELSE 0 END) as wins + FROM games + WHERE user_id = $1 + GROUP BY player_count + ORDER BY player_count + `, + [userId] + ) // Calculate chart data: Win Rate by Color (simple single color approximation for now) // Note: Real multi-color handling is complex in SQL, this matches exact color identity strings - const colorStats = db - .prepare( - ` - SELECT - c.colors, - COUNT(g.id) as total, - SUM(CASE WHEN g.won = 1 THEN 1 ELSE 0 END) as wins - FROM games g - JOIN commanders c ON g.commander_id = c.id - WHERE g.user_id = ? - GROUP BY c.colors - ` - ) - .all([userId]) + const colorStats = await dbManager.all( + ` + SELECT + c.colors, + COUNT(g.id) as total, + SUM(CASE WHEN g.won = TRUE THEN 1 ELSE 0 END) as wins + FROM games g + JOIN commanders c ON g.commander_id = c.id + WHERE g.user_id = $1 + GROUP BY c.colors + `, + [userId] + ) reply.send({ stats, @@ -151,10 +144,10 @@ export default async function statsRoutes(fastify, options) { Math.round((s.wins / s.total) * 100) ) }, - colors: { - labels: colorStats.map((s) => JSON.parse(s.colors).join('')), - data: colorStats.map((s) => Math.round((s.wins / s.total) * 100)) - } + colors: { + labels: colorStats.map((s) => (Array.isArray(s.colors) ? s.colors.join('') : '')), + data: colorStats.map((s) => Math.round((s.wins / s.total) * 100)) + } } }) } catch (error) { diff --git a/backend/src/scripts/reset_password.js b/backend/src/scripts/reset_password.js deleted file mode 100644 index 3c1bd7a..0000000 --- a/backend/src/scripts/reset_password.js +++ /dev/null @@ -1,42 +0,0 @@ -import User from '../models/User.js' -import dbManager from '../config/database.js' - -async function resetPassword() { - try { - console.log('Initializing database...') - await dbManager.initialize() - - const username = 'newuser' - const newPassword = 'password123' - - console.log(`Resetting password for user: ${username}`) - - // Find user - const user = await User.findByUsername(username) - if (!user) { - console.error('User not found!') - process.exit(1) - } - - // Update password - const success = await User.updatePassword(user.id, newPassword) - - if (success) { - console.log('Password reset successfully!') - - // Verify immediately - const updatedUser = await User.findByUsername(username) - const isValid = await User.verifyPassword(newPassword, updatedUser.password_hash) - console.log(`Verification result: ${isValid ? 'SUCCESS' : 'FAILED'}`) - } else { - console.error('Failed to update password') - } - - } catch (error) { - console.error('Error:', error) - } finally { - await dbManager.close() - } -} - -resetPassword() diff --git a/backend/src/server.js b/backend/src/server.js index 87b1709..d554a8b 100644 --- a/backend/src/server.js +++ b/backend/src/server.js @@ -5,7 +5,7 @@ import jwt from '@fastify/jwt' import closeWithGrace from 'close-with-grace' // Import configurations -import { jwtConfig, corsConfig, serverConfig } from './config/jwt.js' +import { jwtConfig, corsConfig, serverConfig, rateLimitConfig } from './config/jwt.js' import dbManager from './config/database.js' // Import routes @@ -20,12 +20,36 @@ export default async function build(opts = {}) { // Register plugins await app.register(cors, corsConfig) + // Add request logging hook + app.addHook('onRequest', async (request, reply) => { + request.startTime = Date.now() + app.log.debug({ + method: request.method, + url: request.url, + ip: request.ip + }, 'Incoming request') + }) + + // Add response logging hook + app.addHook('onResponse', async (request, reply) => { + const duration = Date.now() - (request.startTime || Date.now()) + app.log.debug({ + method: request.method, + url: request.url, + statusCode: reply.statusCode, + durationMs: duration + }, 'Request completed') + }) + await app.register(jwt, { secret: jwtConfig.secret }) + // Register global rate limiting if configured await app.register(rateLimit, { - global: false + global: true, + max: rateLimitConfig.max, + timeWindow: rateLimitConfig.window }) // Authentication decorator diff --git a/backend/src/utils/validators.js b/backend/src/utils/validators.js new file mode 100644 index 0000000..be36e55 --- /dev/null +++ b/backend/src/utils/validators.js @@ -0,0 +1,165 @@ +/** + * Validation Helpers for EDH Stats + * Provides reusable validation functions for all routes + */ + +/** + * Validate date is within acceptable range + * @param {string} date - Date string to validate + * @returns {boolean} - True if date is valid + */ +export const validateDateRange = (date) => { + try { + const parsed = new Date(date) + const now = new Date() + + // Can't be in the future + if (parsed > now) return false + + // Can't be more than 1 year old + const oneYearAgo = new Date() + oneYearAgo.setFullYear(oneYearAgo.getFullYear() - 1) + + return parsed >= oneYearAgo + } catch { + return false + } +} + +/** + * Validate password strength + * @param {string} password - Password to validate + * @returns {object} - { valid: boolean, errors: string[] } + */ +export const validatePasswordStrength = (password) => { + const errors = [] + + if (password.length < 8) { + errors.push('Password must be at least 8 characters') + } + if (password.length > 100) { + errors.push('Password must be less than 100 characters') + } + if (!/(?=.*[a-z])/.test(password)) { + errors.push('Password must contain at least one lowercase letter') + } + if (!/(?=.*[A-Z])/.test(password)) { + errors.push('Password must contain at least one uppercase letter') + } + if (!/(?=.*\d)/.test(password)) { + errors.push('Password must contain at least one number') + } + + return { + valid: errors.length === 0, + errors + } +} + + + +/** + * Check if text contains spam patterns + * @param {string} text - Text to check + * @returns {boolean} - True if text is NOT spam + */ +export const isNotSpam = (text) => { + if (!text) return true + + // Reject if same character repeated 20+ times + if (/^(.)\1{20,}$/.test(text)) return false + + // Reject if mostly special characters + const specialCharCount = (text.match(/[!@#$%^&*()_+=\-\[\]{};:'",.<>?/]/g) || []).length + if (specialCharCount / text.length > 0.8) return false + + return true +} + +/** + * Sanitize string input (trim, collapse spaces, limit length) + * @param {string} input - Input to sanitize + * @returns {string} - Sanitized string + */ +export const sanitizeString = (input) => { + if (!input) return null + + return input + .trim() + .replace(/\s+/g, ' ') // Collapse multiple spaces into one + .substring(0, 1000) // Limit to 1000 chars +} + +/** + * Check if string contains invalid characters + * @param {string} str - String to check + * @param {string} allowedPattern - Regex pattern of allowed characters + * @returns {boolean} - True if valid + */ +export const isValidFormat = (str, allowedPattern) => { + return allowedPattern.test(str) +} + +/** + * Check if username is reserved + * @param {string} username - Username to check + * @returns {boolean} - True if not reserved + */ +export const isNotReservedUsername = (username) => { + const reserved = ['admin', 'root', 'system', 'test', 'api', 'support'] + return !reserved.includes(username.toLowerCase()) +} + +/** + * Check if email is disposable + * @param {string} email - Email to check + * @returns {boolean} - True if not disposable + */ +export const isNotDisposableEmail = (email) => { + const disposableDomains = [ + 'tempmail.com', + '10minutemail.com', + 'guerrillamail.com', + 'mailinator.com', + 'temp-mail.org', + 'throwaway.email' + ] + + const domain = email.split('@')[1]?.toLowerCase() + return !disposableDomains.includes(domain) +} + +/** + * Validate color array has no duplicates + * @param {string[]} colors - Array of color codes + * @returns {boolean} - True if no duplicates + */ +export const hasNoDuplicateColors = (colors) => { + return new Set(colors).size === colors.length +} + +/** + * Format validation errors for API response + * @param {object} zodError - Zod error object + * @returns {object[]} - Formatted errors + */ +export const formatValidationErrors = (zodError) => { + return zodError.errors.map((error) => ({ + field: error.path.join('.') || 'root', + message: error.message + })) +} + +/** + * Create a detailed error response + * @param {string} message - Error message + * @param {string[]} details - Additional details + * @returns {object} - Error response object + */ +export const createErrorResponse = (message, details = []) => { + return { + error: 'Validation Error', + message, + details: details.length > 0 ? details : undefined + } +} diff --git a/deploy.sh b/deploy.sh index 259341f..e4bc939 100755 --- a/deploy.sh +++ b/deploy.sh @@ -255,32 +255,83 @@ generate_deployment_config() { # Generated: $(date -u +'%Y-%m-%dT%H:%M:%SZ') # GitHub User: ${GITHUB_USER} # -# IMPORTANT: Create a .env file with these variables: -# JWT_SECRET=\$(openssl rand -base64 32) -# CORS_ORIGIN=https://yourdomain.com -# ALLOW_REGISTRATION=false + # IMPORTANT: Create a .env file with these variables: + # DB_NAME=edh_stats + # DB_USER=postgres + # DB_PASSWORD=\$(openssl rand -base64 32) + # JWT_SECRET=\$(openssl rand -base64 32) + # CORS_ORIGIN=https://yourdomain.com + # LOG_LEVEL=warn + # ALLOW_REGISTRATION=false + # DB_SEED=false # # FIRST TIME SETUP: # 1. Create .env file with above variables -# 2. Run: docker-compose up -d -# 3. If database error occurs, run: docker volume inspect ${PROJECT_NAME}_sqlite_data -# 4. Note the Mountpoint path and ensure it's writable by Docker +# 2. Run: docker-compose -f docker-compose.prod.deployed.yml up -d +# 3. Database migrations will run automatically via db-migrate service +# 4. Monitor logs: docker-compose logs -f db-migrate services: - backend: - image: ${BACKEND_IMAGE} - environment: - - NODE_ENV=production - - DATABASE_PATH=/app/database/data/edh-stats.db - - JWT_SECRET=\${JWT_SECRET} - - CORS_ORIGIN=\${CORS_ORIGIN:-https://yourdomain.com} - - LOG_LEVEL=warn - - RATE_LIMIT_WINDOW=15 - - RATE_LIMIT_MAX=100 - - ALLOW_REGISTRATION=\${ALLOW_REGISTRATION:-false} - volumes: - - sqlite_data:/app/database/data - - app_logs:/app/logs + # PostgreSQL database service + postgres: + image: postgres:16-alpine + environment: + - POSTGRES_USER=\${DB_USER:-postgres} + - POSTGRES_PASSWORD=\${DB_PASSWORD} + - POSTGRES_DB=\${DB_NAME} + volumes: + - postgres_data:/var/lib/postgresql/data + healthcheck: + test: ['CMD-SHELL', 'PGPASSWORD=\${DB_PASSWORD} pg_isready -U postgres -h localhost'] + interval: 10s + timeout: 5s + retries: 5 + networks: + - edh-stats-network + restart: unless-stopped + deploy: + resources: + limits: + memory: 512M + cpus: '0.5' + reservations: + memory: 256M + cpus: '0.25' + + # Database migration service - runs once on startup + db-migrate: + image: ${BACKEND_IMAGE} + depends_on: + postgres: + condition: service_healthy + environment: + - NODE_ENV=production + - DB_HOST=\${DB_HOST:-postgres} + - DB_NAME=\${DB_NAME} + - DB_USER=\${DB_USER:-postgres} + - DB_PASSWORD=\${DB_PASSWORD} + command: node src/database/migrate.js migrate + networks: + - edh-stats-network + restart: 'no' + + backend: + image: ${BACKEND_IMAGE} + ports: + - '3002:3000' + depends_on: + db-migrate: + condition: service_completed_successfully + environment: + - NODE_ENV=production + - DB_HOST=\${DB_HOST:-postgres} + - DB_NAME=\${DB_NAME} + - DB_USER=\${DB_USER:-postgres} + - DB_PASSWORD=\${DB_PASSWORD} + - JWT_SECRET=\${JWT_SECRET} + - CORS_ORIGIN=\${CORS_ORIGIN:-https://yourdomain.com} + - LOG_LEVEL=\${LOG_LEVEL:-warn} + - ALLOW_REGISTRATION=\${ALLOW_REGISTRATION:-false} restart: unless-stopped deploy: resources: @@ -299,40 +350,36 @@ services: networks: - edh-stats-network stop_grace_period: 30s - depends_on: - - init-db - init-db: - image: alpine:latest - volumes: - - sqlite_data:/app/database/data - - app_logs:/app/logs - command: sh -c "mkdir -p /app/database/data /app/logs && chmod 777 /app/database/data /app/logs && touch /app/database/data/.initialized && echo 'Database directories initialized'" - networks: - - edh-stats-network - # Don't restart, it should exit after initialization - restart: "no" - - frontend: - image: ${FRONTEND_IMAGE} - ports: - - '38080:80' - - '30443:443' - restart: unless-stopped - healthcheck: - test: ['CMD', 'curl', 'http://localhost:80/health'] - interval: 10s - timeout: 5s - retries: 5 - networks: - - edh-stats-network - depends_on: - - backend + frontend: + image: ${FRONTEND_IMAGE} + ports: + - '38080:80' + - '30443:443' + restart: unless-stopped + healthcheck: + test: + - CMD + - curl + - http://localhost:80/health + interval: 10s + timeout: 5s + retries: 5 + networks: + - edh-stats-network + depends_on: + - backend + deploy: + resources: + limits: + memory: 256M + cpus: '0.25' + reservations: + memory: 128M + cpus: '0.125' volumes: - sqlite_data: - driver: local - app_logs: + postgres_data: driver: local networks: @@ -385,9 +432,14 @@ print_summary() { echo " git add frontend/public/version.txt" echo " git commit -m \"Bump version to ${VERSION#v}\"" echo " 2. Pull images: docker pull ${BACKEND_IMAGE}" - echo " 3. Configure production secrets (JWT_SECRET)" - echo " 4. Set environment variables (CORS_ORIGIN, ALLOW_REGISTRATION)" + echo " 3. Create .env file with PostgreSQL credentials:" + echo " DB_PASSWORD=\$(openssl rand -base64 32)" + echo " JWT_SECRET=\$(openssl rand -base64 32)" + echo " 4. Set production secrets:" + echo " - CORS_ORIGIN=https://yourdomain.com" + echo " - ALLOW_REGISTRATION=false" echo " 5. Deploy: docker-compose -f docker-compose.prod.deployed.yml up -d" + echo " 6. Monitor migrations: docker-compose logs -f db-migrate" echo "" } diff --git a/docker-compose.yml b/docker-compose.yml index c1346ae..b6c718f 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,21 +1,75 @@ # Docker Compose configuration for EDH Stats Tracker services: + # PostgreSQL database service + postgres: + image: postgres:16-alpine + container_name: edh-stats-postgres + environment: + - POSTGRES_USER=${DB_USER:-postgres} + - POSTGRES_PASSWORD=${DB_PASSWORD:-edh_password} + - POSTGRES_DB=${DB_NAME:-edh_stats} + volumes: + - ./postgres_data:/var/lib/postgresql/data + - ./backend/init-db:/docker-entrypoint-initdb.d + healthcheck: + test: + [ + 'CMD-SHELL', + 'PGPASSWORD=${DB_PASSWORD:-edh_password} pg_isready -U postgres -h localhost' + ] + interval: 10s + timeout: 5s + retries: 5 + networks: + - edh-stats-network + restart: unless-stopped + + # Database migration service - runs once on startup + db-migrate: + build: + context: ./backend + dockerfile: Dockerfile + target: production + container_name: edh-stats-db-migrate + depends_on: + postgres: + condition: service_healthy + environment: + - NODE_ENV=${NODE_ENV:-development} + - DB_HOST=${DB_HOST:-postgres} + - DB_NAME=${DB_NAME:-edh_stats} + - DB_USER=${DB_USER:-postgres} + - DB_PASSWORD=${DB_PASSWORD:-edh_password} + # Set DB_SEED=true to automatically seed database with sample data after migrations + - DB_SEED=${DB_SEED:-false} + command: node src/database/migrate.js migrate + networks: + - edh-stats-network + restart: 'no' + + # Backend API service backend: build: context: ./backend dockerfile: Dockerfile target: production + container_name: edh-stats-backend ports: - '3002:3000' + depends_on: + db-migrate: + condition: service_completed_successfully environment: - - NODE_ENV=development - - DATABASE_PATH=/app/database/data/edh-stats.db - - JWT_SECRET=dev-jwt-secret-key-change-in-production - - CORS_ORIGIN=http://localhost - - LOG_LEVEL=info - - ALLOW_REGISTRATION=false + - NODE_ENV=${NODE_ENV:-development} + - DB_HOST=${DB_HOST:-postgres} + - DB_NAME=${DB_NAME:-edh_stats} + - DB_USER=${DB_USER:-postgres} + - DB_PASSWORD=${DB_PASSWORD:-edh_password} + - JWT_SECRET=${JWT_SECRET:-dev-jwt-secret-key-change-in-production} + - CORS_ORIGIN=${CORS_ORIGIN:-http://localhost} + - LOG_LEVEL=${LOG_LEVEL:-info} + - ALLOW_REGISTRATION=${ALLOW_REGISTRATION:-true} volumes: - - sqlite_data:/app/database/data - ./backend/src:/app/src restart: unless-stopped healthcheck: @@ -35,26 +89,32 @@ services: networks: - edh-stats-network + # Frontend web server frontend: image: nginx:alpine + container_name: edh-stats-frontend ports: - '8081:80' + depends_on: + - backend volumes: - ./frontend/nginx.conf:/etc/nginx/nginx.conf:ro - ./frontend/public:/usr/share/nginx/html:ro - depends_on: - - backend restart: unless-stopped + healthcheck: + test: + - CMD + - curl + - http://localhost:80/health + interval: 10s + timeout: 5s + retries: 5 networks: - edh-stats-network volumes: - sqlite_data: + postgres_data: driver: local - driver_opts: - type: none - o: bind - device: ${PWD}/database/data networks: edh-stats-network: diff --git a/frontend/public/commanders.html b/frontend/public/commanders.html index 921cc2a..6722ed6 100644 --- a/frontend/public/commanders.html +++ b/frontend/public/commanders.html @@ -9,7 +9,7 @@ content="Manage your Magic: The Gathering EDH/Commander decks" /> - + @@ -322,11 +322,11 @@

Avg Rounds

+

Added

-

Added

@@ -505,13 +505,14 @@ - - - - - + + + + + + diff --git a/frontend/public/dashboard.html b/frontend/public/dashboard.html index d0b9df9..5ae1c0d 100755 --- a/frontend/public/dashboard.html +++ b/frontend/public/dashboard.html @@ -28,7 +28,7 @@ Log GameGame Log

-