2
0
forked from jmug/cactoide

feat: add docker and readme

This commit is contained in:
Levente Orban
2025-08-27 08:47:15 +02:00
parent 3d133a6539
commit 9feb3bf64d
19 changed files with 509 additions and 787 deletions

30
.dockerignore Normal file
View File

@@ -0,0 +1,30 @@
node_modules
npm-debug.log*
yarn-debug.log*
yarn-error.log*
.git
.gitignore
README.md
.env
.env.*
!.env.example
.nyc_output
coverage
.nyc_output
.coverage
.cache
.parcel-cache
.next
.nuxt
dist
build
.output
.svelte-kit
.DS_Store
Thumbs.db
*.log
.vscode
.idea
*.swp
*.swo
*~

218
DOCKER_README.md Normal file
View File

@@ -0,0 +1,218 @@
# Docker Setup for Cactoide
This document explains how to run the Cactoide application using Docker containers.
## Prerequisites
- Docker installed on your system
- Docker Compose installed
- Make (optional, but recommended for easier management)
## Quick Start
### 1. Build and Start Everything
```bash
# Build images and start all services
make build-and-up
# Or manually:
docker-compose up -d --build
```
### 2. Access the Application
- **Application**: http://localhost:3000
- **Database**: localhost:5432
## Available Commands
### Using Make (Recommended)
```bash
# Show all available commands
make help
# Start all services
make up
# Stop all services
make down
# Restart all services
make restart
# View logs
make logs
# Check status
make status
# Clean up everything
make clean
```
### Using Docker Compose Directly
```bash
# Start services
docker-compose up -d
# Stop services
docker-compose down
# View logs
docker-compose logs -f
# Check status
docker-compose ps
```
## Development Mode
For development with hot reloading:
```bash
# Start development environment
make dev
# Or manually:
docker-compose -f docker-compose.dev.yml up
```
Development mode runs on port **5173** with hot reloading enabled.
## Individual Services
### Start Only Database
```bash
make db-only
```
### Start Only Application (requires database to be running)
```bash
make app-only
```
## Database Management
### Access Database Shell
```bash
make db-shell
```
### Database Initialization
The database is automatically initialized with the schema from `database/init.sql` when the container starts for the first time.
### Persistent Data
Database data is stored in a Docker volume (`postgres_data`) and persists between container restarts.
## Environment Variables
The following environment variables are automatically set in the containers:
- `DATABASE_URL`: PostgreSQL connection string
- `NODE_ENV`: Environment mode (production/development)
- `POSTGRES_DB`: Database name
- `POSTGRES_USER`: Database user
- `POSTGRES_PASSWORD`: Database password
## Ports
- **3000**: Production application
- **5173**: Development application (with hot reloading)
- **5432**: PostgreSQL database
## Troubleshooting
### Check Service Status
```bash
make status
```
### View Logs
```bash
# All services
make logs
# Specific service
make logs-app
make logs-db
```
### Restart Services
```bash
make restart
```
### Clean Start
```bash
make clean
make build-and-up
```
### Database Connection Issues
1. Ensure the database container is healthy:
```bash
docker-compose ps postgres
```
2. Check database logs:
```bash
make logs-db
```
3. Verify environment variables:
```bash
docker-compose exec app env | grep DATABASE
```
## File Structure
```
.
├── Dockerfile # Production application image
├── Dockerfile.dev # Development application image
├── docker-compose.yml # Production services
├── docker-compose.dev.yml # Development services
├── Makefile # Management commands
├── .dockerignore # Docker build exclusions
└── database/
└── init.sql # Database initialization script
```
## Production Deployment
For production deployment, use the production compose file:
```bash
docker-compose up -d
```
The production setup:
- Runs the built SvelteKit application
- Uses optimized Node.js production image
- Includes health checks and restart policies
- Runs on port 3000
## Development Workflow
1. **Start development environment**: `make dev`
2. **Make code changes** - they will automatically reload
3. **Test your changes** in the browser
4. **Stop development**: `Ctrl+C` or `make down`
5. **Build for production**: `make build`
6. **Deploy**: `make up`

47
Dockerfile Normal file
View File

@@ -0,0 +1,47 @@
# Use Node.js 20 Alpine for smaller image size
FROM node:20-alpine AS base
# Install dependencies only when needed
FROM base AS deps
# Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed.
RUN apk add --no-cache libc6-compat
WORKDIR /app
# Install dependencies based on the preferred package manager
COPY package.json package-lock.json* ./
RUN npm ci
# Rebuild the source code only when needed
FROM base AS builder
WORKDIR /app
COPY --from=deps /app/node_modules ./node_modules
COPY . .
# Build the application
RUN npm run build
# Production image, copy all the files and run the app
FROM base AS runner
WORKDIR /app
ENV NODE_ENV production
# Uncomment the following line in case you want to disable telemetry during runtime.
# ENV NEXT_TELEMETRY_DISABLED 1
RUN addgroup --system --gid 1001 nodejs
RUN adduser --system --uid 1001 sveltekit
# Copy the built application
COPY --from=builder --chown=sveltekit:nodejs /app/build ./build
COPY --from=builder --chown=sveltekit:nodejs /app/package.json ./package.json
COPY --from=builder --chown=sveltekit:nodejs /app/node_modules ./node_modules
USER sveltekit
EXPOSE 3000
ENV PORT 3000
ENV HOSTNAME "0.0.0.0"
# Start the application
CMD ["node", "build"]

44
Makefile Normal file
View File

@@ -0,0 +1,44 @@
.PHONY: help build up db-only logs clean
# Default target
help:
@echo "Cactoide Commands"
@echo ""
@echo "Main commands:"
@echo " make build - Build the Docker images"
@echo " make up - Start all services (database + app)"
@echo ""
@echo "Individual services:"
@echo " make db-only - Start only the database"
@echo ""
@echo "Utility commands:"
@echo " make logs - Show logs from all services"
@echo " make clean - Remove all containers, images, and volumes"
@echo " make help - Show this help message"
# Build the Docker images
build:
@echo "Building Docker images..."
docker-compose build
# Start all services
up:
@echo "Starting all services..."
docker-compose up -d
# Start only the database
db-only:
@echo "Starting only the database..."
docker-compose up -d postgres
# Show logs from all services
logs:
@echo "Showing logs from all services..."
docker-compose logs -f
# Clean up everything (containers, images, volumes)
clean:
@echo "Cleaning up all Docker resources..."
docker-compose down -v --rmi all

View File

@@ -1,5 +1,49 @@
# Cactoide 🌵
# Cactoide(ae) 🌵
A mobile-first event RSVP SaaS application. Create events, share unique URLs, and collect RSVPs without any registration required.
Events that thrive anywhere.
Built with ❤️ using SvelteKit, Tailwind CSS, and Supabase
Like the cactus, great events bloom under any condition when managed with care. Cactoide(ae) helps you streamline RSVPs, simplify coordination, and keep every detail efficient—so your gatherings are resilient, vibrant, and unforgettable.
#### What is it?
A mobile-first event RSVP platform that lets you create events, share unique URLs, and collect RSVPs without any registration required.
### ✨ Features
- **🎯 Instant Event Creation** - Create events in seconds with our streamlined form. No accounts, no waiting, just pure efficiency.
- **🔗 One-Click Sharing** - Each event gets a unique, memorable URL. Share instantly via any platform or messaging app.
- **🔍 All-in-One Clarity** - No more scrolling through endless chats and reactions. See everyone's availability and responses neatly in one place.
- **👤 No Hassle, No Sign-Ups** - Skip registrations and endless forms. Unlike other event platforms, you create and share instantly — no accounts, no barriers.
- **🛡️ Smart Limits** - Choose between unlimited RSVPs or set a limited capacity. Perfect for any event size.
- **✨ Effortless Simplicity** - Designed to be instantly clear and easy. No learning curve — just open, create, and go.
### 🏗️ Technology
- **SvelteKit** - Full-stack web framework
- **Tailwind CSS** - Utility-first CSS framework
- **TypeScript** - Type-safe development
- **PostgreSQL** - Robust relational database
- **Drizzle ORM** - Type-safe database queries
### 🚀 Quick Start
```bash
git clone <your-repo-url>
cd cactoide
npm install
cp env.example .env
make db-only
npm run dev -- --open
```
Your app will be available at `http://localhost:5173`
### 🚀 Self-Host
WIP
### 📄 License
This project is licensed under the MIT License - see the LICENSE file for details.
**Made with ❤️ by @polaroi8d**

View File

@@ -1,34 +0,0 @@
-- Migration: Add user_id column to RSVPs table
-- Run this against your existing Supabase database
-- Add user_id column to existing rsvps table
ALTER TABLE rsvps
ADD COLUMN user_id VARCHAR(100);
-- Set a default value for existing records (you can modify this if needed)
-- This assigns a unique user ID to each existing RSVP
UPDATE rsvps
SET user_id = 'legacy_user_' || id::text
WHERE user_id IS NULL;
-- Make the column NOT NULL after setting default values
ALTER TABLE rsvps
ALTER COLUMN user_id SET NOT NULL;
-- Add index for better performance
CREATE INDEX IF NOT EXISTS idx_rsvps_user_id ON rsvps(user_id);
-- Verify the migration
SELECT
column_name,
data_type,
is_nullable,
column_default
FROM information_schema.columns
WHERE table_name = 'rsvps'
AND column_name = 'user_id';
-- Show sample of updated data
SELECT id, name, user_id, created_at
FROM rsvps
LIMIT 5;

View File

@@ -1,34 +0,0 @@
-- Migration: Add user_id column to Events table
-- Run this against your existing Supabase database
-- Add user_id column to existing events table
ALTER TABLE events
ADD COLUMN user_id VARCHAR(100);
-- Set a default value for existing records (you can modify this if needed)
-- This assigns a unique user ID to each existing event
UPDATE events
SET user_id = 'legacy_user_' || id::text
WHERE user_id IS NULL;
-- Make the column NOT NULL after setting default values
ALTER TABLE events
ALTER COLUMN user_id SET NOT NULL;
-- Add index for better performance
CREATE INDEX IF NOT EXISTS idx_events_user_id ON events(user_id);
-- Verify the migration
SELECT
column_name,
data_type,
is_nullable,
column_default
FROM information_schema.columns
WHERE table_name = 'events'
AND column_name = 'user_id';
-- Show sample of updated data
SELECT id, name, user_id, created_at
FROM events
LIMIT 5;

View File

@@ -1,42 +0,0 @@
-- Migration: Add visibility column to events table
-- Date: 2025-08-19
-- Description: Add visibility field to distinguish between public and private events
-- First ensure user_id column exists (in case this migration runs before the user_id migration)
DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'events' AND column_name = 'user_id') THEN
ALTER TABLE events ADD COLUMN user_id VARCHAR(100);
UPDATE events SET user_id = 'legacy_user_' || id::text WHERE user_id IS NULL;
ALTER TABLE events ALTER COLUMN user_id SET NOT NULL;
CREATE INDEX IF NOT EXISTS idx_events_user_id ON events(user_id);
END IF;
END $$;
-- Add visibility column with default value 'public'
ALTER TABLE events
ADD COLUMN visibility TEXT NOT NULL DEFAULT 'public' CHECK (visibility IN ('public', 'private'));
-- Update existing events to have 'public' visibility (since they were created before this field existed)
UPDATE events SET visibility = 'public' WHERE visibility IS NULL;
-- Create index on visibility for better query performance
CREATE INDEX IF NOT EXISTS idx_events_visibility ON events(visibility);
-- Add comment to document the column
COMMENT ON COLUMN events.visibility IS 'Event visibility: public (visible to everyone) or private (only visible to creator and people with link)';
-- Verify the migration
SELECT
column_name,
data_type,
is_nullable,
column_default
FROM information_schema.columns
WHERE table_name = 'events'
AND column_name = 'visibility';
-- Show sample of updated data
SELECT id, name, visibility, user_id, created_at
FROM events
LIMIT 5;

60
database/init.sql Normal file
View File

@@ -0,0 +1,60 @@
BEGIN;
-- Extensions
CREATE EXTENSION IF NOT EXISTS pgcrypto; -- for gen_random_uuid()
-- =======================================
-- Tables
-- =======================================
-- Events
CREATE TABLE IF NOT EXISTS events (
id VARCHAR(8) PRIMARY KEY,
name VARCHAR(100) NOT NULL,
date DATE NOT NULL,
time TIME NOT NULL,
location VARCHAR(200) NOT NULL,
type VARCHAR(20) NOT NULL CHECK (type IN ('limited','unlimited')),
attendee_limit INTEGER CHECK (attendee_limit > 0),
user_id VARCHAR(100) NOT NULL,
visibility VARCHAR(20) NOT NULL DEFAULT 'public' CHECK (visibility IN ('public','private')),
created_at TIMESTAMPTZ DEFAULT NOW(),
updated_at TIMESTAMPTZ DEFAULT NOW()
);
-- RSVPs
CREATE TABLE IF NOT EXISTS rsvps (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
event_id VARCHAR(8) NOT NULL REFERENCES events(id) ON DELETE CASCADE,
name VARCHAR(100) NOT NULL,
user_id VARCHAR(100) NOT NULL,
created_at TIMESTAMPTZ DEFAULT NOW(),
updated_at TIMESTAMPTZ DEFAULT NOW()
);
-- =======================================
-- Indexes
-- =======================================
CREATE INDEX IF NOT EXISTS idx_events_user_id ON events(user_id);
CREATE INDEX IF NOT EXISTS idx_events_date ON events(date);
CREATE INDEX IF NOT EXISTS idx_rsvps_event_id ON rsvps(event_id);
CREATE INDEX IF NOT EXISTS idx_rsvps_user_id ON rsvps(user_id);
-- =======================================
-- Triggers (updated_at maintenance)
-- =======================================
CREATE OR REPLACE FUNCTION update_updated_at_column()
RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = NOW();
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
DROP TRIGGER IF EXISTS update_events_updated_at ON events;
CREATE TRIGGER update_events_updated_at
BEFORE UPDATE ON events
FOR EACH ROW
EXECUTE FUNCTION update_updated_at_column();
COMMIT;

View File

@@ -1,63 +0,0 @@
-- Create events table
CREATE TABLE IF NOT EXISTS events (
id VARCHAR(8) PRIMARY KEY,
name VARCHAR(100) NOT NULL,
date DATE NOT NULL,
time TIME NOT NULL,
location VARCHAR(200) NOT NULL,
type VARCHAR(20) NOT NULL CHECK (type IN ('limited', 'unlimited')),
attendee_limit INTEGER CHECK (attendee_limit > 0),
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
);
-- Create RSVPs table
CREATE TABLE IF NOT EXISTS rsvps (
id UUID DEFAULT gen_random_uuid() PRIMARY KEY,
event_id VARCHAR(8) NOT NULL REFERENCES events(id) ON DELETE CASCADE,
name VARCHAR(50) NOT NULL,
user_id VARCHAR(100) NOT NULL,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
UNIQUE(event_id, name)
);
-- Create indexes for better performance
CREATE INDEX IF NOT EXISTS idx_events_id ON events(id);
CREATE INDEX IF NOT EXISTS idx_rsvps_event_id ON rsvps(event_id);
CREATE INDEX IF NOT EXISTS idx_rsvps_user_id ON rsvps(user_id);
CREATE INDEX IF NOT EXISTS idx_rsvps_created_at ON rsvps(created_at);
-- Enable Row Level Security (RLS)
ALTER TABLE events ENABLE ROW LEVEL SECURITY;
ALTER TABLE rsvps ENABLE ROW LEVEL SECURITY;
-- Create policies for public access (since this is a public RSVP app)
CREATE POLICY "Allow public read access to events" ON events
FOR SELECT USING (true);
CREATE POLICY "Allow public insert access to events" ON events
FOR INSERT WITH CHECK (true);
CREATE POLICY "Allow public read access to RSVPs" ON rsvps
FOR SELECT USING (true);
CREATE POLICY "Allow public insert access to RSVPs" ON rsvps
FOR INSERT WITH CHECK (true);
CREATE POLICY "Allow public delete access to RSVPs" ON rsvps
FOR DELETE USING (true);
-- Create function to update updated_at timestamp
CREATE OR REPLACE FUNCTION update_updated_at_column()
RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = NOW();
RETURN NEW;
END;
$$ language 'plpgsql';
-- Create trigger to automatically update updated_at
CREATE TRIGGER update_events_updated_at
BEFORE UPDATE ON events
FOR EACH ROW
EXECUTE FUNCTION update_updated_at_column();

48
docker-compose.yml Normal file
View File

@@ -0,0 +1,48 @@
version: '3.8'
services:
# PostgreSQL Database
postgres:
image: postgres:15-alpine
container_name: cactoide-db
environment:
POSTGRES_DB: cactoied_database
POSTGRES_USER: cactoide
POSTGRES_PASSWORD: cactoide_password
ports:
- '5432:5432'
volumes:
- postgres_data:/var/lib/postgresql/data
- ./database/init.sql:/docker-entrypoint-initdb.d/init.sql
healthcheck:
test: ['CMD-SHELL', 'pg_isready -U cactoide -d cactoied_database']
interval: 10s
timeout: 5s
retries: 5
networks:
- cactoide-network
# SvelteKit Application
app:
build:
context: .
dockerfile: Dockerfile
container_name: cactoide-app
ports:
- '3000:3000'
environment:
DATABASE_URL: postgres://cactoide:cactoide_password@postgres:5432/cactoied_database
NODE_ENV: production
depends_on:
postgres:
condition: service_healthy
networks:
- cactoide-network
restart: unless-stopped
volumes:
postgres_data:
networks:
cactoide-network:
driver: bridge

View File

@@ -1 +1,7 @@
VITE_SUPABASE_ANON_KEY=your_supabase_anon_key_here
# Docker Configuration
POSTGRES_DB=cactoied_database
POSTGRES_USER=cactoide
POSTGRES_PASSWORD=cactoide_password
# Development Settings
DATABASE_URL="postgres://cactoide:cactoide_password@localhost:5432/cactoied_database"

141
package-lock.json generated
View File

@@ -8,7 +8,6 @@
"name": "event-cactus",
"version": "0.0.1",
"dependencies": {
"@supabase/supabase-js": "^2.55.0",
"drizzle-orm": "^0.44.5",
"postgres": "^3.4.7"
},
@@ -1563,80 +1562,6 @@
"dev": true,
"license": "MIT"
},
"node_modules/@supabase/auth-js": {
"version": "2.71.1",
"resolved": "https://registry.npmjs.org/@supabase/auth-js/-/auth-js-2.71.1.tgz",
"integrity": "sha512-mMIQHBRc+SKpZFRB2qtupuzulaUhFYupNyxqDj5Jp/LyPvcWvjaJzZzObv6URtL/O6lPxkanASnotGtNpS3H2Q==",
"license": "MIT",
"dependencies": {
"@supabase/node-fetch": "^2.6.14"
}
},
"node_modules/@supabase/functions-js": {
"version": "2.4.5",
"resolved": "https://registry.npmjs.org/@supabase/functions-js/-/functions-js-2.4.5.tgz",
"integrity": "sha512-v5GSqb9zbosquTo6gBwIiq7W9eQ7rE5QazsK/ezNiQXdCbY+bH8D9qEaBIkhVvX4ZRW5rP03gEfw5yw9tiq4EQ==",
"license": "MIT",
"dependencies": {
"@supabase/node-fetch": "^2.6.14"
}
},
"node_modules/@supabase/node-fetch": {
"version": "2.6.15",
"resolved": "https://registry.npmjs.org/@supabase/node-fetch/-/node-fetch-2.6.15.tgz",
"integrity": "sha512-1ibVeYUacxWYi9i0cf5efil6adJ9WRyZBLivgjs+AUpewx1F3xPi7gLgaASI2SmIQxPoCEjAsLAzKPgMJVgOUQ==",
"license": "MIT",
"dependencies": {
"whatwg-url": "^5.0.0"
},
"engines": {
"node": "4.x || >=6.0.0"
}
},
"node_modules/@supabase/postgrest-js": {
"version": "1.19.4",
"resolved": "https://registry.npmjs.org/@supabase/postgrest-js/-/postgrest-js-1.19.4.tgz",
"integrity": "sha512-O4soKqKtZIW3olqmbXXbKugUtByD2jPa8kL2m2c1oozAO11uCcGrRhkZL0kVxjBLrXHE0mdSkFsMj7jDSfyNpw==",
"license": "MIT",
"dependencies": {
"@supabase/node-fetch": "^2.6.14"
}
},
"node_modules/@supabase/realtime-js": {
"version": "2.15.1",
"resolved": "https://registry.npmjs.org/@supabase/realtime-js/-/realtime-js-2.15.1.tgz",
"integrity": "sha512-edRFa2IrQw50kNntvUyS38hsL7t2d/psah6om6aNTLLcWem0R6bOUq7sk7DsGeSlNfuwEwWn57FdYSva6VddYw==",
"license": "MIT",
"dependencies": {
"@supabase/node-fetch": "^2.6.13",
"@types/phoenix": "^1.6.6",
"@types/ws": "^8.18.1",
"ws": "^8.18.2"
}
},
"node_modules/@supabase/storage-js": {
"version": "2.11.0",
"resolved": "https://registry.npmjs.org/@supabase/storage-js/-/storage-js-2.11.0.tgz",
"integrity": "sha512-Y+kx/wDgd4oasAgoAq0bsbQojwQ+ejIif8uczZ9qufRHWFLMU5cODT+ApHsSrDufqUcVKt+eyxtOXSkeh2v9ww==",
"license": "MIT",
"dependencies": {
"@supabase/node-fetch": "^2.6.14"
}
},
"node_modules/@supabase/supabase-js": {
"version": "2.55.0",
"resolved": "https://registry.npmjs.org/@supabase/supabase-js/-/supabase-js-2.55.0.tgz",
"integrity": "sha512-Y1uV4nEMjQV1x83DGn7+Z9LOisVVRlY1geSARrUHbXWgbyKLZ6/08dvc0Us1r6AJ4tcKpwpCZWG9yDQYo1JgHg==",
"license": "MIT",
"dependencies": {
"@supabase/auth-js": "2.71.1",
"@supabase/functions-js": "2.4.5",
"@supabase/node-fetch": "2.6.15",
"@supabase/postgrest-js": "1.19.4",
"@supabase/realtime-js": "2.15.1",
"@supabase/storage-js": "^2.10.4"
}
},
"node_modules/@sveltejs/acorn-typescript": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/@sveltejs/acorn-typescript/-/acorn-typescript-1.0.5.tgz",
@@ -2076,26 +2001,14 @@
"version": "24.3.0",
"resolved": "https://registry.npmjs.org/@types/node/-/node-24.3.0.tgz",
"integrity": "sha512-aPTXCrfwnDLj4VvXrm+UUCQjNEvJgNA8s5F1cvwQU+3KNltTOkBm1j30uNLyqqPNe7gE3KFzImYoZEfLhp4Yow==",
"dev": true,
"license": "MIT",
"optional": true,
"peer": true,
"dependencies": {
"undici-types": "~7.10.0"
}
},
"node_modules/@types/phoenix": {
"version": "1.6.6",
"resolved": "https://registry.npmjs.org/@types/phoenix/-/phoenix-1.6.6.tgz",
"integrity": "sha512-PIzZZlEppgrpoT2QgbnDU+MMzuR6BbCjllj0bM70lWoejMeNJAxCchxnv7J3XFkI8MpygtRpzXrIlmWUBclP5A==",
"license": "MIT"
},
"node_modules/@types/ws": {
"version": "8.18.1",
"resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.18.1.tgz",
"integrity": "sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==",
"license": "MIT",
"dependencies": {
"@types/node": "*"
}
},
"node_modules/@typescript-eslint/eslint-plugin": {
"version": "8.39.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.39.1.tgz",
@@ -4761,12 +4674,6 @@
"node": ">=6"
}
},
"node_modules/tr46": {
"version": "0.0.3",
"resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz",
"integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==",
"license": "MIT"
},
"node_modules/ts-api-utils": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.1.0.tgz",
@@ -4835,7 +4742,10 @@
"version": "7.10.0",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.10.0.tgz",
"integrity": "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag==",
"license": "MIT"
"dev": true,
"license": "MIT",
"optional": true,
"peer": true
},
"node_modules/uri-js": {
"version": "4.4.1",
@@ -4949,22 +4859,6 @@
}
}
},
"node_modules/webidl-conversions": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
"integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==",
"license": "BSD-2-Clause"
},
"node_modules/whatwg-url": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz",
"integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==",
"license": "MIT",
"dependencies": {
"tr46": "~0.0.3",
"webidl-conversions": "^3.0.0"
}
},
"node_modules/which": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
@@ -4991,27 +4885,6 @@
"node": ">=0.10.0"
}
},
"node_modules/ws": {
"version": "8.18.3",
"resolved": "https://registry.npmjs.org/ws/-/ws-8.18.3.tgz",
"integrity": "sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg==",
"license": "MIT",
"engines": {
"node": ">=10.0.0"
},
"peerDependencies": {
"bufferutil": "^4.0.1",
"utf-8-validate": ">=5.0.2"
},
"peerDependenciesMeta": {
"bufferutil": {
"optional": true
},
"utf-8-validate": {
"optional": true
}
}
},
"node_modules/yallist": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz",

View File

@@ -11,8 +11,7 @@
"check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json",
"check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch",
"format": "prettier --write .",
"lint": "prettier --check . && eslint .",
"build:netlify": "npm run build"
"lint": "prettier --check . && eslint ."
},
"devDependencies": {
"@eslint/compat": "^1.2.5",
@@ -40,7 +39,6 @@
"vite": "^7.0.4"
},
"dependencies": {
"@supabase/supabase-js": "^2.55.0",
"drizzle-orm": "^0.44.5",
"postgres": "^3.4.7"
}

View File

@@ -1,347 +0,0 @@
import { writable } from 'svelte/store';
import { supabase } from '$lib/supabase';
import type { Event, CreateEventData, RSVP, DatabaseEvent, DatabaseRSVP } from '$lib/types';
// Store for events
const events = writable<Map<string, Event>>(new Map());
// Store for RSVPs
const rsvps = writable<Map<string, RSVP[]>>(new Map());
// Generate a random URL-friendly ID
function generateEventId(): string {
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
let result = '';
for (let i = 0; i < 8; i++) {
result += chars.charAt(Math.floor(Math.random() * chars.length));
}
return result;
}
// Convert database event to app event
function convertDatabaseEvent(dbEvent: DatabaseEvent): Event {
return {
id: dbEvent.id,
name: dbEvent.name,
date: dbEvent.date,
time: dbEvent.time,
location: dbEvent.location,
type: dbEvent.type,
attendee_limit: dbEvent.attendee_limit,
visibility: dbEvent.visibility,
user_id: dbEvent.user_id,
created_at: dbEvent.created_at,
updated_at: dbEvent.updated_at
};
}
// Convert database RSVP to app RSVP
function convertDatabaseRSVP(dbRSVP: DatabaseRSVP): RSVP {
return {
id: dbRSVP.id,
event_id: dbRSVP.event_id,
name: dbRSVP.name,
user_id: dbRSVP.user_id,
created_at: dbRSVP.created_at
};
}
export const eventsStore = {
subscribe: events.subscribe,
subscribeRSVPs: rsvps.subscribe,
// Create a new event
createEvent: async (eventData: CreateEventData, userId: string): Promise<string> => {
const eventId = generateEventId();
const now = new Date().toISOString();
try {
const { error } = await supabase.from('events').insert({
id: eventId,
name: eventData.name,
date: eventData.date,
time: eventData.time,
location: eventData.location,
type: eventData.type,
attendee_limit: eventData.attendee_limit,
visibility: eventData.visibility,
user_id: userId,
created_at: now,
updated_at: now
});
if (error) throw error;
// Add to local store
const newEvent: Event = {
id: eventId,
...eventData,
user_id: userId,
created_at: now,
updated_at: now
};
events.update((currentEvents) => {
const newMap = new Map(currentEvents);
newMap.set(eventId, newEvent);
return newMap;
});
// Initialize empty RSVP list
rsvps.update((currentRSVPs) => {
const newMap = new Map(currentRSVPs);
newMap.set(eventId, []);
return newMap;
});
return eventId;
} catch (error) {
console.error('Error creating event:', error);
throw error;
}
},
// Get event by ID
getEvent: async (id: string): Promise<Event | undefined> => {
try {
const { data, error } = await supabase.from('events').select('*').eq('id', id).single();
if (error) throw error;
if (data) {
const event = convertDatabaseEvent(data);
// Update local store
events.update((currentEvents) => {
const newMap = new Map(currentEvents);
newMap.set(id, event);
return newMap;
});
return event;
}
return undefined;
} catch (error) {
console.error('Error fetching event:', error);
return undefined;
}
},
// Get RSVPs for an event
getRSVPs: async (eventId: string): Promise<RSVP[]> => {
try {
const { data, error } = await supabase
.from('rsvps')
.select('*')
.eq('event_id', eventId)
.order('created_at', { ascending: true });
if (error) throw error;
const rsvpList = data?.map(convertDatabaseRSVP) || [];
// Update local store
rsvps.update((currentRSVPs) => {
const newMap = new Map(currentRSVPs);
newMap.set(eventId, rsvpList);
return newMap;
});
return rsvpList;
} catch (error) {
console.error('Error fetching RSVPs:', error);
return [];
}
},
// Add RSVP to an event
addRSVP: async (eventId: string, name: string, userId: string): Promise<boolean> => {
try {
// First check if event exists and get its details
const event = await eventsStore.getEvent(eventId);
if (!event) return false;
// Check if event is full (for limited type events)
if (event.type === 'limited' && event.attendee_limit) {
const currentRSVPs = await eventsStore.getRSVPs(eventId);
if (currentRSVPs.length >= event.attendee_limit) {
return false; // Event is full
}
}
// Check if name is already in the list
const existingRSVPs = await eventsStore.getRSVPs(eventId);
if (existingRSVPs.some((rsvp) => rsvp.name.toLowerCase() === name.toLowerCase())) {
return false; // Name already exists
}
// Add RSVP to database
const { data, error } = await supabase
.from('rsvps')
.insert({
event_id: eventId,
name: name.trim(),
user_id: userId,
created_at: new Date().toISOString()
})
.select()
.single();
if (error) throw error;
// Update local store
const newRSVP = convertDatabaseRSVP(data);
rsvps.update((currentRSVPs) => {
const newMap = new Map(currentRSVPs);
const eventRSVPs = newMap.get(eventId) || [];
newMap.set(eventId, [...eventRSVPs, newRSVP]);
return newMap;
});
return true;
} catch (error) {
console.error('Error adding RSVP:', error);
return false;
}
},
// Remove RSVP from an event
removeRSVP: async (eventId: string, rsvpId: string): Promise<boolean> => {
try {
const { error } = await supabase
.from('rsvps')
.delete()
.eq('id', rsvpId)
.eq('event_id', eventId);
if (error) throw error;
// Update local store
rsvps.update((currentRSVPs) => {
const newMap = new Map(currentRSVPs);
const eventRSVPs = newMap.get(eventId) || [];
const updatedRSVPs = eventRSVPs.filter((rsvp) => rsvp.id !== rsvpId);
newMap.set(eventId, updatedRSVPs);
return newMap;
});
return true;
} catch (error) {
console.error('Error removing RSVP:', error);
return false;
}
},
// Get event with RSVPs
getEventWithRSVPs: async (
eventId: string
): Promise<{ event: Event; rsvps: RSVP[] } | undefined> => {
try {
const [event, rsvpList] = await Promise.all([
eventsStore.getEvent(eventId),
eventsStore.getRSVPs(eventId)
]);
if (!event) return undefined;
return { event, rsvps: rsvpList };
} catch (error) {
console.error('Error fetching event with RSVPs:', error);
return undefined;
}
},
// Get events by user ID
getEventsByUser: async (userId: string): Promise<Event[]> => {
try {
const { data, error } = await supabase
.from('events')
.select('*')
.eq('user_id', userId)
.order('created_at', { ascending: false });
if (error) throw error;
const userEvents = data?.map(convertDatabaseEvent) || [];
// Update local store
userEvents.forEach((event) => {
events.update((currentEvents) => {
const newMap = new Map(currentEvents);
newMap.set(event.id, event);
return newMap;
});
});
return userEvents;
} catch (error) {
console.error('Error fetching user events:', error);
return [];
}
},
// Get public events
getPublicEvents: async (): Promise<Event[]> => {
try {
const { data, error } = await supabase
.from('events')
.select('*')
.eq('visibility', 'public')
.order('created_at', { ascending: false });
if (error) throw error;
const publicEvents = data?.map(convertDatabaseEvent) || [];
// Update local store
publicEvents.forEach((event) => {
events.update((currentEvents) => {
const newMap = new Map(currentEvents);
newMap.set(event.id, event);
return newMap;
});
});
return publicEvents;
} catch (error) {
console.error('Error fetching public events:', error);
return [];
}
},
// Delete event (only by the user who created it)
deleteEvent: async (eventId: string, userId: string): Promise<boolean> => {
try {
// First verify the user owns this event
const event = await eventsStore.getEvent(eventId);
if (!event || event.user_id !== userId) {
return false; // User doesn't own this event
}
// Delete the event (RSVPs will be deleted automatically due to CASCADE)
const { error } = await supabase.from('events').delete().eq('id', eventId);
if (error) throw error;
// Remove from local store
events.update((currentEvents) => {
const newMap = new Map(currentEvents);
newMap.delete(eventId);
return newMap;
});
// Remove RSVPs from local store
rsvps.update((currentRSVPs) => {
const newMap = new Map(currentRSVPs);
newMap.delete(eventId);
return newMap;
});
return true;
} catch (error) {
console.error('Error deleting event:', error);
return false;
}
}
};

View File

@@ -1,113 +0,0 @@
import { writable } from 'svelte/store';
import type { Event, CreateEventData, RSVP } from '$lib/types';
// In-memory store for events (in a real app, this would be a database)
const events = writable<Map<string, Event>>(new Map());
// Generate a random URL-friendly ID
function generateEventId(): string {
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
let result = '';
for (let i = 0; i < 8; i++) {
result += chars.charAt(Math.floor(Math.random() * chars.length));
}
return result;
}
// Generate a random ID for RSVPs
function generateRSVPId(): string {
return Math.random().toString(36).substr(2, 9);
}
export const eventsStore = {
subscribe: events.subscribe,
createEvent: (eventData: CreateEventData): string => {
const eventId = generateEventId();
const newEvent: Event = {
id: eventId,
...eventData,
createdAt: new Date().toISOString(),
attendees: []
};
events.update((currentEvents) => {
const newMap = new Map(currentEvents);
newMap.set(eventId, newEvent);
return newMap;
});
return eventId;
},
getEvent: (id: string): Event | undefined => {
let event: Event | undefined;
events.update((currentEvents) => {
event = currentEvents.get(id);
return currentEvents;
});
return event;
},
addRSVP: (eventId: string, name: string): boolean => {
let success = false;
events.update((currentEvents) => {
const event = currentEvents.get(eventId);
if (!event) return currentEvents;
// Check if event is full (for limited type events)
if (
event.type === 'limited' &&
event.attendee_limit &&
event.attendees.length >= event.attendee_limit
) {
return currentEvents;
}
// Check if name is already in the list
if (event.attendees.some((attendee) => attendee.name.toLowerCase() === name.toLowerCase())) {
return currentEvents;
}
const newRSVP: RSVP = {
id: generateRSVPId(),
name,
timestamp: new Date().toISOString()
};
const updatedEvent = {
...event,
attendees: [...event.attendees, newRSVP]
};
const newMap = new Map(currentEvents);
newMap.set(eventId, updatedEvent);
success = true;
return newMap;
});
return success;
},
removeRSVP: (eventId: string, rsvpId: string): boolean => {
let success = false;
events.update((currentEvents) => {
const event = currentEvents.get(eventId);
if (!event) return currentEvents;
const updatedEvent = {
...event,
attendees: event.attendees.filter((attendee) => attendee.id !== rsvpId)
};
const newMap = new Map(currentEvents);
newMap.set(eventId, updatedEvent);
success = true;
return newMap;
});
return success;
}
};

View File

@@ -1,10 +0,0 @@
import { createClient } from '@supabase/supabase-js';
const supabaseUrl = 'https://jbposrybstrsgtjqzjxk.supabase.co';
const supabaseAnonKey = import.meta.env.VITE_SUPABASE_ANON_KEY;
if (!supabaseAnonKey) {
throw new Error('Missing VITE_SUPABASE_ANON_KEY environment variable');
}
export const supabase = createClient(supabaseUrl, supabaseAnonKey);

View File

@@ -1,8 +1,6 @@
<script lang="ts">
import type { CreateEventData, EventType } from '$lib/types';
import { goto } from '$app/navigation';
import { enhance } from '$app/forms';
import { onMount } from 'svelte';
export let form;

View File

@@ -1,5 +1,4 @@
<script lang="ts">
import { eventsStore } from '$lib/stores/events-supabase';
import type { Event } from '$lib/types';
import { goto } from '$app/navigation';
import type { PageData } from '../$types';