2
0
forked from jmug/cactoide

fix: small adjusments, renames for the /healthz and readme

This commit is contained in:
Levente Orban
2025-09-01 10:43:02 +02:00
parent 94fffc5695
commit 8a76421571
11 changed files with 46 additions and 40 deletions

View File

@@ -1,5 +1,5 @@
# Postgres configuration # Postgres configuration
POSTGRES_DB=cactoied_database POSTGRES_DB=cactoide_database
POSTGRES_USER=cactoide POSTGRES_USER=cactoide
POSTGRES_PASSWORD=cactoide_password POSTGRES_PASSWORD=cactoide_password
POSTGRES_PORT=5432 POSTGRES_PORT=5432

View File

@@ -1,4 +1,4 @@
.PHONY: help build up db-only logs clean .PHONY: help build up db-only logs db-clean prune
# Default target # Default target
help: help:
@@ -13,7 +13,8 @@ help:
@echo "" @echo ""
@echo "Utility commands:" @echo "Utility commands:"
@echo " make logs - Show logs from all services" @echo " make logs - Show logs from all services"
@echo " make clean - Remove all containers, images, and volumes" @echo " make db-clean - Stop & remove database container"
@echo " make prune - Remove all containers, images, and volumes"
@echo " make help - Show this help message" @echo " make help - Show this help message"
# Build the Docker images # Build the Docker images
@@ -36,8 +37,12 @@ logs:
@echo "Showing logs from all services..." @echo "Showing logs from all services..."
docker compose logs -f docker compose logs -f
db-clean:
@echo "Cleaning up all Docker resources..."
docker stop cactoide-db && docker rm cactoide-db && docker volume prune -f && docker network prune -f
# Clean up everything (containers, images, volumes) # Clean up everything (containers, images, volumes)
clean: prune:
@echo "Cleaning up all Docker resources..." @echo "Cleaning up all Docker resources..."
docker compose down -v --rmi all docker compose down -v --rmi all

View File

@@ -27,7 +27,10 @@ A mobile-first event RSVP platform that lets you create events, share unique URL
### Quick Start ### Quick Start
Requirements: git, docker, docker-compose #### Requirements
`git, docker, docker-compose, node at least suggested 20.19.0`
Uses the [`docker-compose.yml`](docker-compose.yml) file to setup the application with the database. You can define all ENV variables in the [`.env`](.env.example) file from the `.env.example`. Uses the [`docker-compose.yml`](docker-compose.yml) file to setup the application with the database. You can define all ENV variables in the [`.env`](.env.example) file from the `.env.example`.
```bash ```bash
@@ -39,8 +42,6 @@ docker compose up -d
### Development ### Development
Requirements: git, docker, docker-compose, node at least suggested 20.19.0
```bash ```bash
git clone https://github.com/polaroi8d/cactoide/ git clone https://github.com/polaroi8d/cactoide/
cd cactoide cd cactoide
@@ -51,6 +52,8 @@ npm run dev -- --open
Your app will be available at `http://localhost:5173`. You can use the Makefile commands to run the application or the database, eg.: `make db-only`. Your app will be available at `http://localhost:5173`. You can use the Makefile commands to run the application or the database, eg.: `make db-only`.
Use the `database/seed.sql` if you want to populate your database with dummy data.
### License ### License
This project is licensed under the MIT License - see the [LICENSE](./LICENSE) file for details. This project is licensed under the MIT License - see the [LICENSE](./LICENSE) file for details.

View File

@@ -8,7 +8,9 @@ services:
POSTGRES_USER: ${POSTGRES_USER:-cactoide} POSTGRES_USER: ${POSTGRES_USER:-cactoide}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-cactoide_password} POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-cactoide_password}
expose: expose:
- '${POSTGRES_PORT:-5437}' - '${POSTGRES_PORT:-5432}'
ports:
- '${POSTGRES_PORT:-5432}:5432'
volumes: volumes:
- postgres_data:/var/lib/postgresql/data - postgres_data:/var/lib/postgresql/data
- ./database/init.sql:/docker-entrypoint-initdb.d/init.sql - ./database/init.sql:/docker-entrypoint-initdb.d/init.sql

View File

@@ -3,6 +3,13 @@ import { env } from '$env/dynamic/private';
import * as schema from './schema'; import * as schema from './schema';
import postgres from 'postgres'; import postgres from 'postgres';
const client = postgres(env.DATABASE_URL, {}); // Database connection configuration
const connectionConfig = {
max: 10, // Maximum number of connections
idle_timeout: 20, // Close idle connections after 20 seconds
connect_timeout: 10 // Connection timeout in seconds
};
export const drizzleQuery = drizzle(client, { schema }); const client = postgres(env.DATABASE_URL, connectionConfig);
export const database = drizzle(client, { schema });

View File

@@ -1,4 +1,4 @@
import { drizzleQuery } from '$lib/database/db'; import { database } from '$lib/database/db';
import { events } from '$lib/database/schema'; import { events } from '$lib/database/schema';
import { fail, redirect } from '@sveltejs/kit'; import { fail, redirect } from '@sveltejs/kit';
import type { Actions } from './$types'; import type { Actions } from './$types';
@@ -66,7 +66,7 @@ export const actions: Actions = {
const eventId = generateEventId(); const eventId = generateEventId();
await drizzleQuery await database
.insert(events) .insert(events)
.values({ .values({
id: eventId, id: eventId,

View File

@@ -1,4 +1,4 @@
import { drizzleQuery } from '$lib/database/db'; import { database } from '$lib/database/db';
import { eq, desc } from 'drizzle-orm'; import { eq, desc } from 'drizzle-orm';
import type { PageServerLoad } from './$types'; import type { PageServerLoad } from './$types';
import { events } from '$lib/database/schema'; import { events } from '$lib/database/schema';
@@ -6,7 +6,7 @@ import { events } from '$lib/database/schema';
export const load: PageServerLoad = async () => { export const load: PageServerLoad = async () => {
try { try {
// Fetch all public events ordered by creation date (newest first) // Fetch all public events ordered by creation date (newest first)
const publicEvents = await drizzleQuery const publicEvents = await database
.select() .select()
.from(events) .from(events)
.where(eq(events.visibility, 'public')) .where(eq(events.visibility, 'public'))

View File

@@ -1,4 +1,4 @@
import { drizzleQuery } from '$lib/database/db'; import { database } from '$lib/database/db';
import { events } from '$lib/database/schema'; import { events } from '$lib/database/schema';
import { fail } from '@sveltejs/kit'; import { fail } from '@sveltejs/kit';
import { eq, desc } from 'drizzle-orm'; import { eq, desc } from 'drizzle-orm';
@@ -11,7 +11,7 @@ export const load = async ({ cookies }) => {
} }
try { try {
const userEvents = await drizzleQuery const userEvents = await database
.select() .select()
.from(events) .from(events)
.where(eq(events.userId, userId)) .where(eq(events.userId, userId))
@@ -50,7 +50,7 @@ export const actions: Actions = {
try { try {
// First verify the user owns this event // First verify the user owns this event
const [eventData] = await drizzleQuery.select().from(events).where(eq(events.id, eventId)); const [eventData] = await database.select().from(events).where(eq(events.id, eventId));
if (!eventData) { if (!eventData) {
return fail(404, { error: 'Event not found' }); return fail(404, { error: 'Event not found' });
@@ -61,7 +61,7 @@ export const actions: Actions = {
} }
// Delete the event (RSVPs will be deleted automatically due to CASCADE) // Delete the event (RSVPs will be deleted automatically due to CASCADE)
await drizzleQuery.delete(events).where(eq(events.id, eventId)); await database.delete(events).where(eq(events.id, eventId));
return { success: true }; return { success: true };
} catch (error) { } catch (error) {

View File

@@ -1,4 +1,4 @@
import { drizzleQuery } from '$lib/database/db'; import { database } from '$lib/database/db';
import { events, rsvps } from '$lib/database/schema'; import { events, rsvps } from '$lib/database/schema';
import { eq, asc } from 'drizzle-orm'; import { eq, asc } from 'drizzle-orm';
import { error, fail } from '@sveltejs/kit'; import { error, fail } from '@sveltejs/kit';
@@ -14,12 +14,8 @@ export const load: PageServerLoad = async ({ params, cookies }) => {
try { try {
// Fetch event and RSVPs in parallel // Fetch event and RSVPs in parallel
const [eventData, rsvpData] = await Promise.all([ const [eventData, rsvpData] = await Promise.all([
drizzleQuery.select().from(events).where(eq(events.id, eventId)).limit(1), database.select().from(events).where(eq(events.id, eventId)).limit(1),
drizzleQuery database.select().from(rsvps).where(eq(rsvps.eventId, eventId)).orderBy(asc(rsvps.createdAt))
.select()
.from(rsvps)
.where(eq(rsvps.eventId, eventId))
.orderBy(asc(rsvps.createdAt))
]); ]);
if (!eventData[0]) { if (!eventData[0]) {
@@ -81,33 +77,27 @@ export const actions: Actions = {
try { try {
// Check if event exists and get its details // Check if event exists and get its details
const [eventData] = await drizzleQuery.select().from(events).where(eq(events.id, eventId)); const [eventData] = await database.select().from(events).where(eq(events.id, eventId));
if (!eventData) { if (!eventData) {
return fail(404, { error: 'Event not found' }); return fail(404, { error: 'Event not found' });
} }
// Check if event is full (for limited type events) // Check if event is full (for limited type events)
if (eventData.type === 'limited' && eventData.attendeeLimit) { if (eventData.type === 'limited' && eventData.attendeeLimit) {
const currentRSVPs = await drizzleQuery const currentRSVPs = await database.select().from(rsvps).where(eq(rsvps.eventId, eventId));
.select()
.from(rsvps)
.where(eq(rsvps.eventId, eventId));
if (currentRSVPs.length >= eventData.attendeeLimit) { if (currentRSVPs.length >= eventData.attendeeLimit) {
return fail(400, { error: 'Event is full' }); return fail(400, { error: 'Event is full' });
} }
} }
// Check if name is already in the list // Check if name is already in the list
const existingRSVPs = await drizzleQuery const existingRSVPs = await database.select().from(rsvps).where(eq(rsvps.eventId, eventId));
.select()
.from(rsvps)
.where(eq(rsvps.eventId, eventId));
if (existingRSVPs.some((rsvp) => rsvp.name.toLowerCase() === name.toLowerCase())) { if (existingRSVPs.some((rsvp) => rsvp.name.toLowerCase() === name.toLowerCase())) {
return fail(400, { error: 'Name already exists for this event' }); return fail(400, { error: 'Name already exists for this event' });
} }
// Add RSVP to database // Add RSVP to database
await drizzleQuery.insert(rsvps).values({ await database.insert(rsvps).values({
eventId: eventId, eventId: eventId,
name: name.trim(), name: name.trim(),
userId: userId, userId: userId,
@@ -131,7 +121,7 @@ export const actions: Actions = {
} }
try { try {
await drizzleQuery.delete(rsvps).where(eq(rsvps.id, rsvpId)); await database.delete(rsvps).where(eq(rsvps.id, rsvpId));
return { success: true, type: 'remove' }; return { success: true, type: 'remove' };
} catch (err) { } catch (err) {
console.error('Error removing RSVP:', err); console.error('Error removing RSVP:', err);

View File

@@ -1,15 +1,15 @@
// src/routes/healthz/+server.ts // src/routes/healthz/+server.ts
import { json } from '@sveltejs/kit'; import { json } from '@sveltejs/kit';
import { drizzleQuery } from '$lib/database/db'; import { database } from '$lib/database/db';
import { sql } from 'drizzle-orm'; import { sql } from 'drizzle-orm';
export async function GET() { export async function GET() {
try { try {
await drizzleQuery.execute(sql`select 1`); await database.execute(sql`select 1`);
return json({ ok: true }, { headers: { 'cache-control': 'no-store' } }); return json({ ok: true }, { headers: { 'cache-control': 'no-store' } });
} catch (err) { } catch (err) {
return json( return json(
{ ok: false, error: (err as Error)?.message ?? 'DB unavailable' }, { ok: false, error: (err as Error)?.message, message: 'Database unreachable.' },
{ status: 503, headers: { 'cache-control': 'no-store' } } { status: 503, headers: { 'cache-control': 'no-store' } }
); );
} }

View File

@@ -8,7 +8,6 @@ const config = {
preprocess: vitePreprocess(), preprocess: vitePreprocess(),
kit: { kit: {
// Using Netlify adapter for deployment
adapter: adapter({ adapter: adapter({
// if you want to use 'split' mode, set this to 'split' // if you want to use 'split' mode, set this to 'split'
// and create a _redirects file with the redirects you want // and create a _redirects file with the redirects you want