Step 2 migration: add PostgreSQL schema and read endpoints
This commit is contained in:
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,3 +1,4 @@
|
||||
gitea_ed25519
|
||||
gitea_ed25519.pub
|
||||
.env
|
||||
node_modules
|
||||
|
||||
11
README.md
11
README.md
@@ -23,6 +23,7 @@ Centraliser ta collection dans une interface rapide a utiliser, evolutive, et fa
|
||||
- Suppression d'un jeu
|
||||
- Statut de pret (marquer prete/rendu)
|
||||
- Persistance locale via `localStorage`
|
||||
- Backend avec migration automatique du schema PostgreSQL (etape 2)
|
||||
|
||||
## Stack technique
|
||||
|
||||
@@ -84,6 +85,8 @@ docker compose up -d --build
|
||||
### 3-bis) Verifier l'API (etape 1 migration)
|
||||
|
||||
- [http://localhost:7002/health](http://localhost:7002/health)
|
||||
- [http://localhost:7002/api/catalog/summary](http://localhost:7002/api/catalog/summary)
|
||||
- [http://localhost:7002/api/catalog/tree](http://localhost:7002/api/catalog/tree)
|
||||
|
||||
### 4) Arreter
|
||||
|
||||
@@ -118,6 +121,14 @@ git pull
|
||||
- Export/import (CSV/JSON)
|
||||
- Sauvegarde distante (API/backend)
|
||||
|
||||
## Etat migration base de donnees
|
||||
|
||||
- Etape 1: architecture `app + api + db` en place
|
||||
- Etape 2: schema SQL applique automatiquement au demarrage API
|
||||
- tables: `brands`, `consoles`, `games`
|
||||
- trigger `updated_at` sur `games`
|
||||
- endpoints de lecture pour validation: `summary` et `tree`
|
||||
|
||||
## Licence
|
||||
|
||||
Projet prive personnel.
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
FROM node:20-alpine
|
||||
|
||||
WORKDIR /app
|
||||
COPY api/package.json ./package.json
|
||||
RUN npm install --omit=dev
|
||||
COPY api/server.js ./server.js
|
||||
|
||||
EXPOSE 3001
|
||||
|
||||
13
api/package.json
Normal file
13
api/package.json
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"name": "video-games-api",
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"description": "Minimal API for video game collection migration steps",
|
||||
"main": "server.js",
|
||||
"scripts": {
|
||||
"start": "node server.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"pg": "^8.16.3"
|
||||
}
|
||||
}
|
||||
163
api/server.js
163
api/server.js
@@ -1,22 +1,167 @@
|
||||
const http = require("http");
|
||||
const { Pool } = require("pg");
|
||||
|
||||
const port = Number(process.env.API_INTERNAL_PORT || 3001);
|
||||
const serviceName = process.env.SERVICE_NAME || "video-games-api";
|
||||
const databaseUrl =
|
||||
process.env.DATABASE_URL ||
|
||||
"postgres://video_games_user:change_me@video-games-db:5432/video_games";
|
||||
|
||||
const pool = new Pool({
|
||||
connectionString: databaseUrl,
|
||||
});
|
||||
|
||||
function sendJson(response, statusCode, payload) {
|
||||
response.writeHead(statusCode, { "Content-Type": "application/json; charset=utf-8" });
|
||||
response.end(JSON.stringify(payload));
|
||||
}
|
||||
|
||||
const server = http.createServer((request, response) => {
|
||||
async function runMigrations() {
|
||||
await pool.query("CREATE EXTENSION IF NOT EXISTS pgcrypto;");
|
||||
await pool.query(`
|
||||
CREATE TABLE IF NOT EXISTS brands (
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
name TEXT NOT NULL UNIQUE,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
`);
|
||||
|
||||
await pool.query(`
|
||||
CREATE TABLE IF NOT EXISTS consoles (
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
brand_id BIGINT NOT NULL REFERENCES brands(id) ON DELETE CASCADE,
|
||||
name TEXT NOT NULL,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
UNIQUE (brand_id, name)
|
||||
);
|
||||
`);
|
||||
|
||||
await pool.query(`
|
||||
CREATE TABLE IF NOT EXISTS games (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
console_id BIGINT NOT NULL REFERENCES consoles(id) ON DELETE CASCADE,
|
||||
title TEXT NOT NULL,
|
||||
genre TEXT,
|
||||
publisher TEXT,
|
||||
release_year INTEGER CHECK (release_year IS NULL OR (release_year >= 1970 AND release_year <= 2100)),
|
||||
estimated_value NUMERIC(10,2) CHECK (estimated_value IS NULL OR estimated_value >= 0),
|
||||
loaned_to TEXT,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
`);
|
||||
|
||||
await pool.query(`
|
||||
CREATE OR REPLACE FUNCTION set_updated_at()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
NEW.updated_at = NOW();
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
`);
|
||||
|
||||
await pool.query(`
|
||||
DROP TRIGGER IF EXISTS trg_games_updated_at ON games;
|
||||
CREATE TRIGGER trg_games_updated_at
|
||||
BEFORE UPDATE ON games
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION set_updated_at();
|
||||
`);
|
||||
}
|
||||
|
||||
async function querySingleValue(sqlText) {
|
||||
const result = await pool.query(sqlText);
|
||||
return Number(result.rows[0].count || 0);
|
||||
}
|
||||
|
||||
async function getCatalogSummary() {
|
||||
const [brands, consoles, games] = await Promise.all([
|
||||
querySingleValue("SELECT COUNT(*)::int AS count FROM brands;"),
|
||||
querySingleValue("SELECT COUNT(*)::int AS count FROM consoles;"),
|
||||
querySingleValue("SELECT COUNT(*)::int AS count FROM games;"),
|
||||
]);
|
||||
|
||||
return { brands, consoles, games };
|
||||
}
|
||||
|
||||
async function getCatalogTree() {
|
||||
const result = await pool.query(`
|
||||
SELECT
|
||||
b.id::int AS brand_id,
|
||||
b.name AS brand_name,
|
||||
c.id::int AS console_id,
|
||||
c.name AS console_name,
|
||||
COUNT(g.id)::int AS games_count
|
||||
FROM brands b
|
||||
LEFT JOIN consoles c ON c.brand_id = b.id
|
||||
LEFT JOIN games g ON g.console_id = c.id
|
||||
GROUP BY b.id, b.name, c.id, c.name
|
||||
ORDER BY b.name ASC, c.name ASC;
|
||||
`);
|
||||
|
||||
const brandMap = new Map();
|
||||
for (const row of result.rows) {
|
||||
if (!brandMap.has(row.brand_id)) {
|
||||
brandMap.set(row.brand_id, {
|
||||
id: row.brand_id,
|
||||
name: row.brand_name,
|
||||
consoles: [],
|
||||
});
|
||||
}
|
||||
|
||||
if (row.console_id) {
|
||||
brandMap.get(row.brand_id).consoles.push({
|
||||
id: row.console_id,
|
||||
name: row.console_name,
|
||||
gamesCount: row.games_count,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return Array.from(brandMap.values());
|
||||
}
|
||||
|
||||
async function handleRequest(request, response) {
|
||||
const url = new URL(request.url || "/", `http://${request.headers.host || "localhost"}`);
|
||||
|
||||
if (request.method === "GET" && url.pathname === "/health") {
|
||||
try {
|
||||
await pool.query("SELECT 1;");
|
||||
sendJson(response, 200, {
|
||||
status: "ok",
|
||||
service: serviceName,
|
||||
db: "up",
|
||||
timestamp: new Date().toISOString(),
|
||||
});
|
||||
} catch (error) {
|
||||
sendJson(response, 503, {
|
||||
status: "degraded",
|
||||
service: serviceName,
|
||||
db: "down",
|
||||
error: error.message,
|
||||
});
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (request.method === "GET" && url.pathname === "/api/catalog/summary") {
|
||||
try {
|
||||
const summary = await getCatalogSummary();
|
||||
sendJson(response, 200, summary);
|
||||
} catch (error) {
|
||||
sendJson(response, 500, { status: "error", message: error.message });
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (request.method === "GET" && url.pathname === "/api/catalog/tree") {
|
||||
try {
|
||||
const tree = await getCatalogTree();
|
||||
sendJson(response, 200, { brands: tree });
|
||||
} catch (error) {
|
||||
sendJson(response, 500, { status: "error", message: error.message });
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -24,9 +169,23 @@ const server = http.createServer((request, response) => {
|
||||
status: "not_found",
|
||||
message: "Route not found",
|
||||
});
|
||||
}
|
||||
|
||||
async function start() {
|
||||
await runMigrations();
|
||||
|
||||
const server = http.createServer((request, response) => {
|
||||
handleRequest(request, response).catch((error) => {
|
||||
sendJson(response, 500, { status: "error", message: error.message });
|
||||
});
|
||||
});
|
||||
|
||||
server.listen(port, "0.0.0.0", () => {
|
||||
// Keep startup logs minimal and readable in docker compose logs.
|
||||
console.log(`${serviceName} listening on port ${port}`);
|
||||
});
|
||||
}
|
||||
|
||||
start().catch((error) => {
|
||||
console.error("Failed to start API:", error);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
@@ -7,6 +7,11 @@ services:
|
||||
- POSTGRES_DB=${VG_DB_NAME:-video_games}
|
||||
- POSTGRES_USER=${VG_DB_USER:-video_games_user}
|
||||
- POSTGRES_PASSWORD=${VG_DB_PASSWORD:-change_me}
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U ${VG_DB_USER:-video_games_user} -d ${VG_DB_NAME:-video_games}"]
|
||||
interval: 5s
|
||||
timeout: 3s
|
||||
retries: 20
|
||||
volumes:
|
||||
- video_games_data:/var/lib/postgresql/data
|
||||
|
||||
@@ -17,7 +22,8 @@ services:
|
||||
container_name: video-games-api
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- video-games-db
|
||||
video-games-db:
|
||||
condition: service_healthy
|
||||
environment:
|
||||
- SERVICE_NAME=video-games-api
|
||||
- API_INTERNAL_PORT=3001
|
||||
|
||||
Reference in New Issue
Block a user