From 3a8cf56f8ce3eee64c7e7763acb0118da9abb287 Mon Sep 17 00:00:00 2001 From: 888i88 Date: Sat, 23 Aug 2025 12:45:24 +0200 Subject: [PATCH 01/11] prep exer w1 databases --- Week1/recipes.sql | 45 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 45 insertions(+) create mode 100644 Week1/recipes.sql diff --git a/Week1/recipes.sql b/Week1/recipes.sql new file mode 100644 index 000000000..4f45c40bc --- /dev/null +++ b/Week1/recipes.sql @@ -0,0 +1,45 @@ +CREATE TABLE recipes ( + recipe_id INT PRIMARY KEY AUTO_INCREMENT, + name VARCHAR(255) NOT NULL +); + +CREATE TABLE categories ( + category_id INT PRIMARY KEY AUTO_INCREMENT, + name VARCHAR(100) NOT NULL UNIQUE +); + +CREATE TABLE ingredients ( + ingredient_id INT PRIMARY KEY AUTO_INCREMENT, + name VARCHAR(100) NOT NULL UNIQUE +); + +CREATE TABLE steps ( + step_id INT PRIMARY KEY AUTO_INCREMENT, + description TEXT NOT NULL +); + +CREATE TABLE recipe_category ( + recipe_id INT, + category_id INT, + PRIMARY KEY (recipe_id, category_id), + FOREIGN KEY (recipe_id) REFERENCES recipes(recipe_id) ON DELETE CASCADE, + FOREIGN KEY (category_id) REFERENCES categories(category_id) ON DELETE CASCADE +); + +CREATE TABLE recipe_ingredient ( + recipe_id INT, + ingredient_id INT, + quantity VARCHAR(50), + PRIMARY KEY (recipe_id, ingredient_id), + FOREIGN KEY (recipe_id) REFERENCES recipes(recipe_id) ON DELETE CASCADE, + FOREIGN KEY (ingredient_id) REFERENCES ingredients(ingredient_id) ON DELETE CASCADE +); + +CREATE TABLE recipe_step ( + recipe_id INT, + step_id INT, + step_order INT NOT NULL, + PRIMARY KEY (recipe_id, step_id), + FOREIGN KEY (recipe_id) REFERENCES recipes(recipe_id) ON DELETE CASCADE, + FOREIGN KEY (step_id) REFERENCES steps(step_id) ON DELETE CASCADE +); \ No newline at end of file From f72caca7a2858f48ccdde20015865613cf04b609 Mon Sep 17 00:00:00 2001 From: 888i88 Date: Tue, 26 Aug 2025 18:18:11 +0200 Subject: [PATCH 02/11] assignment w1 datadases --- Week1/databases/connectDatabases.js | 21 +++++++ Week1/databases/exercise2_world.js | 64 +++++++++++++++++++ Week1/databases/setupMeetup.js | 96 +++++++++++++++++++++++++++++ 3 files changed, 181 insertions(+) create mode 100644 Week1/databases/connectDatabases.js create mode 100644 Week1/databases/exercise2_world.js create mode 100644 Week1/databases/setupMeetup.js diff --git a/Week1/databases/connectDatabases.js b/Week1/databases/connectDatabases.js new file mode 100644 index 000000000..cde9b1fd0 --- /dev/null +++ b/Week1/databases/connectDatabases.js @@ -0,0 +1,21 @@ +import pkg from "pg"; +const { Client } = pkg; + +export async function connectDB(database = "world") { + const client = new Client({ + user: "hyfuser", + host: "localhost", + database: database, + password: "hyfpassword", + port: 5432, + }); + + try { + await client.connect(); + console.log(`Connected to database: ${client.database}`); + return client; + } catch (error) { + console.error("Connection error:", error); + throw error; + } +} diff --git a/Week1/databases/exercise2_world.js b/Week1/databases/exercise2_world.js new file mode 100644 index 000000000..5966d3edf --- /dev/null +++ b/Week1/databases/exercise2_world.js @@ -0,0 +1,64 @@ +import { connectDB } from "./connectDatabase.js"; + +async function runQueries() { + const client = await connectDB("world"); + + try { + const q1 = await client.query( + `SELECT name FROM country WHERE population > 8000000;` + ); + console.log("1. Countries > 8M:", q1.rows); + + const q2 = await client.query( + `SELECT name FROM country WHERE name ILIKE '%land%';` + ); + console.log("2. Countries with 'land':", q2.rows); + + const q3 = await client.query( + `SELECT name FROM city WHERE population BETWEEN 500000 AND 1000000;` + ); + console.log("3. Cities 500k–1M:", q3.rows); + + const q4 = await client.query( + `SELECT name FROM country WHERE continent = 'Europe';` + ); + console.log("4. European countries:", q4.rows); + + const q5 = await client.query( + `SELECT name FROM country ORDER BY surfacearea DESC;` + ); + console.log("5. Countries by surface area DESC:", q5.rows); + + const q6 = await client.query( + `SELECT name FROM city WHERE countrycode = 'NLD';` + ); + console.log("6. Cities in Netherlands:", q6.rows); + + const q7 = await client.query( + `SELECT population FROM city WHERE name = 'Rotterdam';` + ); + console.log("7. Population of Rotterdam:", q7.rows); + + const q8 = await client.query(` + SELECT name FROM country ORDER BY surfacearea DESC LIMIT 10; + `); + console.log("8. Top 10 countries by surface area:", q8.rows); + + const q9 = await client.query(` + SELECT name FROM city ORDER BY population DESC LIMIT 10; + `); + console.log("9. Top 10 most populated cities:", q9.rows); + + const q10 = await client.query( + `SELECT SUM(population) AS world_population FROM country;` + ); + console.log("10. World population:", q10.rows[0].world_population); + } catch (err) { + console.error("❌ Query error:", err); + } finally { + await client.end(); + console.log("🔌 Connection closed."); + } +} + +runQueries(); diff --git a/Week1/databases/setupMeetup.js b/Week1/databases/setupMeetup.js new file mode 100644 index 000000000..d845b4293 --- /dev/null +++ b/Week1/databases/setupMeetup.js @@ -0,0 +1,96 @@ +import pkg from "pg"; +const { Client } = pkg; + +const defaultClient = new Client({ + user: "hyfuser", + host: "localhost", + database: "postgres", + password: "hyfpassword", + port: 5432, +}); + +async function setupDatabase() { + try { + await defaultClient.connect(); + + await defaultClient.query(`DROP DATABASE IF EXISTS meetup;`); + await defaultClient.query(`CREATE DATABASE meetup;`); + console.log('Database "meetup" created.'); + + await defaultClient.end(); + + const client = new Client({ + user: "hyfuser", + host: "localhost", + database: "meetup", + password: "hyfpassword", + port: 5432, + }); + await client.connect(); + + await client.query(` + CREATE TABLE Invitee ( + invitee_no INT PRIMARY KEY, + invitee_name VARCHAR(100), + invited_by VARCHAR(100) + ); + `); + + await client.query(` + CREATE TABLE Room ( + room_no INT PRIMARY KEY, + room_name VARCHAR(64), + floor_number INT + ); + `); + + await client.query(` + CREATE TABLE Meeting ( + meeting_no INT PRIMARY KEY, + meeting_title VARCHAR(64), + starting_time TIMESTAMP, + ending_time TIMESTAMP, + room_no INT REFERENCES Room(room_no) + ); + `); + + console.log("Tables created."); + + await client.query(` + INSERT INTO Invitee VALUES + (1, 'Alice Johnson', 'Bob Smith'), + (2, 'Bob Smith', 'Carol White'), + (3, 'Carol White', 'David Lee'), + (4, 'David Lee', 'Alice Johnson'), + (5, 'Eve Brown', 'Bob Smith'); + `); + + await client.query(` + INSERT INTO Room VALUES + (101, 'Blue Room', 1), + (102, 'Green Room', 1), + (201, 'Yellow Room', 2), + (202, 'Red Room', 2), + (301, 'Conference Hall', 3); + `); + + await client.query(` + INSERT INTO Meeting VALUES + (1, 'Project Kickoff', '2025-09-01 09:00:00', '2025-09-01 10:00:00', 101), + (2, 'Design Review', '2025-09-02 11:00:00', '2025-09-02 12:30:00', 102), + (3, 'Team Standup', '2025-09-03 09:30:00', '2025-09-03 10:00:00', 201), + (4, 'Client Presentation', '2025-09-04 14:00:00', '2025-09-04 15:30:00', 202), + (5, 'Retrospective', '2025-09-05 16:00:00', '2025-09-05 17:00:00', 301); + `); + + console.log("Sample data inserted."); + + await client.end(); + console.log("Setup complete."); + } catch (err) { + console.error("Error:", err); + await defaultClient.end(); + } +} + +setupDatabase(); From 81b4544f7129e2f02a93165e382bb7f01998d726 Mon Sep 17 00:00:00 2001 From: 888i88 Date: Tue, 26 Aug 2025 18:29:23 +0200 Subject: [PATCH 03/11] update assignment --- Week1/databases/exercise2_world.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Week1/databases/exercise2_world.js b/Week1/databases/exercise2_world.js index 5966d3edf..8cb8fa169 100644 --- a/Week1/databases/exercise2_world.js +++ b/Week1/databases/exercise2_world.js @@ -54,10 +54,10 @@ async function runQueries() { ); console.log("10. World population:", q10.rows[0].world_population); } catch (err) { - console.error("❌ Query error:", err); + console.error("Query error:", err); } finally { await client.end(); - console.log("🔌 Connection closed."); + console.log("Connection closed."); } } From 2b244503149defa8a276477b6647970e4a9c7976 Mon Sep 17 00:00:00 2001 From: 888i88 Date: Sat, 30 Aug 2025 13:12:09 +0200 Subject: [PATCH 04/11] prep exercise week 2 done --- Week2/databases/connectDatabase.js | 22 ++ Week2/databases/queries.js | 40 ++++ Week2/databases/recipes.sql | 46 +++++ Week2/databases/setupRecipes.js | 309 +++++++++++++++++++++++++++++ Week2/package-lock.json | 162 +++++++++++++++ Week2/package.json | 16 ++ 6 files changed, 595 insertions(+) create mode 100644 Week2/databases/connectDatabase.js create mode 100644 Week2/databases/queries.js create mode 100644 Week2/databases/recipes.sql create mode 100644 Week2/databases/setupRecipes.js create mode 100644 Week2/package-lock.json create mode 100644 Week2/package.json diff --git a/Week2/databases/connectDatabase.js b/Week2/databases/connectDatabase.js new file mode 100644 index 000000000..ac65e1c17 --- /dev/null +++ b/Week2/databases/connectDatabase.js @@ -0,0 +1,22 @@ +import pkg from 'pg'; +const { Client } = pkg; + + +export async function connectDB(database = 'postgres') { + const client = new Client({ + user: 'hyfuser', + host: 'localhost', + database, + password: 'hyfpassword', + port: 5432, + }); + + try { + await client.connect(); + console.log(`Connected to database: ${database}`); + return client; + } catch (error) { + console.error('Connection error:', error); + throw error; + } +} diff --git a/Week2/databases/queries.js b/Week2/databases/queries.js new file mode 100644 index 000000000..b1323a743 --- /dev/null +++ b/Week2/databases/queries.js @@ -0,0 +1,40 @@ +import { connectDB } from "./connectDatabase.js"; + +async function runQueries() { + const client = await connectDB("recipes"); + + try { + const res1 = await client.query(` + SELECT r.name + FROM recipes r + JOIN recipe_category rc ON r.recipe_id = rc.recipe_id + JOIN categories c ON rc.category_id = c.category_id + WHERE c.name = 'Vegetarian' + `); + console.log("Vegetarian recipes:", res1.rows); + + const res2 = await client.query(` + SELECT r.name + FROM recipes r + JOIN recipe_category rc ON r.recipe_id = rc.recipe_id + JOIN categories c ON rc.category_id = c.category_id + WHERE c.name = 'No-Bake' + `); + console.log("No-Bake Cakes:", res2.rows); + + const res3 = await client.query(` + SELECT r.name + FROM recipes r + JOIN recipe_category rc ON r.recipe_id = rc.recipe_id + JOIN categories c ON rc.category_id = c.category_id + WHERE c.name IN ('Vegan','Japanese') + `); + console.log("Vegan or Japanese recipes:", res3.rows); + } catch (err) { + console.error("Error running queries:", err); + } finally { + await client.end(); + } +} + +runQueries(); diff --git a/Week2/databases/recipes.sql b/Week2/databases/recipes.sql new file mode 100644 index 000000000..312f9cda6 --- /dev/null +++ b/Week2/databases/recipes.sql @@ -0,0 +1,46 @@ + +CREATE TABLE recipes ( + recipe_id SERIAL PRIMARY KEY, + name VARCHAR(255) NOT NULL +); + +CREATE TABLE categories ( + category_id SERIAL PRIMARY KEY, + name VARCHAR(100) NOT NULL UNIQUE +); + +CREATE TABLE ingredients ( + ingredient_id SERIAL PRIMARY KEY, + name VARCHAR(100) NOT NULL UNIQUE +); + +CREATE TABLE steps ( + step_id SERIAL PRIMARY KEY, + description TEXT NOT NULL +); + +CREATE TABLE recipe_category ( + recipe_id INT, + category_id INT, + PRIMARY KEY (recipe_id, category_id), + FOREIGN KEY (recipe_id) REFERENCES recipes(recipe_id) ON DELETE CASCADE, + FOREIGN KEY (category_id) REFERENCES categories(category_id) ON DELETE CASCADE +); + +CREATE TABLE recipe_ingredient ( + recipe_id INT, + ingredient_id INT, + quantity VARCHAR(50), + PRIMARY KEY (recipe_id, ingredient_id), + FOREIGN KEY (recipe_id) REFERENCES recipes(recipe_id) ON DELETE CASCADE, + FOREIGN KEY (ingredient_id) REFERENCES ingredients(ingredient_id) ON DELETE CASCADE +); + +CREATE TABLE recipe_step ( + recipe_id INT, + step_id INT, + step_order INT NOT NULL, + PRIMARY KEY (recipe_id, step_id), + FOREIGN KEY (recipe_id) REFERENCES recipes(recipe_id) ON DELETE CASCADE, + FOREIGN KEY (step_id) REFERENCES steps(step_id) ON DELETE CASCADE +); diff --git a/Week2/databases/setupRecipes.js b/Week2/databases/setupRecipes.js new file mode 100644 index 000000000..617ba383d --- /dev/null +++ b/Week2/databases/setupRecipes.js @@ -0,0 +1,309 @@ +import pkg from "pg"; +const { Client } = pkg; + +const defaultClient = new Client({ + user: "hyfuser", + host: "localhost", + database: "postgres", + password: "hyfpassword", + port: 5432, +}); + +async function setupDatabase() { + try { + await defaultClient.connect(); + + await defaultClient.query(`DROP DATABASE IF EXISTS recipes;`); + await defaultClient.query(`CREATE DATABASE recipes;`); + console.log('Database "recipes" created.'); + await defaultClient.end(); + + const client = new Client({ + user: "hyfuser", + host: "localhost", + database: "recipes", + password: "hyfpassword", + port: 5432, + }); + + await client.connect(); + console.log("Connected to database: recipes"); + + await client.query(` + CREATE TABLE IF NOT EXISTS recipes ( + recipe_id SERIAL PRIMARY KEY, + name VARCHAR(255) NOT NULL + ); + `); + + await client.query(` + CREATE TABLE IF NOT EXISTS categories ( + category_id SERIAL PRIMARY KEY, + name VARCHAR(100) NOT NULL UNIQUE + ); + `); + + await client.query(` + CREATE TABLE IF NOT EXISTS ingredients ( + ingredient_id SERIAL PRIMARY KEY, + name VARCHAR(100) NOT NULL UNIQUE + ); + `); + + await client.query(` + CREATE TABLE IF NOT EXISTS steps ( + step_id SERIAL PRIMARY KEY, + description TEXT NOT NULL + ); + `); + + await client.query(` + CREATE TABLE IF NOT EXISTS recipe_category ( + recipe_id INT, + category_id INT, + PRIMARY KEY (recipe_id, category_id), + FOREIGN KEY (recipe_id) REFERENCES recipes(recipe_id) ON DELETE CASCADE, + FOREIGN KEY (category_id) REFERENCES categories(category_id) ON DELETE CASCADE + ); + `); + + await client.query(` + CREATE TABLE IF NOT EXISTS recipe_ingredient ( + recipe_id INT, + ingredient_id INT, + quantity VARCHAR(50), + PRIMARY KEY (recipe_id, ingredient_id), + FOREIGN KEY (recipe_id) REFERENCES recipes(recipe_id) ON DELETE CASCADE, + FOREIGN KEY (ingredient_id) REFERENCES ingredients(ingredient_id) ON DELETE CASCADE + ); + `); + + await client.query(` + CREATE TABLE IF NOT EXISTS recipe_step ( + recipe_id INT, + step_id INT, + step_order INT NOT NULL, + PRIMARY KEY (recipe_id, step_id), + FOREIGN KEY (recipe_id) REFERENCES recipes(recipe_id) ON DELETE CASCADE, + FOREIGN KEY (step_id) REFERENCES steps(step_id) ON DELETE CASCADE + ); + `); + + const categories = [ + "Cake", + "No-Bake", + "Vegetarian", + "Vegan", + "Gluten-Free", + "Japanese", + ]; + for (let cat of categories) { + await client.query( + `INSERT INTO categories (name) VALUES ($1) ON CONFLICT DO NOTHING`, + [cat] + ); + } + + const ingredients = [ + "Condensed milk", + "Cream Cheese", + "Lemon Juice", + "Pie Crust", + "Cherry Jam", + "Brussels Sprouts", + "Sesame seeds", + "Pepper", + "Salt", + "Olive oil", + "Macaroni", + "Butter", + "Flour", + "Milk", + "Shredded Cheddar cheese", + "Eggs", + "Soy sauce", + "Sugar", + ]; + for (let ing of ingredients) { + await client.query( + `INSERT INTO ingredients (name) VALUES ($1) ON CONFLICT DO NOTHING`, + [ing] + ); + } + + const allSteps = [ + "Beat Cream Cheese", + "Add condensed Milk and blend", + "Add Lemon Juice and blend", + "Add the mix to the pie crust", + "Spread the Cherry Jam", + "Place in refrigerator for 3h", + "Preheat the oven", + "Mix the ingredients in a bowl", + "Spread the mix on baking sheet", + "Bake for 30'", + "Cook Macaroni for 8'", + "Melt butter in a saucepan", + "Add flour, salt, pepper and mix", + "Add Milk and mix", + "Cook until mix is smooth", + "Add cheddar cheese", + "Add the macaroni", + "Beat the eggs", + "Add soya sauce, sugar and salt", + "Add oil to a sauce pan", + "Bring to medium heat", + "Add some mix to the sauce pan", + "Let is cook for 1'", + "Remove pan from fire", + ]; + for (let step of allSteps) { + await client.query( + `INSERT INTO steps (description) VALUES ($1) ON CONFLICT DO NOTHING`, + [step] + ); + } + + const recipes = [ + "No-Bake Cheesecake", + "Roasted Brussels Sprouts", + "Mac & Cheese", + "Tamagoyaki Japanese Omelette", + ]; + for (let rec of recipes) { + await client.query( + `INSERT INTO recipes (name) VALUES ($1) ON CONFLICT DO NOTHING`, + [rec] + ); + } + + const recipeCategories = { + "No-Bake Cheesecake": ["Cake", "No-Bake", "Vegetarian"], + "Roasted Brussels Sprouts": ["Vegan", "Gluten-Free"], + "Mac & Cheese": ["Vegetarian"], + "Tamagoyaki Japanese Omelette": ["Vegetarian", "Japanese"], + }; + + for (let [rec, cats] of Object.entries(recipeCategories)) { + for (let cat of cats) { + await client.query( + ` + INSERT INTO recipe_category (recipe_id, category_id) + SELECT r.recipe_id, c.category_id + FROM recipes r, categories c + WHERE r.name = $1 AND c.name = $2 + ON CONFLICT DO NOTHING + `, + [rec, cat] + ); + } + } + + const recipeIngredients = { + "No-Bake Cheesecake": [ + "Condensed milk", + "Cream Cheese", + "Lemon Juice", + "Pie Crust", + "Cherry Jam", + ], + "Roasted Brussels Sprouts": [ + "Brussels Sprouts", + "Lemon Juice", + "Sesame seeds", + "Pepper", + "Salt", + "Olive oil", + ], + "Mac & Cheese": [ + "Macaroni", + "Butter", + "Flour", + "Salt", + "Pepper", + "Milk", + "Shredded Cheddar cheese", + ], + "Tamagoyaki Japanese Omelette": [ + "Eggs", + "Soy sauce", + "Sugar", + "Salt", + "Olive oil", + ], + }; + + for (let [rec, ings] of Object.entries(recipeIngredients)) { + for (let ing of ings) { + await client.query( + ` + INSERT INTO recipe_ingredient (recipe_id, ingredient_id, quantity) + SELECT r.recipe_id, i.ingredient_id, 'to taste' + FROM recipes r, ingredients i + WHERE r.name = $1 AND i.name = $2 + ON CONFLICT DO NOTHING + `, + [rec, ing] + ); + } + } + + const recipeSteps = { + "No-Bake Cheesecake": [ + "Beat Cream Cheese", + "Add condensed Milk and blend", + "Add Lemon Juice and blend", + "Add the mix to the pie crust", + "Spread the Cherry Jam", + "Place in refrigerator for 3h", + ], + "Roasted Brussels Sprouts": [ + "Preheat the oven", + "Mix the ingredients in a bowl", + "Spread the mix on baking sheet", + "Bake for 30'", + ], + "Mac & Cheese": [ + "Cook Macaroni for 8'", + "Melt butter in a saucepan", + "Add flour, salt, pepper and mix", + "Add Milk and mix", + "Cook until mix is smooth", + "Add cheddar cheese", + "Add the macaroni", + ], + "Tamagoyaki Japanese Omelette": [ + "Beat the eggs", + "Add soya sauce, sugar and salt", + "Add oil to a sauce pan", + "Bring to medium heat", + "Add some mix to the sauce pan", + "Let is cook for 1'", + "Remove pan from fire", + ], + }; + + for (let [rec, steps] of Object.entries(recipeSteps)) { + for (let i = 0; i < steps.length; i++) { + await client.query( + ` + INSERT INTO recipe_step (recipe_id, step_id, step_order) + SELECT r.recipe_id, s.step_id, $1 + FROM recipes r, steps s + WHERE r.name = $2 AND s.description = $3 + ON CONFLICT DO NOTHING + `, + [i + 1, rec, steps[i]] + ); + } + } + + console.log("Database setup complete!"); + await client.end(); + } catch (err) { + console.error("Connection error:", err); + await defaultClient.end(); + } +} + +setupDatabase(); diff --git a/Week2/package-lock.json b/Week2/package-lock.json new file mode 100644 index 000000000..4d4583c48 --- /dev/null +++ b/Week2/package-lock.json @@ -0,0 +1,162 @@ +{ + "name": "week2", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "week2", + "version": "1.0.0", + "license": "ISC", + "dependencies": { + "pg": "^8.16.3" + } + }, + "node_modules/pg": { + "version": "8.16.3", + "resolved": "https://registry.npmjs.org/pg/-/pg-8.16.3.tgz", + "integrity": "sha512-enxc1h0jA/aq5oSDMvqyW3q89ra6XIIDZgCX9vkMrnz5DFTw/Ny3Li2lFQ+pt3L6MCgm/5o2o8HW9hiJji+xvw==", + "license": "MIT", + "dependencies": { + "pg-connection-string": "^2.9.1", + "pg-pool": "^3.10.1", + "pg-protocol": "^1.10.3", + "pg-types": "2.2.0", + "pgpass": "1.0.5" + }, + "engines": { + "node": ">= 16.0.0" + }, + "optionalDependencies": { + "pg-cloudflare": "^1.2.7" + }, + "peerDependencies": { + "pg-native": ">=3.0.1" + }, + "peerDependenciesMeta": { + "pg-native": { + "optional": true + } + } + }, + "node_modules/pg-cloudflare": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/pg-cloudflare/-/pg-cloudflare-1.2.7.tgz", + "integrity": "sha512-YgCtzMH0ptvZJslLM1ffsY4EuGaU0cx4XSdXLRFae8bPP4dS5xL1tNB3k2o/N64cHJpwU7dxKli/nZ2lUa5fLg==", + "license": "MIT", + "optional": true + }, + "node_modules/pg-connection-string": { + "version": "2.9.1", + "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.9.1.tgz", + "integrity": "sha512-nkc6NpDcvPVpZXxrreI/FOtX3XemeLl8E0qFr6F2Lrm/I8WOnaWNhIPK2Z7OHpw7gh5XJThi6j6ppgNoaT1w4w==", + "license": "MIT" + }, + "node_modules/pg-int8": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz", + "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==", + "license": "ISC", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/pg-pool": { + "version": "3.10.1", + "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.10.1.tgz", + "integrity": "sha512-Tu8jMlcX+9d8+QVzKIvM/uJtp07PKr82IUOYEphaWcoBhIYkoHpLXN3qO59nAI11ripznDsEzEv8nUxBVWajGg==", + "license": "MIT", + "peerDependencies": { + "pg": ">=8.0" + } + }, + "node_modules/pg-protocol": { + "version": "1.10.3", + "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.10.3.tgz", + "integrity": "sha512-6DIBgBQaTKDJyxnXaLiLR8wBpQQcGWuAESkRBX/t6OwA8YsqP+iVSiond2EDy6Y/dsGk8rh/jtax3js5NeV7JQ==", + "license": "MIT" + }, + "node_modules/pg-types": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz", + "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", + "license": "MIT", + "dependencies": { + "pg-int8": "1.0.1", + "postgres-array": "~2.0.0", + "postgres-bytea": "~1.0.0", + "postgres-date": "~1.0.4", + "postgres-interval": "^1.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/pgpass": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz", + "integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==", + "license": "MIT", + "dependencies": { + "split2": "^4.1.0" + } + }, + "node_modules/postgres-array": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", + "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==", + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/postgres-bytea": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz", + "integrity": "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-date": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz", + "integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-interval": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz", + "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", + "license": "MIT", + "dependencies": { + "xtend": "^4.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/split2": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz", + "integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==", + "license": "ISC", + "engines": { + "node": ">= 10.x" + } + }, + "node_modules/xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "license": "MIT", + "engines": { + "node": ">=0.4" + } + } + } +} diff --git a/Week2/package.json b/Week2/package.json new file mode 100644 index 000000000..795720253 --- /dev/null +++ b/Week2/package.json @@ -0,0 +1,16 @@ +{ + "name": "week2", + "version": "1.0.0", + "type": "module", + "description": "## Agenda", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "keywords": [], + "author": "", + "license": "ISC", + "dependencies": { + "pg": "^8.16.3" + } +} From 43d04b5940175c7ec0bba6264797f0b71c42a3c8 Mon Sep 17 00:00:00 2001 From: 888i88 Date: Sun, 31 Aug 2025 10:27:02 +0200 Subject: [PATCH 05/11] add some improvments after mentor comments --- Week1/databases/setupMeetup.js | 57 +++++++++++++++++----------------- 1 file changed, 29 insertions(+), 28 deletions(-) diff --git a/Week1/databases/setupMeetup.js b/Week1/databases/setupMeetup.js index d845b4293..e9bcae8f4 100644 --- a/Week1/databases/setupMeetup.js +++ b/Week1/databases/setupMeetup.js @@ -10,6 +10,7 @@ const defaultClient = new Client({ }); async function setupDatabase() { + let client; try { await defaultClient.connect(); @@ -19,7 +20,7 @@ async function setupDatabase() { await defaultClient.end(); - const client = new Client({ + client = new Client({ user: "hyfuser", host: "localhost", database: "meetup", @@ -30,7 +31,7 @@ async function setupDatabase() { await client.query(` CREATE TABLE Invitee ( - invitee_no INT PRIMARY KEY, + invitee_no INTEGER GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, invitee_name VARCHAR(100), invited_by VARCHAR(100) ); @@ -40,13 +41,13 @@ async function setupDatabase() { CREATE TABLE Room ( room_no INT PRIMARY KEY, room_name VARCHAR(64), - floor_number INT + floor_number SMALLINT ); `); await client.query(` CREATE TABLE Meeting ( - meeting_no INT PRIMARY KEY, + meeting_no INTEGER GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, meeting_title VARCHAR(64), starting_time TIMESTAMP, ending_time TIMESTAMP, @@ -57,39 +58,39 @@ async function setupDatabase() { console.log("Tables created."); await client.query(` - INSERT INTO Invitee VALUES - (1, 'Alice Johnson', 'Bob Smith'), - (2, 'Bob Smith', 'Carol White'), - (3, 'Carol White', 'David Lee'), - (4, 'David Lee', 'Alice Johnson'), - (5, 'Eve Brown', 'Bob Smith'); - `); + INSERT INTO Invitee (invitee_name, invited_by) VALUES + ('Alice Johnson', 'Bob Smith'), + ('Bob Smith', 'Carol White'), + ('Carol White', 'David Lee'), + ('David Lee', 'Alice Johnson'), + ('Eve Brown', 'Bob Smith'); +`); await client.query(` - INSERT INTO Room VALUES - (101, 'Blue Room', 1), - (102, 'Green Room', 1), - (201, 'Yellow Room', 2), - (202, 'Red Room', 2), - (301, 'Conference Hall', 3); + INSERT INTO Room (room_name, floor_number) VALUES + ('Blue Room', 1), + ('Green Room', 1), + ('Yellow Room', 2), + ('Red Room', 2), + ('Conference Hall', 3); `); await client.query(` - INSERT INTO Meeting VALUES - (1, 'Project Kickoff', '2025-09-01 09:00:00', '2025-09-01 10:00:00', 101), - (2, 'Design Review', '2025-09-02 11:00:00', '2025-09-02 12:30:00', 102), - (3, 'Team Standup', '2025-09-03 09:30:00', '2025-09-03 10:00:00', 201), - (4, 'Client Presentation', '2025-09-04 14:00:00', '2025-09-04 15:30:00', 202), - (5, 'Retrospective', '2025-09-05 16:00:00', '2025-09-05 17:00:00', 301); - `); + INSERT INTO Meeting (meeting_title, starting_time, ending_time, room_no) VALUES + ('Project Kickoff', '2025-09-01 09:00:00', '2025-09-01 10:00:00', 1), + ('Design Review', '2025-09-02 11:00:00', '2025-09-02 12:30:00', 2), + ('Team Standup', '2025-09-03 09:30:00', '2025-09-03 10:00:00', 3), + ('Client Presentation', '2025-09-04 14:00:00', '2025-09-04 15:30:00', 4), + ('Retrospective', '2025-09-05 16:00:00', '2025-09-05 17:00:00', 5); +`); console.log("Sample data inserted."); - - await client.end(); - console.log("Setup complete."); } catch (err) { console.error("Error:", err); - await defaultClient.end(); + } finally { + if (client) await client.end().catch(() => {}); + await defaultClient.end().catch(() => {}); + console.log("Setup complete (connections closed)."); } } From 386ef79280f1343d8f8d56b8b80db9e4ddcddd3c Mon Sep 17 00:00:00 2001 From: 888i88 Date: Wed, 3 Sep 2025 13:11:47 +0200 Subject: [PATCH 06/11] assignment week 2 databases --- Week2/databases/exercise 1 Keys.js | 36 +++++++++++ Week2/databases/exercise 2 Relationships.js | 61 +++++++++++++++++++ Week2/databases/exercise 3 Joins.js | 33 ++++++++++ .../exercise 4 Aggregate functions.js | 59 ++++++++++++++++++ 4 files changed, 189 insertions(+) create mode 100644 Week2/databases/exercise 1 Keys.js create mode 100644 Week2/databases/exercise 2 Relationships.js create mode 100644 Week2/databases/exercise 3 Joins.js create mode 100644 Week2/databases/exercise 4 Aggregate functions.js diff --git a/Week2/databases/exercise 1 Keys.js b/Week2/databases/exercise 1 Keys.js new file mode 100644 index 000000000..979d1708a --- /dev/null +++ b/Week2/databases/exercise 1 Keys.js @@ -0,0 +1,36 @@ +import pkg from "pg"; +const { Client } = pkg; + +const client = new Client({ + user: "postgres", + host: "localhost", + database: "hyf_db", + password: "your_password", + port: 5432, +}); + +async function main() { + await client.connect(); + + await client.query(` + CREATE TABLE IF NOT EXISTS authors ( + author_id SERIAL PRIMARY KEY, + author_name VARCHAR(100) NOT NULL, + university VARCHAR(100), + date_of_birth DATE, + h_index INT, + gender VARCHAR(10) + ); + `); + + await client.query(` + ALTER TABLE authors + ADD COLUMN IF NOT EXISTS mentor INT, + ADD CONSTRAINT fk_mentor FOREIGN KEY (mentor) REFERENCES authors(author_id); + `); + + console.log("Authors table created with self-referencing mentor key"); + await client.end(); +} + +main().catch(console.error); diff --git a/Week2/databases/exercise 2 Relationships.js b/Week2/databases/exercise 2 Relationships.js new file mode 100644 index 000000000..32dd9281f --- /dev/null +++ b/Week2/databases/exercise 2 Relationships.js @@ -0,0 +1,61 @@ +import pkg from "pg"; +const { Client } = pkg; + +const client = new Client({ + user: "postgres", + host: "localhost", + database: "hyf_db", + password: "your_password", + port: 5432, +}); + +async function main() { + await client.connect(); + + await client.query(` + CREATE TABLE IF NOT EXISTS research_papers ( + paper_id SERIAL PRIMARY KEY, + paper_title VARCHAR(255) NOT NULL, + conference VARCHAR(100), + publish_date DATE + ); + `); + + await client.query(` + CREATE TABLE IF NOT EXISTS author_papers ( + author_id INT REFERENCES authors(author_id), + paper_id INT REFERENCES research_papers(paper_id), + PRIMARY KEY (author_id, paper_id) + ); + `); + + console.log("Research papers and author_papers tables created"); + await client.query(` + INSERT INTO authors (author_name, university, date_of_birth, h_index, gender) + VALUES + ('Alice Smith', 'MIT', '1980-05-10', 42, 'Female'), + ('Bob Johnson', 'Stanford', '1975-09-20', 55, 'Male'), + ('Carol Lee', 'Harvard', '1985-01-15', 38, 'Female') + ON CONFLICT DO NOTHING; + `); + + await client.query(` + INSERT INTO research_papers (paper_title, conference, publish_date) + VALUES + ('AI in Healthcare', 'NeurIPS', '2020-12-01'), + ('Quantum Computing Advances', 'QCon', '2021-06-15'), + ('Deep Learning Optimization', 'ICML', '2019-07-07') + ON CONFLICT DO NOTHING; + `); + + await client.query(` + INSERT INTO author_papers (author_id, paper_id) + VALUES (1,1), (2,1), (1,2), (3,3) + ON CONFLICT DO NOTHING; + `); + + console.log("Sample authors and papers inserted"); + await client.end(); +} + +main().catch(console.error); diff --git a/Week2/databases/exercise 3 Joins.js b/Week2/databases/exercise 3 Joins.js new file mode 100644 index 000000000..dbbfc13e8 --- /dev/null +++ b/Week2/databases/exercise 3 Joins.js @@ -0,0 +1,33 @@ +import pkg from "pg"; +const { Client } = pkg; + +const client = new Client({ + user: "postgres", + host: "localhost", + database: "hyf_db", + password: "your_password", + port: 5432, +}); + +async function main() { + await client.connect(); + + const mentors = await client.query(` + SELECT a.author_name AS author, m.author_name AS mentor + FROM authors a + LEFT JOIN authors m ON a.mentor = m.author_id; + `); + console.table(mentors.rows); + + const papers = await client.query(` + SELECT a.author_name, rp.paper_title + FROM authors a + LEFT JOIN author_papers ap ON a.author_id = ap.author_id + LEFT JOIN research_papers rp ON ap.paper_id = rp.paper_id; + `); + console.table(papers.rows); + + await client.end(); +} + +main().catch(console.error); diff --git a/Week2/databases/exercise 4 Aggregate functions.js b/Week2/databases/exercise 4 Aggregate functions.js new file mode 100644 index 000000000..e8f937e04 --- /dev/null +++ b/Week2/databases/exercise 4 Aggregate functions.js @@ -0,0 +1,59 @@ +import pkg from "pg"; +const { Client } = pkg; + +const client = new Client({ + user: "postgres", + host: "localhost", + database: "hyf_db", + password: "your_password", + port: 5432, +}); + +async function main() { + await client.connect(); + + const q1 = await client.query(` + SELECT rp.paper_title, COUNT(ap.author_id) AS author_count + FROM research_papers rp + LEFT JOIN author_papers ap ON rp.paper_id = ap.paper_id + GROUP BY rp.paper_title; + `); + console.table(q1.rows); + + const q2 = await client.query(` + SELECT SUM(cnt) AS female_paper_count FROM ( + SELECT COUNT(ap.paper_id) AS cnt + FROM authors a + JOIN author_papers ap ON a.author_id = ap.author_id + WHERE a.gender = 'Female' + GROUP BY a.author_id + ) sub; + `); + console.table(q2.rows); + + const q3 = await client.query(` + SELECT university, AVG(h_index) AS avg_hindex + FROM authors + GROUP BY university; + `); + console.table(q3.rows); + + const q4 = await client.query(` + SELECT a.university, COUNT(ap.paper_id) AS paper_count + FROM authors a + LEFT JOIN author_papers ap ON a.author_id = ap.author_id + GROUP BY a.university; + `); + console.table(q4.rows); + + const q5 = await client.query(` + SELECT university, MIN(h_index) AS min_hindex, MAX(h_index) AS max_hindex + FROM authors + GROUP BY university; + `); + console.table(q5.rows); + + await client.end(); +} + +main().catch(console.error); From 56e05f900c2b5ffd601648fdfeccef57870fe81f Mon Sep 17 00:00:00 2001 From: 888i88 Date: Sun, 7 Sep 2025 20:13:55 +0200 Subject: [PATCH 07/11] revised code after mentors feedback --- Week2/databases/exercise 1 Keys.js | 54 +++++---- Week2/databases/exercise 2 Relationships.js | 106 ++++++++++-------- Week2/databases/exercise 3 Joins.js | 44 +++++--- .../exercise 4 Aggregate functions.js | 93 ++++++++------- 4 files changed, 171 insertions(+), 126 deletions(-) diff --git a/Week2/databases/exercise 1 Keys.js b/Week2/databases/exercise 1 Keys.js index 979d1708a..11583963c 100644 --- a/Week2/databases/exercise 1 Keys.js +++ b/Week2/databases/exercise 1 Keys.js @@ -10,27 +10,43 @@ const client = new Client({ }); async function main() { - await client.connect(); + try { + await client.connect(); + console.log("Connected to the database"); - await client.query(` - CREATE TABLE IF NOT EXISTS authors ( - author_id SERIAL PRIMARY KEY, - author_name VARCHAR(100) NOT NULL, - university VARCHAR(100), - date_of_birth DATE, - h_index INT, - gender VARCHAR(10) - ); - `); + await client.query(` + DO $$ + BEGIN + IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'gender_type') THEN + CREATE TYPE gender_type AS ENUM ('Male', 'Female', 'Other'); + END IF; + END $$; + `); - await client.query(` - ALTER TABLE authors - ADD COLUMN IF NOT EXISTS mentor INT, - ADD CONSTRAINT fk_mentor FOREIGN KEY (mentor) REFERENCES authors(author_id); - `); + await client.query(` + CREATE TABLE IF NOT EXISTS authors ( + author_id SERIAL PRIMARY KEY, + author_name VARCHAR(100) NOT NULL, + university VARCHAR(100), + date_of_birth DATE, + h_index INT, + gender gender_type, + mentor INT, + CONSTRAINT fk_mentor FOREIGN KEY (mentor) REFERENCES authors(author_id) ON DELETE SET NULL + ); + `); - console.log("Authors table created with self-referencing mentor key"); - await client.end(); + console.log( + "Authors table created with self-referencing mentor key and gender ENUM" + ); + } catch (error) { + console.error("Error occurred:", error.message); + } finally { + await client.end(); + console.log("Database connection closed"); + } } -main().catch(console.error); +main().catch((error) => { + console.error("Main function error:", error.message); +}); diff --git a/Week2/databases/exercise 2 Relationships.js b/Week2/databases/exercise 2 Relationships.js index 32dd9281f..f3dffe987 100644 --- a/Week2/databases/exercise 2 Relationships.js +++ b/Week2/databases/exercise 2 Relationships.js @@ -10,52 +10,64 @@ const client = new Client({ }); async function main() { - await client.connect(); - - await client.query(` - CREATE TABLE IF NOT EXISTS research_papers ( - paper_id SERIAL PRIMARY KEY, - paper_title VARCHAR(255) NOT NULL, - conference VARCHAR(100), - publish_date DATE - ); - `); - - await client.query(` - CREATE TABLE IF NOT EXISTS author_papers ( - author_id INT REFERENCES authors(author_id), - paper_id INT REFERENCES research_papers(paper_id), - PRIMARY KEY (author_id, paper_id) - ); - `); - - console.log("Research papers and author_papers tables created"); - await client.query(` - INSERT INTO authors (author_name, university, date_of_birth, h_index, gender) - VALUES - ('Alice Smith', 'MIT', '1980-05-10', 42, 'Female'), - ('Bob Johnson', 'Stanford', '1975-09-20', 55, 'Male'), - ('Carol Lee', 'Harvard', '1985-01-15', 38, 'Female') - ON CONFLICT DO NOTHING; - `); - - await client.query(` - INSERT INTO research_papers (paper_title, conference, publish_date) - VALUES - ('AI in Healthcare', 'NeurIPS', '2020-12-01'), - ('Quantum Computing Advances', 'QCon', '2021-06-15'), - ('Deep Learning Optimization', 'ICML', '2019-07-07') - ON CONFLICT DO NOTHING; - `); - - await client.query(` - INSERT INTO author_papers (author_id, paper_id) - VALUES (1,1), (2,1), (1,2), (3,3) - ON CONFLICT DO NOTHING; - `); - - console.log("Sample authors and papers inserted"); - await client.end(); + try { + await client.connect(); + console.log("Connected to the database"); + + await client.query(` + CREATE TABLE IF NOT EXISTS research_papers ( + paper_id SERIAL PRIMARY KEY, + paper_title VARCHAR(255) NOT NULL, + conference VARCHAR(100), + publish_date DATE + ); + `); + + await client.query(` + CREATE TABLE IF NOT EXISTS author_papers ( + author_id INT REFERENCES authors(author_id), + paper_id INT REFERENCES research_papers(paper_id), + PRIMARY KEY (author_id, paper_id) + ); + `); + + console.log("Research papers and author_papers tables created"); + + await client.query(` + INSERT INTO authors (author_name, university, date_of_birth, h_index, gender) + VALUES + ('Alice Smith', 'MIT', '1980-05-10', 42, 'Female'), + ('Bob Johnson', 'Stanford', '1975-09-20', 55, 'Male'), + ('Carol Lee', 'Harvard', '1985-01-15', 38, 'Female') + ON CONFLICT DO NOTHING; + `); + + await client.query(` + INSERT INTO research_papers (paper_title, conference, publish_date) + VALUES + ('AI in Healthcare', 'NeurIPS', '2020-12-01'), + ('Quantum Computing Advances', 'QCon', '2021-06-15'), + ('Deep Learning Optimization', 'ICML', '2019-07-07') + ON CONFLICT DO NOTHING; + `); + + await client.query(` + INSERT INTO author_papers (author_id, paper_id) + VALUES (1,1), (2,1), (1,2), (3,3) + ON CONFLICT DO NOTHING; + `); + + console.log("Sample authors and papers inserted"); + } catch (error) { + console.error("An error occurred:", error); + throw error; // Re-throw to allow caller to handle if needed + } finally { + await client.end(); + console.log("Database connection closed"); + } } -main().catch(console.error); +main().catch((error) => { + console.error("Failed to execute main function:", error); + process.exit(1); // Exit with error code +}); diff --git a/Week2/databases/exercise 3 Joins.js b/Week2/databases/exercise 3 Joins.js index dbbfc13e8..365e177e9 100644 --- a/Week2/databases/exercise 3 Joins.js +++ b/Week2/databases/exercise 3 Joins.js @@ -10,24 +10,34 @@ const client = new Client({ }); async function main() { - await client.connect(); + try { + await client.connect(); + console.log("Connected to the database"); - const mentors = await client.query(` - SELECT a.author_name AS author, m.author_name AS mentor - FROM authors a - LEFT JOIN authors m ON a.mentor = m.author_id; - `); - console.table(mentors.rows); + const mentors = await client.query(` + SELECT a.author_name AS author, m.author_name AS mentor + FROM authors a + LEFT JOIN authors m ON a.mentor = m.author_id; + `); + console.table(mentors.rows); - const papers = await client.query(` - SELECT a.author_name, rp.paper_title - FROM authors a - LEFT JOIN author_papers ap ON a.author_id = ap.author_id - LEFT JOIN research_papers rp ON ap.paper_id = rp.paper_id; - `); - console.table(papers.rows); - - await client.end(); + const papers = await client.query(` + SELECT a.author_name, rp.paper_title + FROM authors a + LEFT JOIN author_papers ap ON a.author_id = ap.author_id + LEFT JOIN research_papers rp ON ap.paper_id = rp.paper_id; + `); + console.table(papers.rows); + } catch (error) { + console.error("An error occurred:", error); + throw error; // Re-throw to allow caller to handle if needed + } finally { + await client.end(); + console.log("Database connection closed"); + } } -main().catch(console.error); +main().catch((error) => { + console.error("Failed to execute main function:", error); + process.exit(1); // Exit with error code +}); diff --git a/Week2/databases/exercise 4 Aggregate functions.js b/Week2/databases/exercise 4 Aggregate functions.js index e8f937e04..5d2626649 100644 --- a/Week2/databases/exercise 4 Aggregate functions.js +++ b/Week2/databases/exercise 4 Aggregate functions.js @@ -10,50 +10,57 @@ const client = new Client({ }); async function main() { - await client.connect(); - - const q1 = await client.query(` - SELECT rp.paper_title, COUNT(ap.author_id) AS author_count - FROM research_papers rp - LEFT JOIN author_papers ap ON rp.paper_id = ap.paper_id - GROUP BY rp.paper_title; - `); - console.table(q1.rows); - - const q2 = await client.query(` - SELECT SUM(cnt) AS female_paper_count FROM ( - SELECT COUNT(ap.paper_id) AS cnt + try { + await client.connect(); + console.log("Connected to the database"); + + const q1 = await client.query(` + SELECT rp.paper_title, COUNT(ap.author_id) AS author_count + FROM research_papers rp + LEFT JOIN author_papers ap ON rp.paper_id = ap.paper_id + GROUP BY rp.paper_title; + `); + console.table(q1.rows); + + const q2 = await client.query(` + SELECT COUNT(DISTINCT ap.paper_id) AS female_paper_count FROM authors a JOIN author_papers ap ON a.author_id = ap.author_id - WHERE a.gender = 'Female' - GROUP BY a.author_id - ) sub; - `); - console.table(q2.rows); - - const q3 = await client.query(` - SELECT university, AVG(h_index) AS avg_hindex - FROM authors - GROUP BY university; - `); - console.table(q3.rows); - - const q4 = await client.query(` - SELECT a.university, COUNT(ap.paper_id) AS paper_count - FROM authors a - LEFT JOIN author_papers ap ON a.author_id = ap.author_id - GROUP BY a.university; - `); - console.table(q4.rows); - - const q5 = await client.query(` - SELECT university, MIN(h_index) AS min_hindex, MAX(h_index) AS max_hindex - FROM authors - GROUP BY university; - `); - console.table(q5.rows); - - await client.end(); + WHERE a.gender = 'Female'; + `); + console.table(q2.rows); + + const q3 = await client.query(` + SELECT university, AVG(h_index) AS avg_hindex + FROM authors + GROUP BY university; + `); + console.table(q3.rows); + + const q4 = await client.query(` + SELECT a.university, COUNT(DISTINCT ap.paper_id) AS paper_count + FROM authors a + LEFT JOIN author_papers ap ON a.author_id = ap.author_id + GROUP BY a.university; + `); + console.table(q4.rows); + + const q5 = await client.query(` + SELECT university, MIN(h_index) AS min_hindex, MAX(h_index) AS max_hindex + FROM authors + GROUP BY university; + `); + console.table(q5.rows); + } catch (error) { + console.error("An error occurred:", error); + throw error; // Re-throw to allow caller to handle if needed + } finally { + await client.end(); + console.log("Database connection closed"); + } } -main().catch(console.error); +main().catch((error) => { + console.error("Failed to execute main function:", error); + process.exit(1); // Exit with error code +}); From 651d7a9af43cb37da4f9518d8e8a6798de383fd6 Mon Sep 17 00:00:00 2001 From: 888i88 Date: Wed, 10 Sep 2025 19:20:27 +0200 Subject: [PATCH 08/11] assignment week 3 --- Week3/homework/mongodb/index.js | 103 +++++++++++++++++++------------- Week3/package-lock.json | 6 ++ 2 files changed, 66 insertions(+), 43 deletions(-) create mode 100644 Week3/package-lock.json diff --git a/Week3/homework/mongodb/index.js b/Week3/homework/mongodb/index.js index 41ee8b618..9d88bc525 100644 --- a/Week3/homework/mongodb/index.js +++ b/Week3/homework/mongodb/index.js @@ -1,85 +1,102 @@ +require("dotenv").config(); const { MongoClient, ServerApiVersion } = require("mongodb"); - const { seedDatabase } = require("./seedDatabase.js"); async function createEpisodeExercise(client) { - /** - * We forgot to add the last episode of season 9. It has this information: - * - * episode: S09E13 - * title: MOUNTAIN HIDE-AWAY - * elements: ["CIRRUS", "CLOUDS", "CONIFER", "DECIDIOUS", "GRASS", "MOUNTAIN", "MOUNTAINS", "RIVER", "SNOWY_MOUNTAIN", "TREE", "TREES"] - */ + const bobRossCollection = client + .db("databaseWeek3") + .collection("bob_ross_episodes"); + + const result = await bobRossCollection.insertOne({ + episode: "S09E13", + title: "MOUNTAIN HIDE-AWAY", + elements: [ + "CIRRUS", + "CLOUDS", + "CONIFER", + "DECIDIOUS", + "GRASS", + "MOUNTAIN", + "MOUNTAINS", + "RIVER", + "SNOWY_MOUNTAIN", + "TREE", + "TREES", + ], + }); // Write code that will add this to the collection! console.log( - `Created season 9 episode 13 and the document got the id ${"TODO: fill in variable here"}` + `Created season 9 episode 13 and the document got the id ${result.insertedId}` ); } async function findEpisodesExercises(client) { - /** - * Complete the following exercises. - * The comments indicate what to do and what the result should be! - */ - - // Find the title of episode 2 in season 2 [Should be: WINTER SUN] + const bobRossCollection = client.db("databaseWeek3").collection("bob_ross_episodes"); + + const episodeS02E02 = await bobRossCollection.findOne({ episode: "S02E02" }); console.log( - `The title of episode 2 in season 2 is ${"TODO: fill in variable here"}` + `The title of episode 2 in season 2 is ${episodeS02E02.title}` ); - // Find the season and episode number of the episode called "BLACK RIVER" [Should be: S02E06] - +const blackRiverEpisode = await bobRossCollection.findOne({ title: "BLACK RIVER" }); console.log( - `The season and episode number of the "BLACK RIVER" episode is ${"TODO: fill in variable here"}` + `The season and episode number of the "BLACK RIVER" episode is ${blackRiverEpisode.episode}` ); - // Find all of the episode titles where Bob Ross painted a CLIFF [Should be: NIGHT LIGHT, EVENING SEASCAPE, SURF'S UP, CLIFFSIDE, BY THE SEA, DEEP WILDERNESS HOME, CRIMSON TIDE, GRACEFUL WATERFALL] - + // Find all of the episode titles where Bob Ross painted a CLIFF + const cliffEpisodes = await bobRossCollection + .find({ elements: "CLIFF" }) + .project({ title: 1, _id: 0 }) + .toArray(); + const cliffTitles = cliffEpisodes.map((ep) => ep.title); console.log( - `The episodes that Bob Ross painted a CLIFF are ${"TODO: fill in variable here"}` + `The episodes that Bob Ross painted a CLIFF are ${cliffTitles.join(", ")}` ); - // Find all of the episode titles where Bob Ross painted a CLIFF and a LIGHTHOUSE [Should be: NIGHT LIGHT] - + // Find all of the episode titles where Bob Ross painted a CLIFF and a LIGHTHOUSE + const cliffAndLighthouseEpisodes = await bobRossCollection + .find({ elements: { $all: ["CLIFF", "LIGHTHOUSE"] } }) + .project({ title: 1, _id: 0 }) + .toArray(); + const cliffAndLighthouseTitles = cliffAndLighthouseEpisodes.map((ep) => ep.title); console.log( - `The episodes that Bob Ross painted a CLIFF and a LIGHTHOUSE are ${"TODO: fill in variable here"}` + `The episodes that Bob Ross painted a CLIFF and a LIGHTHOUSE are ${cliffAndLighthouseTitles.join(", ")}` ); } async function updateEpisodeExercises(client) { - /** - * There are some problems in the initial data that was filled in. - * Let's use update functions to update this information. - * - * Note: do NOT change the data.json file - */ - - // Episode 13 in season 30 should be called BLUE RIDGE FALLS, yet it is called BLUE RIDGE FALLERS now. Fix that +const bobRossCollection = client.db("databaseWeek3").collection("bob_ross_episodes"); + // Update episode 13 in season 30 to have the correct title + const updateTitleResult = await bobRossCollection.updateOne( + { episode: "S30E13" }, + { $set: { title: "BLUE RIDGE FALLS" } } + ); console.log( - `Ran a command to update episode 13 in season 30 and it updated ${"TODO: fill in variable here"} episodes` + `Ran a command to update episode 13 in season 30 and it updated ${updateTitleResult.modifiedCount} episodes` ); - // Unfortunately we made a mistake in the arrays and the element type called 'BUSHES' should actually be 'BUSH' as sometimes only one bush was painted. - // Update all of the documents in the collection that have `BUSHES` in the elements array to now have `BUSH` - // It should update 120 episodes! +const updateBushesResult = await bobRossCollection.updateMany( + { elements: "BUSHES" }, + { $set: { "elements.$[elem]": "BUSH" } }, + { arrayFilters: [{ elem: "BUSHES" }] } + ); console.log( - `Ran a command to update all the BUSHES to BUSH and it updated ${"TODO: fill in variable here"} episodes` + `Ran a command to update all the BUSHES to BUSH and it updated ${updateBushesResult.modifiedCount} episodes` ); } async function deleteEpisodeExercise(client) { - /** - * It seems an errand episode has gotten into our data. - * This is episode 14 in season 31. Please remove it and verify that it has been removed! - */ +const bobRossCollection = client.db("databaseWeek3").collection("bob_ross_episodes"); + + const deleteResult = await bobRossCollection.deleteOne({ episode: "S31E14" }); console.log( - `Ran a command to delete episode and it deleted ${"TODO: fill in variable here"} episodes` + `Ran a command to delete episode and it deleted ${deleteResult.deletedCount} episodes` ); } diff --git a/Week3/package-lock.json b/Week3/package-lock.json new file mode 100644 index 000000000..cbd528e61 --- /dev/null +++ b/Week3/package-lock.json @@ -0,0 +1,6 @@ +{ + "name": "Week3", + "lockfileVersion": 3, + "requires": true, + "packages": {} +} From ee622ba10b8e52c30fa022b6d69e5cb84881dc73 Mon Sep 17 00:00:00 2001 From: 888i88 Date: Sun, 14 Sep 2025 11:27:57 +0200 Subject: [PATCH 09/11] update assignment --- Week3/homework/Exercise 3 sql-injections.js | 11 +++ Week3/homework/Exercise1-Normalization.md | 50 +++++++++++++ .../Exercize2 transactions-create-tables.js | 31 ++++++++ Week3/homework/transaction.js | 33 +++++++++ Week3/homework/transactions-insert-values.js | 16 ++++ Week3/scripts/sql-injection.js | 73 ++++++++++--------- 6 files changed, 179 insertions(+), 35 deletions(-) create mode 100644 Week3/homework/Exercise 3 sql-injections.js create mode 100644 Week3/homework/Exercise1-Normalization.md create mode 100644 Week3/homework/Exercize2 transactions-create-tables.js create mode 100644 Week3/homework/transaction.js create mode 100644 Week3/homework/transactions-insert-values.js diff --git a/Week3/homework/Exercise 3 sql-injections.js b/Week3/homework/Exercise 3 sql-injections.js new file mode 100644 index 000000000..74f212eb2 --- /dev/null +++ b/Week3/homework/Exercise 3 sql-injections.js @@ -0,0 +1,11 @@ +function getPopulation(Country, name, code, cb) { + conn.query( + "SELECT Population FROM ?? WHERE Name = ? AND code = ?", + [Country, name, code], + function (err, result) { + if (err) return cb(err); + if (result.length === 0) return cb(new Error("Not found")); + cb(null, result[0].Population); + } + ); +} diff --git a/Week3/homework/Exercise1-Normalization.md b/Week3/homework/Exercise1-Normalization.md new file mode 100644 index 000000000..d5cbaeb82 --- /dev/null +++ b/Week3/homework/Exercise1-Normalization.md @@ -0,0 +1,50 @@ +# Exercise 1: SQL Normalization + +## 1. What columns violate 1NF? + +- `food_code` and `food_description` violate **1NF** because they contain multiple values in a single cell (comma-separated lists). + +## 2. What entities can be extracted? + +- **Members** (member_id, member_name, member_address) +- **Dinners** (dinner_id, dinner_date, venue_code) +- **Venues** (venue_code, venue_description) +- **Foods** (food_code, food_description) +- **Dinner_Food** (relation between dinner and foods) +- **Dinner_Members** (relation between members and dinners) + +## 3. Tables in 3NF + +### Members + +- member_id (PK) +- member_name +- member_address + +### Venues + +- venue_code (PK) +- venue_description + +### Dinners + +- dinner_id (PK) +- dinner_date +- venue_code (FK → Venues.venue_code) + +### Foods + +- food_code (PK) +- food_description + +### Dinner_Foods + +- dinner_id (FK → Dinners.dinner_id) +- food_code (FK → Foods.food_code) +- **Primary Key: (dinner_id, food_code)** + +### Dinner_Members + +- dinner_id (FK → Dinners.dinner_id) +- member_id (FK → Members.member_id) +- **Primary Key: (dinner_id, member_id)** diff --git a/Week3/homework/Exercize2 transactions-create-tables.js b/Week3/homework/Exercize2 transactions-create-tables.js new file mode 100644 index 000000000..364e79193 --- /dev/null +++ b/Week3/homework/Exercize2 transactions-create-tables.js @@ -0,0 +1,31 @@ +const mysql = require("mysql2/promise"); + +async function createTables() { + const conn = await mysql.createConnection({ + user: "root", + database: "week3", + }); + + await conn.execute(` + CREATE TABLE IF NOT EXISTS account ( + account_number INT PRIMARY KEY, + balance DECIMAL(15,2) NOT NULL + ) + `); + + await conn.execute(` + CREATE TABLE IF NOT EXISTS account_changes ( + change_number INT AUTO_INCREMENT PRIMARY KEY, + account_number INT, + amount DECIMAL(15,2), + changed_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + remark VARCHAR(255), + FOREIGN KEY (account_number) REFERENCES account(account_number) + ) + `); + + console.log("Tables created successfully!"); + await conn.end(); +} + +createTables().catch(console.error); diff --git a/Week3/homework/transaction.js b/Week3/homework/transaction.js new file mode 100644 index 000000000..3974f50b7 --- /dev/null +++ b/Week3/homework/transaction.js @@ -0,0 +1,33 @@ +const mysql = require("mysql2/promise"); + +async function transferFunds() { + const conn = await mysql.createConnection({ user: "root", database: "week3" }); + + try { + await conn.beginTransaction(); + + // Deduct from account 101 + await conn.execute(`UPDATE account SET balance = balance - 1000 WHERE account_number = 101`); + await conn.execute( + `INSERT INTO account_changes (account_number, amount, remark) VALUES (?, ?, ?)`, + [101, -1000, "Transfer to 102"] + ); + + // Add to account 102 + await conn.execute(`UPDATE account SET balance = balance + 1000 WHERE account_number = 102`); + await conn.execute( + `INSERT INTO account_changes (account_number, amount, remark) VALUES (?, ?, ?)`, + [102, 1000, "Transfer from 101"] + ); + + await conn.commit(); + console.log("Transaction successful!"); + } catch (err) { + await conn.rollback(); + console.error("Transaction failed:", err); + } finally { + await conn.end(); + } +} + +transferFunds(); diff --git a/Week3/homework/transactions-insert-values.js b/Week3/homework/transactions-insert-values.js new file mode 100644 index 000000000..bf21e6c4a --- /dev/null +++ b/Week3/homework/transactions-insert-values.js @@ -0,0 +1,16 @@ +const mysql = require("mysql2/promise"); + +async function insertValues() { + const conn = await mysql.createConnection({ user: "root", database: "week3" }); + + await conn.execute(`INSERT INTO account (account_number, balance) VALUES + (101, 5000.00), + (102, 3000.00) + ON DUPLICATE KEY UPDATE balance=VALUES(balance) + `); + + console.log("Sample data inserted!"); + await conn.end(); +} + +insertValues().catch(console.error); diff --git a/Week3/scripts/sql-injection.js b/Week3/scripts/sql-injection.js index 124fb497a..28feb3f58 100644 --- a/Week3/scripts/sql-injection.js +++ b/Week3/scripts/sql-injection.js @@ -1,12 +1,12 @@ -import prompt from 'prompt'; -import { Client } from 'pg'; +import prompt from "prompt"; +import { Client } from "pg"; // Database connection configuration const config = { - host: 'localhost', - user: 'hyfuser', - password: 'hyfpassword', - database: 'company', + host: "localhost", + user: "hyfuser", + password: "hyfpassword", + database: "company", port: 5432, }; @@ -24,8 +24,8 @@ const getInput = (schema) => { async function queryDatabase() { try { await client.connect(); - console.log('Connected to PostgreSQL database!'); - + console.log("Connected to PostgreSQL database!"); + // Ensure employees table exists for demonstration await client.query(` CREATE TABLE IF NOT EXISTS employees ( @@ -35,9 +35,9 @@ async function queryDatabase() { department VARCHAR(50) ) `); - + // Insert some sample data if not exists - const checkData = await client.query('SELECT COUNT(*) FROM employees'); + const checkData = await client.query("SELECT COUNT(*) FROM employees"); if (parseInt(checkData.rows[0].count) === 0) { await client.query(` INSERT INTO employees (employee_id, name, salary, department) VALUES @@ -45,50 +45,53 @@ async function queryDatabase() { (102, 'Jane Smith', 60000, 'Marketing'), (103, 'Bob Johnson', 55000, 'Engineering') `); - console.log('Sample data inserted'); + console.log("Sample data inserted"); } prompt.start(); - - const result = await getInput(['employee_number']); + + const result = await getInput(["employee_number"]); const inputNumber = result.employee_number; - console.log('\n=== SQL Injection Demonstration ===\n'); + console.log("\n=== SQL Injection Demonstration ===\n"); // 1. VULNERABLE: Direct string concatenation (DON'T DO THIS!) - console.log('1. VULNERABLE APPROACH (demonstrates the problem):'); + console.log("1. VULNERABLE APPROACH (demonstrates the problem):"); const vulnerableQuery = `SELECT * FROM employees WHERE employee_id = ${inputNumber}`; - console.log('Query:', vulnerableQuery); - console.log('⚠️ This is vulnerable to SQL injection!\n'); - + console.log("Query:", vulnerableQuery); + console.log(" This is vulnerable to SQL injection!\n"); + // Don't actually execute the vulnerable query in production // await client.query(vulnerableQuery); // 2. SAFE: Using parameterized queries (DO THIS!) - console.log('2. SAFE APPROACH (using parameterized queries):'); - const safeQuery = 'SELECT * FROM employees WHERE employee_id = $1'; - console.log('Query template:', safeQuery); - console.log('Parameter:', inputNumber); - + console.log("2. SAFE APPROACH (using parameterized queries):"); + const safeQuery = "SELECT * FROM employees WHERE employee_id = $1"; + console.log("Query template:", safeQuery); + console.log("Parameter:", inputNumber); + const queryResult = await client.query(safeQuery, [inputNumber]); - + if (queryResult.rows.length > 0) { - console.log('\nResults:'); - queryResult.rows.forEach(row => { - console.log(`ID: ${row.employee_id}, Name: ${row.name}, Salary: ${row.salary}, Dept: ${row.department}`); + console.log("\nResults:"); + queryResult.rows.forEach((row) => { + console.log( + `ID: ${row.employee_id}, Name: ${row.name}, Salary: ${row.salary}, Dept: ${row.department}` + ); }); } else { - console.log('No employee found with that ID.'); + console.log("No employee found with that ID."); } - console.log('\n=== Key Points ==='); - console.log('✅ Always use parameterized queries ($1, $2, etc.)'); - console.log('✅ Never directly concatenate user input into SQL strings'); - console.log('✅ PostgreSQL automatically escapes parameters'); - console.log('\nTry entering: 101 OR 1=1 -- to see how parameterized queries prevent injection'); - + console.log("\n=== Key Points ==="); + console.log("Always use parameterized queries ($1, $2, etc.)"); + console.log(" Never directly concatenate user input into SQL strings"); + console.log("PostgreSQL automatically escapes parameters"); + console.log( + "\nTry entering: 101 OR 1=1 -- to see how parameterized queries prevent injection" + ); } catch (error) { - console.error('Database error:', error); + console.error("Database error:", error); } finally { await client.end(); } From 52bac2c0e420fb6d4340dacc17c48898bdcb55bc Mon Sep 17 00:00:00 2001 From: 888i88 Date: Sun, 14 Sep 2025 12:00:49 +0200 Subject: [PATCH 10/11] prep exerc week 4 --- Week4/prep exerc NoSQL Conversion.md | 123 +++++++++++++++++++++++++++ 1 file changed, 123 insertions(+) create mode 100644 Week4/prep exerc NoSQL Conversion.md diff --git a/Week4/prep exerc NoSQL Conversion.md b/Week4/prep exerc NoSQL Conversion.md new file mode 100644 index 000000000..d72dc2d81 --- /dev/null +++ b/Week4/prep exerc NoSQL Conversion.md @@ -0,0 +1,123 @@ +# Dinner Club Database: NoSQL Conversion Exercise + +This document outlines the conversion of a normalized SQL dinner club database into a document-based MongoDB database. +It includes collections, embedding and normalization decisions, example queries, reasoning, assumptions, and a discussion on database choice. + +--- + +## Collections + +In MongoDB, collections replace multiple normalized SQL tables. +The following table maps SQL tables to MongoDB collections or fields: + +| SQL Table | MongoDB Collection / Field | Notes | +|-----------------|----------------------------------|----------------------------------------------------------------------| +| `members` | `members` (separate collection) | Stores global member info (e.g., name, contact details) for reuse across dinners. | +| `venues` | Embedded in `dinners` as object | Each dinner has one venue; embedding simplifies queries. | +| `dinners` | `dinners` (main collection) | Central collection for dinner events, containing most related data. | +| `foods` | Embedded in `dinners` array | Menu items are specific to each dinner, not reused globally. | +| `dinner_members`| Embedded in `dinners` array | Lists attendees for each dinner, referencing member IDs. | +| `dinner_foods` | Not needed | Handled by embedding foods array in `dinners`. | + +**Main Collection**: `dinners` – contains all dinner event details, with embedded venues, foods, and member references. +**Separate Collection**: `members` – for managing member profiles independently. + +--- + +## Example Dinner Document + +```json +{ + "_id": "D00001003", + "date": "2020-03-20", + "venue": { + "code": "B03", + "description": "Goat Farm", + "address": "123 Farm Lane, Countryside" + }, + "members": [ + { "member_id": 1, "name": "Amit" }, + { "member_id": 4, "name": "Dan" }, + { "member_id": 6, "name": "Hema" } + ], + "foods": [ + { "code": "P1", "description": "Vegetarian Pie", "type": "Main" }, + { "code": "T1", "description": "Herbal Tea", "type": "Beverage" }, + { "code": "M1", "description": "Chocolate Mousse", "type": "Dessert" } + ] +} +``` +--- +## Embedding vs. Normalizing Decisions + +| Data | Approach | Reason | +|----------------|--------------------------|------------------------------------------------------------------------| +| Venue | Embedded object | Each dinner has one venue; embedding avoids joins and simplifies queries. | +| Foods | Embedded array of objects| Menu items are unique to each dinner and always fetched together. | +| Members | Embedded array with IDs | References member IDs to link to `members` collection; balances embedding and normalization. | +| Dinner_Members | Embedded array in dinners| Attendee list is specific to each dinner; embedding simplifies retrieval. | +| Dinner_Foods | Not needed | Food relationships are handled by embedding foods in dinners. | + +--- + +### Embedding Decisions + +- **Venue and Foods**: Embedded because they are tightly coupled to a specific dinner event and are typically accessed together. +- **Members**: Embedded as an array of objects with `member_id` and `name` for quick access within the dinner document, but normalized into a separate `members` collection for profile management. + +--- + +### Normalization Decisions + +- **Members**: Stored in a separate collection to allow independent updates (e.g., address changes) without modifying multiple dinner documents. +- **Junction tables** (`dinner_members`, `dinner_foods`) are eliminated, as their relationships are handled by embedding arrays in the `dinners` collection. + +--- + +## Assumptions + +- Dinners are **read-heavy**, with queries fetching entire dinner details (venue, foods, attendees) in one go. +- Venue and food data are unique to each dinner and do not require reuse across multiple dinners. +- Member data (e.g., contact info) may need independent updates, justifying a separate `members` collection. +- The system prioritizes **query simplicity and performance** over strict relational integrity. + +--- + +## PostgreSQL vs. MongoDB + +| Feature | PostgreSQL (SQL) | MongoDB (NoSQL) | +|---------------|---------------------------------|--------------------------------------| +| Schema | Fixed, normalized tables | Flexible, schema-less documents | +| Relationships | Strong with foreign keys/joins | Embedding or manual referencing | +| Scaling | Vertical scaling | Horizontal scaling | +| Best For | Complex relationships, data integrity | Flexible data, quick development | + +--- + +## Choice + +**MongoDB is preferred** for the dinner club database because: + +- Dinner events are naturally document-like, with venues, foods, and attendees tightly coupled. +- Flexible schema accommodates changes (e.g., adding new food types or event details). +- Read-heavy queries benefit from embedding, reducing the need for joins. +- JSON-like structure aligns with modern API designs for easy integration. + +**PostgreSQL would be better if**: + +- Strict data integrity is critical (e.g., preventing duplicate members). +- Complex relational queries are needed (e.g., analyzing attendance patterns). +- Transaction-heavy operations are common (e.g., simultaneous updates across multiple tables). + +--- + +## Discussion + +### Embedding vs. Normalization + +- **Embedding**: Venues, foods, and attendee lists are embedded in the `dinners` collection because they are specific to each dinner and typically accessed together. This optimizes read performance and eliminates the need for joins. +- **Normalization**: Member data is normalized into a separate `members` collection to allow independent updates without duplicating info across dinners. + +### Omitted Tables + +- Junction tables like `dinner_members` and `dinner_foods` are unnecessary in MongoDB, as relationships are managed through embedding arrays or referencing member IDs. From 0a0063dcde299233c0826bc6e87d4ae4f7f0baf9 Mon Sep 17 00:00:00 2001 From: 888i88 Date: Wed, 17 Sep 2025 13:38:45 +0200 Subject: [PATCH 11/11] assignment week 4 --- Week4/ex1-aggregation/aggregation.js | 101 +++++++++++ Week4/ex1-aggregation/import.js | 45 +++++ .../population_pyramid_1950-2022.csv | 0 Week4/ex2-transactions/setup.js | 58 ++++++ Week4/ex2-transactions/transfer.js | 93 ++++++++++ Week4/package-lock.json | 170 ++++++++++++++++++ Week4/package.json | 16 ++ 7 files changed, 483 insertions(+) create mode 100644 Week4/ex1-aggregation/aggregation.js create mode 100644 Week4/ex1-aggregation/import.js rename Week4/{homework => }/ex1-aggregation/population_pyramid_1950-2022.csv (100%) create mode 100644 Week4/ex2-transactions/setup.js create mode 100644 Week4/ex2-transactions/transfer.js create mode 100644 Week4/package-lock.json create mode 100644 Week4/package.json diff --git a/Week4/ex1-aggregation/aggregation.js b/Week4/ex1-aggregation/aggregation.js new file mode 100644 index 000000000..755f6d44d --- /dev/null +++ b/Week4/ex1-aggregation/aggregation.js @@ -0,0 +1,101 @@ +const { MongoClient } = require('mongodb'); + +const url = 'mongodb://localhost:27017'; +const dbName = 'databaseWeek4'; +const collectionName = 'population'; + +// Function to get total population (M + F) for a given country per year +async function getTotalPopulationByCountry(country) { + const client = new MongoClient(url); + try { + await client.connect(); + const db = client.db(dbName); + const collection = db.collection(collectionName); + + const result = await collection + .aggregate([ + { $match: { Country: country } }, + { + $group: { + _id: '$Year', + countPopulation: { $sum: { $add: ['$M', '$F'] } }, + }, + }, + { $sort: { _id: 1 } }, + ]) + .toArray(); + + return result; + } catch (err) { + console.error('Error executing getTotalPopulationByCountry:', err); + throw err; + } finally { + await client.close(); + } +} + +// Function to get continent data for a given year and age with TotalPopulation +async function getContinentDataByYearAndAge(year, age) { + const client = new MongoClient(url); + try { + await client.connect(); + const db = client.db(dbName); + const collection = db.collection(collectionName); + + const result = await collection + .aggregate([ + { + $match: { + Year: year, + Age: age, + Country: { + $in: [ + 'AFRICA', + 'ASIA', + 'EUROPE', + 'LATIN AMERICA AND THE CARIBBEAN', + 'NORTHERN AMERICA', + 'OCEANIA', + ], + }, + }, + }, + { + $project: { + Country: 1, + Year: 1, + Age: 1, + M: 1, + F: 1, + TotalPopulation: { $add: ['$M', '$F'] }, + }, + }, + ]) + .toArray(); + + return result; + } catch (err) { + console.error('Error executing getContinentDataByYearAndAge:', err); + throw err; + } finally { + await client.close(); + } +} + +async function main() { + try { + // Test total population for Netherlands + const countryResult = await getTotalPopulationByCountry('Netherlands'); + console.log('Total Population for Netherlands:'); + console.log(JSON.stringify(countryResult, null, 2)); + + // Test continent data for Year 2020, Age 100+ + const continentResult = await getContinentDataByYearAndAge(2020, '100+'); + console.log('\nContinent Data for Year 2020, Age 100+:'); + console.log(JSON.stringify(continentResult, null, 2)); + } catch (err) { + console.error('Error in main:', err); + } +} + +main(); \ No newline at end of file diff --git a/Week4/ex1-aggregation/import.js b/Week4/ex1-aggregation/import.js new file mode 100644 index 000000000..116fdda1e --- /dev/null +++ b/Week4/ex1-aggregation/import.js @@ -0,0 +1,45 @@ +const { MongoClient } = require("mongodb"); +const fs = require("fs"); +const { parse } = require("csv-parse"); + +const url = "mongodb://localhost:27017"; +const dbName = "databaseWeek4"; +const collectionName = "population"; +const csvFilePath = "./population_pyramid_1950-2022.csv"; + +async function importCSV() { + const client = new MongoClient(url); + try { + await client.connect(); + const db = client.db(dbName); + const collection = db.collection(collectionName); + + await collection.deleteMany({}); + + const records = []; + fs.createReadStream(csvFilePath) + .pipe(parse({ columns: true, trim: true })) + .on("data", (row) => { + records.push({ + Country: row.Country, + Year: parseInt(row.Year), + Age: row.Age, + M: parseInt(row.M), + F: parseInt(row.F), + }); + }) + .on("end", async () => { + await collection.insertMany(records); + console.log(`Imported ${records.length} records successfully`); + await client.close(); + }) + .on("error", (err) => { + console.error("Error reading CSV:", err); + }); + } catch (err) { + console.error("Error connecting to MongoDB:", err); + throw err; + } +} + +importCSV(); diff --git a/Week4/homework/ex1-aggregation/population_pyramid_1950-2022.csv b/Week4/ex1-aggregation/population_pyramid_1950-2022.csv similarity index 100% rename from Week4/homework/ex1-aggregation/population_pyramid_1950-2022.csv rename to Week4/ex1-aggregation/population_pyramid_1950-2022.csv diff --git a/Week4/ex2-transactions/setup.js b/Week4/ex2-transactions/setup.js new file mode 100644 index 000000000..b60bfdc11 --- /dev/null +++ b/Week4/ex2-transactions/setup.js @@ -0,0 +1,58 @@ +const { MongoClient } = require("mongodb"); + +const url = "mongodb://localhost:27017"; +const dbName = "databaseWeek4"; +const collectionName = "accounts"; + +async function setupAccounts() { + const client = new MongoClient(url); + try { + await client.connect(); + const db = client.db(dbName); + const collection = db.collection(collectionName); + + // Clear existing data + await collection.deleteMany({}); + + // Insert sample accounts + const sampleAccounts = [ + { + account_number: 101, + balance: 5000, + account_changes: [ + { + change_number: 1, + amount: 5000, + changed_date: new Date(), + remark: "Initial deposit", + }, + ], + }, + { + account_number: 102, + balance: 2000, + account_changes: [ + { + change_number: 1, + amount: 2000, + changed_date: new Date(), + remark: "Initial deposit", + }, + ], + }, + ]; + + await collection.insertMany(sampleAccounts); + console.log("Accounts set up successfully"); + } catch (err) { + console.error("Error setting up accounts:", err); + throw err; + } finally { + await client.close(); + } +} + +module.exports = { setupAccounts }; + +// Run setup +setupAccounts(); diff --git a/Week4/ex2-transactions/transfer.js b/Week4/ex2-transactions/transfer.js new file mode 100644 index 000000000..885f99f00 --- /dev/null +++ b/Week4/ex2-transactions/transfer.js @@ -0,0 +1,93 @@ +const { MongoClient } = require("mongodb"); + +const url = "mongodb://localhost:27017"; +const dbName = "databaseWeek4"; +const collectionName = "accounts"; + +async function transfer(fromAccount, toAccount, amount, remark) { + const client = new MongoClient(url); + const session = client.startSession(); + + try { + await session.withTransaction(async () => { + const db = client.db(dbName); + const collection = db.collection(collectionName); + + // Get the accounts + const fromAcc = await collection.findOne( + { account_number: fromAccount }, + { session } + ); + const toAcc = await collection.findOne( + { account_number: toAccount }, + { session } + ); + + if (!fromAcc || !toAcc) { + throw new Error("One or both accounts not found"); + } + if (fromAcc.balance < amount) { + throw new Error("Insufficient balance"); + } + + // Get the latest change_number + const fromChangeNumber = fromAcc.account_changes.length + ? Math.max(...fromAcc.account_changes.map((c) => c.change_number)) + 1 + : 1; + const toChangeNumber = toAcc.account_changes.length + ? Math.max(...toAcc.account_changes.map((c) => c.change_number)) + 1 + : 1; + + // Update from account + await collection.updateOne( + { account_number: fromAccount }, + { + $inc: { balance: -amount }, + $push: { + account_changes: { + change_number: fromChangeNumber, + amount: -amount, + changed_date: new Date(), + remark, + }, + }, + }, + { session } + ); + + // Update to account + await collection.updateOne( + { account_number: toAccount }, + { + $inc: { balance: amount }, + $push: { + account_changes: { + change_number: toChangeNumber, + amount: amount, + changed_date: new Date(), + remark, + }, + }, + }, + { session } + ); + + console.log(`Transferred ${amount} from ${fromAccount} to ${toAccount}`); + }); + } catch (err) { + console.error("Error in transaction:", err); + throw err; + } finally { + await session.endSession(); + await client.close(); + } +} + +module.exports = { transfer }; + +// Test the transfer +async function main() { + await transfer(101, 102, 1000, "Test transfer"); +} + +main(); diff --git a/Week4/package-lock.json b/Week4/package-lock.json new file mode 100644 index 000000000..deafc38cd --- /dev/null +++ b/Week4/package-lock.json @@ -0,0 +1,170 @@ +{ + "name": "week4", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "week4", + "version": "1.0.0", + "license": "ISC", + "dependencies": { + "csv-parse": "^6.1.0", + "mongodb": "^6.19.0" + } + }, + "node_modules/@mongodb-js/saslprep": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@mongodb-js/saslprep/-/saslprep-1.3.0.tgz", + "integrity": "sha512-zlayKCsIjYb7/IdfqxorK5+xUMyi4vOKcFy10wKJYc63NSdKI8mNME+uJqfatkPmOSMMUiojrL58IePKBm3gvQ==", + "license": "MIT", + "dependencies": { + "sparse-bitfield": "^3.0.3" + } + }, + "node_modules/@types/webidl-conversions": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/@types/webidl-conversions/-/webidl-conversions-7.0.3.tgz", + "integrity": "sha512-CiJJvcRtIgzadHCYXw7dqEnMNRjhGZlYK05Mj9OyktqV8uVT8fD2BFOB7S1uwBE3Kj2Z+4UyPmFw/Ixgw/LAlA==", + "license": "MIT" + }, + "node_modules/@types/whatwg-url": { + "version": "11.0.5", + "resolved": "https://registry.npmjs.org/@types/whatwg-url/-/whatwg-url-11.0.5.tgz", + "integrity": "sha512-coYR071JRaHa+xoEvvYqvnIHaVqaYrLPbsufM9BF63HkwI5Lgmy2QR8Q5K/lYDYo5AK82wOvSOS0UsLTpTG7uQ==", + "license": "MIT", + "dependencies": { + "@types/webidl-conversions": "*" + } + }, + "node_modules/bson": { + "version": "6.10.4", + "resolved": "https://registry.npmjs.org/bson/-/bson-6.10.4.tgz", + "integrity": "sha512-WIsKqkSC0ABoBJuT1LEX+2HEvNmNKKgnTAyd0fL8qzK4SH2i9NXg+t08YtdZp/V9IZ33cxe3iV4yM0qg8lMQng==", + "license": "Apache-2.0", + "engines": { + "node": ">=16.20.1" + } + }, + "node_modules/csv-parse": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/csv-parse/-/csv-parse-6.1.0.tgz", + "integrity": "sha512-CEE+jwpgLn+MmtCpVcPtiCZpVtB6Z2OKPTr34pycYYoL7sxdOkXDdQ4lRiw6ioC0q6BLqhc6cKweCVvral8yhw==", + "license": "MIT" + }, + "node_modules/memory-pager": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/memory-pager/-/memory-pager-1.5.0.tgz", + "integrity": "sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg==", + "license": "MIT" + }, + "node_modules/mongodb": { + "version": "6.19.0", + "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-6.19.0.tgz", + "integrity": "sha512-H3GtYujOJdeKIMLKBT9PwlDhGrQfplABNF1G904w6r5ZXKWyv77aB0X9B+rhmaAwjtllHzaEkvi9mkGVZxs2Bw==", + "license": "Apache-2.0", + "dependencies": { + "@mongodb-js/saslprep": "^1.1.9", + "bson": "^6.10.4", + "mongodb-connection-string-url": "^3.0.0" + }, + "engines": { + "node": ">=16.20.1" + }, + "peerDependencies": { + "@aws-sdk/credential-providers": "^3.188.0", + "@mongodb-js/zstd": "^1.1.0 || ^2.0.0", + "gcp-metadata": "^5.2.0", + "kerberos": "^2.0.1", + "mongodb-client-encryption": ">=6.0.0 <7", + "snappy": "^7.3.2", + "socks": "^2.7.1" + }, + "peerDependenciesMeta": { + "@aws-sdk/credential-providers": { + "optional": true + }, + "@mongodb-js/zstd": { + "optional": true + }, + "gcp-metadata": { + "optional": true + }, + "kerberos": { + "optional": true + }, + "mongodb-client-encryption": { + "optional": true + }, + "snappy": { + "optional": true + }, + "socks": { + "optional": true + } + } + }, + "node_modules/mongodb-connection-string-url": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/mongodb-connection-string-url/-/mongodb-connection-string-url-3.0.2.tgz", + "integrity": "sha512-rMO7CGo/9BFwyZABcKAWL8UJwH/Kc2x0g72uhDWzG48URRax5TCIcJ7Rc3RZqffZzO/Gwff/jyKwCU9TN8gehA==", + "license": "Apache-2.0", + "dependencies": { + "@types/whatwg-url": "^11.0.2", + "whatwg-url": "^14.1.0 || ^13.0.0" + } + }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/sparse-bitfield": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/sparse-bitfield/-/sparse-bitfield-3.0.3.tgz", + "integrity": "sha512-kvzhi7vqKTfkh0PZU+2D2PIllw2ymqJKujUcyPMd9Y75Nv4nPbGJZXNhxsgdQab2BmlDct1YnfQCguEvHr7VsQ==", + "license": "MIT", + "dependencies": { + "memory-pager": "^1.0.2" + } + }, + "node_modules/tr46": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-5.1.1.tgz", + "integrity": "sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw==", + "license": "MIT", + "dependencies": { + "punycode": "^2.3.1" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/webidl-conversions": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz", + "integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==", + "license": "BSD-2-Clause", + "engines": { + "node": ">=12" + } + }, + "node_modules/whatwg-url": { + "version": "14.2.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-14.2.0.tgz", + "integrity": "sha512-De72GdQZzNTUBBChsXueQUnPKDkg/5A5zp7pFDuQAj5UFoENpiACU0wlCvzpAGnTkj++ihpKwKyYewn/XNUbKw==", + "license": "MIT", + "dependencies": { + "tr46": "^5.1.0", + "webidl-conversions": "^7.0.0" + }, + "engines": { + "node": ">=18" + } + } + } +} diff --git a/Week4/package.json b/Week4/package.json new file mode 100644 index 000000000..2a5ddf504 --- /dev/null +++ b/Week4/package.json @@ -0,0 +1,16 @@ +{ + "name": "week4", + "version": "1.0.0", + "description": "## Agenda", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "keywords": [], + "author": "", + "license": "ISC", + "dependencies": { + "csv-parse": "^6.1.0", + "mongodb": "^6.19.0" + } +}