From b93c840ee29b782cfc5d487a854241cee863f7f9 Mon Sep 17 00:00:00 2001 From: Yasindu20 Date: Wed, 21 Jan 2026 10:16:54 +0530 Subject: [PATCH 1/5] Add optional S3-based storage --- src/db/db.ts | 21 ++++++++---- src/pages/download.tsx | 51 ++++++++++-------------------- src/pages/upload.tsx | 25 +++++++++++---- src/storage/LocalStorageAdapter.ts | 28 ++++++++++++++++ src/storage/S3StorageAdapter.ts | 36 +++++++++++++++++++++ src/storage/index.ts | 20 ++++++++++++ 6 files changed, 135 insertions(+), 46 deletions(-) create mode 100644 src/storage/LocalStorageAdapter.ts create mode 100644 src/storage/S3StorageAdapter.ts create mode 100644 src/storage/index.ts diff --git a/src/db/db.ts b/src/db/db.ts index de572685..a8478eea 100644 --- a/src/db/db.ts +++ b/src/db/db.ts @@ -27,14 +27,23 @@ CREATE TABLE IF NOT EXISTS jobs ( num_files INTEGER DEFAULT 0, FOREIGN KEY (user_id) REFERENCES users(id) ); -PRAGMA user_version = 1;`); +CREATE TABLE IF NOT EXISTS storage_metadata ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + user_id INTEGER NOT NULL, + job_id INTEGER NOT NULL, + file_name TEXT NOT NULL, + storage_key TEXT NOT NULL, + FOREIGN KEY (job_id) REFERENCES jobs(id), + FOREIGN KEY (user_id) REFERENCES users(id) +); +PRAGMA user_version = 2;`); } -const dbVersion = (db.query("PRAGMA user_version").get() as { user_version?: number }).user_version; -if (dbVersion === 0) { - db.exec("ALTER TABLE file_names ADD COLUMN status TEXT DEFAULT 'not started';"); - db.exec("PRAGMA user_version = 1;"); - console.log("Updated database to version 1."); +const dbVersion = (db.query("PRAGMA user_version").get() as { user_version?: number }).user_version!; +if (dbVersion < 2) { + db.exec("ALTER TABLE file_names ADD COLUMN storage_key TEXT;"); + db.exec("PRAGMA user_version = 2;"); + console.log("Updated database to version 2."); } // enable WAL mode diff --git a/src/pages/download.tsx b/src/pages/download.tsx index f10fec46..470f5e16 100644 --- a/src/pages/download.tsx +++ b/src/pages/download.tsx @@ -1,18 +1,15 @@ -import path from "node:path"; import { Elysia } from "elysia"; import sanitize from "sanitize-filename"; -import * as tar from "tar"; -import { outputDir } from ".."; import db from "../db/db"; import { WEBROOT } from "../helpers/env"; import { userService } from "./user"; +import { getStorage } from "../storage/index"; export const download = new Elysia() .use(userService) .get( "/download/:userId/:jobId/:fileName", async ({ params, redirect, user }) => { - const userId = user.id; const job = await db .query("SELECT * FROM jobs WHERE user_id = ? AND id = ?") .get(user.id, params.jobId); @@ -20,44 +17,30 @@ export const download = new Elysia() if (!job) { return redirect(`${WEBROOT}/results`, 302); } - // parse from URL encoded string - const jobId = decodeURIComponent(params.jobId); + const fileName = sanitize(decodeURIComponent(params.fileName)); - const filePath = `${outputDir}${userId}/${jobId}/${fileName}`; - return Bun.file(filePath); - }, - { - auth: true, - }, - ) - .get( - "/archive/:jobId", - async ({ params, redirect, user }) => { - const userId = user.id; - const job = await db - .query("SELECT * FROM jobs WHERE user_id = ? AND id = ?") - .get(user.id, params.jobId); + const fileRow = db + .query(` + SELECT storage_key FROM file_names + WHERE job_id = ? AND file_name = ? + `, + ) + .get(params.jobId, fileName) as { storage_key: string } | undefined; - if (!job) { + if (!fileRow) { return redirect(`${WEBROOT}/results`, 302); } - const jobId = decodeURIComponent(params.jobId); - const outputPath = `${outputDir}${userId}/${jobId}`; - const outputTar = path.join(outputPath, `converted_files_${jobId}.tar`); + const storage = getStorage(); + const fileBuffer = await storage.get(fileRow.storage_key); - await tar.create( - { - file: outputTar, - cwd: outputPath, - filter: (path) => { - return !path.match(".*\\.tar"); - }, + return new Response(fileBuffer, { + headers: { + "Content-Type": "application/octet-stream", + "Content-Disposition": `attachment; filename="${fileName}"`, }, - ["."], - ); - return Bun.file(outputTar); + }); }, { auth: true, diff --git a/src/pages/upload.tsx b/src/pages/upload.tsx index 0c000091..bb488f1b 100644 --- a/src/pages/upload.tsx +++ b/src/pages/upload.tsx @@ -1,9 +1,10 @@ import { Elysia, t } from "elysia"; import db from "../db/db"; import { WEBROOT } from "../helpers/env"; -import { uploadsDir } from "../index"; import { userService } from "./user"; import sanitize from "sanitize-filename"; +import { getStorage } from "../storage"; +import crypto from "node:crypto"; export const upload = new Elysia().use(userService).post( "/upload", @@ -12,6 +13,8 @@ export const upload = new Elysia().use(userService).post( return redirect(`${WEBROOT}/`, 302); } + const jobIdValue = jobId.value; + const existingJob = await db .query("SELECT * FROM jobs WHERE id = ? AND user_id = ?") .get(jobId.value, user.id); @@ -20,17 +23,27 @@ export const upload = new Elysia().use(userService).post( return redirect(`${WEBROOT}/`, 302); } - const userUploadsDir = `${uploadsDir}${user.id}/${jobId.value}/`; + const storage = getStorage(); + + const saveFile = async (file: File) => { + const sanitizedFileName = sanitize(file.name); + const storageKey = `${user.id}/${jobId.value}/${crypto.randomUUID()}`; + const buffer = Buffer.from(await file.arrayBuffer()); + await storage.save(storageKey, buffer); + + db.query(` + INSERT INTO file_names (job_id, file_name, storage_key) + VALUES (?, ?, ?) + `).run(jobIdValue, sanitizedFileName, storageKey); + }; if (body?.file) { if (Array.isArray(body.file)) { for (const file of body.file) { - const santizedFileName = sanitize(file.name); - await Bun.write(`${userUploadsDir}${santizedFileName}`, file); + await saveFile(file); } } else { - const santizedFileName = sanitize(body.file["name"]); - await Bun.write(`${userUploadsDir}${santizedFileName}`, body.file); + await saveFile(body.file);; } } diff --git a/src/storage/LocalStorageAdapter.ts b/src/storage/LocalStorageAdapter.ts new file mode 100644 index 00000000..a640e6a1 --- /dev/null +++ b/src/storage/LocalStorageAdapter.ts @@ -0,0 +1,28 @@ +import { IStorageAdapter } from "./index"; +import { promises as fs } from "fs"; +import path from "path"; + +export class LocalStorageAdapter implements IStorageAdapter { + baseDir: string; + + constructor(baseDir: string) { + this.baseDir = baseDir; + } + + async save(key: string, data: Buffer): Promise { + const fullPath = path.join(this.baseDir, key); + await fs.mkdir(path.dirname(fullPath), { recursive: true }); + await fs.writeFile(fullPath, data); + return key; + } + + async get(key: string): Promise { + const fullPath = path.join(this.baseDir, key); + return fs.readFile(fullPath); + } + + async delete(key: string): Promise { + const fullPath = path.join(this.baseDir, key); + await fs.unlink(fullPath); + } +} \ No newline at end of file diff --git a/src/storage/S3StorageAdapter.ts b/src/storage/S3StorageAdapter.ts new file mode 100644 index 00000000..5e66b97d --- /dev/null +++ b/src/storage/S3StorageAdapter.ts @@ -0,0 +1,36 @@ +import { s3, S3File } from "bun"; +import { IStorageAdapter } from "./index"; + +export class S3StorageAdapter implements IStorageAdapter { + private bucket: string; + + constructor(bucket: string) { + this.bucket = bucket; + } + + async save(key: string, data: Buffer): Promise { + const file: S3File = s3.file(key, { + bucket: this.bucket, + acl: "private", + }); + + await file.write(data); + return key; + } + + async get(key: string): Promise { + const file: S3File = s3.file(key, { + bucket: this.bucket, + }); + + return Buffer.from(await file.bytes()); + } + + async delete(key: string): Promise { + const file: S3File = s3.file(key, { + bucket: this.bucket, + }) + + await file.delete(); + } +} \ No newline at end of file diff --git a/src/storage/index.ts b/src/storage/index.ts new file mode 100644 index 00000000..e8221841 --- /dev/null +++ b/src/storage/index.ts @@ -0,0 +1,20 @@ +import { LocalStorageAdapter } from "./LocalStorageAdapter"; +import { S3StorageAdapter } from "./S3StorageAdapter"; + +export interface IStorageAdapter { + save(key: string, data: Buffer): Promise; + get(key: string): Promise; + delete(key: string): Promise; +} + +export function getStorage(): IStorageAdapter { + if (process.env.STORAGE_BACKEND === "s3") { + if (!process.env.S3_BUCKET) { + throw new Error("S3_BUCKET must be set when STORAGE_BACKEND=s3"); + } + + return new S3StorageAdapter(process.env.S3_BUCKET); + } + + return new LocalStorageAdapter("./data"); +} \ No newline at end of file From 631314dea8e472c50fc02ac2a42599cb4b5bfe88 Mon Sep 17 00:00:00 2001 From: Yasindu20 Date: Thu, 22 Jan 2026 12:25:21 +0530 Subject: [PATCH 2/5] change the buffer to stream --- src/db/db.ts | 51 +++++++++++++++++----- src/pages/download.tsx | 68 +++++++++++++++++++++++++++--- src/storage/LocalStorageAdapter.ts | 12 +++++- src/storage/S3StorageAdapter.ts | 10 ++++- src/storage/index.ts | 17 +++++--- 5 files changed, 134 insertions(+), 24 deletions(-) diff --git a/src/db/db.ts b/src/db/db.ts index a8478eea..bc70be33 100644 --- a/src/db/db.ts +++ b/src/db/db.ts @@ -4,8 +4,21 @@ import { Database } from "bun:sqlite"; mkdirSync("./data", { recursive: true }); const db = new Database("./data/mydb.sqlite", { create: true }); -if (!db.query("SELECT * FROM sqlite_master WHERE type='table'").get()) { - db.exec(` +function getTableInfo(tableName: string) { + try { + return db.query(`PRAGMA table_info('${tableName}')`).all() as Array<{ name: string}>; + } catch (error) { + console.error(`Error getting table info for ${tableName}:`, error) + return []; + } +} + +function hasColumn(tableName: string, columnName: string) { + const info = getTableInfo(tableName); + return info.some((c) => c.name === columnName); +} + +db.exec(` CREATE TABLE IF NOT EXISTS users ( id INTEGER PRIMARY KEY AUTOINCREMENT, email TEXT NOT NULL, @@ -17,6 +30,7 @@ CREATE TABLE IF NOT EXISTS file_names ( file_name TEXT NOT NULL, output_file_name TEXT NOT NULL, status TEXT DEFAULT 'not started', + storage_key TEXT, -- v2 column FOREIGN KEY (job_id) REFERENCES jobs(id) ); CREATE TABLE IF NOT EXISTS jobs ( @@ -27,6 +41,7 @@ CREATE TABLE IF NOT EXISTS jobs ( num_files INTEGER DEFAULT 0, FOREIGN KEY (user_id) REFERENCES users(id) ); +-- Ensure storage_metadata exists for legacy DBs CREATE TABLE IF NOT EXISTS storage_metadata ( id INTEGER PRIMARY KEY AUTOINCREMENT, user_id INTEGER NOT NULL, @@ -36,17 +51,33 @@ CREATE TABLE IF NOT EXISTS storage_metadata ( FOREIGN KEY (job_id) REFERENCES jobs(id), FOREIGN KEY (user_id) REFERENCES users(id) ); -PRAGMA user_version = 2;`); -} +`); + +try { + if (!hasColumn("file_names", "status")) { + db.exec("ALTER TABLE file_names ADD COLUMN status TEXT DEFAULT 'not started';"); + console.log("Added column file_names.status"); + } -const dbVersion = (db.query("PRAGMA user_version").get() as { user_version?: number }).user_version!; -if (dbVersion < 2) { - db.exec("ALTER TABLE file_names ADD COLUMN storage_key TEXT;"); - db.exec("PRAGMA user_version = 2;"); - console.log("Updated database to version 2."); + if (!hasColumn("file_names", "storage_key")) { + db.exec("ALTER TABLE file_names ADD COLUMN storage_key TEXT;"); + console.log("Added column file_names.storage_key"); + } + + const currentVersion = (db.query("PRAGMA user_version").get() as { user_version?: number}).user_version ?? 0; + if (currentVersion < 2) { + db.exec("PRAGMA user_version = 2;"); + console.log(`Updated database to version 2 (was ${currentVersion}).`); + } +} catch (error) { + console.error("Error running migrations: ", error); } // enable WAL mode -db.exec("PRAGMA journal_mode = WAL;"); +try { + db.exec("PRAGMA journal_mode = WAL;"); +} catch (error) { + console.warn("Could not enable WAL mode: ", error); +} export default db; diff --git a/src/pages/download.tsx b/src/pages/download.tsx index 470f5e16..2d513a80 100644 --- a/src/pages/download.tsx +++ b/src/pages/download.tsx @@ -3,7 +3,10 @@ import sanitize from "sanitize-filename"; import db from "../db/db"; import { WEBROOT } from "../helpers/env"; import { userService } from "./user"; -import { getStorage } from "../storage/index"; +import { getStorage, getStorageType } from "../storage/index"; +import { outputDir } from ".."; +import path from "path"; +import * as tar from "tar"; export const download = new Elysia() .use(userService) @@ -22,27 +25,78 @@ export const download = new Elysia() const fileRow = db .query(` - SELECT storage_key FROM file_names + SELECT storage_key, file_name + FROM file_names WHERE job_id = ? AND file_name = ? `, ) - .get(params.jobId, fileName) as { storage_key: string } | undefined; - + .get(params.jobId, fileName) as { storage_key?: string; file_name?: string } | undefined; if (!fileRow) { return redirect(`${WEBROOT}/results`, 302); } const storage = getStorage(); - const fileBuffer = await storage.get(fileRow.storage_key); + const stream = await storage.getStream(fileRow.storage_key!); - return new Response(fileBuffer, { + return new Response(stream, { headers: { "Content-Type": "application/octet-stream", - "Content-Disposition": `attachment; filename="${fileName}"`, + "Content-Disposition": `attachment; filename="${fileRow.file_name ?? fileName}"`, }, }); }, { auth: true, }, + ) + + .get( + "/archive/:jobId", + async ({ params, redirect, user }) => { + const job = await db + .query("SELECT * FROM jobs WHERE user_id = ? AND id = ?") + .get(user.id, params.jobId); + + if(!job) { + return redirect(`${WEBROOT}/results`, 302); + } + + const storageType = getStorageType(); + if (storageType === "local") { + const userId = user.id; + const jobId = decodeURIComponent(params.jobId); + const outputPath = `${outputDir}${userId}/${jobId}`; + const outputTar = path.join(outputPath, `converted_files_${jobId}.tar`); + + await tar.create( + { + file: outputTar, + cwd: outputPath, + filter: (path) => { + return !path.match(".*\\.tar"); + }, + }, + ["."], + ); + + return Bun.file(outputTar); + } + + return new Response( + JSON.stringify({ + ok: false, + message: + "Archive download is not supported when object storage is enabled. This is intentional it avoid 404s - please use per-file downloads or request a follow-up for server-side archiving", + }), + { + status: 501, + headers: { + "Content-Type": "application/json", + }, + }, + ); + }, + { + auth: true, + }, ); diff --git a/src/storage/LocalStorageAdapter.ts b/src/storage/LocalStorageAdapter.ts index a640e6a1..f849fa14 100644 --- a/src/storage/LocalStorageAdapter.ts +++ b/src/storage/LocalStorageAdapter.ts @@ -23,6 +23,16 @@ export class LocalStorageAdapter implements IStorageAdapter { async delete(key: string): Promise { const fullPath = path.join(this.baseDir, key); - await fs.unlink(fullPath); + try { + await fs.unlink(fullPath); + } catch { + //ignore error if file does not exist + } + } + + getStream(key: string): ReadableStream { + const fullPath = path.join(this.baseDir, key); + const file = Bun.file(fullPath); + return file.stream(); } } \ No newline at end of file diff --git a/src/storage/S3StorageAdapter.ts b/src/storage/S3StorageAdapter.ts index 5e66b97d..a3fd916b 100644 --- a/src/storage/S3StorageAdapter.ts +++ b/src/storage/S3StorageAdapter.ts @@ -23,7 +23,8 @@ export class S3StorageAdapter implements IStorageAdapter { bucket: this.bucket, }); - return Buffer.from(await file.bytes()); + const buf = await file.bytes(); + return Buffer.from(buf); } async delete(key: string): Promise { @@ -33,4 +34,11 @@ export class S3StorageAdapter implements IStorageAdapter { await file.delete(); } + + getStream(key: string): ReadableStream { + const file: S3File = s3.file(key, { + bucket: this.bucket + }); + return file.stream(); + } } \ No newline at end of file diff --git a/src/storage/index.ts b/src/storage/index.ts index e8221841..98037682 100644 --- a/src/storage/index.ts +++ b/src/storage/index.ts @@ -5,16 +5,23 @@ export interface IStorageAdapter { save(key: string, data: Buffer): Promise; get(key: string): Promise; delete(key: string): Promise; + getStream(key: string): ReadableStream; +} + +export function getStorageType(): "local" | "s3" { + return process.env.STORAGE_TYPE === "s3" ? "s3" : "local"; } export function getStorage(): IStorageAdapter { - if (process.env.STORAGE_BACKEND === "s3") { - if (!process.env.S3_BUCKET) { - throw new Error("S3_BUCKET must be set when STORAGE_BACKEND=s3"); + if (getStorageType() === "s3") { + const bucket = process.env.S3_BUCKET_NAME; + if (!bucket) { + throw new Error("S3_BUCKET_NAME must be set when STORAGE_TYPE=s3"); } - return new S3StorageAdapter(process.env.S3_BUCKET); + return new S3StorageAdapter(bucket); } - return new LocalStorageAdapter("./data"); + const baseDir = process.env.LOCAL_STORAGE_PATH || "./data/storage"; + return new LocalStorageAdapter(baseDir); } \ No newline at end of file From 0c72d5a37c11a60d11e23fa3c9367470e31c1855 Mon Sep 17 00:00:00 2001 From: Yasindu20 Date: Thu, 22 Jan 2026 16:08:45 +0530 Subject: [PATCH 3/5] LocalStorageAdapter delete catch block solved --- src/storage/LocalStorageAdapter.ts | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/src/storage/LocalStorageAdapter.ts b/src/storage/LocalStorageAdapter.ts index f849fa14..86fc733e 100644 --- a/src/storage/LocalStorageAdapter.ts +++ b/src/storage/LocalStorageAdapter.ts @@ -3,7 +3,7 @@ import { promises as fs } from "fs"; import path from "path"; export class LocalStorageAdapter implements IStorageAdapter { - baseDir: string; + private baseDir: string; constructor(baseDir: string) { this.baseDir = baseDir; @@ -25,8 +25,14 @@ export class LocalStorageAdapter implements IStorageAdapter { const fullPath = path.join(this.baseDir, key); try { await fs.unlink(fullPath); - } catch { - //ignore error if file does not exist + } catch (error) { + const err = error as NodeJS.ErrnoException; + if (err?.code === "ENOENT" || err?.code === "ENOTDIR") { + return; + } + + console.error(`Failed to delete file at ${fullPath}: `, error); + throw error } } From 1d82c4ffb945def7d82ec69ab03cf2eafd5d78cc Mon Sep 17 00:00:00 2001 From: Yasindu20 Date: Tue, 27 Jan 2026 12:54:24 +0530 Subject: [PATCH 4/5] Lint errors --- src/db/db.ts | 8 +-- src/pages/download.tsx | 13 ++--- src/pages/upload.tsx | 8 +-- src/storage/LocalStorageAdapter.ts | 64 ++++++++++++------------ src/storage/S3StorageAdapter.ts | 80 +++++++++++++++--------------- src/storage/index.ts | 32 ++++++------ 6 files changed, 105 insertions(+), 100 deletions(-) diff --git a/src/db/db.ts b/src/db/db.ts index bc70be33..5eda969f 100644 --- a/src/db/db.ts +++ b/src/db/db.ts @@ -6,9 +6,9 @@ const db = new Database("./data/mydb.sqlite", { create: true }); function getTableInfo(tableName: string) { try { - return db.query(`PRAGMA table_info('${tableName}')`).all() as Array<{ name: string}>; + return db.query(`PRAGMA table_info('${tableName}')`).all() as Array<{ name: string }>; } catch (error) { - console.error(`Error getting table info for ${tableName}:`, error) + console.error(`Error getting table info for ${tableName}:`, error); return []; } } @@ -64,7 +64,9 @@ try { console.log("Added column file_names.storage_key"); } - const currentVersion = (db.query("PRAGMA user_version").get() as { user_version?: number}).user_version ?? 0; + const currentVersion = + (db.query("PRAGMA user_version").get() as { user_version?: number }).user_version ?? 0; + if (currentVersion < 2) { db.exec("PRAGMA user_version = 2;"); console.log(`Updated database to version 2 (was ${currentVersion}).`); diff --git a/src/pages/download.tsx b/src/pages/download.tsx index 2d513a80..f3289e28 100644 --- a/src/pages/download.tsx +++ b/src/pages/download.tsx @@ -20,23 +20,24 @@ export const download = new Elysia() if (!job) { return redirect(`${WEBROOT}/results`, 302); } - + const fileName = sanitize(decodeURIComponent(params.fileName)); const fileRow = db - .query(` + .query( + ` SELECT storage_key, file_name FROM file_names WHERE job_id = ? AND file_name = ? `, - ) - .get(params.jobId, fileName) as { storage_key?: string; file_name?: string } | undefined; + ) + .get(params.jobId, fileName) as { storage_key?: string; file_name?: string } | undefined; if (!fileRow) { return redirect(`${WEBROOT}/results`, 302); } const storage = getStorage(); - const stream = await storage.getStream(fileRow.storage_key!); + const stream = storage.getStream(fileRow.storage_key!); return new Response(stream, { headers: { @@ -57,7 +58,7 @@ export const download = new Elysia() .query("SELECT * FROM jobs WHERE user_id = ? AND id = ?") .get(user.id, params.jobId); - if(!job) { + if (!job) { return redirect(`${WEBROOT}/results`, 302); } diff --git a/src/pages/upload.tsx b/src/pages/upload.tsx index bb488f1b..73745253 100644 --- a/src/pages/upload.tsx +++ b/src/pages/upload.tsx @@ -31,10 +31,12 @@ export const upload = new Elysia().use(userService).post( const buffer = Buffer.from(await file.arrayBuffer()); await storage.save(storageKey, buffer); - db.query(` + db.query( + ` INSERT INTO file_names (job_id, file_name, storage_key) VALUES (?, ?, ?) - `).run(jobIdValue, sanitizedFileName, storageKey); + `, + ).run(jobIdValue, sanitizedFileName, storageKey); }; if (body?.file) { @@ -43,7 +45,7 @@ export const upload = new Elysia().use(userService).post( await saveFile(file); } } else { - await saveFile(body.file);; + await saveFile(body.file); } } diff --git a/src/storage/LocalStorageAdapter.ts b/src/storage/LocalStorageAdapter.ts index 86fc733e..687ebd90 100644 --- a/src/storage/LocalStorageAdapter.ts +++ b/src/storage/LocalStorageAdapter.ts @@ -3,42 +3,42 @@ import { promises as fs } from "fs"; import path from "path"; export class LocalStorageAdapter implements IStorageAdapter { - private baseDir: string; + private baseDir: string; - constructor(baseDir: string) { - this.baseDir = baseDir; - } + constructor(baseDir: string) { + this.baseDir = baseDir; + } - async save(key: string, data: Buffer): Promise { - const fullPath = path.join(this.baseDir, key); - await fs.mkdir(path.dirname(fullPath), { recursive: true }); - await fs.writeFile(fullPath, data); - return key; - } + async save(key: string, data: Buffer): Promise { + const fullPath = path.join(this.baseDir, key); + await fs.mkdir(path.dirname(fullPath), { recursive: true }); + await fs.writeFile(fullPath, data); + return key; + } - async get(key: string): Promise { - const fullPath = path.join(this.baseDir, key); - return fs.readFile(fullPath); - } + async get(key: string): Promise { + const fullPath = path.join(this.baseDir, key); + return fs.readFile(fullPath); + } - async delete(key: string): Promise { - const fullPath = path.join(this.baseDir, key); - try { - await fs.unlink(fullPath); - } catch (error) { - const err = error as NodeJS.ErrnoException; - if (err?.code === "ENOENT" || err?.code === "ENOTDIR") { - return; - } + async delete(key: string): Promise { + const fullPath = path.join(this.baseDir, key); + try { + await fs.unlink(fullPath); + } catch (error) { + const err = error as NodeJS.ErrnoException; + if (err?.code === "ENOENT" || err?.code === "ENOTDIR") { + return; + } - console.error(`Failed to delete file at ${fullPath}: `, error); - throw error - } + console.error(`Failed to delete file at ${fullPath}: `, error); + throw error; } + } - getStream(key: string): ReadableStream { - const fullPath = path.join(this.baseDir, key); - const file = Bun.file(fullPath); - return file.stream(); - } -} \ No newline at end of file + getStream(key: string): ReadableStream { + const fullPath = path.join(this.baseDir, key); + const file = Bun.file(fullPath); + return file.stream(); + } +} diff --git a/src/storage/S3StorageAdapter.ts b/src/storage/S3StorageAdapter.ts index a3fd916b..8caaff80 100644 --- a/src/storage/S3StorageAdapter.ts +++ b/src/storage/S3StorageAdapter.ts @@ -2,43 +2,43 @@ import { s3, S3File } from "bun"; import { IStorageAdapter } from "./index"; export class S3StorageAdapter implements IStorageAdapter { - private bucket: string; - - constructor(bucket: string) { - this.bucket = bucket; - } - - async save(key: string, data: Buffer): Promise { - const file: S3File = s3.file(key, { - bucket: this.bucket, - acl: "private", - }); - - await file.write(data); - return key; - } - - async get(key: string): Promise { - const file: S3File = s3.file(key, { - bucket: this.bucket, - }); - - const buf = await file.bytes(); - return Buffer.from(buf); - } - - async delete(key: string): Promise { - const file: S3File = s3.file(key, { - bucket: this.bucket, - }) - - await file.delete(); - } - - getStream(key: string): ReadableStream { - const file: S3File = s3.file(key, { - bucket: this.bucket - }); - return file.stream(); - } -} \ No newline at end of file + private bucket: string; + + constructor(bucket: string) { + this.bucket = bucket; + } + + async save(key: string, data: Buffer): Promise { + const file: S3File = s3.file(key, { + bucket: this.bucket, + acl: "private", + }); + + await file.write(data); + return key; + } + + async get(key: string): Promise { + const file: S3File = s3.file(key, { + bucket: this.bucket, + }); + + const buf = await file.bytes(); + return Buffer.from(buf); + } + + async delete(key: string): Promise { + const file: S3File = s3.file(key, { + bucket: this.bucket, + }); + + await file.delete(); + } + + getStream(key: string): ReadableStream { + const file: S3File = s3.file(key, { + bucket: this.bucket, + }); + return file.stream(); + } +} diff --git a/src/storage/index.ts b/src/storage/index.ts index 98037682..7c796122 100644 --- a/src/storage/index.ts +++ b/src/storage/index.ts @@ -2,26 +2,26 @@ import { LocalStorageAdapter } from "./LocalStorageAdapter"; import { S3StorageAdapter } from "./S3StorageAdapter"; export interface IStorageAdapter { - save(key: string, data: Buffer): Promise; - get(key: string): Promise; - delete(key: string): Promise; - getStream(key: string): ReadableStream; + save(key: string, data: Buffer): Promise; + get(key: string): Promise; + delete(key: string): Promise; + getStream(key: string): ReadableStream; } export function getStorageType(): "local" | "s3" { - return process.env.STORAGE_TYPE === "s3" ? "s3" : "local"; + return process.env.STORAGE_TYPE === "s3" ? "s3" : "local"; } export function getStorage(): IStorageAdapter { - if (getStorageType() === "s3") { - const bucket = process.env.S3_BUCKET_NAME; - if (!bucket) { - throw new Error("S3_BUCKET_NAME must be set when STORAGE_TYPE=s3"); - } - - return new S3StorageAdapter(bucket); + if (getStorageType() === "s3") { + const bucket = process.env.S3_BUCKET_NAME; + if (!bucket) { + throw new Error("S3_BUCKET_NAME must be set when STORAGE_TYPE=s3"); } - - const baseDir = process.env.LOCAL_STORAGE_PATH || "./data/storage"; - return new LocalStorageAdapter(baseDir); -} \ No newline at end of file + + return new S3StorageAdapter(bucket); + } + + const baseDir = process.env.LOCAL_STORAGE_PATH || "./data/storage"; + return new LocalStorageAdapter(baseDir); +} From a814aac0693f6df32190ac0868c10dbf98f89620 Mon Sep 17 00:00:00 2001 From: Yasindu20 Date: Tue, 27 Jan 2026 13:18:09 +0530 Subject: [PATCH 5/5] AWS acl error fixed --- src/storage/S3StorageAdapter.ts | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/src/storage/S3StorageAdapter.ts b/src/storage/S3StorageAdapter.ts index 8caaff80..20dcea6a 100644 --- a/src/storage/S3StorageAdapter.ts +++ b/src/storage/S3StorageAdapter.ts @@ -9,10 +9,16 @@ export class S3StorageAdapter implements IStorageAdapter { } async save(key: string, data: Buffer): Promise { - const file: S3File = s3.file(key, { + const opts: Record = { bucket: this.bucket, - acl: "private", - }); + }; + + if (process.env.S3_USE_ACL === "true") { + const aclValue = process.env.S3_ACL_VALUE || "private"; + opts.acl = aclValue; + } + + const file: S3File = s3.file(key, opts); await file.write(data); return key;