From 5d39b580474f175e450b862063ae00b5ac9f75e5 Mon Sep 17 00:00:00 2001 From: jessevz Date: Tue, 11 Feb 2025 07:11:54 +0100 Subject: [PATCH 01/16] Added more input validation in POST to the importfile helper --- src/inc/apiv2/helper/importFile.routes.php | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/src/inc/apiv2/helper/importFile.routes.php b/src/inc/apiv2/helper/importFile.routes.php index ce6eb4b83..0331f7bdf 100644 --- a/src/inc/apiv2/helper/importFile.routes.php +++ b/src/inc/apiv2/helper/importFile.routes.php @@ -89,6 +89,10 @@ function updateStorage(string $id, array $update): void { $list = explode(",", $update["upload_metadata_raw"]); foreach ($list as $item) { list($key, $b64val) = explode(" ", $item); + if (!isset($b64val)) { + $response->getBody()->write("Error Upload-Metadata, should be a key value pair that is seperated by a space, no value has been provided"); + return $response->withStatus(400); + } if (($val = base64_decode($b64val, true)) === false) { $response->getBody()->write("Error Upload-Metadata '$key' invalid base64 encoding"); return $response->withStatus(400); @@ -97,7 +101,7 @@ function updateStorage(string $id, array $update): void { } } // TODO: Should filename be mandatory? - if (array_key_exists('filename', $update_metadata)) { + if (isset($update_metadata) && array_key_exists('filename', $update_metadata)) { $filename = $update_metadata['filename']; /* Generate unique upload identifier */ $id = date("YmdHis") . "-" . md5($filename); @@ -111,6 +115,10 @@ function updateStorage(string $id, array $update): void { } $update["upload_metadata"] = $update_metadata; + if ($request->hasHeader('Upload-Defer-Length') && $request->hasHeader('Upload-Length')) { + $response->getBody()->write('Error: Cannot provide both Upload-Length and Upload-Defer-Length'); + return $response->withStatus(400); + } if ($request->hasHeader('Upload-Defer-Length')) { if ($request->getHeader('Upload-Defer-Length')[0] == "1") { $update["upload_defer_length"] = true; From 576e316853001474ca8c182ab5dedae2dfadee56 Mon Sep 17 00:00:00 2001 From: jessevz Date: Wed, 12 Feb 2025 16:14:50 +0100 Subject: [PATCH 02/16] Added automatic removing of outdated import files --- Dockerfile | 62 ++++++++++++++-------- cleanup_cron_script.sh | 44 +++++++++++++++ docker-entrypoint.sh | 30 +++++------ src/inc/apiv2/helper/importFile.routes.php | 41 +++++++------- src/inc/confv2.php | 5 +- 5 files changed, 124 insertions(+), 58 deletions(-) create mode 100644 cleanup_cron_script.sh diff --git a/Dockerfile b/Dockerfile index bcea9c17c..9e22803b8 100644 --- a/Dockerfile +++ b/Dockerfile @@ -24,6 +24,9 @@ ENV HASHTOPOLIS_IMPORT_PATH=${HASHTOPOLIS_PATH}/import ENV HASHTOPOLIS_LOG_PATH=${HASHTOPOLIS_PATH}/log ENV HASHTOPOLIS_CONFIG_PATH=${HASHTOPOLIS_PATH}/config ENV HASHTOPOLIS_BINARIES_PATH=${HASHTOPOLIS_PATH}/binaries +ENV HASHTOPOLIS_TUS_PATH=/var/tmp/tus/ +ENV HASHTOPOLIS_TEMP_UPLOADS_PATH=${HASHTOPOLIS_TUS_PATH}/uploads +ENV HASHTOPOLIS_TEMP_META_PATH=${HASHTOPOLIS_TUS_PATH}/meta # Add support for TLS inspection corporate setups, see .env.sample for details ENV NODE_EXTRA_CA_CERTS=/etc/ssl/certs/ca-certificates.crt @@ -33,7 +36,7 @@ RUN if [ -n "${CONTAINER_USER_CMD_PRE}" ]; then echo "${CONTAINER_USER_CMD_PRE}" # Configure apt and install packages RUN apt-get update \ - && apt-get -y install --no-install-recommends apt-utils zip unzip nano ncdu gettext-base 2>&1 \ + && apt-get -y install --no-install-recommends apt-utils cron zip unzip nano ncdu gettext-base 2>&1 \ # # Install git, procps, lsb-release (useful for CLI installs) && apt-get -y install git iproute2 procps lsb-release \ @@ -51,29 +54,46 @@ RUN apt-get update \ RUN sed -i 's/KeepAliveTimeout 5/KeepAliveTimeout 10/' /etc/apache2/apache2.conf -RUN mkdir -p ${HASHTOPOLIS_DOCUMENT_ROOT} \ - && mkdir ${HASHTOPOLIS_DOCUMENT_ROOT}/../../.git/ \ - && mkdir -p ${HASHTOPOLIS_PATH} \ - && chown www-data:www-data ${HASHTOPOLIS_PATH} \ - && chmod g+w ${HASHTOPOLIS_PATH} \ - && mkdir -p ${HASHTOPOLIS_FILES_PATH} \ - && chown www-data:www-data ${HASHTOPOLIS_FILES_PATH} \ - && chmod g+w ${HASHTOPOLIS_FILES_PATH} \ - && mkdir -p ${HASHTOPOLIS_IMPORT_PATH} \ - && chown www-data:www-data ${HASHTOPOLIS_IMPORT_PATH} \ - && chmod g+w ${HASHTOPOLIS_IMPORT_PATH} \ - && mkdir -p ${HASHTOPOLIS_LOG_PATH} \ - && chown www-data:www-data ${HASHTOPOLIS_LOG_PATH} \ - && chmod g+w ${HASHTOPOLIS_LOG_PATH} \ - && mkdir -p ${HASHTOPOLIS_CONFIG_PATH} \ - && chown www-data:www-data ${HASHTOPOLIS_CONFIG_PATH} \ - && chmod g+w ${HASHTOPOLIS_CONFIG_PATH} \ - && mkdir -p ${HASHTOPOLIS_BINARIES_PATH} \ - && chown www-data:www-data ${HASHTOPOLIS_BINARIES_PATH} \ - && chmod g+w ${HASHTOPOLIS_BINARIES_PATH} +RUN mkdir -p \ + ${HASHTOPOLIS_DOCUMENT_ROOT} \ + ${HASHTOPOLIS_DOCUMENT_ROOT}/../../.git/ \ + ${HASHTOPOLIS_PATH} \ + ${HASHTOPOLIS_FILES_PATH} \ + ${HASHTOPOLIS_IMPORT_PATH} \ + ${HASHTOPOLIS_LOG_PATH} \ + ${HASHTOPOLIS_CONFIG_PATH} \ + ${HASHTOPOLIS_BINARIES_PATH} \ + ${HASHTOPOLIS_TUS_PATH} \ + ${HASHTOPOLIS_TEMP_UPLOADS_PATH} \ + ${HASHTOPOLIS_TEMP_META_PATH} \ + && chown -R www-data:www-data \ + ${HASHTOPOLIS_PATH} \ + ${HASHTOPOLIS_FILES_PATH} \ + ${HASHTOPOLIS_IMPORT_PATH} \ + ${HASHTOPOLIS_LOG_PATH} \ + ${HASHTOPOLIS_CONFIG_PATH} \ + ${HASHTOPOLIS_BINARIES_PATH} \ + ${HASHTOPOLIS_TUS_PATH} \ + ${HASHTOPOLIS_TEMP_UPLOADS_PATH} \ + ${HASHTOPOLIS_TEMP_META_PATH} \ + && chmod -R g+w \ + ${HASHTOPOLIS_PATH} \ + ${HASHTOPOLIS_FILES_PATH} \ + ${HASHTOPOLIS_IMPORT_PATH} \ + ${HASHTOPOLIS_LOG_PATH} \ + ${HASHTOPOLIS_CONFIG_PATH} \ + ${HASHTOPOLIS_BINARIES_PATH} \ + ${HASHTOPOLIS_TUS_PATH} \ + ${HASHTOPOLIS_TEMP_UPLOADS_PATH} \ + ${HASHTOPOLIS_TEMP_META_PATH} COPY --from=preprocess /HEA[D] ${HASHTOPOLIS_DOCUMENT_ROOT}/../.git/ +COPY cleanup_cron_script.sh /usr/local/bin/cleanup_cron_script.sh +RUN chmod +x /usr/local/bin/cleanup_cron_script.sh \ + && echo '0 * * * * www-data /usr/local/bin/cleanup_cron_script.sh >> /var/log/cleanup_cron.log 2>&1' > /etc/cron.d/cleanup_cron \ + && crontab /etc/cron.d/cleanup_cron + COPY composer.json ${HASHTOPOLIS_DOCUMENT_ROOT}/../ RUN composer install --working-dir=${HASHTOPOLIS_DOCUMENT_ROOT}/.. diff --git a/cleanup_cron_script.sh b/cleanup_cron_script.sh new file mode 100644 index 000000000..76acb2d79 --- /dev/null +++ b/cleanup_cron_script.sh @@ -0,0 +1,44 @@ +#!/bin/bash + +# Directories where uploads and metadata are stored +UPLOAD_DIR="/var/tmp/tus/uploads/" +META_DIR="/var/tmp/tus/meta/" + +# 1 hour expiration time +EXPIRATION_TIME=3600 + +# Current timestamp +CURRENT_TIME=$(date +%s) + +# Function to remove expired files +cleanup_expired_uploads() { + for upload_file in "$UPLOAD_DIR"/*.part; do + # Check if it's a regular file + if [ -f "$upload_file" ]; then + # Get the last modification time of the file + MOD_TIME=$(stat -c %Y "$upload_file") + + # Calculate the age of the file + FILE_AGE=$((CURRENT_TIME - MOD_TIME)) + + # If the file is older than the expiration time, delete it + if [ "$FILE_AGE" -ge "$EXPIRATION_TIME" ]; then + FILE_NAME=$(basename "$upload_file") + META_FILE="$META_DIR/$FILE_NAME.meta" + + + echo "Removing expired upload: $FILE_NAME" + rm -f "$upload_file" + + # Remove the associated metadata file if it exists + if [ -f "$META_FILE" ]; then + rm -f "$META_FILE" + echo "Removed associated metadata: $META_FILE" + fi + fi + fi + done +} + +# Run the cleanup function +cleanup_expired_uploads diff --git a/docker-entrypoint.sh b/docker-entrypoint.sh index 7bed554f6..085dbffa1 100755 --- a/docker-entrypoint.sh +++ b/docker-entrypoint.sh @@ -25,22 +25,22 @@ done echo "Database ready." +directories=( + "${HASHTOPOLIS_FILES_PATH}" + "${HASHTOPOLIS_CONFIG_PATH}" + "${HASHTOPOLIS_LOG_PATH}" + "${HASHTOPOLIS_IMPORT_PATH}" + "${HASHTOPOLIS_BINARIES_PATH}" + "${HASHTOPOLIS_TUS_PATH}" + "${HASHTOPOLIS_TEMP_UPLOADS_PATH}" + "${HASHTOPOLIS_TEMP_META_PATH}" +) + echo "Setting up folders" -if [ ! -d ${HASHTOPOLIS_FILES_PATH} ];then - mkdir -p ${HASHTOPOLIS_FILES_PATH} && chown www-data:www-data ${HASHTOPOLIS_FILES_PATH} -fi -if [ ! -d ${HASHTOPOLIS_CONFIG_PATH} ];then - mkdir -p ${HASHTOPOLIS_CONFIG_PATH} && chown www-data:www-data ${HASHTOPOLIS_CONFIG_PATH} -fi -if [ ! -d ${HASHTOPOLIS_LOG_PATH} ];then - mkdir -p ${HASHTOPOLIS_LOG_PATH} && chown www-data:www-data ${HASHTOPOLIS_LOG_PATH} -fi -if [ ! -d ${HASHTOPOLIS_IMPORT_PATH} ];then - mkdir -p ${HASHTOPOLIS_IMPORT_PATH} && chown www-data:www-data ${HASHTOPOLIS_IMPORT_PATH} -fi -if [ ! -d ${HASHTOPOLIS_BINARIES_PATH} ];then - mkdir -p ${HASHTOPOLIS_BINARIES_PATH} && chown www-data:www-data ${HASHTOPOLIS_BINARIES_PATH} -fi +for dir in "${directories[@]}"; do + mkdir -p "$dir" + chown www-data:www-data "$dir" +done # required to trigger the initialization echo "Start initialization process..." diff --git a/src/inc/apiv2/helper/importFile.routes.php b/src/inc/apiv2/helper/importFile.routes.php index 0331f7bdf..c76f448c8 100644 --- a/src/inc/apiv2/helper/importFile.routes.php +++ b/src/inc/apiv2/helper/importFile.routes.php @@ -27,12 +27,12 @@ require_once(dirname(__FILE__) . "/../../load.php"); function getUploadPath(string $id): string { - $filename = "/tmp/" . $id . '.part'; + $filename = "/var/tmp/tus/uploads/" . $id . '.part'; return $filename; }; function getMetaPath(string $id): string { - $filename = "/tmp/" . $id . '.meta'; + $filename = "/var/tmp/tus/meta/" . $id . '.meta'; return $filename; }; @@ -69,7 +69,8 @@ function updateStorage(string $id, array $update): void { ->withHeader('Tus-Version', '1.0.0') ->withHeader('Tus-Resumable', '1.0.0') ->withHeader('Tus-Checksum-Algorithm', join(',', getChecksumAlgorithm())) - //TODO: Maybe add Upload-Expires support. Return in PATCH with RFC 7231 + //TODO: Maybe add Upload-Expires support. Return in PATCH with RFC 7231. + //TODO: Maybe add creation-with-upload support ->withHeader('Tus-Extension', 'checksum,creation,creation-defer-length,expiration,termination') ->withHeader('Access-Control-Expose-Headers', 'Tus-Version, Tus-Resumable, Tus-Checksum-Algorithm, Tus-Extension'); //TODO: Option for Tus-Max-Size: 1073741824 @@ -190,9 +191,6 @@ function updateStorage(string $id, array $update): void { } }); - - - $group->patch('', function (Request $request, Response $response, array $args): Response { // Check for Content-Type: application/offset+octet-stream or return 415 if (($request->hasHeader('Content-Type') == false) || @@ -203,7 +201,7 @@ function updateStorage(string $id, array $update): void { /* Return 404 if entry is not found */ $filename = getUploadPath($args['id']); - if (file_exists($filename) === false) { + if (!file_exists($filename)) { // TODO: Maybe 410 if actual file still exists and meta file also exists? $response->getBody()->write('Upload ID does not exists'); return $response->withStatus(404); @@ -232,17 +230,6 @@ function updateStorage(string $id, array $update): void { return $response->withStatus(400); } - $ds = getMetaStorage($args['id']); - - /* Validate if upload time is still valid */ - $now = new DateTimeImmutable(); - $dt = (new DateTime())->setTimeStamp($ds['upload_expires']); - if (($dt->getTimestamp() - $now->getTimestamp()) <= 0) { - // TODO: Remove expired uploads - $response->getBody()->write('Upload token expired'); - return $response->withStatus(410); - } - /* Validate checksum */ if ($request->hasHeader('Upload-Checksum')) { $uploadChecksum = $request->getHeader('Upload-Checksum')[0]; @@ -278,6 +265,7 @@ function updateStorage(string $id, array $update): void { } } + $ds = getMetaStorage($args['id']); if ($ds["upload_defer_length"] === true) { if ($request->hasHeader('Upload-Length')) { $update["upload_length"] = intval($request->getHeader('Upload-Length')[0]); @@ -286,7 +274,10 @@ function updateStorage(string $id, array $update): void { } } - file_put_contents($filename, $chunk, FILE_APPEND); + if (file_put_contents($filename, $chunk, FILE_APPEND) == false) { + $response->getBody()->write('Failed to write to file'); + return $response->withStatus(400); + } clearstatcache(); $newSize = filesize($filename); @@ -315,6 +306,7 @@ function updateStorage(string $id, array $update): void { $statusMsg = "Next chunk please"; } + $dt = (new DateTime())->setTimeStamp($ds['upload_expires']); $response->getBody()->write($statusMsg); return $response->withStatus(204) ->withHeader("Tus-Resumable", "1.0.0") @@ -325,9 +317,16 @@ function updateStorage(string $id, array $update): void { }); $group->delete('', function (Request $request, Response $response, array $args): Response { - // TODO delete file + /* Return 404 if entry is not found */ + $filename_upload = getUploadPath($args['id']); + $filename_meta = getMetaPath($args['id']); + if (!file_exists($filename_upload) && !file_exists($filename_meta)) { + $response->getBody()->write('Upload ID does not exists'); + return $response->withStatus(404); + } + unlink($filename_upload); + unlink($filename_meta); - // TODO return 404 or 410 if entry is not found return $response->withStatus(204) ->withHeader("Tus-Resumable", "1.0.0") ->WithHeader("Access-Control-Expose-Headers", "Tus-Resumable"); diff --git a/src/inc/confv2.php b/src/inc/confv2.php index dd63271d5..998319979 100644 --- a/src/inc/confv2.php +++ b/src/inc/confv2.php @@ -30,7 +30,10 @@ "files" => "/usr/local/share/hashtopolis/files", "import" => "/usr/local/share/hashtopolis/import", "log" => "/usr/local/share/hashtopolis/log", - "config" => "/usr/local/share/hashtopolis/config" + "config" => "/usr/local/share/hashtopolis/config", + "tus" => "/var/tmp/tus/", + "temp_uploads" => "/var/tmp/tus/uploads/", + "temp_meta" => "/var/tmp/tus/meta/" ]; // update from env if set From 22ffbf31ddc36f28ccdb9efe5341eda46ff7324a Mon Sep 17 00:00:00 2001 From: jessevz Date: Mon, 19 Jan 2026 16:15:51 +0100 Subject: [PATCH 03/16] Fixed merge conflicts --- src/inc/apiv2/helper/importFile.routes.php | 28 ++++++++++++---------- 1 file changed, 15 insertions(+), 13 deletions(-) diff --git a/src/inc/apiv2/helper/importFile.routes.php b/src/inc/apiv2/helper/importFile.routes.php index 23b4992c0..b770ec73b 100644 --- a/src/inc/apiv2/helper/importFile.routes.php +++ b/src/inc/apiv2/helper/importFile.routes.php @@ -37,14 +37,21 @@ public function getRequiredPermissions(string $method): array { return []; } - static function getUploadPath(string $id): string { - return "/tmp/" . $id . '.part'; - } - - static function getMetaPath(string $id): string { - return "/tmp/" . $id . '.meta'; - } - +function getUploadPath(string $id): string { + $filename = "/var/tmp/tus/uploads/" . $id . '.part'; + return $filename; +} + +function getMetaPath(string $id): string { + $filename = "/var/tmp/tus/meta/" . $id . '.meta'; + return $filename; +} + +function getImportPath(string $id): string { + $filename = Factory::getStoredValueFactory()->get(DDirectories::IMPORT)->getVal() . "/" . $id; + return $filename; +} + /** * Import file has no POST parameters */ @@ -52,11 +59,6 @@ public function getFormFields(): array { return []; } - - static function getImportPath(string $id): string { - return Factory::getStoredValueFactory()->get(DDirectories::IMPORT)->getVal() . "/" . $id; - } - static function getChecksumAlgorithm(): array { return ['md5', 'sha1', 'crc32']; } From 1087aff1be3e32b5dfe7168c3ff31ae9a050362c Mon Sep 17 00:00:00 2001 From: jessevz Date: Mon, 19 Jan 2026 16:16:16 +0100 Subject: [PATCH 04/16] Fixed TUS path --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 39c2347f0..289afb9d7 100644 --- a/Dockerfile +++ b/Dockerfile @@ -30,7 +30,7 @@ ENV HASHTOPOLIS_IMPORT_PATH=${HASHTOPOLIS_PATH}/import ENV HASHTOPOLIS_LOG_PATH=${HASHTOPOLIS_PATH}/log ENV HASHTOPOLIS_CONFIG_PATH=${HASHTOPOLIS_PATH}/config ENV HASHTOPOLIS_BINARIES_PATH=${HASHTOPOLIS_PATH}/binaries -ENV HASHTOPOLIS_TUS_PATH=/var/tmp/tus/ +ENV HASHTOPOLIS_TUS_PATH=/var/tmp/tus ENV HASHTOPOLIS_TEMP_UPLOADS_PATH=${HASHTOPOLIS_TUS_PATH}/uploads ENV HASHTOPOLIS_TEMP_META_PATH=${HASHTOPOLIS_TUS_PATH}/meta From ea876b9fb8235f8538ec24cdb5a5c7a8320587ed Mon Sep 17 00:00:00 2001 From: jessevz Date: Mon, 19 Jan 2026 17:29:35 +0100 Subject: [PATCH 05/16] Fixed warnings --- docker-entrypoint.sh | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docker-entrypoint.sh b/docker-entrypoint.sh index f98e9ecd6..bb2520afe 100755 --- a/docker-entrypoint.sh +++ b/docker-entrypoint.sh @@ -62,8 +62,9 @@ directories=( echo "Setting up folders" for dir in "${directories[@]}"; do - mkdir -p "$dir" - chown www-data:www-data "$dir" + if [ ! -d $dir ];then + mkdir -p $dir && chown www-data:www-data $dir + fi done # required to trigger the initialization From 108e7e50bfc0bb465f7a8202263d317603a42611 Mon Sep 17 00:00:00 2001 From: jessevz Date: Tue, 20 Jan 2026 13:14:35 +0100 Subject: [PATCH 06/16] Fixed git merge issues and made tus file locations configurable --- src/inc/apiv2/helper/importFile.routes.php | 10 +++++----- src/inc/confv2.php | 8 +++++--- src/inc/defines/global.php | 1 + src/inc/startup/setup.php | 1 + 4 files changed, 12 insertions(+), 8 deletions(-) diff --git a/src/inc/apiv2/helper/importFile.routes.php b/src/inc/apiv2/helper/importFile.routes.php index b770ec73b..0a0c07ec3 100644 --- a/src/inc/apiv2/helper/importFile.routes.php +++ b/src/inc/apiv2/helper/importFile.routes.php @@ -37,17 +37,17 @@ public function getRequiredPermissions(string $method): array { return []; } -function getUploadPath(string $id): string { - $filename = "/var/tmp/tus/uploads/" . $id . '.part'; +static function getUploadPath(string $id): string { + $filename = Factory::getStoredValueFactory()->get(DDirectories::TUS)->getVal() . '/uploads/' . $id . '.part'; return $filename; } -function getMetaPath(string $id): string { - $filename = "/var/tmp/tus/meta/" . $id . '.meta'; +static function getMetaPath(string $id): string { + $filename = Factory::getStoredValueFactory()->get(DDirectories::TUS)->getVal() . '/uploads/' . $id . '.meta'; return $filename; } -function getImportPath(string $id): string { +static function getImportPath(string $id): string { $filename = Factory::getStoredValueFactory()->get(DDirectories::IMPORT)->getVal() . "/" . $id; return $filename; } diff --git a/src/inc/confv2.php b/src/inc/confv2.php index 52066785d..fe94080c1 100644 --- a/src/inc/confv2.php +++ b/src/inc/confv2.php @@ -10,7 +10,8 @@ "files" => dirname(__FILE__) . "/../files/", "import" => dirname(__FILE__) . "/../import/", "log" => dirname(__FILE__) . "/../log/", - "config" => dirname(__FILE__) . "/../config/" + "config" => dirname(__FILE__) . "/../config/", + "tus" => "/var/tmp/tus/", ]; } @@ -50,8 +51,6 @@ "log" => "/usr/local/share/hashtopolis/log", "config" => "/usr/local/share/hashtopolis/config", "tus" => "/var/tmp/tus/", - "temp_uploads" => "/var/tmp/tus/uploads/", - "temp_meta" => "/var/tmp/tus/meta/" ]; // update from env if set @@ -64,6 +63,9 @@ if (getenv('HASHTOPOLIS_LOG_PATH') !== false) { $DIRECTORIES["log"] = getenv('HASHTOPOLIS_LOG_PATH'); } + if (getenv('HASHTOPOLIS_TUS_PATH') !== false) { + $DIRECTORIES["tus"] = getenv('HASHTOPOLIS_TUS_PATH'); + } } // load data // test if config file exists diff --git a/src/inc/defines/global.php b/src/inc/defines/global.php index 9323d88ac..79a14b710 100644 --- a/src/inc/defines/global.php +++ b/src/inc/defines/global.php @@ -13,6 +13,7 @@ class DDirectories { const IMPORT = "directory_import"; const LOG = "directory_log"; const CONFIG = "directory_config"; + const TUS = "directory_tus"; } // log entry types diff --git a/src/inc/startup/setup.php b/src/inc/startup/setup.php index ee3992a58..2f75153bf 100755 --- a/src/inc/startup/setup.php +++ b/src/inc/startup/setup.php @@ -114,5 +114,6 @@ Util::checkDataDirectory(DDirectories::IMPORT, $DIRECTORIES['import']); Util::checkDataDirectory(DDirectories::LOG, $DIRECTORIES['log']); Util::checkDataDirectory(DDirectories::CONFIG, $DIRECTORIES['config']); +Util::checkDataDirectory(DDirectories::TUS, $DIRECTORIES['tus']); define("APP_NAME", (SConfig::getInstance()->getVal(DConfig::S_NAME) == 1) ? "Hashtopussy" : "Hashtopolis"); From 6203afd46486884edf088ae22e23aaf5a4f63d3f Mon Sep 17 00:00:00 2001 From: jessevz Date: Tue, 20 Jan 2026 13:19:12 +0100 Subject: [PATCH 07/16] Fixed another merge mistake --- Dockerfile | 2 -- 1 file changed, 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index 289afb9d7..26e96ed44 100644 --- a/Dockerfile +++ b/Dockerfile @@ -105,8 +105,6 @@ RUN mkdir -p \ COPY --from=prebuild /usr/local/cargo/bin/sqlx /usr/bin/ -COPY --from=prebuild /usr/local/cargo/bin/sqlx /usr/bin/ - COPY --from=preprocess /HEA[D] ${HASHTOPOLIS_DOCUMENT_ROOT}/../.git/ COPY cleanup_cron_script.sh /usr/local/bin/cleanup_cron_script.sh From e5c29b737733263827baa2b7a986f11ac2d3befc Mon Sep 17 00:00:00 2001 From: jessevz Date: Tue, 20 Jan 2026 15:40:42 +0100 Subject: [PATCH 08/16] Improved cleaning of outdated uploaded files --- Dockerfile | 5 -- cleanup_cron_script.sh | 44 ----------- src/inc/Util.class.php | 91 +++++++++++++--------- src/inc/api/APISendProgress.class.php | 3 +- src/inc/apiv2/helper/importFile.routes.php | 2 +- src/inc/defines/global.php | 6 +- 6 files changed, 60 insertions(+), 91 deletions(-) delete mode 100644 cleanup_cron_script.sh diff --git a/Dockerfile b/Dockerfile index 26e96ed44..9dec1c167 100644 --- a/Dockerfile +++ b/Dockerfile @@ -107,11 +107,6 @@ COPY --from=prebuild /usr/local/cargo/bin/sqlx /usr/bin/ COPY --from=preprocess /HEA[D] ${HASHTOPOLIS_DOCUMENT_ROOT}/../.git/ -COPY cleanup_cron_script.sh /usr/local/bin/cleanup_cron_script.sh -RUN chmod +x /usr/local/bin/cleanup_cron_script.sh \ - && echo '0 * * * * www-data /usr/local/bin/cleanup_cron_script.sh >> /var/log/cleanup_cron.log 2>&1' > /etc/cron.d/cleanup_cron \ - && crontab /etc/cron.d/cleanup_cron - # Install composer COPY composer.json ${HASHTOPOLIS_DOCUMENT_ROOT}/../ RUN composer install --working-dir=${HASHTOPOLIS_DOCUMENT_ROOT}/.. diff --git a/cleanup_cron_script.sh b/cleanup_cron_script.sh deleted file mode 100644 index 76acb2d79..000000000 --- a/cleanup_cron_script.sh +++ /dev/null @@ -1,44 +0,0 @@ -#!/bin/bash - -# Directories where uploads and metadata are stored -UPLOAD_DIR="/var/tmp/tus/uploads/" -META_DIR="/var/tmp/tus/meta/" - -# 1 hour expiration time -EXPIRATION_TIME=3600 - -# Current timestamp -CURRENT_TIME=$(date +%s) - -# Function to remove expired files -cleanup_expired_uploads() { - for upload_file in "$UPLOAD_DIR"/*.part; do - # Check if it's a regular file - if [ -f "$upload_file" ]; then - # Get the last modification time of the file - MOD_TIME=$(stat -c %Y "$upload_file") - - # Calculate the age of the file - FILE_AGE=$((CURRENT_TIME - MOD_TIME)) - - # If the file is older than the expiration time, delete it - if [ "$FILE_AGE" -ge "$EXPIRATION_TIME" ]; then - FILE_NAME=$(basename "$upload_file") - META_FILE="$META_DIR/$FILE_NAME.meta" - - - echo "Removing expired upload: $FILE_NAME" - rm -f "$upload_file" - - # Remove the associated metadata file if it exists - if [ -f "$META_FILE" ]; then - rm -f "$META_FILE" - echo "Removed associated metadata: $META_FILE" - fi - fi - fi - done -} - -# Run the cleanup function -cleanup_expired_uploads diff --git a/src/inc/Util.class.php b/src/inc/Util.class.php index a8a31ece9..6baad40dc 100755 --- a/src/inc/Util.class.php +++ b/src/inc/Util.class.php @@ -621,6 +621,21 @@ public static function checkTaskWrapperCompleted($taskWrapper) { } return true; } + + public static function cleaning() { + $entry = Factory::getStoredValueFactory()->get(DStats::LAST_CLEANING); + if ($entry == null) { + $entry = new StoredValue(DStats::LAST_CLEANING, 0); + Factory::getStoredValueFactory()->save($entry); + } + $time = time(); + if ($time - $entry->getVal() > 600) { + self::agentStatCleaning(); + self::zapCleaning(); + self::tusFileCleaning(); + } + Factory::getStoredValueFactory()->set($entry, StoredValue::VAL, $time); + } /** * Checks if it is longer than 10 mins since the last time it was checked if there are @@ -628,48 +643,54 @@ public static function checkTaskWrapperCompleted($taskWrapper) { * and old entries are deleted. */ public static function agentStatCleaning() { - $entry = Factory::getStoredValueFactory()->get(DStats::LAST_STAT_CLEANING); - if ($entry == null) { - $entry = new StoredValue(DStats::LAST_STAT_CLEANING, 0); - Factory::getStoredValueFactory()->save($entry); - } - if (time() - $entry->getVal() > 600) { - $lifetime = intval(SConfig::getInstance()->getVal(DConfig::AGENT_DATA_LIFETIME)); - if ($lifetime <= 0) { - $lifetime = 3600; - } - $qF = new QueryFilter(AgentStat::TIME, time() - $lifetime, "<="); - Factory::getAgentStatFactory()->massDeletion([Factory::FILTER => $qF]); - - $qF = new QueryFilter(Speed::TIME, time() - $lifetime, "<="); - Factory::getSpeedFactory()->massDeletion([Factory::FILTER => $qF]); - - Factory::getStoredValueFactory()->set($entry, StoredValue::VAL, time()); + $lifetime = intval(SConfig::getInstance()->getVal(DConfig::AGENT_DATA_LIFETIME)); + if ($lifetime <= 0) { + $lifetime = 3600; } + $qF = new QueryFilter(AgentStat::TIME, time() - $lifetime, "<="); + Factory::getAgentStatFactory()->massDeletion([Factory::FILTER => $qF]); + + $qF = new QueryFilter(Speed::TIME, time() - $lifetime, "<="); + Factory::getSpeedFactory()->massDeletion([Factory::FILTER => $qF]); + } /** * Used by the solver. Cleans the zap-queue */ public static function zapCleaning() { - $entry = Factory::getStoredValueFactory()->get(DZaps::LAST_ZAP_CLEANING); - if ($entry == null) { - $entry = new StoredValue(DZaps::LAST_ZAP_CLEANING, 0); - Factory::getStoredValueFactory()->save($entry); - } - if (time() - $entry->getVal() > 600) { - $zapFilter = new QueryFilter(Zap::SOLVE_TIME, time() - 600, "<="); - - // delete dependencies on AgentZap - $zaps = Factory::getZapFactory()->filter([Factory::FILTER => $zapFilter]); - $zapIds = Util::arrayOfIds($zaps); - $uS = new UpdateSet(AgentZap::LAST_ZAP_ID, null); - $qF = new ContainFilter(AgentZap::LAST_ZAP_ID, $zapIds); - Factory::getAgentZapFactory()->massUpdate([Factory::FILTER => $qF, Factory::UPDATE => $uS]); - - Factory::getZapFactory()->massDeletion([Factory::FILTER => $zapFilter]); - - Factory::getStoredValueFactory()->set($entry, StoredValue::VAL, time()); + $zapFilter = new QueryFilter(Zap::SOLVE_TIME, time() - 600, "<="); + + // delete dependencies on AgentZap + $zaps = Factory::getZapFactory()->filter([Factory::FILTER => $zapFilter]); + $zapIds = Util::arrayOfIds($zaps); + $uS = new UpdateSet(AgentZap::LAST_ZAP_ID, null); + $qF = new ContainFilter(AgentZap::LAST_ZAP_ID, $zapIds); + Factory::getAgentZapFactory()->massUpdate([Factory::FILTER => $qF, Factory::UPDATE => $uS]); + + Factory::getZapFactory()->massDeletion([Factory::FILTER => $zapFilter]); + } + + public static function tusFileCleaning() { + $tusDirectory = Factory::getStoredValueFactory()->get(DDirectories::TUS)->getVal(); + $uploadDirectory = $tusDirectory . "/uploads/"; + $metaDirectory = $tusDirectory . "/meta/"; + $expiration_time = 3600; + if (file_exists($uploadDirectory) && is_dir($uploadDirectory)) { + if ($uploadDirectoryHandler = opendir($uploadDirectory)){ + while ($file = readdir($uploadDirectoryHandler)) { + if (str_ends_with($file, ".part")) { + $mod_time = filemtime($file); + $file_age = time() - $mod_time; + if ($file_age > $expiration_time) { + $metaFile = $metaDirectory . pathinfo($file, PATHINFO_FILENAME); + unlink($metaFile); + unlink($uploadDirectory . $file); + } + } + } + closedir($uploadDirectoryHandler); + } } } diff --git a/src/inc/api/APISendProgress.class.php b/src/inc/api/APISendProgress.class.php index c069e4faf..c2d9a243e 100644 --- a/src/inc/api/APISendProgress.class.php +++ b/src/inc/api/APISendProgress.class.php @@ -537,8 +537,7 @@ public function execute($QUERY = array()) { DServerLog::log(DServerLog::TRACE, "Checked zaps and sending new ones to agent", [$this->agent, $zaps]); break; } - Util::zapCleaning(); - Util::agentStatCleaning(); + Util::cleaning(); $this->sendResponse(array( PResponseSendProgress::ACTION => PActions::SEND_PROGRESS, PResponseSendProgress::RESPONSE => PValues::SUCCESS, diff --git a/src/inc/apiv2/helper/importFile.routes.php b/src/inc/apiv2/helper/importFile.routes.php index 0a0c07ec3..b05214d14 100644 --- a/src/inc/apiv2/helper/importFile.routes.php +++ b/src/inc/apiv2/helper/importFile.routes.php @@ -43,7 +43,7 @@ static function getUploadPath(string $id): string { } static function getMetaPath(string $id): string { - $filename = Factory::getStoredValueFactory()->get(DDirectories::TUS)->getVal() . '/uploads/' . $id . '.meta'; + $filename = Factory::getStoredValueFactory()->get(DDirectories::TUS)->getVal() . '/meta/' . $id . '.meta'; return $filename; } diff --git a/src/inc/defines/global.php b/src/inc/defines/global.php index 79a14b710..662aa3bb8 100644 --- a/src/inc/defines/global.php +++ b/src/inc/defines/global.php @@ -4,8 +4,8 @@ class DLimits { const ACCESS_GROUP_MAX_LENGTH = 50; } -class DZaps { - const LAST_ZAP_CLEANING = "lastZapCleaning"; +class DCleaning { + const LAST_CLEANING = "lastCleaning"; } class DDirectories { @@ -37,8 +37,6 @@ class DStats { const TASKS_FINISHED = "tasksFinished"; const TASKS_RUNNING = "tasksRunning"; const TASKS_QUEUED = "tasksQueued"; - - const LAST_STAT_CLEANING = "lastStatCleaning"; } class DPrince { From 38421222ab81533d837b3cf3b3ba077828667887 Mon Sep 17 00:00:00 2001 From: jessevz Date: Wed, 21 Jan 2026 13:46:40 +0100 Subject: [PATCH 09/16] Added checking for empty file for HEAD request for importfile --- src/inc/apiv2/helper/importFile.routes.php | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/inc/apiv2/helper/importFile.routes.php b/src/inc/apiv2/helper/importFile.routes.php index b05214d14..1bb999635 100644 --- a/src/inc/apiv2/helper/importFile.routes.php +++ b/src/inc/apiv2/helper/importFile.routes.php @@ -88,8 +88,10 @@ function actionPost(array $data): object|array|null { * And to retrieve the upload status. */ function processHead(Request $request, Response $response, array $args): Response { - // TODO return 404 or 410 if entry is not found $filename = self::getUploadPath($args['id']); + if (!is_file($filename)) { + return $response->withStatus(404); + } $currentSize = filesize($filename); $ds = self::getMetaStorage($args['id']); From 336d963226343e0ab8bf713bdf451d9fe0ad9634 Mon Sep 17 00:00:00 2001 From: jessevz Date: Wed, 21 Jan 2026 13:51:02 +0100 Subject: [PATCH 10/16] Fixed bug in cleaning --- src/inc/Util.class.php | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/inc/Util.class.php b/src/inc/Util.class.php index 6baad40dc..622003374 100755 --- a/src/inc/Util.class.php +++ b/src/inc/Util.class.php @@ -623,9 +623,9 @@ public static function checkTaskWrapperCompleted($taskWrapper) { } public static function cleaning() { - $entry = Factory::getStoredValueFactory()->get(DStats::LAST_CLEANING); + $entry = Factory::getStoredValueFactory()->get(DCleaning::LAST_CLEANING); if ($entry == null) { - $entry = new StoredValue(DStats::LAST_CLEANING, 0); + $entry = new StoredValue(DCleaning::LAST_CLEANING, 0); Factory::getStoredValueFactory()->save($entry); } $time = time(); From b2ec8d300c45877223435d4e208f10b1eeae97ee Mon Sep 17 00:00:00 2001 From: jessevz Date: Wed, 21 Jan 2026 14:42:02 +0100 Subject: [PATCH 11/16] Improved expired file handling --- src/inc/Util.class.php | 20 ++++++++++---------- src/inc/apiv2/helper/importFile.routes.php | 2 +- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/src/inc/Util.class.php b/src/inc/Util.class.php index 622003374..6d3328f7e 100755 --- a/src/inc/Util.class.php +++ b/src/inc/Util.class.php @@ -676,20 +676,20 @@ public static function tusFileCleaning() { $uploadDirectory = $tusDirectory . "/uploads/"; $metaDirectory = $tusDirectory . "/meta/"; $expiration_time = 3600; - if (file_exists($uploadDirectory) && is_dir($uploadDirectory)) { - if ($uploadDirectoryHandler = opendir($uploadDirectory)){ - while ($file = readdir($uploadDirectoryHandler)) { - if (str_ends_with($file, ".part")) { - $mod_time = filemtime($file); - $file_age = time() - $mod_time; - if ($file_age > $expiration_time) { - $metaFile = $metaDirectory . pathinfo($file, PATHINFO_FILENAME); + if (file_exists($metaDirectory) && is_dir($metaDirectory)) { + if ($metaDirectoryHandler = opendir($metaDirectory)){ + while ($file = readdir($metaDirectoryHandler)) { + if (str_ends_with($file, ".meta")) { + $metaFile = $metaDirectory . $file; + $metadata = (array)json_decode(file_get_contents($metaFile), true) ; + if ($metadata['upload_expires'] > $expiration_time) { + $uploadFile = $uploadDirectory . pathinfo($file, PATHINFO_FILENAME) . ".part"; unlink($metaFile); - unlink($uploadDirectory . $file); + unlink($uploadFile); } } } - closedir($uploadDirectoryHandler); + closedir($metaDirectoryHandler); } } } diff --git a/src/inc/apiv2/helper/importFile.routes.php b/src/inc/apiv2/helper/importFile.routes.php index 1bb999635..d3311f60e 100644 --- a/src/inc/apiv2/helper/importFile.routes.php +++ b/src/inc/apiv2/helper/importFile.routes.php @@ -266,7 +266,7 @@ function processPatch(Request $request, Response $response, array $args): Respon $now = new DateTimeImmutable(); $dt = (new DateTime())->setTimeStamp($ds['upload_expires']); if (($dt->getTimestamp() - $now->getTimestamp()) <= 0) { - // TODO: Remove expired uploads + Util::tusFileCleaning(); $response->getBody()->write('Upload token expired'); return $response->withStatus(410); } From 947b2dd125532dd1fe48d55cd5b565aacc174d6a Mon Sep 17 00:00:00 2001 From: jessevz Date: Wed, 21 Jan 2026 14:47:09 +0100 Subject: [PATCH 12/16] Removed cron since that is not needed anymore --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 9dec1c167..ed8512914 100644 --- a/Dockerfile +++ b/Dockerfile @@ -42,7 +42,7 @@ RUN if [ -n "${CONTAINER_USER_CMD_PRE}" ]; then echo "${CONTAINER_USER_CMD_PRE}" # Configure apt and install packages RUN apt-get update \ - && apt-get -y install --no-install-recommends apt-utils cron zip unzip nano ncdu gettext-base 2>&1 \ + && apt-get -y install --no-install-recommends apt-utils zip unzip nano ncdu gettext-base 2>&1 \ # # Install git, procps, lsb-release (useful for CLI installs) && apt-get -y install git iproute2 procps lsb-release \ From 5a3ba47482a4a8f7cda612a80414a8c6d901fd30 Mon Sep 17 00:00:00 2001 From: jessevz Date: Wed, 21 Jan 2026 15:39:04 +0100 Subject: [PATCH 13/16] Resolved copilot review --- docker-entrypoint.sh | 4 +- src/inc/Util.class.php | 9 ++-- src/inc/apiv2/helper/importFile.routes.php | 48 +++++++++++++++++----- src/inc/startup/setup.php | 4 +- 4 files changed, 47 insertions(+), 18 deletions(-) diff --git a/docker-entrypoint.sh b/docker-entrypoint.sh index bb2520afe..367142e97 100755 --- a/docker-entrypoint.sh +++ b/docker-entrypoint.sh @@ -62,8 +62,8 @@ directories=( echo "Setting up folders" for dir in "${directories[@]}"; do - if [ ! -d $dir ];then - mkdir -p $dir && chown www-data:www-data $dir + if [ ! -d "$dir" ];then + mkdir -p "$dir" && chown www-data:www-data "$dir" fi done diff --git a/src/inc/Util.class.php b/src/inc/Util.class.php index 6d3328f7e..c3c91eca4 100755 --- a/src/inc/Util.class.php +++ b/src/inc/Util.class.php @@ -633,8 +633,8 @@ public static function cleaning() { self::agentStatCleaning(); self::zapCleaning(); self::tusFileCleaning(); + Factory::getStoredValueFactory()->set($entry, StoredValue::VAL, $time); } - Factory::getStoredValueFactory()->set($entry, StoredValue::VAL, $time); } /** @@ -673,8 +673,8 @@ public static function zapCleaning() { public static function tusFileCleaning() { $tusDirectory = Factory::getStoredValueFactory()->get(DDirectories::TUS)->getVal(); - $uploadDirectory = $tusDirectory . "/uploads/"; - $metaDirectory = $tusDirectory . "/meta/"; + $uploadDirectory = $tusDirectory . PATH_SEPARATOR . "uploads" . PATH_SEPARATOR; + $metaDirectory = $tusDirectory . PATH_SEPARATOR . "meta" . PATH_SEPARATOR; $expiration_time = 3600; if (file_exists($metaDirectory) && is_dir($metaDirectory)) { if ($metaDirectoryHandler = opendir($metaDirectory)){ @@ -682,6 +682,9 @@ public static function tusFileCleaning() { if (str_ends_with($file, ".meta")) { $metaFile = $metaDirectory . $file; $metadata = (array)json_decode(file_get_contents($metaFile), true) ; + if (!isset($metadata['upload_expires'])) { + continue; + } if ($metadata['upload_expires'] > $expiration_time) { $uploadFile = $uploadDirectory . pathinfo($file, PATHINFO_FILENAME) . ".part"; unlink($metaFile); diff --git a/src/inc/apiv2/helper/importFile.routes.php b/src/inc/apiv2/helper/importFile.routes.php index d3311f60e..618e3ccdd 100644 --- a/src/inc/apiv2/helper/importFile.routes.php +++ b/src/inc/apiv2/helper/importFile.routes.php @@ -38,18 +38,39 @@ public function getRequiredPermissions(string $method): array { } static function getUploadPath(string $id): string { - $filename = Factory::getStoredValueFactory()->get(DDirectories::TUS)->getVal() . '/uploads/' . $id . '.part'; - return $filename; + $baseDir = Factory::getStoredValueFactory()->get(DDirectories::TUS)->getVal() . DIRECTORY_SEPARATOR . 'uploads' . + DIRECTORY_SEPARATOR; + $fullPath = realpath($baseDir . $id . ".part"); + // path traversal prevention + if (!$fullPath || !str_starts_with($fullPath, $baseDir)) { + throw new HttpForbidden("Invalid path"); + } + return $fullPath; } static function getMetaPath(string $id): string { - $filename = Factory::getStoredValueFactory()->get(DDirectories::TUS)->getVal() . '/meta/' . $id . '.meta'; - return $filename; + $baseDir = Factory::getStoredValueFactory()->get(DDirectories::TUS)->getVal() . DIRECTORY_SEPARATOR . 'meta' + . DIRECTORY_SEPARATOR; + $fullPath = realpath($baseDir . $id . ".meta"); + + // path traversal prevention + if (!$fullPath || !str_starts_with($fullPath, $baseDir)) { + throw new HttpForbidden("Invalid path"); + } + + return $fullPath; } static function getImportPath(string $id): string { - $filename = Factory::getStoredValueFactory()->get(DDirectories::IMPORT)->getVal() . "/" . $id; - return $filename; + $baseDir = Factory::getStoredValueFactory()->get(DDirectories::IMPORT)->getVal() . DIRECTORY_SEPARATOR; + $fullPath = realpath($baseDir . $id); + + // path traversal prevention + if (!$fullPath || !str_starts_with($fullPath, $baseDir)) { + throw new HttpForbidden("Invalid path"); + } + + return $fullPath; } /** @@ -160,7 +181,7 @@ function processPost(Request $request, Response $response, array $args): Respons foreach ($list as $item) { list($key, $b64val) = explode(" ", $item); if (!isset($b64val)) { - $response->getBody()->write("Error Upload-Metadata, should be a key value pair that is seperated by a space, no value has been provided"); + $response->getBody()->write("Error Upload-Metadata, should be a key value pair that is separated by a space, no value has been provided"); return $response->withStatus(400); } if (($val = base64_decode($b64val, true)) === false) { @@ -264,6 +285,9 @@ function processPatch(Request $request, Response $response, array $args): Respon /* Validate if upload time is still valid */ $now = new DateTimeImmutable(); + if (!isset($ds['upload_expires'])) { + throw new HttpError("The meta file of this upload is incorrect"); + } $dt = (new DateTime())->setTimeStamp($ds['upload_expires']); if (($dt->getTimestamp() - $now->getTimestamp()) <= 0) { Util::tusFileCleaning(); @@ -364,11 +388,15 @@ function processDelete(Request $request, Response $response, array $args): Respo $filename_upload = self::getUploadPath($args['id']); $filename_meta = self::getMetaPath($args['id']); if (!file_exists($filename_upload) && !file_exists($filename_meta)) { - $response->getBody()->write('Upload ID does not exists'); + $response->getBody()->write('Upload ID does not exist'); return $response->withStatus(404); } - unlink($filename_upload); - unlink($filename_meta); + $isDeletedUpload = unlink($filename_upload); + $isDeletedMeta = unlink($filename_meta); + + if (!$isDeletedMeta || !$isDeletedUpload) { + throw new HttpError("Something went wrong while deleting the files"); + } return $response->withStatus(204) ->withHeader("Tus-Resumable", "1.0.0") diff --git a/src/inc/startup/setup.php b/src/inc/startup/setup.php index 2f75153bf..233cb8558 100755 --- a/src/inc/startup/setup.php +++ b/src/inc/startup/setup.php @@ -114,6 +114,4 @@ Util::checkDataDirectory(DDirectories::IMPORT, $DIRECTORIES['import']); Util::checkDataDirectory(DDirectories::LOG, $DIRECTORIES['log']); Util::checkDataDirectory(DDirectories::CONFIG, $DIRECTORIES['config']); -Util::checkDataDirectory(DDirectories::TUS, $DIRECTORIES['tus']); - -define("APP_NAME", (SConfig::getInstance()->getVal(DConfig::S_NAME) == 1) ? "Hashtopussy" : "Hashtopolis"); +Util::checkDataDirectory(DDirectories::TUS, $DIRECTORIES['tus']); \ No newline at end of file From c8b1c0a2644a664748f77411ecbf4b747b5a0563 Mon Sep 17 00:00:00 2001 From: jessevz Date: Wed, 21 Jan 2026 16:23:32 +0100 Subject: [PATCH 14/16] Fixed path traversal prevention --- src/inc/apiv2/helper/importFile.routes.php | 32 ++++------------------ 1 file changed, 5 insertions(+), 27 deletions(-) diff --git a/src/inc/apiv2/helper/importFile.routes.php b/src/inc/apiv2/helper/importFile.routes.php index 618e3ccdd..dc06ba870 100644 --- a/src/inc/apiv2/helper/importFile.routes.php +++ b/src/inc/apiv2/helper/importFile.routes.php @@ -38,39 +38,17 @@ public function getRequiredPermissions(string $method): array { } static function getUploadPath(string $id): string { - $baseDir = Factory::getStoredValueFactory()->get(DDirectories::TUS)->getVal() . DIRECTORY_SEPARATOR . 'uploads' . - DIRECTORY_SEPARATOR; - $fullPath = realpath($baseDir . $id . ".part"); - // path traversal prevention - if (!$fullPath || !str_starts_with($fullPath, $baseDir)) { - throw new HttpForbidden("Invalid path"); - } - return $fullPath; + return Factory::getStoredValueFactory()->get(DDirectories::TUS)->getVal() . DIRECTORY_SEPARATOR . 'uploads' . + DIRECTORY_SEPARATOR . basename($id) . ".part"; } static function getMetaPath(string $id): string { - $baseDir = Factory::getStoredValueFactory()->get(DDirectories::TUS)->getVal() . DIRECTORY_SEPARATOR . 'meta' - . DIRECTORY_SEPARATOR; - $fullPath = realpath($baseDir . $id . ".meta"); - - // path traversal prevention - if (!$fullPath || !str_starts_with($fullPath, $baseDir)) { - throw new HttpForbidden("Invalid path"); - } - - return $fullPath; + return Factory::getStoredValueFactory()->get(DDirectories::TUS)->getVal() . DIRECTORY_SEPARATOR . 'meta' + . DIRECTORY_SEPARATOR . basename($id) . ".meta"; } static function getImportPath(string $id): string { - $baseDir = Factory::getStoredValueFactory()->get(DDirectories::IMPORT)->getVal() . DIRECTORY_SEPARATOR; - $fullPath = realpath($baseDir . $id); - - // path traversal prevention - if (!$fullPath || !str_starts_with($fullPath, $baseDir)) { - throw new HttpForbidden("Invalid path"); - } - - return $fullPath; + return Factory::getStoredValueFactory()->get(DDirectories::IMPORT)->getVal() . DIRECTORY_SEPARATOR . basename($id); } /** From 517431200a66c1d16e37d201a90d4e78450d0f6b Mon Sep 17 00:00:00 2001 From: jessevz Date: Wed, 21 Jan 2026 16:42:53 +0100 Subject: [PATCH 15/16] Removed unnesesary actions from Dockerfile --- Dockerfile | 16 +--------------- 1 file changed, 1 insertion(+), 15 deletions(-) diff --git a/Dockerfile b/Dockerfile index ed8512914..b87a3e8ec 100644 --- a/Dockerfile +++ b/Dockerfile @@ -84,24 +84,10 @@ RUN mkdir -p \ ${HASHTOPOLIS_TEMP_META_PATH} \ && chown -R www-data:www-data \ ${HASHTOPOLIS_PATH} \ - ${HASHTOPOLIS_FILES_PATH} \ - ${HASHTOPOLIS_IMPORT_PATH} \ - ${HASHTOPOLIS_LOG_PATH} \ - ${HASHTOPOLIS_CONFIG_PATH} \ - ${HASHTOPOLIS_BINARIES_PATH} \ ${HASHTOPOLIS_TUS_PATH} \ - ${HASHTOPOLIS_TEMP_UPLOADS_PATH} \ - ${HASHTOPOLIS_TEMP_META_PATH} \ && chmod -R g+w \ ${HASHTOPOLIS_PATH} \ - ${HASHTOPOLIS_FILES_PATH} \ - ${HASHTOPOLIS_IMPORT_PATH} \ - ${HASHTOPOLIS_LOG_PATH} \ - ${HASHTOPOLIS_CONFIG_PATH} \ - ${HASHTOPOLIS_BINARIES_PATH} \ - ${HASHTOPOLIS_TUS_PATH} \ - ${HASHTOPOLIS_TEMP_UPLOADS_PATH} \ - ${HASHTOPOLIS_TEMP_META_PATH} + ${HASHTOPOLIS_TUS_PATH} COPY --from=prebuild /usr/local/cargo/bin/sqlx /usr/bin/ From e745582a826966e1fa17e231c417f74a4aacb3d7 Mon Sep 17 00:00:00 2001 From: jessevz Date: Wed, 21 Jan 2026 18:12:17 +0100 Subject: [PATCH 16/16] Fixed copilot second review --- src/inc/Util.class.php | 22 +++++++++++++++++----- src/inc/apiv2/helper/importFile.routes.php | 17 +++++++++++------ 2 files changed, 28 insertions(+), 11 deletions(-) diff --git a/src/inc/Util.class.php b/src/inc/Util.class.php index c3c91eca4..273eb7d85 100755 --- a/src/inc/Util.class.php +++ b/src/inc/Util.class.php @@ -671,11 +671,19 @@ public static function zapCleaning() { Factory::getZapFactory()->massDeletion([Factory::FILTER => $zapFilter]); } + /** + * Cleans up stale TUS upload files. + * + * This method scans the TUS metadata directory for .meta files, reads their + * metadata to determine upload expiration, and removes expired metadata files + * together with their corresponding upload (.part) files. It performs file + * system operations and may delete files on disk. + */ public static function tusFileCleaning() { $tusDirectory = Factory::getStoredValueFactory()->get(DDirectories::TUS)->getVal(); - $uploadDirectory = $tusDirectory . PATH_SEPARATOR . "uploads" . PATH_SEPARATOR; - $metaDirectory = $tusDirectory . PATH_SEPARATOR . "meta" . PATH_SEPARATOR; - $expiration_time = 3600; + $uploadDirectory = $tusDirectory . DIRECTORY_SEPARATOR . "uploads" . DIRECTORY_SEPARATOR; + $metaDirectory = $tusDirectory . DIRECTORY_SEPARATOR . "meta" . DIRECTORY_SEPARATOR; + $expiration_time = time() + 3600; if (file_exists($metaDirectory) && is_dir($metaDirectory)) { if ($metaDirectoryHandler = opendir($metaDirectory)){ while ($file = readdir($metaDirectoryHandler)) { @@ -687,8 +695,12 @@ public static function tusFileCleaning() { } if ($metadata['upload_expires'] > $expiration_time) { $uploadFile = $uploadDirectory . pathinfo($file, PATHINFO_FILENAME) . ".part"; - unlink($metaFile); - unlink($uploadFile); + if (file_exists($metaFile)) { + unlink($metaFile); + } + if (file_exists($uploadFile)){ + unlink($uploadFile); + } } } } diff --git a/src/inc/apiv2/helper/importFile.routes.php b/src/inc/apiv2/helper/importFile.routes.php index dc06ba870..fec46de03 100644 --- a/src/inc/apiv2/helper/importFile.routes.php +++ b/src/inc/apiv2/helper/importFile.routes.php @@ -317,7 +317,7 @@ function processPatch(Request $request, Response $response, array $args): Respon } } - if (file_put_contents($filename, $chunk, FILE_APPEND) == false) { + if (file_put_contents($filename, $chunk, FILE_APPEND) === false) { $response->getBody()->write('Failed to write to file'); return $response->withStatus(400); } @@ -365,12 +365,17 @@ function processDelete(Request $request, Response $response, array $args): Respo /* Return 404 if entry is not found */ $filename_upload = self::getUploadPath($args['id']); $filename_meta = self::getMetaPath($args['id']); - if (!file_exists($filename_upload) && !file_exists($filename_meta)) { - $response->getBody()->write('Upload ID does not exist'); - return $response->withStatus(404); + $uploadExists = file_exists($filename_upload); + $metaExists = file_exists($filename_meta); + if (!$uploadExists && !$metaExists) { + throw new HttpError("Upload ID doesnt exists"); + } + if ($uploadExists) { + $isDeletedUpload = unlink($filename_upload); + } + if ($metaExists) { + $isDeletedMeta = unlink($filename_meta); } - $isDeletedUpload = unlink($filename_upload); - $isDeletedMeta = unlink($filename_meta); if (!$isDeletedMeta || !$isDeletedUpload) { throw new HttpError("Something went wrong while deleting the files");