Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
41 changes: 21 additions & 20 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,9 @@ ENV HASHTOPOLIS_IMPORT_PATH=${HASHTOPOLIS_PATH}/import
ENV HASHTOPOLIS_LOG_PATH=${HASHTOPOLIS_PATH}/log
ENV HASHTOPOLIS_CONFIG_PATH=${HASHTOPOLIS_PATH}/config
ENV HASHTOPOLIS_BINARIES_PATH=${HASHTOPOLIS_PATH}/binaries
ENV HASHTOPOLIS_TUS_PATH=/var/tmp/tus
ENV HASHTOPOLIS_TEMP_UPLOADS_PATH=${HASHTOPOLIS_TUS_PATH}/uploads
ENV HASHTOPOLIS_TEMP_META_PATH=${HASHTOPOLIS_TUS_PATH}/meta

# Add support for TLS inspection corporate setups, see .env.sample for details
ENV NODE_EXTRA_CA_CERTS=/etc/ssl/certs/ca-certificates.crt
Expand Down Expand Up @@ -67,26 +70,24 @@ RUN echo "ServerTokens Prod" >> /etc/apache2/apache2.conf \
&& echo "ServerSignature Off" >> /etc/apache2/apache2.conf


RUN mkdir -p ${HASHTOPOLIS_DOCUMENT_ROOT} \
&& mkdir ${HASHTOPOLIS_DOCUMENT_ROOT}/../../.git/ \
&& mkdir -p ${HASHTOPOLIS_PATH} \
&& chown www-data:www-data ${HASHTOPOLIS_PATH} \
&& chmod g+w ${HASHTOPOLIS_PATH} \
&& mkdir -p ${HASHTOPOLIS_FILES_PATH} \
&& chown www-data:www-data ${HASHTOPOLIS_FILES_PATH} \
&& chmod g+w ${HASHTOPOLIS_FILES_PATH} \
&& mkdir -p ${HASHTOPOLIS_IMPORT_PATH} \
&& chown www-data:www-data ${HASHTOPOLIS_IMPORT_PATH} \
&& chmod g+w ${HASHTOPOLIS_IMPORT_PATH} \
&& mkdir -p ${HASHTOPOLIS_LOG_PATH} \
&& chown www-data:www-data ${HASHTOPOLIS_LOG_PATH} \
&& chmod g+w ${HASHTOPOLIS_LOG_PATH} \
&& mkdir -p ${HASHTOPOLIS_CONFIG_PATH} \
&& chown www-data:www-data ${HASHTOPOLIS_CONFIG_PATH} \
&& chmod g+w ${HASHTOPOLIS_CONFIG_PATH} \
&& mkdir -p ${HASHTOPOLIS_BINARIES_PATH} \
&& chown www-data:www-data ${HASHTOPOLIS_BINARIES_PATH} \
&& chmod g+w ${HASHTOPOLIS_BINARIES_PATH}
RUN mkdir -p \
${HASHTOPOLIS_DOCUMENT_ROOT} \
${HASHTOPOLIS_DOCUMENT_ROOT}/../../.git/ \
${HASHTOPOLIS_PATH} \
${HASHTOPOLIS_FILES_PATH} \
${HASHTOPOLIS_IMPORT_PATH} \
${HASHTOPOLIS_LOG_PATH} \
${HASHTOPOLIS_CONFIG_PATH} \
${HASHTOPOLIS_BINARIES_PATH} \
${HASHTOPOLIS_TUS_PATH} \
${HASHTOPOLIS_TEMP_UPLOADS_PATH} \
${HASHTOPOLIS_TEMP_META_PATH} \
&& chown -R www-data:www-data \
${HASHTOPOLIS_PATH} \
${HASHTOPOLIS_TUS_PATH} \
&& chmod -R g+w \
${HASHTOPOLIS_PATH} \
${HASHTOPOLIS_TUS_PATH}

COPY --from=prebuild /usr/local/cargo/bin/sqlx /usr/bin/

Expand Down
31 changes: 16 additions & 15 deletions docker-entrypoint.sh
Original file line number Diff line number Diff line change
Expand Up @@ -49,22 +49,23 @@ while :; do
done
echo "Database ready!"

directories=(
"${HASHTOPOLIS_FILES_PATH}"
"${HASHTOPOLIS_CONFIG_PATH}"
"${HASHTOPOLIS_LOG_PATH}"
"${HASHTOPOLIS_IMPORT_PATH}"
"${HASHTOPOLIS_BINARIES_PATH}"
"${HASHTOPOLIS_TUS_PATH}"
"${HASHTOPOLIS_TEMP_UPLOADS_PATH}"
"${HASHTOPOLIS_TEMP_META_PATH}"
)

echo "Setting up folders"
if [ ! -d ${HASHTOPOLIS_FILES_PATH} ];then
mkdir -p ${HASHTOPOLIS_FILES_PATH} && chown www-data:www-data ${HASHTOPOLIS_FILES_PATH}
fi
if [ ! -d ${HASHTOPOLIS_CONFIG_PATH} ];then
mkdir -p ${HASHTOPOLIS_CONFIG_PATH} && chown www-data:www-data ${HASHTOPOLIS_CONFIG_PATH}
fi
if [ ! -d ${HASHTOPOLIS_LOG_PATH} ];then
mkdir -p ${HASHTOPOLIS_LOG_PATH} && chown www-data:www-data ${HASHTOPOLIS_LOG_PATH}
fi
if [ ! -d ${HASHTOPOLIS_IMPORT_PATH} ];then
mkdir -p ${HASHTOPOLIS_IMPORT_PATH} && chown www-data:www-data ${HASHTOPOLIS_IMPORT_PATH}
fi
if [ ! -d ${HASHTOPOLIS_BINARIES_PATH} ];then
mkdir -p ${HASHTOPOLIS_BINARIES_PATH} && chown www-data:www-data ${HASHTOPOLIS_BINARIES_PATH}
fi
for dir in "${directories[@]}"; do
if [ ! -d "$dir" ];then
mkdir -p "$dir" && chown www-data:www-data "$dir"
fi
done

# required to trigger the initialization
echo "Start initialization process..."
Expand Down
106 changes: 71 additions & 35 deletions src/inc/Util.class.php
Original file line number Diff line number Diff line change
Expand Up @@ -621,55 +621,91 @@ public static function checkTaskWrapperCompleted($taskWrapper) {
}
return true;
}

public static function cleaning() {
$entry = Factory::getStoredValueFactory()->get(DCleaning::LAST_CLEANING);
if ($entry == null) {
$entry = new StoredValue(DCleaning::LAST_CLEANING, 0);
Factory::getStoredValueFactory()->save($entry);
}
$time = time();
if ($time - $entry->getVal() > 600) {
self::agentStatCleaning();
self::zapCleaning();
self::tusFileCleaning();
Factory::getStoredValueFactory()->set($entry, StoredValue::VAL, $time);
}
}

/**
* Checks if it is longer than 10 mins since the last time it was checked if there are
* any old agent statistic entries which can be deleted. If necessary, check is executed
* and old entries are deleted.
*/
public static function agentStatCleaning() {
$entry = Factory::getStoredValueFactory()->get(DStats::LAST_STAT_CLEANING);
if ($entry == null) {
$entry = new StoredValue(DStats::LAST_STAT_CLEANING, 0);
Factory::getStoredValueFactory()->save($entry);
}
if (time() - $entry->getVal() > 600) {
$lifetime = intval(SConfig::getInstance()->getVal(DConfig::AGENT_DATA_LIFETIME));
if ($lifetime <= 0) {
$lifetime = 3600;
}
$qF = new QueryFilter(AgentStat::TIME, time() - $lifetime, "<=");
Factory::getAgentStatFactory()->massDeletion([Factory::FILTER => $qF]);

$qF = new QueryFilter(Speed::TIME, time() - $lifetime, "<=");
Factory::getSpeedFactory()->massDeletion([Factory::FILTER => $qF]);

Factory::getStoredValueFactory()->set($entry, StoredValue::VAL, time());
$lifetime = intval(SConfig::getInstance()->getVal(DConfig::AGENT_DATA_LIFETIME));
if ($lifetime <= 0) {
$lifetime = 3600;
}
$qF = new QueryFilter(AgentStat::TIME, time() - $lifetime, "<=");
Factory::getAgentStatFactory()->massDeletion([Factory::FILTER => $qF]);

$qF = new QueryFilter(Speed::TIME, time() - $lifetime, "<=");
Factory::getSpeedFactory()->massDeletion([Factory::FILTER => $qF]);

}

/**
* Used by the solver. Cleans the zap-queue
*/
public static function zapCleaning() {
$entry = Factory::getStoredValueFactory()->get(DZaps::LAST_ZAP_CLEANING);
if ($entry == null) {
$entry = new StoredValue(DZaps::LAST_ZAP_CLEANING, 0);
Factory::getStoredValueFactory()->save($entry);
}
if (time() - $entry->getVal() > 600) {
$zapFilter = new QueryFilter(Zap::SOLVE_TIME, time() - 600, "<=");

// delete dependencies on AgentZap
$zaps = Factory::getZapFactory()->filter([Factory::FILTER => $zapFilter]);
$zapIds = Util::arrayOfIds($zaps);
$uS = new UpdateSet(AgentZap::LAST_ZAP_ID, null);
$qF = new ContainFilter(AgentZap::LAST_ZAP_ID, $zapIds);
Factory::getAgentZapFactory()->massUpdate([Factory::FILTER => $qF, Factory::UPDATE => $uS]);

Factory::getZapFactory()->massDeletion([Factory::FILTER => $zapFilter]);

Factory::getStoredValueFactory()->set($entry, StoredValue::VAL, time());
$zapFilter = new QueryFilter(Zap::SOLVE_TIME, time() - 600, "<=");

// delete dependencies on AgentZap
$zaps = Factory::getZapFactory()->filter([Factory::FILTER => $zapFilter]);
$zapIds = Util::arrayOfIds($zaps);
$uS = new UpdateSet(AgentZap::LAST_ZAP_ID, null);
$qF = new ContainFilter(AgentZap::LAST_ZAP_ID, $zapIds);
Factory::getAgentZapFactory()->massUpdate([Factory::FILTER => $qF, Factory::UPDATE => $uS]);

Factory::getZapFactory()->massDeletion([Factory::FILTER => $zapFilter]);
}

/**
* Cleans up stale TUS upload files.
*
* This method scans the TUS metadata directory for .meta files, reads their
* metadata to determine upload expiration, and removes expired metadata files
* together with their corresponding upload (.part) files. It performs file
* system operations and may delete files on disk.
*/
public static function tusFileCleaning() {
$tusDirectory = Factory::getStoredValueFactory()->get(DDirectories::TUS)->getVal();
$uploadDirectory = $tusDirectory . DIRECTORY_SEPARATOR . "uploads" . DIRECTORY_SEPARATOR;
$metaDirectory = $tusDirectory . DIRECTORY_SEPARATOR . "meta" . DIRECTORY_SEPARATOR;
$expiration_time = time() + 3600;
if (file_exists($metaDirectory) && is_dir($metaDirectory)) {
if ($metaDirectoryHandler = opendir($metaDirectory)){
while ($file = readdir($metaDirectoryHandler)) {
if (str_ends_with($file, ".meta")) {
$metaFile = $metaDirectory . $file;
$metadata = (array)json_decode(file_get_contents($metaFile), true) ;
Copy link

Copilot AI Jan 21, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Missing error handling for potential JSON decode failure. If the metadata file is corrupted or not valid JSON, json_decode will return null, and accessing $metadata['upload_expires'] will generate a warning. Consider adding error handling to check if json_decode succeeded.

Suggested change
$metadata = (array)json_decode(file_get_contents($metaFile), true) ;
$metaContent = file_get_contents($metaFile);
$metadata = json_decode($metaContent, true);
if (!is_array($metadata) || json_last_error() !== JSON_ERROR_NONE) {
// Invalid or unreadable metadata JSON, skip this file
continue;
}
if (!isset($metadata['upload_expires']) || !is_numeric($metadata['upload_expires'])) {
// Missing or invalid upload_expires field, skip this file
continue;
}

Copilot uses AI. Check for mistakes.
if (!isset($metadata['upload_expires'])) {
continue;
}
if ($metadata['upload_expires'] > $expiration_time) {
$uploadFile = $uploadDirectory . pathinfo($file, PATHINFO_FILENAME) . ".part";
if (file_exists($metaFile)) {
unlink($metaFile);
}
if (file_exists($uploadFile)){
unlink($uploadFile);
}
}
}
}
closedir($metaDirectoryHandler);
}
}
}

Expand Down
3 changes: 1 addition & 2 deletions src/inc/api/APISendProgress.class.php
Original file line number Diff line number Diff line change
Expand Up @@ -537,8 +537,7 @@ public function execute($QUERY = array()) {
DServerLog::log(DServerLog::TRACE, "Checked zaps and sending new ones to agent", [$this->agent, $zaps]);
break;
}
Util::zapCleaning();
Util::agentStatCleaning();
Util::cleaning();
$this->sendResponse(array(
PResponseSendProgress::ACTION => PActions::SEND_PROGRESS,
PResponseSendProgress::RESPONSE => PValues::SUCCESS,
Expand Down
Loading