Compare commits
12 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a9fe342175 | ||
|
|
7669f5a10b | ||
|
|
34a4e06a23 | ||
|
|
d00faf5fe7 | ||
|
|
ad8cbc601a | ||
|
|
40e000b5bc | ||
|
|
eee25a4dc6 | ||
|
|
d66f4d93cb | ||
|
|
f4f7f8ef38 | ||
|
|
0ccba45c40 | ||
|
|
620c916eb3 | ||
|
|
f809cc09d2 |
164
.github/workflows/release-on-version.yml
vendored
164
.github/workflows/release-on-version.yml
vendored
@@ -9,52 +9,157 @@ on:
|
||||
workflow_run:
|
||||
workflows: ["Bump version and sync Changelog to Docker Repo"]
|
||||
types: [completed]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
ref:
|
||||
description: "Ref (branch or SHA) to build from (default: origin/master)"
|
||||
required: false
|
||||
version:
|
||||
description: "Explicit version tag to release (e.g., v1.8.6). If empty, auto-detect."
|
||||
required: false
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
release:
|
||||
delay:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Delay 2 minutes
|
||||
run: sleep 120
|
||||
|
||||
release:
|
||||
needs: delay
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
# Guard: Only run on trusted workflow_run events (pushes from this repo)
|
||||
if: >
|
||||
github.event_name == 'push' ||
|
||||
github.event_name == 'workflow_dispatch' ||
|
||||
(github.event_name == 'workflow_run' &&
|
||||
github.event.workflow_run.event == 'push' &&
|
||||
github.event.workflow_run.head_repository.full_name == github.repository)
|
||||
|
||||
# Use run_id for a stable, unique key
|
||||
concurrency:
|
||||
# Cancel older runs for the same branch/ref so only the latest proceeds
|
||||
group: release-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
group: release-${{ github.run_id }}
|
||||
cancel-in-progress: false
|
||||
|
||||
steps:
|
||||
- name: Checkout correct ref
|
||||
- name: Checkout (fetch all)
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# For workflow_run, use the triggering workflow's head_sha; else use the current SHA
|
||||
ref: ${{ github.event_name == 'workflow_run' && github.event.workflow_run.head_sha || github.sha }}
|
||||
|
||||
- name: Ensure tags available
|
||||
- name: Ensure tags + master available
|
||||
shell: bash
|
||||
run: |
|
||||
git fetch --tags --force --prune --quiet
|
||||
git fetch origin master --quiet
|
||||
|
||||
- name: Show recent tags (debug)
|
||||
run: git tag --list "v*" --sort=-v:refname | head -n 20
|
||||
- name: Resolve source ref + (maybe) version
|
||||
id: pickref
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
- name: Read version from version.js
|
||||
# Defaults
|
||||
REF=""
|
||||
VER=""
|
||||
SRC=""
|
||||
|
||||
if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
|
||||
# manual run
|
||||
REF_IN="${{ github.event.inputs.ref }}"
|
||||
VER_IN="${{ github.event.inputs.version }}"
|
||||
if [[ -n "$REF_IN" ]]; then
|
||||
# Try branch/sha; fetch branch if needed
|
||||
git fetch origin "$REF_IN" --quiet || true
|
||||
if REF_SHA="$(git rev-parse --verify --quiet "$REF_IN")"; then
|
||||
REF="$REF_SHA"
|
||||
else
|
||||
echo "Provided ref '$REF_IN' not found" >&2
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
REF="$(git rev-parse origin/master)"
|
||||
fi
|
||||
if [[ -n "$VER_IN" ]]; then
|
||||
VER="$VER_IN"
|
||||
SRC="manual-version"
|
||||
fi
|
||||
elif [[ "${{ github.event_name }}" == "workflow_run" ]]; then
|
||||
REF="${{ github.event.workflow_run.head_sha }}"
|
||||
else
|
||||
REF="${{ github.sha }}"
|
||||
fi
|
||||
|
||||
# If no explicit version, try to find the latest bot bump reachable from REF
|
||||
if [[ -z "$VER" ]]; then
|
||||
# Search recent history reachable from REF
|
||||
BOT_SHA="$(git log "$REF" -n 200 --author='github-actions[bot]' --grep='set APP_VERSION to v' --pretty=%H | head -n1 || true)"
|
||||
if [[ -n "$BOT_SHA" ]]; then
|
||||
SUBJ="$(git log -n1 --pretty=%s "$BOT_SHA")"
|
||||
BOT_VER="$(sed -n 's/.*set APP_VERSION to \(v[^ ]*\).*/\1/p' <<<"${SUBJ}")"
|
||||
if [[ -n "$BOT_VER" ]]; then
|
||||
VER="$BOT_VER"
|
||||
REF="$BOT_SHA" # build/tag from the bump commit
|
||||
SRC="bot-commit"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Output
|
||||
REF_SHA="$(git rev-parse "$REF")"
|
||||
echo "ref=$REF_SHA" >> "$GITHUB_OUTPUT"
|
||||
echo "source=${SRC:-event-ref}" >> "$GITHUB_OUTPUT"
|
||||
echo "preversion=${VER}" >> "$GITHUB_OUTPUT"
|
||||
echo "Using source=${SRC:-event-ref} ref=$REF_SHA"
|
||||
if [[ -n "$VER" ]]; then echo "Pre-resolved version=$VER"; fi
|
||||
|
||||
- name: Checkout chosen ref
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ steps.pickref.outputs.ref }}
|
||||
|
||||
- name: Assert ref is on master
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
REF="${{ steps.pickref.outputs.ref }}"
|
||||
git fetch origin master --quiet
|
||||
if ! git merge-base --is-ancestor "$REF" origin/master; then
|
||||
echo "Ref $REF is not on master; refusing to release."
|
||||
exit 78
|
||||
fi
|
||||
|
||||
- name: Debug version.js provenance
|
||||
shell: bash
|
||||
run: |
|
||||
echo "version.js last-change commit: $(git log -n1 --pretty='%h %s' -- public/js/version.js || echo 'none')"
|
||||
sed -n '1,20p' public/js/version.js || true
|
||||
|
||||
- name: Determine version
|
||||
id: ver
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "version.js at commit: $(git rev-parse --short HEAD)"
|
||||
sed -n '1,80p' public/js/version.js || true
|
||||
|
||||
VER=$(
|
||||
grep -Eo "APP_VERSION[^\\n]*['\"]v[0-9][^'\"]+['\"]" public/js/version.js \
|
||||
| sed -E "s/.*['\"](v[^'\"]+)['\"].*/\1/" \
|
||||
| tail -n1
|
||||
)
|
||||
if [[ -z "${VER:-}" ]]; then
|
||||
# Prefer pre-resolved version (manual input or bot commit)
|
||||
if [[ -n "${{ steps.pickref.outputs.preversion }}" ]]; then
|
||||
VER="${{ steps.pickref.outputs.preversion }}"
|
||||
echo "version=$VER" >> "$GITHUB_OUTPUT"
|
||||
echo "Parsed version (pre-resolved): $VER"
|
||||
exit 0
|
||||
fi
|
||||
# Fallback to version.js
|
||||
VER="$(grep -Eo "APP_VERSION\s*=\s*['\"]v[^'\"]+['\"]" public/js/version.js | sed -E "s/.*['\"](v[^'\"]+)['\"].*/\1/")"
|
||||
if [[ -z "$VER" ]]; then
|
||||
echo "Could not parse APP_VERSION from version.js" >&2
|
||||
exit 1
|
||||
fi
|
||||
echo "version=$VER" >> "$GITHUB_OUTPUT"
|
||||
echo "Parsed version: $VER"
|
||||
echo "Parsed version (file): $VER"
|
||||
|
||||
- name: Skip if tag already exists
|
||||
id: tagcheck
|
||||
@@ -68,7 +173,6 @@ jobs:
|
||||
echo "exists=false" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
# Ensure the stamper is executable and has LF endings (helps if edited on Windows)
|
||||
- name: Prep stamper script
|
||||
if: steps.tagcheck.outputs.exists == 'false'
|
||||
shell: bash
|
||||
@@ -82,18 +186,13 @@ jobs:
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
VER="${{ steps.ver.outputs.version }}" # e.g. v1.8.2
|
||||
ZIP="FileRise-${VER}.zip"
|
||||
|
||||
# Clean staging copy (exclude dotfiles you don’t want)
|
||||
VER="${{ steps.ver.outputs.version }}"
|
||||
rm -rf staging
|
||||
rsync -a \
|
||||
--exclude '.git' --exclude '.github' \
|
||||
--exclude 'resources' \
|
||||
--exclude '.dockerignore' --exclude '.gitattributes' --exclude '.gitignore' \
|
||||
./ staging/
|
||||
|
||||
# Stamp IN THE STAGING COPY (invoke via bash to avoid exec-bit issues)
|
||||
bash ./scripts/stamp-assets.sh "${VER}" "$(pwd)/staging"
|
||||
|
||||
- name: Verify placeholders are gone (staging)
|
||||
@@ -122,8 +221,7 @@ jobs:
|
||||
run: |
|
||||
set -euo pipefail
|
||||
VER="${{ steps.ver.outputs.version }}"
|
||||
ZIP="FileRise-${VER}.zip"
|
||||
(cd staging && zip -r "../$ZIP" . >/dev/null)
|
||||
(cd staging && zip -r "../FileRise-${VER}.zip" . >/dev/null)
|
||||
|
||||
- name: Compute SHA-256 checksum
|
||||
if: steps.tagcheck.outputs.exists == 'false'
|
||||
@@ -183,7 +281,6 @@ jobs:
|
||||
COMPARE_URL="https://github.com/${REPO}/compare/${PREV}...${VER}"
|
||||
ZIP="FileRise-${VER}.zip"
|
||||
SHA="${{ steps.sum.outputs.sha }}"
|
||||
|
||||
{
|
||||
echo
|
||||
if [[ -s CHANGELOG_SNIPPET.md ]]; then
|
||||
@@ -199,8 +296,6 @@ jobs:
|
||||
echo "${SHA} ${ZIP}"
|
||||
echo '```'
|
||||
} > RELEASE_BODY.md
|
||||
|
||||
echo "Release body:"
|
||||
sed -n '1,200p' RELEASE_BODY.md
|
||||
|
||||
- name: Create GitHub Release
|
||||
@@ -208,8 +303,7 @@ jobs:
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
tag_name: ${{ steps.ver.outputs.version }}
|
||||
# Point the tag at the same commit we checked out (handles workflow_run case)
|
||||
target_commitish: ${{ github.event_name == 'workflow_run' && github.event.workflow_run.head_sha || github.sha }}
|
||||
target_commitish: ${{ steps.pickref.outputs.ref }}
|
||||
name: ${{ steps.ver.outputs.version }}
|
||||
body_path: RELEASE_BODY.md
|
||||
generate_release_notes: false
|
||||
|
||||
61
CHANGELOG.md
61
CHANGELOG.md
@@ -1,6 +1,65 @@
|
||||
# Changelog
|
||||
|
||||
## Changees 11/4/2025 (v1.8.3)
|
||||
## Changes 11/5/2025 (v1.8.7)
|
||||
|
||||
release(v1.8.7): fix(zip-download): stream clean ZIP response and purge stale temp archives
|
||||
|
||||
- FileController::downloadZip
|
||||
- Remove _jsonStart/_jsonEnd and JSON wrappers; send a pure binary ZIP
|
||||
- Close session locks, disable gzip/output buffering, set Content-Length when known
|
||||
- Stream in 1MiB chunks; proper HTTP codes/messages on errors
|
||||
- Unlink the temp ZIP after successful send
|
||||
- Preserves all auth/ACL/ownership checks
|
||||
|
||||
- FileModel::createZipArchive
|
||||
- Purge META_DIR/ziptmp/download-*.zip older than 6h before creating a new ZIP
|
||||
|
||||
Result: fixes “failed to fetch / load failed” with fetch>blob flow and reduces leftover tmp ZIPs.
|
||||
|
||||
---
|
||||
|
||||
## Changes 11/4/2025 (v1.8.6)
|
||||
|
||||
release(v1.8.6): fix large ZIP downloads + safer extract; close #60
|
||||
|
||||
- Zip creation
|
||||
- Write archives to META_DIR/ziptmp (on large/writable disk) instead of system tmp.
|
||||
- Auto-create ziptmp (0775) and verify writability.
|
||||
- Free-space sanity check (~files total +5% +20MB); clearer error on low space.
|
||||
- Normalize/validate folder segments; include only regular files.
|
||||
- set_time_limit(0); use CREATE|OVERWRITE; improved error handling.
|
||||
|
||||
- Zip extraction
|
||||
- New: stamp metadata for files in nested subfolders (per-folder metadata.json).
|
||||
- Skip hidden “dot” paths (files/dirs with any segment starting with “.”) by default
|
||||
via SKIP_DOTFILES_ON_EXTRACT=true; only extract allow-listed entries.
|
||||
- Hardenings: zip-slip guard, reject symlinks (external_attributes), zip-bomb limits
|
||||
(MAX_UNZIP_BYTES default 200GiB, MAX_UNZIP_FILES default 20k).
|
||||
- Persist metadata for all touched folders; keep extractedFiles list for top-level names.
|
||||
|
||||
Ops note: ensure /var/www/metadata/ziptmp exists & is writable (or mount META_DIR to a large volume).
|
||||
|
||||
Closes #60.
|
||||
|
||||
---
|
||||
|
||||
## Changes 11/4/2025 (v1.8.5)
|
||||
|
||||
release(v1.8.5): ci: reduce pre-run delay to 2-min and add missing `needs: delay`, final test
|
||||
|
||||
- No change release just testing
|
||||
|
||||
---
|
||||
|
||||
## Changes 11/4/2025 (v1.8.4)
|
||||
|
||||
release(v1.8.4): ci: add 3-min pre-run delay to avoid workflow_run races
|
||||
|
||||
- No change release just testing
|
||||
|
||||
---
|
||||
|
||||
## Changes 11/4/2025 (v1.8.3)
|
||||
|
||||
release(v1.8.3): feat(mobile+ci): harden Capacitor switcher & make release-on-version robust
|
||||
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
// generated by CI
|
||||
window.APP_VERSION = 'v1.8.2';
|
||||
window.APP_VERSION = 'v1.8.7';
|
||||
|
||||
@@ -667,75 +667,96 @@ public function deleteFiles()
|
||||
|
||||
public function downloadZip()
|
||||
{
|
||||
$this->_jsonStart();
|
||||
try {
|
||||
if (!$this->_checkCsrf()) return;
|
||||
if (!$this->_requireAuth()) return;
|
||||
|
||||
|
||||
if (!$this->_checkCsrf()) { http_response_code(400); echo "Bad CSRF"; return; }
|
||||
if (!$this->_requireAuth()) { http_response_code(401); echo "Unauthorized"; return; }
|
||||
|
||||
$data = $this->_readJsonBody();
|
||||
if (!is_array($data) || !isset($data['folder'], $data['files']) || !is_array($data['files'])) {
|
||||
$this->_jsonOut(["error" => "Invalid input."], 400); return;
|
||||
http_response_code(400); echo "Invalid input."; return;
|
||||
}
|
||||
|
||||
|
||||
$folder = $this->_normalizeFolder($data['folder']);
|
||||
$files = $data['files'];
|
||||
if (!$this->_validFolder($folder)) { $this->_jsonOut(["error"=>"Invalid folder name."], 400); return; }
|
||||
|
||||
if (!$this->_validFolder($folder)) { http_response_code(400); echo "Invalid folder name."; return; }
|
||||
|
||||
$username = $_SESSION['username'] ?? '';
|
||||
$perms = $this->loadPerms($username);
|
||||
|
||||
|
||||
// Optional zip gate by account flag
|
||||
if (!$this->isAdmin($perms) && !empty($perms['disableZip'])) {
|
||||
$this->_jsonOut(["error" => "ZIP downloads are not allowed for your account."], 403); return;
|
||||
http_response_code(403); echo "ZIP downloads are not allowed for your account."; return;
|
||||
}
|
||||
|
||||
|
||||
$ignoreOwnership = $this->isAdmin($perms)
|
||||
|| ($perms['bypassOwnership'] ?? (defined('DEFAULT_BYPASS_OWNERSHIP') ? DEFAULT_BYPASS_OWNERSHIP : false));
|
||||
|
||||
|
||||
// Ancestor-owner counts as full view
|
||||
$fullView = $ignoreOwnership
|
||||
|| ACL::canRead($username, $perms, $folder)
|
||||
|| $this->ownsFolderOrAncestor($folder, $username, $perms);
|
||||
$ownOnly = !$fullView && ACL::hasGrant($username, $folder, 'read_own');
|
||||
|
||||
if (!$fullView && !$ownOnly) {
|
||||
$this->_jsonOut(["error" => "Forbidden: no view access to this folder."], 403); return;
|
||||
}
|
||||
|
||||
// If own-only, ensure all files are owned by the user
|
||||
|
||||
if (!$fullView && !$ownOnly) { http_response_code(403); echo "Forbidden: no view access to this folder."; return; }
|
||||
|
||||
if ($ownOnly) {
|
||||
$meta = $this->loadFolderMetadata($folder);
|
||||
foreach ($files as $f) {
|
||||
$bn = basename((string)$f);
|
||||
if (!isset($meta[$bn]['uploader']) || strcasecmp((string)$meta[$bn]['uploader'], $username) !== 0) {
|
||||
$this->_jsonOut(["error" => "Forbidden: you are not the owner of '{$bn}'."], 403); return;
|
||||
http_response_code(403); echo "Forbidden: you are not the owner of '{$bn}'."; return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
$result = FileModel::createZipArchive($folder, $files);
|
||||
if (isset($result['error'])) {
|
||||
$this->_jsonOut(["error" => $result['error']], 400); return;
|
||||
}
|
||||
|
||||
if (isset($result['error'])) { http_response_code(400); echo $result['error']; return; }
|
||||
|
||||
$zipPath = $result['zipPath'] ?? null;
|
||||
if (!$zipPath || !file_exists($zipPath)) { $this->_jsonOut(["error"=>"ZIP archive not found."], 500); return; }
|
||||
|
||||
// switch to file streaming
|
||||
if (!$zipPath || !is_file($zipPath)) { http_response_code(500); echo "ZIP archive not found."; return; }
|
||||
|
||||
// ---- Clean binary stream setup ----
|
||||
@session_write_close();
|
||||
@set_time_limit(0);
|
||||
@ignore_user_abort(true);
|
||||
if (function_exists('apache_setenv')) { @apache_setenv('no-gzip', '1'); }
|
||||
@ini_set('zlib.output_compression', '0');
|
||||
@ini_set('output_buffering', 'off');
|
||||
while (ob_get_level() > 0) { @ob_end_clean(); }
|
||||
|
||||
@clearstatcache(true, $zipPath);
|
||||
$size = (int)@filesize($zipPath);
|
||||
|
||||
header('X-Accel-Buffering: no');
|
||||
header_remove('Content-Type');
|
||||
header('Content-Type: application/zip');
|
||||
// Client sets the final name via a.download in your JS; server can be generic
|
||||
header('Content-Disposition: attachment; filename="files.zip"');
|
||||
header('Content-Length: ' . filesize($zipPath));
|
||||
if ($size > 0) header('Content-Length: ' . $size);
|
||||
header('Cache-Control: no-store, no-cache, must-revalidate');
|
||||
header('Pragma: no-cache');
|
||||
|
||||
readfile($zipPath);
|
||||
|
||||
$fp = fopen($zipPath, 'rb');
|
||||
if ($fp === false) { http_response_code(500); echo "Failed to open ZIP."; return; }
|
||||
|
||||
$chunk = 1048576; // 1 MiB
|
||||
while (!feof($fp)) {
|
||||
$buf = fread($fp, $chunk);
|
||||
if ($buf === false) break;
|
||||
echo $buf;
|
||||
flush();
|
||||
}
|
||||
fclose($fp);
|
||||
@unlink($zipPath);
|
||||
exit;
|
||||
|
||||
} catch (Throwable $e) {
|
||||
error_log('FileController::downloadZip error: '.$e->getMessage().' @ '.$e->getFile().':'.$e->getLine());
|
||||
$this->_jsonOut(['error' => 'Internal server error while preparing ZIP.'], 500);
|
||||
} finally { $this->_jsonEnd(); }
|
||||
if (!headers_sent()) http_response_code(500);
|
||||
echo "Internal server error while preparing ZIP.";
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public function extractZip()
|
||||
|
||||
@@ -557,59 +557,103 @@ class FileModel {
|
||||
* @return array An associative array with either an "error" key or a "zipPath" key.
|
||||
*/
|
||||
public static function createZipArchive($folder, $files) {
|
||||
// Validate and build folder path.
|
||||
$folder = trim($folder) ?: 'root';
|
||||
|
||||
// (optional) purge old temp zips > 6h
|
||||
$zipRoot = rtrim((string)META_DIR, '/\\') . DIRECTORY_SEPARATOR . 'ziptmp';
|
||||
$now = time();
|
||||
foreach (glob($zipRoot . DIRECTORY_SEPARATOR . 'download-*.zip') ?: [] as $zp) {
|
||||
if (is_file($zp) && ($now - @filemtime($zp)) > 21600) { @unlink($zp); }
|
||||
}
|
||||
// Normalize and validate target folder
|
||||
$folder = trim((string)$folder) ?: 'root';
|
||||
$baseDir = realpath(UPLOAD_DIR);
|
||||
if ($baseDir === false) {
|
||||
return ["error" => "Uploads directory not configured correctly."];
|
||||
}
|
||||
|
||||
if (strtolower($folder) === 'root' || $folder === "") {
|
||||
$folderPathReal = $baseDir;
|
||||
} else {
|
||||
// Prevent path traversal.
|
||||
// Prevent traversal and validate each segment against folder regex
|
||||
if (strpos($folder, '..') !== false) {
|
||||
return ["error" => "Invalid folder name."];
|
||||
}
|
||||
$folderPath = rtrim(UPLOAD_DIR, '/\\') . DIRECTORY_SEPARATOR . trim($folder, "/\\ ");
|
||||
$parts = explode('/', trim($folder, "/\\ "));
|
||||
foreach ($parts as $part) {
|
||||
if ($part === '' || !preg_match(REGEX_FOLDER_NAME, $part)) {
|
||||
return ["error" => "Invalid folder name."];
|
||||
}
|
||||
}
|
||||
$folderPath = rtrim(UPLOAD_DIR, '/\\') . DIRECTORY_SEPARATOR . implode(DIRECTORY_SEPARATOR, $parts);
|
||||
$folderPathReal = realpath($folderPath);
|
||||
if ($folderPathReal === false || strpos($folderPathReal, $baseDir) !== 0) {
|
||||
return ["error" => "Folder not found."];
|
||||
}
|
||||
}
|
||||
|
||||
// Validate each file and build an array of files to zip.
|
||||
|
||||
// Collect files to zip (only regular files in the chosen folder)
|
||||
$filesToZip = [];
|
||||
foreach ($files as $fileName) {
|
||||
// Validate file name using REGEX_FILE_NAME.
|
||||
$fileName = basename(trim($fileName));
|
||||
$fileName = basename(trim((string)$fileName));
|
||||
if (!preg_match(REGEX_FILE_NAME, $fileName)) {
|
||||
continue;
|
||||
}
|
||||
$fullPath = $folderPathReal . DIRECTORY_SEPARATOR . $fileName;
|
||||
if (file_exists($fullPath)) {
|
||||
if (is_file($fullPath)) {
|
||||
$filesToZip[] = $fullPath;
|
||||
}
|
||||
}
|
||||
if (empty($filesToZip)) {
|
||||
return ["error" => "No valid files found to zip."];
|
||||
}
|
||||
|
||||
// Create a temporary ZIP file.
|
||||
$tempZip = tempnam(sys_get_temp_dir(), 'zip');
|
||||
unlink($tempZip); // Remove the temp file so that ZipArchive can create a new file.
|
||||
$tempZip .= '.zip';
|
||||
|
||||
$zip = new ZipArchive();
|
||||
if ($zip->open($tempZip, ZipArchive::CREATE) !== TRUE) {
|
||||
|
||||
// Workspace on the big disk: META_DIR/ziptmp
|
||||
$work = rtrim((string)META_DIR, '/\\') . DIRECTORY_SEPARATOR . 'ziptmp';
|
||||
if (!is_dir($work)) {
|
||||
@mkdir($work, 0775, true);
|
||||
}
|
||||
if (!is_dir($work) || !is_writable($work)) {
|
||||
return ["error" => "ZIP temp dir not writable: " . $work];
|
||||
}
|
||||
|
||||
// Optional sanity: ensure there is roughly enough free space
|
||||
$totalSize = 0;
|
||||
foreach ($filesToZip as $fp) {
|
||||
$sz = @filesize($fp);
|
||||
if ($sz !== false) $totalSize += (int)$sz;
|
||||
}
|
||||
$free = @disk_free_space($work);
|
||||
// Add ~20MB overhead and a 5% cushion
|
||||
if ($free !== false && $totalSize > 0) {
|
||||
$needed = (int)ceil($totalSize * 1.05) + (20 * 1024 * 1024);
|
||||
if ($free < $needed) {
|
||||
return ["error" => "Insufficient free space in ZIP workspace."];
|
||||
}
|
||||
}
|
||||
|
||||
@set_time_limit(0);
|
||||
|
||||
// Create the ZIP path inside META_DIR/ziptmp
|
||||
$zipName = 'download-' . date('Ymd-His') . '-' . bin2hex(random_bytes(4)) . '.zip';
|
||||
$zipPath = $work . DIRECTORY_SEPARATOR . $zipName;
|
||||
|
||||
$zip = new \ZipArchive();
|
||||
if ($zip->open($zipPath, \ZipArchive::CREATE | \ZipArchive::OVERWRITE) !== true) {
|
||||
return ["error" => "Could not create zip archive."];
|
||||
}
|
||||
// Add each file using its base name.
|
||||
|
||||
foreach ($filesToZip as $filePath) {
|
||||
// Add using basename at the root of the zip (matches your current behavior)
|
||||
$zip->addFile($filePath, basename($filePath));
|
||||
}
|
||||
$zip->close();
|
||||
|
||||
return ["zipPath" => $tempZip];
|
||||
|
||||
if (!$zip->close()) {
|
||||
// Commonly indicates disk full at finalize
|
||||
return ["error" => "Failed to finalize ZIP (disk full?)."];
|
||||
}
|
||||
|
||||
// Success: controller will readfile() and unlink()
|
||||
return ["zipPath" => $zipPath];
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -623,15 +667,23 @@ class FileModel {
|
||||
$errors = [];
|
||||
$allSuccess = true;
|
||||
$extractedFiles = [];
|
||||
|
||||
|
||||
// Config toggles
|
||||
$SKIP_DOTFILES = defined('SKIP_DOTFILES_ON_EXTRACT') ? (bool)SKIP_DOTFILES_ON_EXTRACT : true;
|
||||
|
||||
// Hard limits to mitigate zip-bombs (tweak via defines if you like)
|
||||
$MAX_UNZIP_BYTES = defined('MAX_UNZIP_BYTES') ? (int)MAX_UNZIP_BYTES : (200 * 1024 * 1024 * 1024); // 200 GiB
|
||||
$MAX_UNZIP_FILES = defined('MAX_UNZIP_FILES') ? (int)MAX_UNZIP_FILES : 20000;
|
||||
|
||||
$baseDir = realpath(UPLOAD_DIR);
|
||||
if ($baseDir === false) {
|
||||
return ["error" => "Uploads directory not configured correctly."];
|
||||
}
|
||||
|
||||
|
||||
// Build target dir
|
||||
if (strtolower(trim($folder) ?: '') === "root") {
|
||||
$relativePath = "";
|
||||
$folderNorm = "root";
|
||||
} else {
|
||||
$parts = explode('/', trim($folder, "/\\"));
|
||||
foreach ($parts as $part) {
|
||||
@@ -640,9 +692,10 @@ class FileModel {
|
||||
}
|
||||
}
|
||||
$relativePath = implode(DIRECTORY_SEPARATOR, $parts) . DIRECTORY_SEPARATOR;
|
||||
$folderNorm = implode('/', $parts); // normalized with forward slashes for metadata helpers
|
||||
}
|
||||
|
||||
$folderPath = $baseDir . DIRECTORY_SEPARATOR . $relativePath;
|
||||
|
||||
$folderPath = $baseDir . DIRECTORY_SEPARATOR . $relativePath;
|
||||
if (!is_dir($folderPath) && !mkdir($folderPath, 0775, true)) {
|
||||
return ["error" => "Folder not found and cannot be created."];
|
||||
}
|
||||
@@ -650,17 +703,74 @@ class FileModel {
|
||||
if ($folderPathReal === false || strpos($folderPathReal, $baseDir) !== 0) {
|
||||
return ["error" => "Folder not found."];
|
||||
}
|
||||
|
||||
// Prepare metadata container
|
||||
$metadataFile = self::getMetadataFilePath($folder);
|
||||
$destMetadata = file_exists($metadataFile) ? (json_decode(file_get_contents($metadataFile), true) ?: []) : [];
|
||||
|
||||
|
||||
// Metadata cache per folder to avoid many reads/writes
|
||||
$metaCache = [];
|
||||
$getMeta = function(string $folderStr) use (&$metaCache) {
|
||||
if (!isset($metaCache[$folderStr])) {
|
||||
$mf = self::getMetadataFilePath($folderStr);
|
||||
$metaCache[$folderStr] = file_exists($mf) ? (json_decode(file_get_contents($mf), true) ?: []) : [];
|
||||
}
|
||||
return $metaCache[$folderStr];
|
||||
};
|
||||
$putMeta = function(string $folderStr, array $meta) use (&$metaCache) {
|
||||
$metaCache[$folderStr] = $meta;
|
||||
};
|
||||
|
||||
$safeFileNamePattern = REGEX_FILE_NAME;
|
||||
$actor = $_SESSION['username'] ?? 'Unknown';
|
||||
$now = date(DATE_TIME_FORMAT);
|
||||
|
||||
|
||||
// --- Helpers ---
|
||||
|
||||
// Reject absolute paths, traversal, drive letters
|
||||
$isUnsafeEntryPath = function(string $entry) : bool {
|
||||
$e = str_replace('\\', '/', $entry);
|
||||
if ($e === '' || str_contains($e, "\0")) return true;
|
||||
if (str_starts_with($e, '/')) return true; // absolute nix path
|
||||
if (preg_match('/^[A-Za-z]:[\\/]/', $e)) return true; // Windows drive
|
||||
if (str_contains($e, '../') || str_contains($e, '..\\')) return true;
|
||||
return false;
|
||||
};
|
||||
|
||||
// Validate each subfolder name in the path using REGEX_FOLDER_NAME
|
||||
$validEntrySubdirs = function(string $entry) : bool {
|
||||
$e = trim(str_replace('\\', '/', $entry), '/');
|
||||
if ($e === '') return true;
|
||||
$dirs = explode('/', $e);
|
||||
array_pop($dirs); // remove basename; we only validate directories here
|
||||
foreach ($dirs as $d) {
|
||||
if ($d === '' || !preg_match(REGEX_FOLDER_NAME, $d)) return false;
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
// NEW: hidden path detector — true if ANY segment starts with '.'
|
||||
$isHiddenDotPath = function(string $entry) : bool {
|
||||
$e = trim(str_replace('\\', '/', $entry), '/');
|
||||
if ($e === '') return false;
|
||||
foreach (explode('/', $e) as $seg) {
|
||||
if ($seg !== '' && $seg[0] === '.') return true;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
// Generalized metadata stamper: writes to the specified folder's metadata.json
|
||||
$stampMeta = function(string $folderStr, string $basename) use (&$getMeta, &$putMeta, $actor, $now) {
|
||||
$meta = $getMeta($folderStr);
|
||||
$meta[$basename] = [
|
||||
'uploaded' => $now,
|
||||
'modified' => $now,
|
||||
'uploader' => $actor,
|
||||
];
|
||||
$putMeta($folderStr, $meta);
|
||||
};
|
||||
|
||||
// No PHP execution time limit during heavy work
|
||||
@set_time_limit(0);
|
||||
|
||||
foreach ($files as $zipFileName) {
|
||||
$zipBase = basename(trim($zipFileName));
|
||||
$zipBase = basename(trim((string)$zipFileName));
|
||||
if (strtolower(substr($zipBase, -4)) !== '.zip') {
|
||||
continue;
|
||||
}
|
||||
@@ -669,76 +779,135 @@ class FileModel {
|
||||
$allSuccess = false;
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
$zipFilePath = $folderPathReal . DIRECTORY_SEPARATOR . $zipBase;
|
||||
if (!file_exists($zipFilePath)) {
|
||||
$errors[] = "$zipBase does not exist in folder.";
|
||||
$allSuccess = false;
|
||||
continue;
|
||||
}
|
||||
|
||||
$zip = new ZipArchive();
|
||||
if ($zip->open($zipFilePath) !== TRUE) {
|
||||
|
||||
$zip = new \ZipArchive();
|
||||
if ($zip->open($zipFilePath) !== true) {
|
||||
$errors[] = "Could not open $zipBase as a zip file.";
|
||||
$allSuccess = false;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Minimal Zip Slip guard: fail if any entry looks unsafe
|
||||
|
||||
// ---- Pre-scan: safety and size limits + build allow-list (skip dotfiles) ----
|
||||
$unsafe = false;
|
||||
$totalUncompressed = 0;
|
||||
$fileCount = 0;
|
||||
$allowedEntries = []; // names to extract (files and/or directories)
|
||||
$allowedFiles = []; // only files (for metadata stamping)
|
||||
|
||||
for ($i = 0; $i < $zip->numFiles; $i++) {
|
||||
$entryName = $zip->getNameIndex($i);
|
||||
if ($entryName === false) { $unsafe = true; break; }
|
||||
// Absolute paths, parent traversal, or Windows drive paths
|
||||
if (strpos($entryName, '../') !== false || strpos($entryName, '..\\') !== false ||
|
||||
str_starts_with($entryName, '/') || preg_match('/^[A-Za-z]:[\\\\\\/]/', $entryName)) {
|
||||
$stat = $zip->statIndex($i);
|
||||
$name = $zip->getNameIndex($i);
|
||||
if ($name === false || !$stat) { $unsafe = true; break; }
|
||||
|
||||
$isDir = str_ends_with($name, '/');
|
||||
|
||||
// Basic path checks
|
||||
if ($isUnsafeEntryPath($name) || !$validEntrySubdirs($name)) { $unsafe = true; break; }
|
||||
|
||||
// Skip hidden entries (any segment starts with '.')
|
||||
if ($SKIP_DOTFILES && $isHiddenDotPath($name)) {
|
||||
continue; // just ignore; do not treat as unsafe
|
||||
}
|
||||
|
||||
// Detect symlinks via external attributes (best-effort)
|
||||
$mode = (isset($stat['external_attributes']) ? (($stat['external_attributes'] >> 16) & 0xF000) : 0);
|
||||
if ($mode === 0120000) { // S_IFLNK
|
||||
$unsafe = true; break;
|
||||
}
|
||||
|
||||
// Track limits only for files we're going to extract
|
||||
if (!$isDir) {
|
||||
$fileCount++;
|
||||
$sz = isset($stat['size']) ? (int)$stat['size'] : 0;
|
||||
$totalUncompressed += $sz;
|
||||
if ($fileCount > $MAX_UNZIP_FILES || $totalUncompressed > $MAX_UNZIP_BYTES) {
|
||||
$unsafe = true; break;
|
||||
}
|
||||
$allowedFiles[] = $name;
|
||||
}
|
||||
|
||||
$allowedEntries[] = $name;
|
||||
}
|
||||
|
||||
if ($unsafe) {
|
||||
$zip->close();
|
||||
$errors[] = "$zipBase contains unsafe paths; extraction aborted.";
|
||||
$errors[] = "$zipBase contains unsafe or oversized contents; extraction aborted.";
|
||||
$allSuccess = false;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Extract safely (whole archive) after precheck
|
||||
if (!$zip->extractTo($folderPathReal)) {
|
||||
|
||||
// Nothing to extract after filtering?
|
||||
if (empty($allowedEntries)) {
|
||||
$zip->close();
|
||||
// Treat as success (nothing visible to extract), but informatively note it
|
||||
$errors[] = "$zipBase contained only hidden or unsupported entries.";
|
||||
$allSuccess = false; // or keep true if you'd rather not mark as failure
|
||||
continue;
|
||||
}
|
||||
|
||||
// ---- Extract ONLY the allowed entries ----
|
||||
if (!$zip->extractTo($folderPathReal, $allowedEntries)) {
|
||||
$errors[] = "Failed to extract $zipBase.";
|
||||
$allSuccess = false;
|
||||
$zip->close();
|
||||
continue;
|
||||
}
|
||||
|
||||
// Stamp metadata for extracted regular files
|
||||
for ($i = 0; $i < $zip->numFiles; $i++) {
|
||||
$entryName = $zip->getNameIndex($i);
|
||||
if ($entryName === false) continue;
|
||||
|
||||
$basename = basename($entryName);
|
||||
|
||||
// ---- Stamp metadata for files in the target folder AND nested subfolders (allowed files only) ----
|
||||
foreach ($allowedFiles as $entryName) {
|
||||
// Normalize entry path for filesystem checks
|
||||
$entryFsRel = str_replace(['\\'], '/', $entryName);
|
||||
$entryFsRel = ltrim($entryFsRel, '/'); // ensure relative
|
||||
|
||||
// Skip any directories (shouldn't be listed here, but defend anyway)
|
||||
if ($entryFsRel === '' || str_ends_with($entryFsRel, '/')) continue;
|
||||
|
||||
$basename = basename($entryFsRel);
|
||||
if ($basename === '' || !preg_match($safeFileNamePattern, $basename)) continue;
|
||||
|
||||
// Only stamp files that actually exist after extraction
|
||||
$target = $folderPathReal . DIRECTORY_SEPARATOR . $entryName;
|
||||
$isDir = str_ends_with($entryName, '/') || is_dir($target);
|
||||
if ($isDir) continue;
|
||||
|
||||
$extractedFiles[] = $basename;
|
||||
$destMetadata[$basename] = [
|
||||
'uploaded' => $now,
|
||||
'modified' => $now,
|
||||
'uploader' => $actor,
|
||||
// no tags by default
|
||||
];
|
||||
|
||||
// Decide which folder's metadata to update:
|
||||
// - top-level files -> $folderNorm
|
||||
// - nested files -> corresponding "<folderNorm>/<sub/dir>" (or "sub/dir" if folderNorm is 'root')
|
||||
$relDir = str_replace('\\', '/', trim(dirname($entryFsRel), '.'));
|
||||
$relDir = ($relDir === '.' ? '' : trim($relDir, '/'));
|
||||
|
||||
$targetFolderNorm = ($relDir === '' || $relDir === '.')
|
||||
? $folderNorm
|
||||
: (($folderNorm === 'root') ? $relDir : ($folderNorm . '/' . $relDir));
|
||||
|
||||
// Only stamp if the file actually exists on disk after extraction
|
||||
$targetAbs = $folderPathReal . DIRECTORY_SEPARATOR . str_replace('/', DIRECTORY_SEPARATOR, $entryFsRel);
|
||||
if (is_file($targetAbs)) {
|
||||
// Preserve list behavior: only include top-level extracted names
|
||||
if ($relDir === '' || $relDir === '.') {
|
||||
$extractedFiles[] = $basename;
|
||||
}
|
||||
$stampMeta($targetFolderNorm, $basename);
|
||||
}
|
||||
}
|
||||
|
||||
$zip->close();
|
||||
}
|
||||
|
||||
if (file_put_contents($metadataFile, json_encode($destMetadata, JSON_PRETTY_PRINT), LOCK_EX) === false) {
|
||||
$errors[] = "Failed to update metadata.";
|
||||
$allSuccess = false;
|
||||
|
||||
// Persist metadata for any touched folder(s)
|
||||
foreach ($metaCache as $folderStr => $meta) {
|
||||
$metadataFile = self::getMetadataFilePath($folderStr);
|
||||
if (!is_dir(dirname($metadataFile))) {
|
||||
@mkdir(dirname($metadataFile), 0775, true);
|
||||
}
|
||||
if (file_put_contents($metadataFile, json_encode($meta, JSON_PRETTY_PRINT), LOCK_EX) === false) {
|
||||
$errors[] = "Failed to update metadata for {$folderStr}.";
|
||||
$allSuccess = false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return $allSuccess
|
||||
? ["success" => true, "extractedFiles" => $extractedFiles]
|
||||
: ["success" => false, "error" => implode(" ", $errors)];
|
||||
|
||||
Reference in New Issue
Block a user