Compare commits
10 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a9fe342175 | ||
|
|
7669f5a10b | ||
|
|
34a4e06a23 | ||
|
|
d00faf5fe7 | ||
|
|
ad8cbc601a | ||
|
|
40e000b5bc | ||
|
|
eee25a4dc6 | ||
|
|
d66f4d93cb | ||
|
|
f4f7f8ef38 | ||
|
|
0ccba45c40 |
160
.github/workflows/release-on-version.yml
vendored
160
.github/workflows/release-on-version.yml
vendored
@@ -9,6 +9,14 @@ on:
|
|||||||
workflow_run:
|
workflow_run:
|
||||||
workflows: ["Bump version and sync Changelog to Docker Repo"]
|
workflows: ["Bump version and sync Changelog to Docker Repo"]
|
||||||
types: [completed]
|
types: [completed]
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
ref:
|
||||||
|
description: "Ref (branch or SHA) to build from (default: origin/master)"
|
||||||
|
required: false
|
||||||
|
version:
|
||||||
|
description: "Explicit version tag to release (e.g., v1.8.6). If empty, auto-detect."
|
||||||
|
required: false
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
@@ -17,50 +25,141 @@ jobs:
|
|||||||
delay:
|
delay:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Delay 3 minutes
|
- name: Delay 2 minutes
|
||||||
run: sleep 180
|
run: sleep 120
|
||||||
|
|
||||||
release:
|
release:
|
||||||
|
needs: delay
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
# Guard: Only run on trusted workflow_run events (pushes from this repo)
|
||||||
|
if: >
|
||||||
|
github.event_name == 'push' ||
|
||||||
|
github.event_name == 'workflow_dispatch' ||
|
||||||
|
(github.event_name == 'workflow_run' &&
|
||||||
|
github.event.workflow_run.event == 'push' &&
|
||||||
|
github.event.workflow_run.head_repository.full_name == github.repository)
|
||||||
|
|
||||||
|
# Use run_id for a stable, unique key
|
||||||
concurrency:
|
concurrency:
|
||||||
# Cancel older runs for the same branch/ref so only the latest proceeds
|
group: release-${{ github.run_id }}
|
||||||
group: release-${{ github.ref }}
|
cancel-in-progress: false
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout correct ref
|
- name: Checkout (fetch all)
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
# For workflow_run, use the triggering workflow's head_sha; else use the current SHA
|
|
||||||
ref: ${{ github.event_name == 'workflow_run' && github.event.workflow_run.head_sha || github.sha }}
|
|
||||||
|
|
||||||
- name: Ensure tags available
|
- name: Ensure tags + master available
|
||||||
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
git fetch --tags --force --prune --quiet
|
git fetch --tags --force --prune --quiet
|
||||||
|
git fetch origin master --quiet
|
||||||
|
|
||||||
- name: Show recent tags (debug)
|
- name: Resolve source ref + (maybe) version
|
||||||
run: git tag --list "v*" --sort=-v:refname | head -n 20
|
id: pickref
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
- name: Read version from version.js
|
# Defaults
|
||||||
|
REF=""
|
||||||
|
VER=""
|
||||||
|
SRC=""
|
||||||
|
|
||||||
|
if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
|
||||||
|
# manual run
|
||||||
|
REF_IN="${{ github.event.inputs.ref }}"
|
||||||
|
VER_IN="${{ github.event.inputs.version }}"
|
||||||
|
if [[ -n "$REF_IN" ]]; then
|
||||||
|
# Try branch/sha; fetch branch if needed
|
||||||
|
git fetch origin "$REF_IN" --quiet || true
|
||||||
|
if REF_SHA="$(git rev-parse --verify --quiet "$REF_IN")"; then
|
||||||
|
REF="$REF_SHA"
|
||||||
|
else
|
||||||
|
echo "Provided ref '$REF_IN' not found" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
REF="$(git rev-parse origin/master)"
|
||||||
|
fi
|
||||||
|
if [[ -n "$VER_IN" ]]; then
|
||||||
|
VER="$VER_IN"
|
||||||
|
SRC="manual-version"
|
||||||
|
fi
|
||||||
|
elif [[ "${{ github.event_name }}" == "workflow_run" ]]; then
|
||||||
|
REF="${{ github.event.workflow_run.head_sha }}"
|
||||||
|
else
|
||||||
|
REF="${{ github.sha }}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# If no explicit version, try to find the latest bot bump reachable from REF
|
||||||
|
if [[ -z "$VER" ]]; then
|
||||||
|
# Search recent history reachable from REF
|
||||||
|
BOT_SHA="$(git log "$REF" -n 200 --author='github-actions[bot]' --grep='set APP_VERSION to v' --pretty=%H | head -n1 || true)"
|
||||||
|
if [[ -n "$BOT_SHA" ]]; then
|
||||||
|
SUBJ="$(git log -n1 --pretty=%s "$BOT_SHA")"
|
||||||
|
BOT_VER="$(sed -n 's/.*set APP_VERSION to \(v[^ ]*\).*/\1/p' <<<"${SUBJ}")"
|
||||||
|
if [[ -n "$BOT_VER" ]]; then
|
||||||
|
VER="$BOT_VER"
|
||||||
|
REF="$BOT_SHA" # build/tag from the bump commit
|
||||||
|
SRC="bot-commit"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Output
|
||||||
|
REF_SHA="$(git rev-parse "$REF")"
|
||||||
|
echo "ref=$REF_SHA" >> "$GITHUB_OUTPUT"
|
||||||
|
echo "source=${SRC:-event-ref}" >> "$GITHUB_OUTPUT"
|
||||||
|
echo "preversion=${VER}" >> "$GITHUB_OUTPUT"
|
||||||
|
echo "Using source=${SRC:-event-ref} ref=$REF_SHA"
|
||||||
|
if [[ -n "$VER" ]]; then echo "Pre-resolved version=$VER"; fi
|
||||||
|
|
||||||
|
- name: Checkout chosen ref
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
ref: ${{ steps.pickref.outputs.ref }}
|
||||||
|
|
||||||
|
- name: Assert ref is on master
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
REF="${{ steps.pickref.outputs.ref }}"
|
||||||
|
git fetch origin master --quiet
|
||||||
|
if ! git merge-base --is-ancestor "$REF" origin/master; then
|
||||||
|
echo "Ref $REF is not on master; refusing to release."
|
||||||
|
exit 78
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Debug version.js provenance
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
echo "version.js last-change commit: $(git log -n1 --pretty='%h %s' -- public/js/version.js || echo 'none')"
|
||||||
|
sed -n '1,20p' public/js/version.js || true
|
||||||
|
|
||||||
|
- name: Determine version
|
||||||
id: ver
|
id: ver
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
echo "version.js at commit: $(git rev-parse --short HEAD)"
|
# Prefer pre-resolved version (manual input or bot commit)
|
||||||
sed -n '1,80p' public/js/version.js || true
|
if [[ -n "${{ steps.pickref.outputs.preversion }}" ]]; then
|
||||||
|
VER="${{ steps.pickref.outputs.preversion }}"
|
||||||
VER=$(
|
echo "version=$VER" >> "$GITHUB_OUTPUT"
|
||||||
grep -Eo "APP_VERSION[^\\n]*['\"]v[0-9][^'\"]+['\"]" public/js/version.js \
|
echo "Parsed version (pre-resolved): $VER"
|
||||||
| sed -E "s/.*['\"](v[^'\"]+)['\"].*/\1/" \
|
exit 0
|
||||||
| tail -n1
|
fi
|
||||||
)
|
# Fallback to version.js
|
||||||
if [[ -z "${VER:-}" ]]; then
|
VER="$(grep -Eo "APP_VERSION\s*=\s*['\"]v[^'\"]+['\"]" public/js/version.js | sed -E "s/.*['\"](v[^'\"]+)['\"].*/\1/")"
|
||||||
|
if [[ -z "$VER" ]]; then
|
||||||
echo "Could not parse APP_VERSION from version.js" >&2
|
echo "Could not parse APP_VERSION from version.js" >&2
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
echo "version=$VER" >> "$GITHUB_OUTPUT"
|
echo "version=$VER" >> "$GITHUB_OUTPUT"
|
||||||
echo "Parsed version: $VER"
|
echo "Parsed version (file): $VER"
|
||||||
|
|
||||||
- name: Skip if tag already exists
|
- name: Skip if tag already exists
|
||||||
id: tagcheck
|
id: tagcheck
|
||||||
@@ -74,7 +173,6 @@ jobs:
|
|||||||
echo "exists=false" >> "$GITHUB_OUTPUT"
|
echo "exists=false" >> "$GITHUB_OUTPUT"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Ensure the stamper is executable and has LF endings (helps if edited on Windows)
|
|
||||||
- name: Prep stamper script
|
- name: Prep stamper script
|
||||||
if: steps.tagcheck.outputs.exists == 'false'
|
if: steps.tagcheck.outputs.exists == 'false'
|
||||||
shell: bash
|
shell: bash
|
||||||
@@ -88,18 +186,13 @@ jobs:
|
|||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
VER="${{ steps.ver.outputs.version }}" # e.g. v1.8.2
|
VER="${{ steps.ver.outputs.version }}"
|
||||||
ZIP="FileRise-${VER}.zip"
|
|
||||||
|
|
||||||
# Clean staging copy (exclude dotfiles you don’t want)
|
|
||||||
rm -rf staging
|
rm -rf staging
|
||||||
rsync -a \
|
rsync -a \
|
||||||
--exclude '.git' --exclude '.github' \
|
--exclude '.git' --exclude '.github' \
|
||||||
--exclude 'resources' \
|
--exclude 'resources' \
|
||||||
--exclude '.dockerignore' --exclude '.gitattributes' --exclude '.gitignore' \
|
--exclude '.dockerignore' --exclude '.gitattributes' --exclude '.gitignore' \
|
||||||
./ staging/
|
./ staging/
|
||||||
|
|
||||||
# Stamp IN THE STAGING COPY (invoke via bash to avoid exec-bit issues)
|
|
||||||
bash ./scripts/stamp-assets.sh "${VER}" "$(pwd)/staging"
|
bash ./scripts/stamp-assets.sh "${VER}" "$(pwd)/staging"
|
||||||
|
|
||||||
- name: Verify placeholders are gone (staging)
|
- name: Verify placeholders are gone (staging)
|
||||||
@@ -128,8 +221,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
VER="${{ steps.ver.outputs.version }}"
|
VER="${{ steps.ver.outputs.version }}"
|
||||||
ZIP="FileRise-${VER}.zip"
|
(cd staging && zip -r "../FileRise-${VER}.zip" . >/dev/null)
|
||||||
(cd staging && zip -r "../$ZIP" . >/dev/null)
|
|
||||||
|
|
||||||
- name: Compute SHA-256 checksum
|
- name: Compute SHA-256 checksum
|
||||||
if: steps.tagcheck.outputs.exists == 'false'
|
if: steps.tagcheck.outputs.exists == 'false'
|
||||||
@@ -189,7 +281,6 @@ jobs:
|
|||||||
COMPARE_URL="https://github.com/${REPO}/compare/${PREV}...${VER}"
|
COMPARE_URL="https://github.com/${REPO}/compare/${PREV}...${VER}"
|
||||||
ZIP="FileRise-${VER}.zip"
|
ZIP="FileRise-${VER}.zip"
|
||||||
SHA="${{ steps.sum.outputs.sha }}"
|
SHA="${{ steps.sum.outputs.sha }}"
|
||||||
|
|
||||||
{
|
{
|
||||||
echo
|
echo
|
||||||
if [[ -s CHANGELOG_SNIPPET.md ]]; then
|
if [[ -s CHANGELOG_SNIPPET.md ]]; then
|
||||||
@@ -205,8 +296,6 @@ jobs:
|
|||||||
echo "${SHA} ${ZIP}"
|
echo "${SHA} ${ZIP}"
|
||||||
echo '```'
|
echo '```'
|
||||||
} > RELEASE_BODY.md
|
} > RELEASE_BODY.md
|
||||||
|
|
||||||
echo "Release body:"
|
|
||||||
sed -n '1,200p' RELEASE_BODY.md
|
sed -n '1,200p' RELEASE_BODY.md
|
||||||
|
|
||||||
- name: Create GitHub Release
|
- name: Create GitHub Release
|
||||||
@@ -214,8 +303,7 @@ jobs:
|
|||||||
uses: softprops/action-gh-release@v2
|
uses: softprops/action-gh-release@v2
|
||||||
with:
|
with:
|
||||||
tag_name: ${{ steps.ver.outputs.version }}
|
tag_name: ${{ steps.ver.outputs.version }}
|
||||||
# Point the tag at the same commit we checked out (handles workflow_run case)
|
target_commitish: ${{ steps.pickref.outputs.ref }}
|
||||||
target_commitish: ${{ github.event_name == 'workflow_run' && github.event.workflow_run.head_sha || github.sha }}
|
|
||||||
name: ${{ steps.ver.outputs.version }}
|
name: ${{ steps.ver.outputs.version }}
|
||||||
body_path: RELEASE_BODY.md
|
body_path: RELEASE_BODY.md
|
||||||
generate_release_notes: false
|
generate_release_notes: false
|
||||||
|
|||||||
51
CHANGELOG.md
51
CHANGELOG.md
@@ -1,5 +1,56 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
|
## Changes 11/5/2025 (v1.8.7)
|
||||||
|
|
||||||
|
release(v1.8.7): fix(zip-download): stream clean ZIP response and purge stale temp archives
|
||||||
|
|
||||||
|
- FileController::downloadZip
|
||||||
|
- Remove _jsonStart/_jsonEnd and JSON wrappers; send a pure binary ZIP
|
||||||
|
- Close session locks, disable gzip/output buffering, set Content-Length when known
|
||||||
|
- Stream in 1MiB chunks; proper HTTP codes/messages on errors
|
||||||
|
- Unlink the temp ZIP after successful send
|
||||||
|
- Preserves all auth/ACL/ownership checks
|
||||||
|
|
||||||
|
- FileModel::createZipArchive
|
||||||
|
- Purge META_DIR/ziptmp/download-*.zip older than 6h before creating a new ZIP
|
||||||
|
|
||||||
|
Result: fixes “failed to fetch / load failed” with fetch>blob flow and reduces leftover tmp ZIPs.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Changes 11/4/2025 (v1.8.6)
|
||||||
|
|
||||||
|
release(v1.8.6): fix large ZIP downloads + safer extract; close #60
|
||||||
|
|
||||||
|
- Zip creation
|
||||||
|
- Write archives to META_DIR/ziptmp (on large/writable disk) instead of system tmp.
|
||||||
|
- Auto-create ziptmp (0775) and verify writability.
|
||||||
|
- Free-space sanity check (~files total +5% +20MB); clearer error on low space.
|
||||||
|
- Normalize/validate folder segments; include only regular files.
|
||||||
|
- set_time_limit(0); use CREATE|OVERWRITE; improved error handling.
|
||||||
|
|
||||||
|
- Zip extraction
|
||||||
|
- New: stamp metadata for files in nested subfolders (per-folder metadata.json).
|
||||||
|
- Skip hidden “dot” paths (files/dirs with any segment starting with “.”) by default
|
||||||
|
via SKIP_DOTFILES_ON_EXTRACT=true; only extract allow-listed entries.
|
||||||
|
- Hardenings: zip-slip guard, reject symlinks (external_attributes), zip-bomb limits
|
||||||
|
(MAX_UNZIP_BYTES default 200GiB, MAX_UNZIP_FILES default 20k).
|
||||||
|
- Persist metadata for all touched folders; keep extractedFiles list for top-level names.
|
||||||
|
|
||||||
|
Ops note: ensure /var/www/metadata/ziptmp exists & is writable (or mount META_DIR to a large volume).
|
||||||
|
|
||||||
|
Closes #60.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Changes 11/4/2025 (v1.8.5)
|
||||||
|
|
||||||
|
release(v1.8.5): ci: reduce pre-run delay to 2-min and add missing `needs: delay`, final test
|
||||||
|
|
||||||
|
- No change release just testing
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
## Changes 11/4/2025 (v1.8.4)
|
## Changes 11/4/2025 (v1.8.4)
|
||||||
|
|
||||||
release(v1.8.4): ci: add 3-min pre-run delay to avoid workflow_run races
|
release(v1.8.4): ci: add 3-min pre-run delay to avoid workflow_run races
|
||||||
|
|||||||
@@ -1,2 +1,2 @@
|
|||||||
// generated by CI
|
// generated by CI
|
||||||
window.APP_VERSION = 'v1.8.3';
|
window.APP_VERSION = 'v1.8.7';
|
||||||
|
|||||||
@@ -667,26 +667,26 @@ public function deleteFiles()
|
|||||||
|
|
||||||
public function downloadZip()
|
public function downloadZip()
|
||||||
{
|
{
|
||||||
$this->_jsonStart();
|
|
||||||
try {
|
try {
|
||||||
if (!$this->_checkCsrf()) return;
|
|
||||||
if (!$this->_requireAuth()) return;
|
if (!$this->_checkCsrf()) { http_response_code(400); echo "Bad CSRF"; return; }
|
||||||
|
if (!$this->_requireAuth()) { http_response_code(401); echo "Unauthorized"; return; }
|
||||||
|
|
||||||
$data = $this->_readJsonBody();
|
$data = $this->_readJsonBody();
|
||||||
if (!is_array($data) || !isset($data['folder'], $data['files']) || !is_array($data['files'])) {
|
if (!is_array($data) || !isset($data['folder'], $data['files']) || !is_array($data['files'])) {
|
||||||
$this->_jsonOut(["error" => "Invalid input."], 400); return;
|
http_response_code(400); echo "Invalid input."; return;
|
||||||
}
|
}
|
||||||
|
|
||||||
$folder = $this->_normalizeFolder($data['folder']);
|
$folder = $this->_normalizeFolder($data['folder']);
|
||||||
$files = $data['files'];
|
$files = $data['files'];
|
||||||
if (!$this->_validFolder($folder)) { $this->_jsonOut(["error"=>"Invalid folder name."], 400); return; }
|
if (!$this->_validFolder($folder)) { http_response_code(400); echo "Invalid folder name."; return; }
|
||||||
|
|
||||||
$username = $_SESSION['username'] ?? '';
|
$username = $_SESSION['username'] ?? '';
|
||||||
$perms = $this->loadPerms($username);
|
$perms = $this->loadPerms($username);
|
||||||
|
|
||||||
// Optional zip gate by account flag
|
// Optional zip gate by account flag
|
||||||
if (!$this->isAdmin($perms) && !empty($perms['disableZip'])) {
|
if (!$this->isAdmin($perms) && !empty($perms['disableZip'])) {
|
||||||
$this->_jsonOut(["error" => "ZIP downloads are not allowed for your account."], 403); return;
|
http_response_code(403); echo "ZIP downloads are not allowed for your account."; return;
|
||||||
}
|
}
|
||||||
|
|
||||||
$ignoreOwnership = $this->isAdmin($perms)
|
$ignoreOwnership = $this->isAdmin($perms)
|
||||||
@@ -698,44 +698,65 @@ public function deleteFiles()
|
|||||||
|| $this->ownsFolderOrAncestor($folder, $username, $perms);
|
|| $this->ownsFolderOrAncestor($folder, $username, $perms);
|
||||||
$ownOnly = !$fullView && ACL::hasGrant($username, $folder, 'read_own');
|
$ownOnly = !$fullView && ACL::hasGrant($username, $folder, 'read_own');
|
||||||
|
|
||||||
if (!$fullView && !$ownOnly) {
|
if (!$fullView && !$ownOnly) { http_response_code(403); echo "Forbidden: no view access to this folder."; return; }
|
||||||
$this->_jsonOut(["error" => "Forbidden: no view access to this folder."], 403); return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If own-only, ensure all files are owned by the user
|
|
||||||
if ($ownOnly) {
|
if ($ownOnly) {
|
||||||
$meta = $this->loadFolderMetadata($folder);
|
$meta = $this->loadFolderMetadata($folder);
|
||||||
foreach ($files as $f) {
|
foreach ($files as $f) {
|
||||||
$bn = basename((string)$f);
|
$bn = basename((string)$f);
|
||||||
if (!isset($meta[$bn]['uploader']) || strcasecmp((string)$meta[$bn]['uploader'], $username) !== 0) {
|
if (!isset($meta[$bn]['uploader']) || strcasecmp((string)$meta[$bn]['uploader'], $username) !== 0) {
|
||||||
$this->_jsonOut(["error" => "Forbidden: you are not the owner of '{$bn}'."], 403); return;
|
http_response_code(403); echo "Forbidden: you are not the owner of '{$bn}'."; return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
$result = FileModel::createZipArchive($folder, $files);
|
$result = FileModel::createZipArchive($folder, $files);
|
||||||
if (isset($result['error'])) {
|
if (isset($result['error'])) { http_response_code(400); echo $result['error']; return; }
|
||||||
$this->_jsonOut(["error" => $result['error']], 400); return;
|
|
||||||
}
|
|
||||||
|
|
||||||
$zipPath = $result['zipPath'] ?? null;
|
$zipPath = $result['zipPath'] ?? null;
|
||||||
if (!$zipPath || !file_exists($zipPath)) { $this->_jsonOut(["error"=>"ZIP archive not found."], 500); return; }
|
if (!$zipPath || !is_file($zipPath)) { http_response_code(500); echo "ZIP archive not found."; return; }
|
||||||
|
|
||||||
// switch to file streaming
|
// ---- Clean binary stream setup ----
|
||||||
|
@session_write_close();
|
||||||
|
@set_time_limit(0);
|
||||||
|
@ignore_user_abort(true);
|
||||||
|
if (function_exists('apache_setenv')) { @apache_setenv('no-gzip', '1'); }
|
||||||
|
@ini_set('zlib.output_compression', '0');
|
||||||
|
@ini_set('output_buffering', 'off');
|
||||||
|
while (ob_get_level() > 0) { @ob_end_clean(); }
|
||||||
|
|
||||||
|
@clearstatcache(true, $zipPath);
|
||||||
|
$size = (int)@filesize($zipPath);
|
||||||
|
|
||||||
|
header('X-Accel-Buffering: no');
|
||||||
header_remove('Content-Type');
|
header_remove('Content-Type');
|
||||||
header('Content-Type: application/zip');
|
header('Content-Type: application/zip');
|
||||||
|
// Client sets the final name via a.download in your JS; server can be generic
|
||||||
header('Content-Disposition: attachment; filename="files.zip"');
|
header('Content-Disposition: attachment; filename="files.zip"');
|
||||||
header('Content-Length: ' . filesize($zipPath));
|
if ($size > 0) header('Content-Length: ' . $size);
|
||||||
header('Cache-Control: no-store, no-cache, must-revalidate');
|
header('Cache-Control: no-store, no-cache, must-revalidate');
|
||||||
header('Pragma: no-cache');
|
header('Pragma: no-cache');
|
||||||
|
|
||||||
readfile($zipPath);
|
$fp = fopen($zipPath, 'rb');
|
||||||
|
if ($fp === false) { http_response_code(500); echo "Failed to open ZIP."; return; }
|
||||||
|
|
||||||
|
$chunk = 1048576; // 1 MiB
|
||||||
|
while (!feof($fp)) {
|
||||||
|
$buf = fread($fp, $chunk);
|
||||||
|
if ($buf === false) break;
|
||||||
|
echo $buf;
|
||||||
|
flush();
|
||||||
|
}
|
||||||
|
fclose($fp);
|
||||||
@unlink($zipPath);
|
@unlink($zipPath);
|
||||||
exit;
|
exit;
|
||||||
|
|
||||||
} catch (Throwable $e) {
|
} catch (Throwable $e) {
|
||||||
error_log('FileController::downloadZip error: '.$e->getMessage().' @ '.$e->getFile().':'.$e->getLine());
|
error_log('FileController::downloadZip error: '.$e->getMessage().' @ '.$e->getFile().':'.$e->getLine());
|
||||||
$this->_jsonOut(['error' => 'Internal server error while preparing ZIP.'], 500);
|
if (!headers_sent()) http_response_code(500);
|
||||||
} finally { $this->_jsonEnd(); }
|
echo "Internal server error while preparing ZIP.";
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public function extractZip()
|
public function extractZip()
|
||||||
|
|||||||
@@ -557,36 +557,49 @@ class FileModel {
|
|||||||
* @return array An associative array with either an "error" key or a "zipPath" key.
|
* @return array An associative array with either an "error" key or a "zipPath" key.
|
||||||
*/
|
*/
|
||||||
public static function createZipArchive($folder, $files) {
|
public static function createZipArchive($folder, $files) {
|
||||||
// Validate and build folder path.
|
|
||||||
$folder = trim($folder) ?: 'root';
|
// (optional) purge old temp zips > 6h
|
||||||
|
$zipRoot = rtrim((string)META_DIR, '/\\') . DIRECTORY_SEPARATOR . 'ziptmp';
|
||||||
|
$now = time();
|
||||||
|
foreach (glob($zipRoot . DIRECTORY_SEPARATOR . 'download-*.zip') ?: [] as $zp) {
|
||||||
|
if (is_file($zp) && ($now - @filemtime($zp)) > 21600) { @unlink($zp); }
|
||||||
|
}
|
||||||
|
// Normalize and validate target folder
|
||||||
|
$folder = trim((string)$folder) ?: 'root';
|
||||||
$baseDir = realpath(UPLOAD_DIR);
|
$baseDir = realpath(UPLOAD_DIR);
|
||||||
if ($baseDir === false) {
|
if ($baseDir === false) {
|
||||||
return ["error" => "Uploads directory not configured correctly."];
|
return ["error" => "Uploads directory not configured correctly."];
|
||||||
}
|
}
|
||||||
|
|
||||||
if (strtolower($folder) === 'root' || $folder === "") {
|
if (strtolower($folder) === 'root' || $folder === "") {
|
||||||
$folderPathReal = $baseDir;
|
$folderPathReal = $baseDir;
|
||||||
} else {
|
} else {
|
||||||
// Prevent path traversal.
|
// Prevent traversal and validate each segment against folder regex
|
||||||
if (strpos($folder, '..') !== false) {
|
if (strpos($folder, '..') !== false) {
|
||||||
return ["error" => "Invalid folder name."];
|
return ["error" => "Invalid folder name."];
|
||||||
}
|
}
|
||||||
$folderPath = rtrim(UPLOAD_DIR, '/\\') . DIRECTORY_SEPARATOR . trim($folder, "/\\ ");
|
$parts = explode('/', trim($folder, "/\\ "));
|
||||||
|
foreach ($parts as $part) {
|
||||||
|
if ($part === '' || !preg_match(REGEX_FOLDER_NAME, $part)) {
|
||||||
|
return ["error" => "Invalid folder name."];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
$folderPath = rtrim(UPLOAD_DIR, '/\\') . DIRECTORY_SEPARATOR . implode(DIRECTORY_SEPARATOR, $parts);
|
||||||
$folderPathReal = realpath($folderPath);
|
$folderPathReal = realpath($folderPath);
|
||||||
if ($folderPathReal === false || strpos($folderPathReal, $baseDir) !== 0) {
|
if ($folderPathReal === false || strpos($folderPathReal, $baseDir) !== 0) {
|
||||||
return ["error" => "Folder not found."];
|
return ["error" => "Folder not found."];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate each file and build an array of files to zip.
|
// Collect files to zip (only regular files in the chosen folder)
|
||||||
$filesToZip = [];
|
$filesToZip = [];
|
||||||
foreach ($files as $fileName) {
|
foreach ($files as $fileName) {
|
||||||
// Validate file name using REGEX_FILE_NAME.
|
$fileName = basename(trim((string)$fileName));
|
||||||
$fileName = basename(trim($fileName));
|
|
||||||
if (!preg_match(REGEX_FILE_NAME, $fileName)) {
|
if (!preg_match(REGEX_FILE_NAME, $fileName)) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
$fullPath = $folderPathReal . DIRECTORY_SEPARATOR . $fileName;
|
$fullPath = $folderPathReal . DIRECTORY_SEPARATOR . $fileName;
|
||||||
if (file_exists($fullPath)) {
|
if (is_file($fullPath)) {
|
||||||
$filesToZip[] = $fullPath;
|
$filesToZip[] = $fullPath;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -594,22 +607,53 @@ class FileModel {
|
|||||||
return ["error" => "No valid files found to zip."];
|
return ["error" => "No valid files found to zip."];
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create a temporary ZIP file.
|
// Workspace on the big disk: META_DIR/ziptmp
|
||||||
$tempZip = tempnam(sys_get_temp_dir(), 'zip');
|
$work = rtrim((string)META_DIR, '/\\') . DIRECTORY_SEPARATOR . 'ziptmp';
|
||||||
unlink($tempZip); // Remove the temp file so that ZipArchive can create a new file.
|
if (!is_dir($work)) {
|
||||||
$tempZip .= '.zip';
|
@mkdir($work, 0775, true);
|
||||||
|
}
|
||||||
|
if (!is_dir($work) || !is_writable($work)) {
|
||||||
|
return ["error" => "ZIP temp dir not writable: " . $work];
|
||||||
|
}
|
||||||
|
|
||||||
$zip = new ZipArchive();
|
// Optional sanity: ensure there is roughly enough free space
|
||||||
if ($zip->open($tempZip, ZipArchive::CREATE) !== TRUE) {
|
$totalSize = 0;
|
||||||
|
foreach ($filesToZip as $fp) {
|
||||||
|
$sz = @filesize($fp);
|
||||||
|
if ($sz !== false) $totalSize += (int)$sz;
|
||||||
|
}
|
||||||
|
$free = @disk_free_space($work);
|
||||||
|
// Add ~20MB overhead and a 5% cushion
|
||||||
|
if ($free !== false && $totalSize > 0) {
|
||||||
|
$needed = (int)ceil($totalSize * 1.05) + (20 * 1024 * 1024);
|
||||||
|
if ($free < $needed) {
|
||||||
|
return ["error" => "Insufficient free space in ZIP workspace."];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@set_time_limit(0);
|
||||||
|
|
||||||
|
// Create the ZIP path inside META_DIR/ziptmp
|
||||||
|
$zipName = 'download-' . date('Ymd-His') . '-' . bin2hex(random_bytes(4)) . '.zip';
|
||||||
|
$zipPath = $work . DIRECTORY_SEPARATOR . $zipName;
|
||||||
|
|
||||||
|
$zip = new \ZipArchive();
|
||||||
|
if ($zip->open($zipPath, \ZipArchive::CREATE | \ZipArchive::OVERWRITE) !== true) {
|
||||||
return ["error" => "Could not create zip archive."];
|
return ["error" => "Could not create zip archive."];
|
||||||
}
|
}
|
||||||
// Add each file using its base name.
|
|
||||||
foreach ($filesToZip as $filePath) {
|
foreach ($filesToZip as $filePath) {
|
||||||
|
// Add using basename at the root of the zip (matches your current behavior)
|
||||||
$zip->addFile($filePath, basename($filePath));
|
$zip->addFile($filePath, basename($filePath));
|
||||||
}
|
}
|
||||||
$zip->close();
|
|
||||||
|
|
||||||
return ["zipPath" => $tempZip];
|
if (!$zip->close()) {
|
||||||
|
// Commonly indicates disk full at finalize
|
||||||
|
return ["error" => "Failed to finalize ZIP (disk full?)."];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Success: controller will readfile() and unlink()
|
||||||
|
return ["zipPath" => $zipPath];
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -624,6 +668,13 @@ class FileModel {
|
|||||||
$allSuccess = true;
|
$allSuccess = true;
|
||||||
$extractedFiles = [];
|
$extractedFiles = [];
|
||||||
|
|
||||||
|
// Config toggles
|
||||||
|
$SKIP_DOTFILES = defined('SKIP_DOTFILES_ON_EXTRACT') ? (bool)SKIP_DOTFILES_ON_EXTRACT : true;
|
||||||
|
|
||||||
|
// Hard limits to mitigate zip-bombs (tweak via defines if you like)
|
||||||
|
$MAX_UNZIP_BYTES = defined('MAX_UNZIP_BYTES') ? (int)MAX_UNZIP_BYTES : (200 * 1024 * 1024 * 1024); // 200 GiB
|
||||||
|
$MAX_UNZIP_FILES = defined('MAX_UNZIP_FILES') ? (int)MAX_UNZIP_FILES : 20000;
|
||||||
|
|
||||||
$baseDir = realpath(UPLOAD_DIR);
|
$baseDir = realpath(UPLOAD_DIR);
|
||||||
if ($baseDir === false) {
|
if ($baseDir === false) {
|
||||||
return ["error" => "Uploads directory not configured correctly."];
|
return ["error" => "Uploads directory not configured correctly."];
|
||||||
@@ -632,6 +683,7 @@ class FileModel {
|
|||||||
// Build target dir
|
// Build target dir
|
||||||
if (strtolower(trim($folder) ?: '') === "root") {
|
if (strtolower(trim($folder) ?: '') === "root") {
|
||||||
$relativePath = "";
|
$relativePath = "";
|
||||||
|
$folderNorm = "root";
|
||||||
} else {
|
} else {
|
||||||
$parts = explode('/', trim($folder, "/\\"));
|
$parts = explode('/', trim($folder, "/\\"));
|
||||||
foreach ($parts as $part) {
|
foreach ($parts as $part) {
|
||||||
@@ -640,6 +692,7 @@ class FileModel {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
$relativePath = implode(DIRECTORY_SEPARATOR, $parts) . DIRECTORY_SEPARATOR;
|
$relativePath = implode(DIRECTORY_SEPARATOR, $parts) . DIRECTORY_SEPARATOR;
|
||||||
|
$folderNorm = implode('/', $parts); // normalized with forward slashes for metadata helpers
|
||||||
}
|
}
|
||||||
|
|
||||||
$folderPath = $baseDir . DIRECTORY_SEPARATOR . $relativePath;
|
$folderPath = $baseDir . DIRECTORY_SEPARATOR . $relativePath;
|
||||||
@@ -651,16 +704,73 @@ class FileModel {
|
|||||||
return ["error" => "Folder not found."];
|
return ["error" => "Folder not found."];
|
||||||
}
|
}
|
||||||
|
|
||||||
// Prepare metadata container
|
// Metadata cache per folder to avoid many reads/writes
|
||||||
$metadataFile = self::getMetadataFilePath($folder);
|
$metaCache = [];
|
||||||
$destMetadata = file_exists($metadataFile) ? (json_decode(file_get_contents($metadataFile), true) ?: []) : [];
|
$getMeta = function(string $folderStr) use (&$metaCache) {
|
||||||
|
if (!isset($metaCache[$folderStr])) {
|
||||||
|
$mf = self::getMetadataFilePath($folderStr);
|
||||||
|
$metaCache[$folderStr] = file_exists($mf) ? (json_decode(file_get_contents($mf), true) ?: []) : [];
|
||||||
|
}
|
||||||
|
return $metaCache[$folderStr];
|
||||||
|
};
|
||||||
|
$putMeta = function(string $folderStr, array $meta) use (&$metaCache) {
|
||||||
|
$metaCache[$folderStr] = $meta;
|
||||||
|
};
|
||||||
|
|
||||||
$safeFileNamePattern = REGEX_FILE_NAME;
|
$safeFileNamePattern = REGEX_FILE_NAME;
|
||||||
$actor = $_SESSION['username'] ?? 'Unknown';
|
$actor = $_SESSION['username'] ?? 'Unknown';
|
||||||
$now = date(DATE_TIME_FORMAT);
|
$now = date(DATE_TIME_FORMAT);
|
||||||
|
|
||||||
|
// --- Helpers ---
|
||||||
|
|
||||||
|
// Reject absolute paths, traversal, drive letters
|
||||||
|
$isUnsafeEntryPath = function(string $entry) : bool {
|
||||||
|
$e = str_replace('\\', '/', $entry);
|
||||||
|
if ($e === '' || str_contains($e, "\0")) return true;
|
||||||
|
if (str_starts_with($e, '/')) return true; // absolute nix path
|
||||||
|
if (preg_match('/^[A-Za-z]:[\\/]/', $e)) return true; // Windows drive
|
||||||
|
if (str_contains($e, '../') || str_contains($e, '..\\')) return true;
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
|
||||||
|
// Validate each subfolder name in the path using REGEX_FOLDER_NAME
|
||||||
|
$validEntrySubdirs = function(string $entry) : bool {
|
||||||
|
$e = trim(str_replace('\\', '/', $entry), '/');
|
||||||
|
if ($e === '') return true;
|
||||||
|
$dirs = explode('/', $e);
|
||||||
|
array_pop($dirs); // remove basename; we only validate directories here
|
||||||
|
foreach ($dirs as $d) {
|
||||||
|
if ($d === '' || !preg_match(REGEX_FOLDER_NAME, $d)) return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
};
|
||||||
|
|
||||||
|
// NEW: hidden path detector — true if ANY segment starts with '.'
|
||||||
|
$isHiddenDotPath = function(string $entry) : bool {
|
||||||
|
$e = trim(str_replace('\\', '/', $entry), '/');
|
||||||
|
if ($e === '') return false;
|
||||||
|
foreach (explode('/', $e) as $seg) {
|
||||||
|
if ($seg !== '' && $seg[0] === '.') return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
|
||||||
|
// Generalized metadata stamper: writes to the specified folder's metadata.json
|
||||||
|
$stampMeta = function(string $folderStr, string $basename) use (&$getMeta, &$putMeta, $actor, $now) {
|
||||||
|
$meta = $getMeta($folderStr);
|
||||||
|
$meta[$basename] = [
|
||||||
|
'uploaded' => $now,
|
||||||
|
'modified' => $now,
|
||||||
|
'uploader' => $actor,
|
||||||
|
];
|
||||||
|
$putMeta($folderStr, $meta);
|
||||||
|
};
|
||||||
|
|
||||||
|
// No PHP execution time limit during heavy work
|
||||||
|
@set_time_limit(0);
|
||||||
|
|
||||||
foreach ($files as $zipFileName) {
|
foreach ($files as $zipFileName) {
|
||||||
$zipBase = basename(trim($zipFileName));
|
$zipBase = basename(trim((string)$zipFileName));
|
||||||
if (strtolower(substr($zipBase, -4)) !== '.zip') {
|
if (strtolower(substr($zipBase, -4)) !== '.zip') {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@@ -677,67 +787,126 @@ class FileModel {
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
$zip = new ZipArchive();
|
$zip = new \ZipArchive();
|
||||||
if ($zip->open($zipFilePath) !== TRUE) {
|
if ($zip->open($zipFilePath) !== true) {
|
||||||
$errors[] = "Could not open $zipBase as a zip file.";
|
$errors[] = "Could not open $zipBase as a zip file.";
|
||||||
$allSuccess = false;
|
$allSuccess = false;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Minimal Zip Slip guard: fail if any entry looks unsafe
|
// ---- Pre-scan: safety and size limits + build allow-list (skip dotfiles) ----
|
||||||
$unsafe = false;
|
$unsafe = false;
|
||||||
|
$totalUncompressed = 0;
|
||||||
|
$fileCount = 0;
|
||||||
|
$allowedEntries = []; // names to extract (files and/or directories)
|
||||||
|
$allowedFiles = []; // only files (for metadata stamping)
|
||||||
|
|
||||||
for ($i = 0; $i < $zip->numFiles; $i++) {
|
for ($i = 0; $i < $zip->numFiles; $i++) {
|
||||||
$entryName = $zip->getNameIndex($i);
|
$stat = $zip->statIndex($i);
|
||||||
if ($entryName === false) { $unsafe = true; break; }
|
$name = $zip->getNameIndex($i);
|
||||||
// Absolute paths, parent traversal, or Windows drive paths
|
if ($name === false || !$stat) { $unsafe = true; break; }
|
||||||
if (strpos($entryName, '../') !== false || strpos($entryName, '..\\') !== false ||
|
|
||||||
str_starts_with($entryName, '/') || preg_match('/^[A-Za-z]:[\\\\\\/]/', $entryName)) {
|
$isDir = str_ends_with($name, '/');
|
||||||
|
|
||||||
|
// Basic path checks
|
||||||
|
if ($isUnsafeEntryPath($name) || !$validEntrySubdirs($name)) { $unsafe = true; break; }
|
||||||
|
|
||||||
|
// Skip hidden entries (any segment starts with '.')
|
||||||
|
if ($SKIP_DOTFILES && $isHiddenDotPath($name)) {
|
||||||
|
continue; // just ignore; do not treat as unsafe
|
||||||
|
}
|
||||||
|
|
||||||
|
// Detect symlinks via external attributes (best-effort)
|
||||||
|
$mode = (isset($stat['external_attributes']) ? (($stat['external_attributes'] >> 16) & 0xF000) : 0);
|
||||||
|
if ($mode === 0120000) { // S_IFLNK
|
||||||
$unsafe = true; break;
|
$unsafe = true; break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Track limits only for files we're going to extract
|
||||||
|
if (!$isDir) {
|
||||||
|
$fileCount++;
|
||||||
|
$sz = isset($stat['size']) ? (int)$stat['size'] : 0;
|
||||||
|
$totalUncompressed += $sz;
|
||||||
|
if ($fileCount > $MAX_UNZIP_FILES || $totalUncompressed > $MAX_UNZIP_BYTES) {
|
||||||
|
$unsafe = true; break;
|
||||||
}
|
}
|
||||||
|
$allowedFiles[] = $name;
|
||||||
|
}
|
||||||
|
|
||||||
|
$allowedEntries[] = $name;
|
||||||
|
}
|
||||||
|
|
||||||
if ($unsafe) {
|
if ($unsafe) {
|
||||||
$zip->close();
|
$zip->close();
|
||||||
$errors[] = "$zipBase contains unsafe paths; extraction aborted.";
|
$errors[] = "$zipBase contains unsafe or oversized contents; extraction aborted.";
|
||||||
$allSuccess = false;
|
$allSuccess = false;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Extract safely (whole archive) after precheck
|
// Nothing to extract after filtering?
|
||||||
if (!$zip->extractTo($folderPathReal)) {
|
if (empty($allowedEntries)) {
|
||||||
|
$zip->close();
|
||||||
|
// Treat as success (nothing visible to extract), but informatively note it
|
||||||
|
$errors[] = "$zipBase contained only hidden or unsupported entries.";
|
||||||
|
$allSuccess = false; // or keep true if you'd rather not mark as failure
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---- Extract ONLY the allowed entries ----
|
||||||
|
if (!$zip->extractTo($folderPathReal, $allowedEntries)) {
|
||||||
$errors[] = "Failed to extract $zipBase.";
|
$errors[] = "Failed to extract $zipBase.";
|
||||||
$allSuccess = false;
|
$allSuccess = false;
|
||||||
$zip->close();
|
$zip->close();
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Stamp metadata for extracted regular files
|
// ---- Stamp metadata for files in the target folder AND nested subfolders (allowed files only) ----
|
||||||
for ($i = 0; $i < $zip->numFiles; $i++) {
|
foreach ($allowedFiles as $entryName) {
|
||||||
$entryName = $zip->getNameIndex($i);
|
// Normalize entry path for filesystem checks
|
||||||
if ($entryName === false) continue;
|
$entryFsRel = str_replace(['\\'], '/', $entryName);
|
||||||
|
$entryFsRel = ltrim($entryFsRel, '/'); // ensure relative
|
||||||
|
|
||||||
$basename = basename($entryName);
|
// Skip any directories (shouldn't be listed here, but defend anyway)
|
||||||
|
if ($entryFsRel === '' || str_ends_with($entryFsRel, '/')) continue;
|
||||||
|
|
||||||
|
$basename = basename($entryFsRel);
|
||||||
if ($basename === '' || !preg_match($safeFileNamePattern, $basename)) continue;
|
if ($basename === '' || !preg_match($safeFileNamePattern, $basename)) continue;
|
||||||
|
|
||||||
// Only stamp files that actually exist after extraction
|
// Decide which folder's metadata to update:
|
||||||
$target = $folderPathReal . DIRECTORY_SEPARATOR . $entryName;
|
// - top-level files -> $folderNorm
|
||||||
$isDir = str_ends_with($entryName, '/') || is_dir($target);
|
// - nested files -> corresponding "<folderNorm>/<sub/dir>" (or "sub/dir" if folderNorm is 'root')
|
||||||
if ($isDir) continue;
|
$relDir = str_replace('\\', '/', trim(dirname($entryFsRel), '.'));
|
||||||
|
$relDir = ($relDir === '.' ? '' : trim($relDir, '/'));
|
||||||
|
|
||||||
|
$targetFolderNorm = ($relDir === '' || $relDir === '.')
|
||||||
|
? $folderNorm
|
||||||
|
: (($folderNorm === 'root') ? $relDir : ($folderNorm . '/' . $relDir));
|
||||||
|
|
||||||
|
// Only stamp if the file actually exists on disk after extraction
|
||||||
|
$targetAbs = $folderPathReal . DIRECTORY_SEPARATOR . str_replace('/', DIRECTORY_SEPARATOR, $entryFsRel);
|
||||||
|
if (is_file($targetAbs)) {
|
||||||
|
// Preserve list behavior: only include top-level extracted names
|
||||||
|
if ($relDir === '' || $relDir === '.') {
|
||||||
$extractedFiles[] = $basename;
|
$extractedFiles[] = $basename;
|
||||||
$destMetadata[$basename] = [
|
|
||||||
'uploaded' => $now,
|
|
||||||
'modified' => $now,
|
|
||||||
'uploader' => $actor,
|
|
||||||
// no tags by default
|
|
||||||
];
|
|
||||||
}
|
}
|
||||||
|
$stampMeta($targetFolderNorm, $basename);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
$zip->close();
|
$zip->close();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (file_put_contents($metadataFile, json_encode($destMetadata, JSON_PRETTY_PRINT), LOCK_EX) === false) {
|
// Persist metadata for any touched folder(s)
|
||||||
$errors[] = "Failed to update metadata.";
|
foreach ($metaCache as $folderStr => $meta) {
|
||||||
|
$metadataFile = self::getMetadataFilePath($folderStr);
|
||||||
|
if (!is_dir(dirname($metadataFile))) {
|
||||||
|
@mkdir(dirname($metadataFile), 0775, true);
|
||||||
|
}
|
||||||
|
if (file_put_contents($metadataFile, json_encode($meta, JSON_PRETTY_PRINT), LOCK_EX) === false) {
|
||||||
|
$errors[] = "Failed to update metadata for {$folderStr}.";
|
||||||
$allSuccess = false;
|
$allSuccess = false;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return $allSuccess
|
return $allSuccess
|
||||||
? ["success" => true, "extractedFiles" => $extractedFiles]
|
? ["success" => true, "extractedFiles" => $extractedFiles]
|
||||||
|
|||||||
Reference in New Issue
Block a user