Compare commits
15 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
429cd0314a | ||
|
|
ba29cc4822 | ||
|
|
e2cd304158 | ||
|
|
ca8788a694 | ||
|
|
dc45fed886 | ||
|
|
a9fe342175 | ||
|
|
7669f5a10b | ||
|
|
34a4e06a23 | ||
|
|
d00faf5fe7 | ||
|
|
ad8cbc601a | ||
|
|
40e000b5bc | ||
|
|
eee25a4dc6 | ||
|
|
d66f4d93cb | ||
|
|
f4f7f8ef38 | ||
|
|
0ccba45c40 |
160
.github/workflows/release-on-version.yml
vendored
160
.github/workflows/release-on-version.yml
vendored
@@ -9,6 +9,14 @@ on:
|
|||||||
workflow_run:
|
workflow_run:
|
||||||
workflows: ["Bump version and sync Changelog to Docker Repo"]
|
workflows: ["Bump version and sync Changelog to Docker Repo"]
|
||||||
types: [completed]
|
types: [completed]
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
ref:
|
||||||
|
description: "Ref (branch or SHA) to build from (default: origin/master)"
|
||||||
|
required: false
|
||||||
|
version:
|
||||||
|
description: "Explicit version tag to release (e.g., v1.8.6). If empty, auto-detect."
|
||||||
|
required: false
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
@@ -17,50 +25,141 @@ jobs:
|
|||||||
delay:
|
delay:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Delay 3 minutes
|
- name: Delay 10 minutes
|
||||||
run: sleep 180
|
run: sleep 600
|
||||||
|
|
||||||
release:
|
release:
|
||||||
|
needs: delay
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
# Guard: Only run on trusted workflow_run events (pushes from this repo)
|
||||||
|
if: >
|
||||||
|
github.event_name == 'push' ||
|
||||||
|
github.event_name == 'workflow_dispatch' ||
|
||||||
|
(github.event_name == 'workflow_run' &&
|
||||||
|
github.event.workflow_run.event == 'push' &&
|
||||||
|
github.event.workflow_run.head_repository.full_name == github.repository)
|
||||||
|
|
||||||
|
# Use run_id for a stable, unique key
|
||||||
concurrency:
|
concurrency:
|
||||||
# Cancel older runs for the same branch/ref so only the latest proceeds
|
group: release-${{ github.run_id }}
|
||||||
group: release-${{ github.ref }}
|
cancel-in-progress: false
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout correct ref
|
- name: Checkout (fetch all)
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
# For workflow_run, use the triggering workflow's head_sha; else use the current SHA
|
|
||||||
ref: ${{ github.event_name == 'workflow_run' && github.event.workflow_run.head_sha || github.sha }}
|
|
||||||
|
|
||||||
- name: Ensure tags available
|
- name: Ensure tags + master available
|
||||||
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
git fetch --tags --force --prune --quiet
|
git fetch --tags --force --prune --quiet
|
||||||
|
git fetch origin master --quiet
|
||||||
|
|
||||||
- name: Show recent tags (debug)
|
- name: Resolve source ref + (maybe) version
|
||||||
run: git tag --list "v*" --sort=-v:refname | head -n 20
|
id: pickref
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
- name: Read version from version.js
|
# Defaults
|
||||||
|
REF=""
|
||||||
|
VER=""
|
||||||
|
SRC=""
|
||||||
|
|
||||||
|
if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
|
||||||
|
# manual run
|
||||||
|
REF_IN="${{ github.event.inputs.ref }}"
|
||||||
|
VER_IN="${{ github.event.inputs.version }}"
|
||||||
|
if [[ -n "$REF_IN" ]]; then
|
||||||
|
# Try branch/sha; fetch branch if needed
|
||||||
|
git fetch origin "$REF_IN" --quiet || true
|
||||||
|
if REF_SHA="$(git rev-parse --verify --quiet "$REF_IN")"; then
|
||||||
|
REF="$REF_SHA"
|
||||||
|
else
|
||||||
|
echo "Provided ref '$REF_IN' not found" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
REF="$(git rev-parse origin/master)"
|
||||||
|
fi
|
||||||
|
if [[ -n "$VER_IN" ]]; then
|
||||||
|
VER="$VER_IN"
|
||||||
|
SRC="manual-version"
|
||||||
|
fi
|
||||||
|
elif [[ "${{ github.event_name }}" == "workflow_run" ]]; then
|
||||||
|
REF="${{ github.event.workflow_run.head_sha }}"
|
||||||
|
else
|
||||||
|
REF="${{ github.sha }}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# If no explicit version, try to find the latest bot bump reachable from REF
|
||||||
|
if [[ -z "$VER" ]]; then
|
||||||
|
# Search recent history reachable from REF
|
||||||
|
BOT_SHA="$(git log "$REF" -n 200 --author='github-actions[bot]' --grep='set APP_VERSION to v' --pretty=%H | head -n1 || true)"
|
||||||
|
if [[ -n "$BOT_SHA" ]]; then
|
||||||
|
SUBJ="$(git log -n1 --pretty=%s "$BOT_SHA")"
|
||||||
|
BOT_VER="$(sed -n 's/.*set APP_VERSION to \(v[^ ]*\).*/\1/p' <<<"${SUBJ}")"
|
||||||
|
if [[ -n "$BOT_VER" ]]; then
|
||||||
|
VER="$BOT_VER"
|
||||||
|
REF="$BOT_SHA" # build/tag from the bump commit
|
||||||
|
SRC="bot-commit"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Output
|
||||||
|
REF_SHA="$(git rev-parse "$REF")"
|
||||||
|
echo "ref=$REF_SHA" >> "$GITHUB_OUTPUT"
|
||||||
|
echo "source=${SRC:-event-ref}" >> "$GITHUB_OUTPUT"
|
||||||
|
echo "preversion=${VER}" >> "$GITHUB_OUTPUT"
|
||||||
|
echo "Using source=${SRC:-event-ref} ref=$REF_SHA"
|
||||||
|
if [[ -n "$VER" ]]; then echo "Pre-resolved version=$VER"; fi
|
||||||
|
|
||||||
|
- name: Checkout chosen ref
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
ref: ${{ steps.pickref.outputs.ref }}
|
||||||
|
|
||||||
|
- name: Assert ref is on master
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
REF="${{ steps.pickref.outputs.ref }}"
|
||||||
|
git fetch origin master --quiet
|
||||||
|
if ! git merge-base --is-ancestor "$REF" origin/master; then
|
||||||
|
echo "Ref $REF is not on master; refusing to release."
|
||||||
|
exit 78
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Debug version.js provenance
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
echo "version.js last-change commit: $(git log -n1 --pretty='%h %s' -- public/js/version.js || echo 'none')"
|
||||||
|
sed -n '1,20p' public/js/version.js || true
|
||||||
|
|
||||||
|
- name: Determine version
|
||||||
id: ver
|
id: ver
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
echo "version.js at commit: $(git rev-parse --short HEAD)"
|
# Prefer pre-resolved version (manual input or bot commit)
|
||||||
sed -n '1,80p' public/js/version.js || true
|
if [[ -n "${{ steps.pickref.outputs.preversion }}" ]]; then
|
||||||
|
VER="${{ steps.pickref.outputs.preversion }}"
|
||||||
VER=$(
|
echo "version=$VER" >> "$GITHUB_OUTPUT"
|
||||||
grep -Eo "APP_VERSION[^\\n]*['\"]v[0-9][^'\"]+['\"]" public/js/version.js \
|
echo "Parsed version (pre-resolved): $VER"
|
||||||
| sed -E "s/.*['\"](v[^'\"]+)['\"].*/\1/" \
|
exit 0
|
||||||
| tail -n1
|
fi
|
||||||
)
|
# Fallback to version.js
|
||||||
if [[ -z "${VER:-}" ]]; then
|
VER="$(grep -Eo "APP_VERSION\s*=\s*['\"]v[^'\"]+['\"]" public/js/version.js | sed -E "s/.*['\"](v[^'\"]+)['\"].*/\1/")"
|
||||||
|
if [[ -z "$VER" ]]; then
|
||||||
echo "Could not parse APP_VERSION from version.js" >&2
|
echo "Could not parse APP_VERSION from version.js" >&2
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
echo "version=$VER" >> "$GITHUB_OUTPUT"
|
echo "version=$VER" >> "$GITHUB_OUTPUT"
|
||||||
echo "Parsed version: $VER"
|
echo "Parsed version (file): $VER"
|
||||||
|
|
||||||
- name: Skip if tag already exists
|
- name: Skip if tag already exists
|
||||||
id: tagcheck
|
id: tagcheck
|
||||||
@@ -74,7 +173,6 @@ jobs:
|
|||||||
echo "exists=false" >> "$GITHUB_OUTPUT"
|
echo "exists=false" >> "$GITHUB_OUTPUT"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Ensure the stamper is executable and has LF endings (helps if edited on Windows)
|
|
||||||
- name: Prep stamper script
|
- name: Prep stamper script
|
||||||
if: steps.tagcheck.outputs.exists == 'false'
|
if: steps.tagcheck.outputs.exists == 'false'
|
||||||
shell: bash
|
shell: bash
|
||||||
@@ -88,18 +186,13 @@ jobs:
|
|||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
VER="${{ steps.ver.outputs.version }}" # e.g. v1.8.2
|
VER="${{ steps.ver.outputs.version }}"
|
||||||
ZIP="FileRise-${VER}.zip"
|
|
||||||
|
|
||||||
# Clean staging copy (exclude dotfiles you don’t want)
|
|
||||||
rm -rf staging
|
rm -rf staging
|
||||||
rsync -a \
|
rsync -a \
|
||||||
--exclude '.git' --exclude '.github' \
|
--exclude '.git' --exclude '.github' \
|
||||||
--exclude 'resources' \
|
--exclude 'resources' \
|
||||||
--exclude '.dockerignore' --exclude '.gitattributes' --exclude '.gitignore' \
|
--exclude '.dockerignore' --exclude '.gitattributes' --exclude '.gitignore' \
|
||||||
./ staging/
|
./ staging/
|
||||||
|
|
||||||
# Stamp IN THE STAGING COPY (invoke via bash to avoid exec-bit issues)
|
|
||||||
bash ./scripts/stamp-assets.sh "${VER}" "$(pwd)/staging"
|
bash ./scripts/stamp-assets.sh "${VER}" "$(pwd)/staging"
|
||||||
|
|
||||||
- name: Verify placeholders are gone (staging)
|
- name: Verify placeholders are gone (staging)
|
||||||
@@ -128,8 +221,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
VER="${{ steps.ver.outputs.version }}"
|
VER="${{ steps.ver.outputs.version }}"
|
||||||
ZIP="FileRise-${VER}.zip"
|
(cd staging && zip -r "../FileRise-${VER}.zip" . >/dev/null)
|
||||||
(cd staging && zip -r "../$ZIP" . >/dev/null)
|
|
||||||
|
|
||||||
- name: Compute SHA-256 checksum
|
- name: Compute SHA-256 checksum
|
||||||
if: steps.tagcheck.outputs.exists == 'false'
|
if: steps.tagcheck.outputs.exists == 'false'
|
||||||
@@ -189,7 +281,6 @@ jobs:
|
|||||||
COMPARE_URL="https://github.com/${REPO}/compare/${PREV}...${VER}"
|
COMPARE_URL="https://github.com/${REPO}/compare/${PREV}...${VER}"
|
||||||
ZIP="FileRise-${VER}.zip"
|
ZIP="FileRise-${VER}.zip"
|
||||||
SHA="${{ steps.sum.outputs.sha }}"
|
SHA="${{ steps.sum.outputs.sha }}"
|
||||||
|
|
||||||
{
|
{
|
||||||
echo
|
echo
|
||||||
if [[ -s CHANGELOG_SNIPPET.md ]]; then
|
if [[ -s CHANGELOG_SNIPPET.md ]]; then
|
||||||
@@ -205,8 +296,6 @@ jobs:
|
|||||||
echo "${SHA} ${ZIP}"
|
echo "${SHA} ${ZIP}"
|
||||||
echo '```'
|
echo '```'
|
||||||
} > RELEASE_BODY.md
|
} > RELEASE_BODY.md
|
||||||
|
|
||||||
echo "Release body:"
|
|
||||||
sed -n '1,200p' RELEASE_BODY.md
|
sed -n '1,200p' RELEASE_BODY.md
|
||||||
|
|
||||||
- name: Create GitHub Release
|
- name: Create GitHub Release
|
||||||
@@ -214,8 +303,7 @@ jobs:
|
|||||||
uses: softprops/action-gh-release@v2
|
uses: softprops/action-gh-release@v2
|
||||||
with:
|
with:
|
||||||
tag_name: ${{ steps.ver.outputs.version }}
|
tag_name: ${{ steps.ver.outputs.version }}
|
||||||
# Point the tag at the same commit we checked out (handles workflow_run case)
|
target_commitish: ${{ steps.pickref.outputs.ref }}
|
||||||
target_commitish: ${{ github.event_name == 'workflow_run' && github.event.workflow_run.head_sha || github.sha }}
|
|
||||||
name: ${{ steps.ver.outputs.version }}
|
name: ${{ steps.ver.outputs.version }}
|
||||||
body_path: RELEASE_BODY.md
|
body_path: RELEASE_BODY.md
|
||||||
generate_release_notes: false
|
generate_release_notes: false
|
||||||
|
|||||||
111
CHANGELOG.md
111
CHANGELOG.md
@@ -1,5 +1,116 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
|
## Changes 11/7/2025 (v1.8.9)
|
||||||
|
|
||||||
|
release(v1.8.9): fix(oidc, admin): first-save Client ID/Secret (closes #64)
|
||||||
|
|
||||||
|
- adminPanel.js:
|
||||||
|
- Masked inputs without a saved value now start with data-replace="1".
|
||||||
|
- handleSave() now sends oidc.clientId / oidc.clientSecret on first save (no longer requires clicking “Replace” first).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Changes 11/7/2025 (v1.8.8)
|
||||||
|
|
||||||
|
release(v1.8.8): background ZIP jobs w/ tokenized download + in‑modal progress bar; robust finalize; janitor cleanup — closes #60
|
||||||
|
|
||||||
|
**Summary**
|
||||||
|
This release moves ZIP creation off the request thread into a **background worker** and switches the client to a **queue > poll > tokenized GET** download flow. It fixes large multi‑GB ZIP failures caused by request timeouts or cross‑device renames, and provides a resilient in‑modal progress experience. It also adds a 6‑hour janitor for temporary tokens/logs.
|
||||||
|
|
||||||
|
**Backend** changes:
|
||||||
|
|
||||||
|
- Add **zip status** endpoint that returns progress and readiness, and **tokenized download** endpoint for one‑shot downloads.
|
||||||
|
- Update `FileController::downloadZip()` to enqueue a job and return `{ token, statusUrl, downloadUrl }` instead of streaming a blob in the POST response.
|
||||||
|
- Implement `spawnZipWorker()` to find a working PHP CLI, set `TMPDIR` on the same filesystem as the final ZIP, spawn with `nohup`, and persist PID/log metadata for diagnostics.
|
||||||
|
- Serve finished ZIPs via `downloadZipFile()` with strict token/user checks and streaming headers; unlink the ZIP after successful read.
|
||||||
|
|
||||||
|
New **Worker**:
|
||||||
|
|
||||||
|
- New `src/cli/zip_worker.php` builds the archive in the background.
|
||||||
|
- Writes progress fields (`pct`, `filesDone`, `filesTotal`, `bytesDone`, `bytesTotal`, `current`, `phase`, `startedAt`, `finalizeAt`) to the per‑token JSON.
|
||||||
|
- During **finalizing**, publishes `selectedFiles`/`selectedBytes` and clears incremental counters to avoid the confusing “N/N files” display before `close()` returns.
|
||||||
|
- Adds a **janitor**: purge `.tokens/*.json` and `.logs/WORKER-*.log` older than **6 hours** on each run.
|
||||||
|
|
||||||
|
New **API/Status Payload**:
|
||||||
|
|
||||||
|
- `zipStatus()` exposes `ready` (derived from `status=done` + existing `zipPath`), and includes `startedAt`/`finalizeAt` for UI timers.
|
||||||
|
- Returns a prebuilt `downloadUrl` for a direct handoff once the ZIP is ready.
|
||||||
|
|
||||||
|
**Frontend (UX)** changes:
|
||||||
|
|
||||||
|
- Replace blob POST download with **enqueue → poll → tokenized GET** flow.
|
||||||
|
- Native `<progress>` bar now renders **inside the modal** (no overflow/jitter).
|
||||||
|
- Shows determinate **0–98%** during enumeration, then **locks at 100%** with **“Finalizing… mm:ss — N files, ~Size”** until the download starts.
|
||||||
|
- Modal closes just before download; UI resets for the next operation.
|
||||||
|
|
||||||
|
Added **CSS**:
|
||||||
|
|
||||||
|
- Ensure the progress modal has a minimum height and hidden overflow; ellipsize the status line to prevent scrollbars.
|
||||||
|
|
||||||
|
**Why this closes #60**?
|
||||||
|
|
||||||
|
- ZIP creation no longer depends on the request lifetime (avoids proxy/Apache timeouts).
|
||||||
|
- Temporary files and final ZIP are created on the **same filesystem** (prevents “rename temp file failed” during `ZipArchive::close()`).
|
||||||
|
- Users get continuous, truthful feedback for large multi‑GB archives.
|
||||||
|
|
||||||
|
Additional **Notes**
|
||||||
|
|
||||||
|
- Download tokens are **one‑shot** and are deleted after the GET completes.
|
||||||
|
- Temporary artifacts (`META_DIR/ziptmp/.tokens`, `.logs`, and old ZIPs) are cleaned up automatically (≥6h).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Changes 11/5/2025 (v1.8.7)
|
||||||
|
|
||||||
|
release(v1.8.7): fix(zip-download): stream clean ZIP response and purge stale temp archives
|
||||||
|
|
||||||
|
- FileController::downloadZip
|
||||||
|
- Remove _jsonStart/_jsonEnd and JSON wrappers; send a pure binary ZIP
|
||||||
|
- Close session locks, disable gzip/output buffering, set Content-Length when known
|
||||||
|
- Stream in 1MiB chunks; proper HTTP codes/messages on errors
|
||||||
|
- Unlink the temp ZIP after successful send
|
||||||
|
- Preserves all auth/ACL/ownership checks
|
||||||
|
|
||||||
|
- FileModel::createZipArchive
|
||||||
|
- Purge META_DIR/ziptmp/download-*.zip older than 6h before creating a new ZIP
|
||||||
|
|
||||||
|
Result: fixes “failed to fetch / load failed” with fetch>blob flow and reduces leftover tmp ZIPs.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Changes 11/4/2025 (v1.8.6)
|
||||||
|
|
||||||
|
release(v1.8.6): fix large ZIP downloads + safer extract; close #60
|
||||||
|
|
||||||
|
- Zip creation
|
||||||
|
- Write archives to META_DIR/ziptmp (on large/writable disk) instead of system tmp.
|
||||||
|
- Auto-create ziptmp (0775) and verify writability.
|
||||||
|
- Free-space sanity check (~files total +5% +20MB); clearer error on low space.
|
||||||
|
- Normalize/validate folder segments; include only regular files.
|
||||||
|
- set_time_limit(0); use CREATE|OVERWRITE; improved error handling.
|
||||||
|
|
||||||
|
- Zip extraction
|
||||||
|
- New: stamp metadata for files in nested subfolders (per-folder metadata.json).
|
||||||
|
- Skip hidden “dot” paths (files/dirs with any segment starting with “.”) by default
|
||||||
|
via SKIP_DOTFILES_ON_EXTRACT=true; only extract allow-listed entries.
|
||||||
|
- Hardenings: zip-slip guard, reject symlinks (external_attributes), zip-bomb limits
|
||||||
|
(MAX_UNZIP_BYTES default 200GiB, MAX_UNZIP_FILES default 20k).
|
||||||
|
- Persist metadata for all touched folders; keep extractedFiles list for top-level names.
|
||||||
|
|
||||||
|
Ops note: ensure /var/www/metadata/ziptmp exists & is writable (or mount META_DIR to a large volume).
|
||||||
|
|
||||||
|
Closes #60.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Changes 11/4/2025 (v1.8.5)
|
||||||
|
|
||||||
|
release(v1.8.5): ci: reduce pre-run delay to 2-min and add missing `needs: delay`, final test
|
||||||
|
|
||||||
|
- No change release just testing
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
## Changes 11/4/2025 (v1.8.4)
|
## Changes 11/4/2025 (v1.8.4)
|
||||||
|
|
||||||
release(v1.8.4): ci: add 3-min pre-run delay to avoid workflow_run races
|
release(v1.8.4): ci: add 3-min pre-run delay to avoid workflow_run races
|
||||||
|
|||||||
@@ -1,12 +1,13 @@
|
|||||||
# --------------------------------
|
# --------------------------------
|
||||||
# FileRise portable .htaccess
|
# FileRise portable .htaccess
|
||||||
# --------------------------------
|
# --------------------------------
|
||||||
Options -Indexes
|
Options -Indexes -Multiviews
|
||||||
DirectoryIndex index.html
|
DirectoryIndex index.html
|
||||||
|
|
||||||
|
# ---------------- Security: dotfiles ----------------
|
||||||
<IfModule mod_authz_core.c>
|
<IfModule mod_authz_core.c>
|
||||||
# Block dotfiles like .env, .git, etc., but allow ACME under .well-known
|
# Block direct access to dotfiles like .env, .gitignore, etc.
|
||||||
<FilesMatch "^\.(?!well-known(?:/|$))">
|
<FilesMatch "^\..*">
|
||||||
Require all denied
|
Require all denied
|
||||||
</FilesMatch>
|
</FilesMatch>
|
||||||
</IfModule>
|
</IfModule>
|
||||||
@@ -15,15 +16,24 @@ DirectoryIndex index.html
|
|||||||
<IfModule mod_rewrite.c>
|
<IfModule mod_rewrite.c>
|
||||||
RewriteEngine On
|
RewriteEngine On
|
||||||
|
|
||||||
# Never redirect local/dev hosts
|
# 0) Let ACME http-01 pass BEFORE any other rule (needed for auto-renew)
|
||||||
RewriteCond %{HTTP_HOST} ^(localhost|127\.0\.0\.1|fr\.local|192\.168\.[0-9]+\.[0-9]+)$ [NC]
|
|
||||||
RewriteRule ^ - [L]
|
|
||||||
|
|
||||||
# Let ACME http-01 pass BEFORE any redirect (needed for auto-renew)
|
|
||||||
RewriteCond %{REQUEST_URI} ^/.well-known/acme-challenge/
|
RewriteCond %{REQUEST_URI} ^/.well-known/acme-challenge/
|
||||||
RewriteRule - - [L]
|
RewriteRule - - [L]
|
||||||
|
|
||||||
# HTTPS redirect (enable ONE of these, comment the other)
|
# 1) Block hidden files/dirs anywhere EXCEPT .well-known (path-aware)
|
||||||
|
# Prevents requests like /.env, /.git/config, /.ssh/id_rsa, etc.
|
||||||
|
RewriteRule "(^|/)\.(?!well-known/)" - [F]
|
||||||
|
|
||||||
|
# 2) Deny direct access to PHP outside /api/
|
||||||
|
# This stops scanners from hitting /index.php, /admin.php, /wso.php, etc.
|
||||||
|
RewriteCond %{REQUEST_URI} !^/api/
|
||||||
|
RewriteRule \.php$ - [F]
|
||||||
|
|
||||||
|
# 3) Never redirect local/dev hosts
|
||||||
|
RewriteCond %{HTTP_HOST} ^(localhost|127\.0\.0\.1|fr\.local|192\.168\.[0-9]+\.[0-9]+)$ [NC]
|
||||||
|
RewriteRule ^ - [L]
|
||||||
|
|
||||||
|
# 4) HTTPS redirect (enable ONE of these, comment the other)
|
||||||
|
|
||||||
# A) Direct TLS on this server
|
# A) Direct TLS on this server
|
||||||
#RewriteCond %{HTTPS} !=on
|
#RewriteCond %{HTTPS} !=on
|
||||||
@@ -35,7 +45,7 @@ RewriteRule - - [L]
|
|||||||
#RewriteCond %{HTTPS} !=on
|
#RewriteCond %{HTTPS} !=on
|
||||||
#RewriteRule ^ https://%{HTTP_HOST}%{REQUEST_URI} [L,R=301]
|
#RewriteRule ^ https://%{HTTP_HOST}%{REQUEST_URI} [L,R=301]
|
||||||
|
|
||||||
# Mark versioned assets (?v=...) with env flag for caching rules below
|
# 5) Mark versioned assets (?v=...) with env flag for caching rules below
|
||||||
RewriteCond %{QUERY_STRING} (^|&)v= [NC]
|
RewriteCond %{QUERY_STRING} (^|&)v= [NC]
|
||||||
RewriteRule ^ - [E=IS_VER:1]
|
RewriteRule ^ - [E=IS_VER:1]
|
||||||
</IfModule>
|
</IfModule>
|
||||||
@@ -98,7 +108,6 @@ RewriteRule ^ - [E=IS_VER:1]
|
|||||||
|
|
||||||
# ---------------- Compression ----------------
|
# ---------------- Compression ----------------
|
||||||
<IfModule mod_brotli.c>
|
<IfModule mod_brotli.c>
|
||||||
# Do NOT set BrotliCompressionQuality in .htaccess (vhost/server only)
|
|
||||||
AddOutputFilterByType BROTLI_COMPRESS text/html text/css application/javascript application/json image/svg+xml
|
AddOutputFilterByType BROTLI_COMPRESS text/html text/css application/javascript application/json image/svg+xml
|
||||||
</IfModule>
|
</IfModule>
|
||||||
<IfModule mod_deflate.c>
|
<IfModule mod_deflate.c>
|
||||||
|
|||||||
24
public/api/file/downloadZipFile.php
Normal file
24
public/api/file/downloadZipFile.php
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
<?php
|
||||||
|
// public/api/file/downloadZipFile.php
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @OA\Get(
|
||||||
|
* path="/api/file/downloadZipFile.php",
|
||||||
|
* summary="Download a finished ZIP by token",
|
||||||
|
* description="Streams the zip once; token is one-shot.",
|
||||||
|
* operationId="downloadZipFile",
|
||||||
|
* tags={"Files"},
|
||||||
|
* security={{"cookieAuth": {}}},
|
||||||
|
* @OA\Parameter(name="k", in="query", required=true, @OA\Schema(type="string"), description="Job token"),
|
||||||
|
* @OA\Parameter(name="name", in="query", required=false, @OA\Schema(type="string"), description="Suggested filename"),
|
||||||
|
* @OA\Response(response=200, description="ZIP stream"),
|
||||||
|
* @OA\Response(response=401, description="Unauthorized"),
|
||||||
|
* @OA\Response(response=404, description="Not found")
|
||||||
|
* )
|
||||||
|
*/
|
||||||
|
|
||||||
|
require_once __DIR__ . '/../../../config/config.php';
|
||||||
|
require_once PROJECT_ROOT . '/src/controllers/FileController.php';
|
||||||
|
|
||||||
|
$controller = new FileController();
|
||||||
|
$controller->downloadZipFile();
|
||||||
23
public/api/file/zipStatus.php
Normal file
23
public/api/file/zipStatus.php
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
<?php
|
||||||
|
// public/api/file/zipStatus.php
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @OA\Get(
|
||||||
|
* path="/api/file/zipStatus.php",
|
||||||
|
* summary="Check status of a background ZIP build",
|
||||||
|
* description="Returns status for the authenticated user's token.",
|
||||||
|
* operationId="zipStatus",
|
||||||
|
* tags={"Files"},
|
||||||
|
* security={{"cookieAuth": {}}},
|
||||||
|
* @OA\Parameter(name="k", in="query", required=true, @OA\Schema(type="string"), description="Job token"),
|
||||||
|
* @OA\Response(response=200, description="Status payload"),
|
||||||
|
* @OA\Response(response=401, description="Unauthorized"),
|
||||||
|
* @OA\Response(response=404, description="Not found")
|
||||||
|
* )
|
||||||
|
*/
|
||||||
|
|
||||||
|
require_once __DIR__ . '/../../../config/config.php';
|
||||||
|
require_once PROJECT_ROOT . '/src/controllers/FileController.php';
|
||||||
|
|
||||||
|
$controller = new FileController();
|
||||||
|
$controller->zipStatus();
|
||||||
@@ -1925,4 +1925,18 @@ body {
|
|||||||
.status-badge.progress {
|
.status-badge.progress {
|
||||||
border-color: rgba(250,204,21,.35); /* amber-ish */
|
border-color: rgba(250,204,21,.35); /* amber-ish */
|
||||||
background: rgba(250,204,21,.15);
|
background: rgba(250,204,21,.15);
|
||||||
}
|
}
|
||||||
|
#downloadProgressModal .modal-body,
|
||||||
|
#downloadProgressModal .rise-modal-body,
|
||||||
|
#downloadProgressModal .modal-content {
|
||||||
|
min-height: 88px;
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
#downloadProgressText {
|
||||||
|
white-space: nowrap;
|
||||||
|
overflow: hidden;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
}
|
||||||
|
|
||||||
|
#downloadProgressBarOuter { height: 10px; }
|
||||||
@@ -58,7 +58,7 @@ function wireHeaderTitleLive() {
|
|||||||
|
|
||||||
function renderMaskedInput({ id, label, hasValue, isSecret = false }) {
|
function renderMaskedInput({ id, label, hasValue, isSecret = false }) {
|
||||||
const type = isSecret ? 'password' : 'text';
|
const type = isSecret ? 'password' : 'text';
|
||||||
const disabled = hasValue ? 'disabled data-replace="0" placeholder="•••••• (saved)"' : '';
|
const disabled = hasValue ? 'disabled data-replace="0" placeholder="•••••• (saved)"' : 'data-replace="1"';
|
||||||
const replaceBtn = hasValue
|
const replaceBtn = hasValue
|
||||||
? `<button type="button" class="btn btn-sm btn-outline-secondary" data-replace-for="${id}">Replace</button>`
|
? `<button type="button" class="btn btn-sm btn-outline-secondary" data-replace-for="${id}">Replace</button>`
|
||||||
: '';
|
: '';
|
||||||
@@ -1070,11 +1070,15 @@ function handleSave() {
|
|||||||
const idEl = document.getElementById("oidcClientId");
|
const idEl = document.getElementById("oidcClientId");
|
||||||
const scEl = document.getElementById("oidcClientSecret");
|
const scEl = document.getElementById("oidcClientSecret");
|
||||||
|
|
||||||
if (idEl?.dataset.replace === '1' && idEl.value.trim() !== '') {
|
const idVal = idEl?.value.trim() || '';
|
||||||
payload.oidc.clientId = idEl.value.trim();
|
const secVal = scEl?.value.trim() || '';
|
||||||
|
const idFirstTime = idEl && !idEl.hasAttribute('data-replace'); // no saved value yet
|
||||||
|
const secFirstTime = scEl && !scEl.hasAttribute('data-replace'); // no saved value yet
|
||||||
|
if ((idEl?.dataset.replace === '1' || idFirstTime) && idVal !== '') {
|
||||||
|
payload.oidc.clientId = idVal;
|
||||||
}
|
}
|
||||||
if (scEl?.dataset.replace === '1' && scEl.value.trim() !== '') {
|
if ((scEl?.dataset.replace === '1' || secFirstTime) && secVal !== '') {
|
||||||
payload.oidc.clientSecret = scEl.value.trim();
|
payload.oidc.clientSecret = secVal;
|
||||||
}
|
}
|
||||||
|
|
||||||
const ooSecretEl = document.getElementById("ooJwtSecret");
|
const ooSecretEl = document.getElementById("ooJwtSecret");
|
||||||
|
|||||||
@@ -119,7 +119,7 @@ export async function handleCreateFile(e) {
|
|||||||
method: 'POST',
|
method: 'POST',
|
||||||
credentials: 'include',
|
credentials: 'include',
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type':'application/json',
|
'Content-Type': 'application/json',
|
||||||
'X-CSRF-Token': window.csrfToken
|
'X-CSRF-Token': window.csrfToken
|
||||||
},
|
},
|
||||||
// ⚠️ must send `name`, not `filename`
|
// ⚠️ must send `name`, not `filename`
|
||||||
@@ -139,7 +139,7 @@ export async function handleCreateFile(e) {
|
|||||||
document.addEventListener('DOMContentLoaded', () => {
|
document.addEventListener('DOMContentLoaded', () => {
|
||||||
const cancel = document.getElementById('cancelCreateFile');
|
const cancel = document.getElementById('cancelCreateFile');
|
||||||
const confirm = document.getElementById('confirmCreateFile');
|
const confirm = document.getElementById('confirmCreateFile');
|
||||||
if (cancel) cancel.addEventListener('click', () => document.getElementById('createFileModal').style.display = 'none');
|
if (cancel) cancel.addEventListener('click', () => document.getElementById('createFileModal').style.display = 'none');
|
||||||
if (confirm) confirm.addEventListener('click', handleCreateFile);
|
if (confirm) confirm.addEventListener('click', handleCreateFile);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -265,7 +265,7 @@ document.addEventListener("DOMContentLoaded", () => {
|
|||||||
const cancelZipBtn = document.getElementById("cancelDownloadZip");
|
const cancelZipBtn = document.getElementById("cancelDownloadZip");
|
||||||
const confirmZipBtn = document.getElementById("confirmDownloadZip");
|
const confirmZipBtn = document.getElementById("confirmDownloadZip");
|
||||||
const cancelCreate = document.getElementById('cancelCreateFile');
|
const cancelCreate = document.getElementById('cancelCreateFile');
|
||||||
|
|
||||||
if (cancelCreate) {
|
if (cancelCreate) {
|
||||||
cancelCreate.addEventListener('click', () => {
|
cancelCreate.addEventListener('click', () => {
|
||||||
document.getElementById('createFileModal').style.display = 'none';
|
document.getElementById('createFileModal').style.display = 'none';
|
||||||
@@ -305,7 +305,7 @@ document.addEventListener("DOMContentLoaded", () => {
|
|||||||
showToast(err.message || t('error_creating_file'));
|
showToast(err.message || t('error_creating_file'));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
attachEnterKeyListener('createFileModal','confirmCreateFile');
|
attachEnterKeyListener('createFileModal', 'confirmCreateFile');
|
||||||
}
|
}
|
||||||
|
|
||||||
// 1) Cancel button hides the name modal
|
// 1) Cancel button hides the name modal
|
||||||
@@ -321,63 +321,187 @@ document.addEventListener("DOMContentLoaded", () => {
|
|||||||
confirmZipBtn.addEventListener("click", async () => {
|
confirmZipBtn.addEventListener("click", async () => {
|
||||||
// a) Validate ZIP filename
|
// a) Validate ZIP filename
|
||||||
let zipName = document.getElementById("zipFileNameInput").value.trim();
|
let zipName = document.getElementById("zipFileNameInput").value.trim();
|
||||||
if (!zipName) {
|
if (!zipName) { showToast("Please enter a name for the zip file."); return; }
|
||||||
showToast("Please enter a name for the zip file.");
|
if (!zipName.toLowerCase().endsWith(".zip")) zipName += ".zip";
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (!zipName.toLowerCase().endsWith(".zip")) {
|
|
||||||
zipName += ".zip";
|
|
||||||
}
|
|
||||||
|
|
||||||
// b) Hide the name‐input modal, show the spinner modal
|
// b) Hide the name‐input modal, show the progress modal
|
||||||
zipNameModal.style.display = "none";
|
zipNameModal.style.display = "none";
|
||||||
progressModal.style.display = "block";
|
progressModal.style.display = "block";
|
||||||
|
|
||||||
// c) (Optional) update the “Preparing…” text if you gave it an ID
|
// c) Title text (optional)
|
||||||
const titleEl = document.getElementById("downloadProgressTitle");
|
const titleEl = document.getElementById("downloadProgressTitle");
|
||||||
if (titleEl) titleEl.textContent = `Preparing ${zipName}…`;
|
if (titleEl) titleEl.textContent = `Preparing ${zipName}…`;
|
||||||
|
|
||||||
try {
|
// d) Queue the job
|
||||||
// d) POST and await the ZIP blob
|
const res = await fetch("/api/file/downloadZip.php", {
|
||||||
const res = await fetch("/api/file/downloadZip.php", {
|
method: "POST",
|
||||||
method: "POST",
|
credentials: "include",
|
||||||
credentials: "include",
|
headers: { "Content-Type": "application/json", "X-CSRF-Token": window.csrfToken },
|
||||||
headers: {
|
body: JSON.stringify({ folder: window.currentFolder || "root", files: window.filesToDownload })
|
||||||
"Content-Type": "application/json",
|
});
|
||||||
"X-CSRF-Token": window.csrfToken
|
const jsr = await res.json().catch(() => ({}));
|
||||||
},
|
if (!res.ok || !jsr.ok) {
|
||||||
body: JSON.stringify({
|
const msg = (jsr && jsr.error) ? jsr.error : `Status ${res.status}`;
|
||||||
folder: window.currentFolder || "root",
|
throw new Error(msg);
|
||||||
files: window.filesToDownload
|
|
||||||
})
|
|
||||||
});
|
|
||||||
if (!res.ok) {
|
|
||||||
const txt = await res.text();
|
|
||||||
throw new Error(txt || `Status ${res.status}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const blob = await res.blob();
|
|
||||||
if (!blob || blob.size === 0) {
|
|
||||||
throw new Error("Received empty ZIP file.");
|
|
||||||
}
|
|
||||||
|
|
||||||
// e) Hand off to the browser’s download manager
|
|
||||||
const url = URL.createObjectURL(blob);
|
|
||||||
const a = document.createElement("a");
|
|
||||||
a.href = url;
|
|
||||||
a.download = zipName;
|
|
||||||
document.body.appendChild(a);
|
|
||||||
a.click();
|
|
||||||
URL.revokeObjectURL(url);
|
|
||||||
a.remove();
|
|
||||||
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Error downloading ZIP:", err);
|
|
||||||
showToast("Error: " + err.message);
|
|
||||||
} finally {
|
|
||||||
// f) Always hide spinner modal
|
|
||||||
progressModal.style.display = "none";
|
|
||||||
}
|
}
|
||||||
|
const token = jsr.token;
|
||||||
|
const statusUrl = jsr.statusUrl;
|
||||||
|
const downloadUrl = jsr.downloadUrl + "&name=" + encodeURIComponent(zipName);
|
||||||
|
|
||||||
|
// Ensure a progress UI exists in the modal
|
||||||
|
function ensureZipProgressUI() {
|
||||||
|
const modalEl = document.getElementById("downloadProgressModal");
|
||||||
|
if (!modalEl) {
|
||||||
|
// really shouldn't happen, but fall back to body
|
||||||
|
console.warn("downloadProgressModal not found; falling back to document.body");
|
||||||
|
}
|
||||||
|
// Prefer a dedicated content node inside the modal
|
||||||
|
let host =
|
||||||
|
(modalEl && modalEl.querySelector("#downloadProgressContent")) ||
|
||||||
|
(modalEl && modalEl.querySelector(".modal-body")) ||
|
||||||
|
(modalEl && modalEl.querySelector(".rise-modal-body")) ||
|
||||||
|
(modalEl && modalEl.querySelector(".modal-content")) ||
|
||||||
|
(modalEl && modalEl.querySelector(".content")) ||
|
||||||
|
null;
|
||||||
|
|
||||||
|
// If no suitable container, create one inside the modal
|
||||||
|
if (!host) {
|
||||||
|
host = document.createElement("div");
|
||||||
|
host.id = "downloadProgressContent";
|
||||||
|
(modalEl || document.body).appendChild(host);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper: ensure/move an element with given id into host
|
||||||
|
function ensureInHost(id, tag, init) {
|
||||||
|
let el = document.getElementById(id);
|
||||||
|
if (el && el.parentElement !== host) host.appendChild(el); // move if it exists elsewhere
|
||||||
|
if (!el) {
|
||||||
|
el = document.createElement(tag);
|
||||||
|
el.id = id;
|
||||||
|
if (typeof init === "function") init(el);
|
||||||
|
host.appendChild(el);
|
||||||
|
}
|
||||||
|
return el;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Title
|
||||||
|
const title = ensureInHost("downloadProgressTitle", "div", (el) => {
|
||||||
|
el.style.marginBottom = "8px";
|
||||||
|
el.textContent = "Preparing…";
|
||||||
|
});
|
||||||
|
|
||||||
|
// Progress bar (native <progress>)
|
||||||
|
const bar = (function () {
|
||||||
|
let el = document.getElementById("downloadProgressBar");
|
||||||
|
if (el && el.parentElement !== host) host.appendChild(el); // move into modal
|
||||||
|
if (!el) {
|
||||||
|
el = document.createElement("progress");
|
||||||
|
el.id = "downloadProgressBar";
|
||||||
|
host.appendChild(el);
|
||||||
|
}
|
||||||
|
el.max = 100;
|
||||||
|
el.value = 0;
|
||||||
|
el.style.display = ""; // override any inline display:none
|
||||||
|
el.style.width = "100%";
|
||||||
|
el.style.height = "1.1em";
|
||||||
|
return el;
|
||||||
|
})();
|
||||||
|
|
||||||
|
// Text line
|
||||||
|
const text = ensureInHost("downloadProgressText", "div", (el) => {
|
||||||
|
el.style.marginTop = "8px";
|
||||||
|
el.style.fontSize = "0.9rem";
|
||||||
|
el.style.whiteSpace = "nowrap";
|
||||||
|
el.style.overflow = "hidden";
|
||||||
|
el.style.textOverflow = "ellipsis";
|
||||||
|
});
|
||||||
|
|
||||||
|
// Optional spinner hider
|
||||||
|
const hideSpinner = () => {
|
||||||
|
const sp = document.getElementById("downloadSpinner");
|
||||||
|
if (sp) sp.style.display = "none";
|
||||||
|
};
|
||||||
|
|
||||||
|
return { bar, text, title, hideSpinner };
|
||||||
|
}
|
||||||
|
|
||||||
|
function humanBytes(n) {
|
||||||
|
if (!Number.isFinite(n) || n < 0) return "";
|
||||||
|
const u = ["B", "KB", "MB", "GB", "TB"]; let i = 0, x = n;
|
||||||
|
while (x >= 1024 && i < u.length - 1) { x /= 1024; i++; }
|
||||||
|
return x.toFixed(x >= 10 || i === 0 ? 0 : 1) + " " + u[i];
|
||||||
|
}
|
||||||
|
function mmss(sec) {
|
||||||
|
sec = Math.max(0, sec | 0);
|
||||||
|
const m = (sec / 60) | 0, s = sec % 60;
|
||||||
|
return `${m}:${s.toString().padStart(2, '0')}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
const ui = ensureZipProgressUI();
|
||||||
|
const t0 = Date.now();
|
||||||
|
|
||||||
|
// e) Poll until ready
|
||||||
|
while (true) {
|
||||||
|
await new Promise(r => setTimeout(r, 1200));
|
||||||
|
const s = await fetch(`${statusUrl}&_=${Date.now()}`, {
|
||||||
|
credentials: "include", cache: "no-store",
|
||||||
|
}).then(r => r.json());
|
||||||
|
|
||||||
|
if (s.error) throw new Error(s.error);
|
||||||
|
if (ui.title) ui.title.textContent = `Preparing ${zipName}…`;
|
||||||
|
|
||||||
|
// --- RENDER PROGRESS ---
|
||||||
|
if (typeof s.pct === "number" && ui.bar && ui.text) {
|
||||||
|
if ((s.phase !== 'finalizing') && (s.pct < 99)) {
|
||||||
|
ui.hideSpinner && ui.hideSpinner();
|
||||||
|
const filesDone = s.filesDone ?? 0;
|
||||||
|
const filesTotal = s.filesTotal ?? 0;
|
||||||
|
const bytesDone = s.bytesDone ?? 0;
|
||||||
|
const bytesTotal = s.bytesTotal ?? 0;
|
||||||
|
|
||||||
|
// Determinate 0–98% while enumerating
|
||||||
|
const pct = Math.max(0, Math.min(98, s.pct | 0));
|
||||||
|
if (!ui.bar.hasAttribute("value")) ui.bar.value = 0;
|
||||||
|
ui.bar.value = pct;
|
||||||
|
ui.text.textContent =
|
||||||
|
`${pct}% — ${filesDone}/${filesTotal} files, ${humanBytes(bytesDone)} / ${humanBytes(bytesTotal)}`;
|
||||||
|
} else {
|
||||||
|
// FINALIZING: keep progress at 100% and show timer + selected totals
|
||||||
|
if (!ui.bar.hasAttribute("value")) ui.bar.value = 100;
|
||||||
|
ui.bar.value = 100; // lock at 100 during finalizing
|
||||||
|
const since = s.finalizeAt ? Math.max(0, (Date.now() / 1000 | 0) - (s.finalizeAt | 0)) : 0;
|
||||||
|
const selF = s.selectedFiles ?? s.filesTotal ?? 0;
|
||||||
|
const selB = s.selectedBytes ?? s.bytesTotal ?? 0;
|
||||||
|
ui.text.textContent = `Finalizing… ${mmss(since)} — ${selF} file${selF === 1 ? '' : 's'}, ~${humanBytes(selB)}`;
|
||||||
|
}
|
||||||
|
} else if (ui.text) {
|
||||||
|
ui.text.textContent = "Still preparing…";
|
||||||
|
}
|
||||||
|
// --- /RENDER ---
|
||||||
|
|
||||||
|
if (s.ready) {
|
||||||
|
// Snap to 100 and close modal just before download
|
||||||
|
if (ui.bar) { ui.bar.max = 100; ui.bar.value = 100; }
|
||||||
|
progressModal.style.display = "none";
|
||||||
|
await new Promise(r => setTimeout(r, 0));
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (Date.now() - t0 > 15 * 60 * 1000) throw new Error("Timed out preparing ZIP");
|
||||||
|
}
|
||||||
|
|
||||||
|
// f) Trigger download
|
||||||
|
const a = document.createElement("a");
|
||||||
|
a.href = downloadUrl;
|
||||||
|
a.download = zipName;
|
||||||
|
a.style.display = "none";
|
||||||
|
document.body.appendChild(a);
|
||||||
|
a.click();
|
||||||
|
a.remove();
|
||||||
|
|
||||||
|
// g) Reset for next time
|
||||||
|
if (ui.bar) ui.bar.value = 0;
|
||||||
|
if (ui.text) ui.text.textContent = "";
|
||||||
|
if (Array.isArray(window.filesToDownload)) window.filesToDownload = [];
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@@ -694,10 +818,10 @@ document.addEventListener("DOMContentLoaded", () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
document.addEventListener('DOMContentLoaded', () => {
|
document.addEventListener('DOMContentLoaded', () => {
|
||||||
const btn = document.getElementById('createBtn');
|
const btn = document.getElementById('createBtn');
|
||||||
const menu = document.getElementById('createMenu');
|
const menu = document.getElementById('createMenu');
|
||||||
const fileOpt = document.getElementById('createFileOption');
|
const fileOpt = document.getElementById('createFileOption');
|
||||||
const folderOpt= document.getElementById('createFolderOption');
|
const folderOpt = document.getElementById('createFolderOption');
|
||||||
|
|
||||||
// Toggle dropdown on click
|
// Toggle dropdown on click
|
||||||
btn.addEventListener('click', (e) => {
|
btn.addEventListener('click', (e) => {
|
||||||
|
|||||||
@@ -1,2 +1,2 @@
|
|||||||
// generated by CI
|
// generated by CI
|
||||||
window.APP_VERSION = 'v1.8.3';
|
window.APP_VERSION = 'v1.8.9';
|
||||||
|
|||||||
179
src/cli/zip_worker.php
Normal file
179
src/cli/zip_worker.php
Normal file
@@ -0,0 +1,179 @@
|
|||||||
|
#!/usr/bin/env php
|
||||||
|
<?php
|
||||||
|
declare(strict_types=1);
|
||||||
|
|
||||||
|
require __DIR__ . '/../../config/config.php';
|
||||||
|
require __DIR__ . '/../../src/models/FileModel.php';
|
||||||
|
|
||||||
|
$token = $argv[1] ?? '';
|
||||||
|
$token = preg_replace('/[^a-f0-9]/','',$token);
|
||||||
|
if ($token === '') { fwrite(STDERR, "No token\n"); exit(1); }
|
||||||
|
|
||||||
|
$root = rtrim((string)META_DIR, '/\\') . '/ziptmp';
|
||||||
|
$tokDir = $root . '/.tokens';
|
||||||
|
$logDir = $root . '/.logs';
|
||||||
|
@mkdir($tokDir, 0775, true);
|
||||||
|
@mkdir($logDir, 0775, true);
|
||||||
|
|
||||||
|
$tokFile = $tokDir . '/' . $token . '.json';
|
||||||
|
$logFile = $logDir . '/WORKER-' . $token . '.log';
|
||||||
|
|
||||||
|
file_put_contents($logFile, "[".date('c')."] worker start token={$token}\n", FILE_APPEND);
|
||||||
|
|
||||||
|
// Keep libzip temp files on same FS as final zip (prevents cross-device rename failures)
|
||||||
|
@mkdir($root, 0775, true);
|
||||||
|
@putenv('TMPDIR='.$root);
|
||||||
|
@ini_set('sys_temp_dir', $root);
|
||||||
|
|
||||||
|
// Small janitor: purge old tokens/logs (> 6h)
|
||||||
|
$now = time();
|
||||||
|
foreach (glob($tokDir.'/*.json') ?: [] as $f) { if (is_file($f) && ($now - @filemtime($f)) > 21600) @unlink($f); }
|
||||||
|
foreach (glob($logDir.'/WORKER-*.log') ?: [] as $f) { if (is_file($f) && ($now - @filemtime($f)) > 21600) @unlink($f); }
|
||||||
|
|
||||||
|
// Helpers to read/write the token file safely
|
||||||
|
$job = json_decode((string)@file_get_contents($tokFile), true) ?: [];
|
||||||
|
|
||||||
|
$save = function() use (&$job, $tokFile) {
|
||||||
|
@file_put_contents($tokFile, json_encode($job, JSON_PRETTY_PRINT), LOCK_EX);
|
||||||
|
@clearstatcache(true, $tokFile);
|
||||||
|
};
|
||||||
|
|
||||||
|
$touchPhase = function(string $phase) use (&$job, $save) {
|
||||||
|
$job['phase'] = $phase;
|
||||||
|
$save();
|
||||||
|
};
|
||||||
|
|
||||||
|
// Init timing
|
||||||
|
if (empty($job['startedAt'])) {
|
||||||
|
$job['startedAt'] = time();
|
||||||
|
}
|
||||||
|
$job['status'] = 'working';
|
||||||
|
$job['error'] = null;
|
||||||
|
$save();
|
||||||
|
|
||||||
|
// Build the list of files to zip using the model (same validation FileRise uses)
|
||||||
|
try {
|
||||||
|
// Reuse FileModel’s validation by calling it but not keeping the zip; we’ll enumerate sizes here.
|
||||||
|
$folder = (string)($job['folder'] ?? 'root');
|
||||||
|
$names = (array)($job['files'] ?? []);
|
||||||
|
|
||||||
|
// Resolve folder path similarly to createZipArchive
|
||||||
|
$baseDir = realpath(UPLOAD_DIR);
|
||||||
|
if ($baseDir === false) {
|
||||||
|
throw new RuntimeException('Uploads directory not configured correctly.');
|
||||||
|
}
|
||||||
|
if (strtolower($folder) === 'root' || $folder === "") {
|
||||||
|
$folderPathReal = $baseDir;
|
||||||
|
} else {
|
||||||
|
if (strpos($folder, '..') !== false) throw new RuntimeException('Invalid folder name.');
|
||||||
|
$parts = explode('/', trim($folder, "/\\ "));
|
||||||
|
foreach ($parts as $part) {
|
||||||
|
if ($part === '' || !preg_match(REGEX_FOLDER_NAME, $part)) {
|
||||||
|
throw new RuntimeException('Invalid folder name.');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
$folderPath = rtrim(UPLOAD_DIR, '/\\') . DIRECTORY_SEPARATOR . implode(DIRECTORY_SEPARATOR, $parts);
|
||||||
|
$folderPathReal = realpath($folderPath);
|
||||||
|
if ($folderPathReal === false || strpos($folderPathReal, $baseDir) !== 0) {
|
||||||
|
throw new RuntimeException('Folder not found.');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Collect files (only regular files)
|
||||||
|
$filesToZip = [];
|
||||||
|
foreach ($names as $nm) {
|
||||||
|
$bn = basename(trim((string)$nm));
|
||||||
|
if (!preg_match(REGEX_FILE_NAME, $bn)) continue;
|
||||||
|
$fp = $folderPathReal . DIRECTORY_SEPARATOR . $bn;
|
||||||
|
if (is_file($fp)) $filesToZip[] = $fp;
|
||||||
|
}
|
||||||
|
if (!$filesToZip) throw new RuntimeException('No valid files to zip.');
|
||||||
|
|
||||||
|
// Totals for progress
|
||||||
|
$filesTotal = count($filesToZip);
|
||||||
|
$bytesTotal = 0;
|
||||||
|
foreach ($filesToZip as $fp) {
|
||||||
|
$sz = @filesize($fp);
|
||||||
|
if ($sz !== false) $bytesTotal += (int)$sz;
|
||||||
|
}
|
||||||
|
|
||||||
|
$job['filesTotal'] = $filesTotal;
|
||||||
|
$job['bytesTotal'] = $bytesTotal;
|
||||||
|
$job['filesDone'] = 0;
|
||||||
|
$job['bytesDone'] = 0;
|
||||||
|
$job['pct'] = 0;
|
||||||
|
$job['current'] = null;
|
||||||
|
$job['phase'] = 'zipping';
|
||||||
|
$save();
|
||||||
|
|
||||||
|
// Create final zip path in META_DIR/ziptmp
|
||||||
|
$zipName = 'download-' . date('Ymd-His') . '-' . bin2hex(random_bytes(4)) . '.zip';
|
||||||
|
$zipPath = $root . DIRECTORY_SEPARATOR . $zipName;
|
||||||
|
|
||||||
|
$zip = new ZipArchive();
|
||||||
|
if ($zip->open($zipPath, ZipArchive::CREATE | ZipArchive::OVERWRITE) !== true) {
|
||||||
|
throw new RuntimeException('Could not create zip archive.');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Enumerate files; report up to 98%
|
||||||
|
$bytesDone = 0;
|
||||||
|
$filesDone = 0;
|
||||||
|
foreach ($filesToZip as $fp) {
|
||||||
|
$bn = basename($fp);
|
||||||
|
$zip->addFile($fp, $bn);
|
||||||
|
|
||||||
|
$filesDone++;
|
||||||
|
$sz = @filesize($fp);
|
||||||
|
if ($sz !== false) $bytesDone += (int)$sz;
|
||||||
|
|
||||||
|
$job['filesDone'] = $filesDone;
|
||||||
|
$job['bytesDone'] = $bytesDone;
|
||||||
|
$job['current'] = $bn;
|
||||||
|
|
||||||
|
$pct = ($bytesTotal > 0) ? (int) floor(($bytesDone / $bytesTotal) * 98) : 0;
|
||||||
|
if ($pct < 0) $pct = 0;
|
||||||
|
if ($pct > 98) $pct = 98;
|
||||||
|
if ($pct > (int)($job['pct'] ?? 0)) $job['pct'] = $pct;
|
||||||
|
|
||||||
|
$save();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Finalizing (this is where libzip writes & renames)
|
||||||
|
$job['pct'] = max((int)($job['pct'] ?? 0), 99);
|
||||||
|
$job['phase'] = 'finalizing';
|
||||||
|
$job['finalizeAt'] = time();
|
||||||
|
|
||||||
|
// Publish selected totals for a truthful UI during finalizing,
|
||||||
|
// and clear incremental fields so the UI doesn't show "7/7 14 GB / 14 GB" prematurely.
|
||||||
|
$job['selectedFiles'] = $filesTotal;
|
||||||
|
$job['selectedBytes'] = $bytesTotal;
|
||||||
|
$job['filesDone'] = null;
|
||||||
|
$job['bytesDone'] = null;
|
||||||
|
$job['current'] = null;
|
||||||
|
|
||||||
|
$save();
|
||||||
|
|
||||||
|
// ---- finalize the zip on disk ----
|
||||||
|
$ok = $zip->close();
|
||||||
|
$statusStr = method_exists($zip, 'getStatusString') ? $zip->getStatusString() : '';
|
||||||
|
|
||||||
|
if (!$ok || !is_file($zipPath)) {
|
||||||
|
$job['status'] = 'error';
|
||||||
|
$job['error'] = 'Failed to finalize ZIP' . ($statusStr ? " ($statusStr)" : '');
|
||||||
|
$save();
|
||||||
|
file_put_contents($logFile, "[".date('c')."] error: ".$job['error']."\n", FILE_APPEND);
|
||||||
|
exit(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
$job['status'] = 'done';
|
||||||
|
$job['zipPath'] = $zipPath;
|
||||||
|
$job['pct'] = 100;
|
||||||
|
$job['phase'] = 'finalized';
|
||||||
|
$save();
|
||||||
|
file_put_contents($logFile, "[".date('c')."] done zip={$zipPath}\n", FILE_APPEND);
|
||||||
|
} catch (Throwable $e) {
|
||||||
|
$job['status'] = 'error';
|
||||||
|
$job['error'] = 'Worker exception: '.$e->getMessage();
|
||||||
|
$save();
|
||||||
|
file_put_contents($logFile, "[".date('c')."] exception: ".$e->getMessage()."\n", FILE_APPEND);
|
||||||
|
}
|
||||||
@@ -190,6 +190,59 @@ class FileController
|
|||||||
return $ok ? null : "Forbidden: folder scope violation.";
|
return $ok ? null : "Forbidden: folder scope violation.";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private function spawnZipWorker(string $token, string $tokFile, string $logDir): array
|
||||||
|
{
|
||||||
|
$worker = realpath(PROJECT_ROOT . '/src/cli/zip_worker.php');
|
||||||
|
if (!$worker || !is_file($worker)) {
|
||||||
|
return ['ok'=>false, 'error'=>'zip_worker.php not found'];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find a PHP CLI binary that actually works
|
||||||
|
$candidates = array_values(array_filter([
|
||||||
|
PHP_BINARY ?: null,
|
||||||
|
'/usr/local/bin/php',
|
||||||
|
'/usr/bin/php',
|
||||||
|
'/bin/php'
|
||||||
|
]));
|
||||||
|
$php = null;
|
||||||
|
foreach ($candidates as $bin) {
|
||||||
|
if (!$bin) continue;
|
||||||
|
$rc = 1;
|
||||||
|
@exec(escapeshellcmd($bin).' -v >/dev/null 2>&1', $o, $rc);
|
||||||
|
if ($rc === 0) { $php = $bin; break; }
|
||||||
|
}
|
||||||
|
if (!$php) {
|
||||||
|
return ['ok'=>false, 'error'=>'No working php CLI found'];
|
||||||
|
}
|
||||||
|
|
||||||
|
$logFile = $logDir . DIRECTORY_SEPARATOR . 'WORKER-' . $token . '.log';
|
||||||
|
|
||||||
|
// Ensure TMPDIR is on the same FS as the final zip; actually apply it to the child process.
|
||||||
|
$tmpDir = rtrim((string)META_DIR, '/\\') . '/ziptmp';
|
||||||
|
@mkdir($tmpDir, 0775, true);
|
||||||
|
|
||||||
|
// Build one sh -c string so env + nohup + echo $! are in the same shell
|
||||||
|
$cmdStr =
|
||||||
|
'export TMPDIR=' . escapeshellarg($tmpDir) . ' ; ' .
|
||||||
|
'nohup ' . escapeshellcmd($php) . ' ' . escapeshellarg($worker) . ' ' . escapeshellarg($token) .
|
||||||
|
' >> ' . escapeshellarg($logFile) . ' 2>&1 & echo $!';
|
||||||
|
|
||||||
|
$pid = @shell_exec('/bin/sh -c ' . escapeshellarg($cmdStr));
|
||||||
|
$pid = is_string($pid) ? (int)trim($pid) : 0;
|
||||||
|
|
||||||
|
// Persist spawn metadata into token (best-effort)
|
||||||
|
$job = json_decode((string)@file_get_contents($tokFile), true) ?: [];
|
||||||
|
$job['spawn'] = [
|
||||||
|
'ts' => time(),
|
||||||
|
'php' => $php,
|
||||||
|
'pid' => $pid,
|
||||||
|
'log' => $logFile
|
||||||
|
];
|
||||||
|
@file_put_contents($tokFile, json_encode($job, JSON_PRETTY_PRINT), LOCK_EX);
|
||||||
|
|
||||||
|
return $pid > 0 ? ['ok'=>true] : ['ok'=>false, 'error'=>'spawn returned no PID'];
|
||||||
|
}
|
||||||
|
|
||||||
// --- small helpers ---
|
// --- small helpers ---
|
||||||
private function _jsonStart(): void {
|
private function _jsonStart(): void {
|
||||||
if (session_status() !== PHP_SESSION_ACTIVE) session_start();
|
if (session_status() !== PHP_SESSION_ACTIVE) session_start();
|
||||||
@@ -665,78 +718,214 @@ public function deleteFiles()
|
|||||||
exit;
|
exit;
|
||||||
}
|
}
|
||||||
|
|
||||||
public function downloadZip()
|
public function zipStatus()
|
||||||
{
|
{
|
||||||
$this->_jsonStart();
|
if (!$this->_requireAuth()) { http_response_code(401); header('Content-Type: application/json'); echo json_encode(["error"=>"Unauthorized"]); return; }
|
||||||
try {
|
$username = $_SESSION['username'] ?? '';
|
||||||
if (!$this->_checkCsrf()) return;
|
$token = isset($_GET['k']) ? preg_replace('/[^a-f0-9]/','',(string)$_GET['k']) : '';
|
||||||
if (!$this->_requireAuth()) return;
|
if ($token === '' || strlen($token) < 8) { http_response_code(400); header('Content-Type: application/json'); echo json_encode(["error"=>"Bad token"]); return; }
|
||||||
|
|
||||||
$data = $this->_readJsonBody();
|
$tokFile = rtrim((string)META_DIR, '/\\') . '/ziptmp/.tokens/' . $token . '.json';
|
||||||
if (!is_array($data) || !isset($data['folder'], $data['files']) || !is_array($data['files'])) {
|
if (!is_file($tokFile)) { http_response_code(404); header('Content-Type: application/json'); echo json_encode(["error"=>"Not found"]); return; }
|
||||||
$this->_jsonOut(["error" => "Invalid input."], 400); return;
|
$job = json_decode((string)@file_get_contents($tokFile), true) ?: [];
|
||||||
}
|
if (($job['user'] ?? '') !== $username) { http_response_code(403); header('Content-Type: application/json'); echo json_encode(["error"=>"Forbidden"]); return; }
|
||||||
|
|
||||||
$folder = $this->_normalizeFolder($data['folder']);
|
$ready = (($job['status'] ?? '') === 'done') && !empty($job['zipPath']) && is_file($job['zipPath']);
|
||||||
$files = $data['files'];
|
|
||||||
if (!$this->_validFolder($folder)) { $this->_jsonOut(["error"=>"Invalid folder name."], 400); return; }
|
|
||||||
|
|
||||||
$username = $_SESSION['username'] ?? '';
|
$out = [
|
||||||
$perms = $this->loadPerms($username);
|
'status' => $job['status'] ?? 'unknown',
|
||||||
|
'error' => $job['error'] ?? null,
|
||||||
|
'ready' => $ready,
|
||||||
|
// progress (if present)
|
||||||
|
'pct' => $job['pct'] ?? null,
|
||||||
|
'filesDone' => $job['filesDone'] ?? null,
|
||||||
|
'filesTotal' => $job['filesTotal'] ?? null,
|
||||||
|
'bytesDone' => $job['bytesDone'] ?? null,
|
||||||
|
'bytesTotal' => $job['bytesTotal'] ?? null,
|
||||||
|
'current' => $job['current'] ?? null,
|
||||||
|
'phase' => $job['phase'] ?? null,
|
||||||
|
// timing (always include for UI)
|
||||||
|
'startedAt' => $job['startedAt'] ?? null,
|
||||||
|
'finalizeAt' => $job['finalizeAt'] ?? null,
|
||||||
|
];
|
||||||
|
|
||||||
// Optional zip gate by account flag
|
if ($ready) {
|
||||||
if (!$this->isAdmin($perms) && !empty($perms['disableZip'])) {
|
$out['size'] = @filesize($job['zipPath']) ?: null;
|
||||||
$this->_jsonOut(["error" => "ZIP downloads are not allowed for your account."], 403); return;
|
$out['downloadUrl'] = '/api/file/downloadZipFile.php?k=' . urlencode($token);
|
||||||
}
|
}
|
||||||
|
|
||||||
$ignoreOwnership = $this->isAdmin($perms)
|
header('Content-Type: application/json');
|
||||||
|| ($perms['bypassOwnership'] ?? (defined('DEFAULT_BYPASS_OWNERSHIP') ? DEFAULT_BYPASS_OWNERSHIP : false));
|
header('Cache-Control: no-store, no-cache, must-revalidate, max-age=0');
|
||||||
|
header('Pragma: no-cache');
|
||||||
|
header('Expires: 0');
|
||||||
|
echo json_encode($out);
|
||||||
|
}
|
||||||
|
|
||||||
// Ancestor-owner counts as full view
|
public function downloadZipFile()
|
||||||
$fullView = $ignoreOwnership
|
{
|
||||||
|| ACL::canRead($username, $perms, $folder)
|
if (!isset($_SESSION['authenticated']) || $_SESSION['authenticated'] !== true) { http_response_code(401); echo "Unauthorized"; return; }
|
||||||
|| $this->ownsFolderOrAncestor($folder, $username, $perms);
|
$username = $_SESSION['username'] ?? '';
|
||||||
$ownOnly = !$fullView && ACL::hasGrant($username, $folder, 'read_own');
|
$token = isset($_GET['k']) ? preg_replace('/[^a-f0-9]/','',(string)$_GET['k']) : '';
|
||||||
|
if ($token === '' || strlen($token) < 8) { http_response_code(400); echo "Bad token"; return; }
|
||||||
|
|
||||||
if (!$fullView && !$ownOnly) {
|
$tokFile = rtrim((string)META_DIR, '/\\') . '/ziptmp/.tokens/' . $token . '.json';
|
||||||
$this->_jsonOut(["error" => "Forbidden: no view access to this folder."], 403); return;
|
if (!is_file($tokFile)) { http_response_code(404); echo "Not found"; return; }
|
||||||
}
|
$job = json_decode((string)@file_get_contents($tokFile), true) ?: [];
|
||||||
|
@unlink($tokFile); // one-shot token
|
||||||
|
|
||||||
// If own-only, ensure all files are owned by the user
|
if (($job['user'] ?? '') !== $username) { http_response_code(403); echo "Forbidden"; return; }
|
||||||
if ($ownOnly) {
|
$zip = (string)($job['zipPath'] ?? '');
|
||||||
$meta = $this->loadFolderMetadata($folder);
|
$zipReal = realpath($zip);
|
||||||
foreach ($files as $f) {
|
$root = realpath(rtrim((string)META_DIR, '/\\') . '/ziptmp');
|
||||||
$bn = basename((string)$f);
|
if (!$zipReal || !$root || strpos($zipReal, $root) !== 0 || !is_file($zipReal)) { http_response_code(404); echo "Not found"; return; }
|
||||||
if (!isset($meta[$bn]['uploader']) || strcasecmp((string)$meta[$bn]['uploader'], $username) !== 0) {
|
|
||||||
$this->_jsonOut(["error" => "Forbidden: you are not the owner of '{$bn}'."], 403); return;
|
@session_write_close();
|
||||||
}
|
@set_time_limit(0);
|
||||||
|
@ignore_user_abort(true);
|
||||||
|
if (function_exists('apache_setenv')) @apache_setenv('no-gzip','1');
|
||||||
|
@ini_set('zlib.output_compression','0');
|
||||||
|
@ini_set('output_buffering','off');
|
||||||
|
while (ob_get_level()>0) @ob_end_clean();
|
||||||
|
|
||||||
|
@clearstatcache(true, $zipReal);
|
||||||
|
$name = isset($_GET['name']) ? preg_replace('/[^A-Za-z0-9._-]/','_', (string)$_GET['name']) : 'files.zip';
|
||||||
|
if ($name === '' || str_ends_with($name,'.')) $name = 'files.zip';
|
||||||
|
$size = (int)@filesize($zipReal);
|
||||||
|
|
||||||
|
header('X-Accel-Buffering: no');
|
||||||
|
header('X-Content-Type-Options: nosniff');
|
||||||
|
header('Content-Type: application/zip');
|
||||||
|
header('Content-Disposition: attachment; filename="'.$name.'"');
|
||||||
|
if ($size>0) header('Content-Length: '.$size);
|
||||||
|
header('Cache-Control: no-store, no-cache, must-revalidate');
|
||||||
|
header('Pragma: no-cache');
|
||||||
|
|
||||||
|
readfile($zipReal);
|
||||||
|
@unlink($zipReal);
|
||||||
|
}
|
||||||
|
|
||||||
|
public function downloadZip()
|
||||||
|
{
|
||||||
|
try {
|
||||||
|
if (!$this->_checkCsrf()) { $this->_jsonOut(["error"=>"Bad CSRF"],400); return; }
|
||||||
|
if (!$this->_requireAuth()) { $this->_jsonOut(["error"=>"Unauthorized"],401); return; }
|
||||||
|
|
||||||
|
$data = $this->_readJsonBody();
|
||||||
|
if (!is_array($data) || !isset($data['folder'], $data['files']) || !is_array($data['files'])) {
|
||||||
|
$this->_jsonOut(["error" => "Invalid input."], 400); return;
|
||||||
|
}
|
||||||
|
|
||||||
|
$folder = $this->_normalizeFolder($data['folder']);
|
||||||
|
$files = $data['files'];
|
||||||
|
if (!$this->_validFolder($folder)) { $this->_jsonOut(["error"=>"Invalid folder name."], 400); return; }
|
||||||
|
|
||||||
|
$username = $_SESSION['username'] ?? '';
|
||||||
|
$perms = $this->loadPerms($username);
|
||||||
|
|
||||||
|
// Optional zip gate by account flag
|
||||||
|
if (!$this->isAdmin($perms) && !empty($perms['disableZip'])) {
|
||||||
|
$this->_jsonOut(["error" => "ZIP downloads are not allowed for your account."], 403); return;
|
||||||
|
}
|
||||||
|
|
||||||
|
$ignoreOwnership = $this->isAdmin($perms)
|
||||||
|
|| ($perms['bypassOwnership'] ?? (defined('DEFAULT_BYPASS_OWNERSHIP') ? DEFAULT_BYPASS_OWNERSHIP : false));
|
||||||
|
|
||||||
|
// Ancestor-owner counts as full view
|
||||||
|
$fullView = $ignoreOwnership
|
||||||
|
|| ACL::canRead($username, $perms, $folder)
|
||||||
|
|| $this->ownsFolderOrAncestor($folder, $username, $perms);
|
||||||
|
$ownOnly = !$fullView && ACL::hasGrant($username, $folder, 'read_own');
|
||||||
|
|
||||||
|
if (!$fullView && !$ownOnly) { $this->_jsonOut(["error" => "Forbidden: no view access to this folder."], 403); return; }
|
||||||
|
|
||||||
|
// If own-only, ensure all files are owned by the user
|
||||||
|
if ($ownOnly) {
|
||||||
|
$meta = $this->loadFolderMetadata($folder);
|
||||||
|
foreach ($files as $f) {
|
||||||
|
$bn = basename((string)$f);
|
||||||
|
if (!isset($meta[$bn]['uploader']) || strcasecmp((string)$meta[$bn]['uploader'], $username) !== 0) {
|
||||||
|
$this->_jsonOut(["error" => "Forbidden: you are not the owner of '{$bn}'."], 403); return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
$result = FileModel::createZipArchive($folder, $files);
|
$root = rtrim((string)META_DIR, '/\\') . DIRECTORY_SEPARATOR . 'ziptmp';
|
||||||
if (isset($result['error'])) {
|
$tokDir = $root . DIRECTORY_SEPARATOR . '.tokens';
|
||||||
$this->_jsonOut(["error" => $result['error']], 400); return;
|
$logDir = $root . DIRECTORY_SEPARATOR . '.logs';
|
||||||
|
if (!is_dir($tokDir)) @mkdir($tokDir, 0700, true);
|
||||||
|
if (!is_dir($logDir)) @mkdir($logDir, 0700, true);
|
||||||
|
@chmod($tokDir, 0700);
|
||||||
|
@chmod($logDir, 0700);
|
||||||
|
if (!is_dir($tokDir) || !is_writable($tokDir)) {
|
||||||
|
$this->_jsonOut(["error"=>"ZIP token dir not writable."],500); return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Light janitor: purge old tokens/logs > 6h (best-effort)
|
||||||
|
$now = time();
|
||||||
|
foreach ((glob($tokDir . DIRECTORY_SEPARATOR . '*.json') ?: []) as $tf) {
|
||||||
|
if (is_file($tf) && ($now - (int)@filemtime($tf)) > 21600) { @unlink($tf); }
|
||||||
|
}
|
||||||
|
foreach ((glob($logDir . DIRECTORY_SEPARATOR . 'WORKER-*.log') ?: []) as $lf) {
|
||||||
|
if (is_file($lf) && ($now - (int)@filemtime($lf)) > 21600) { @unlink($lf); }
|
||||||
|
}
|
||||||
|
|
||||||
|
// Per-user and global caps (simple anti-DoS)
|
||||||
|
$perUserCap = 2; // tweak if desired
|
||||||
|
$globalCap = 8; // tweak if desired
|
||||||
|
|
||||||
|
$tokens = glob($tokDir . DIRECTORY_SEPARATOR . '*.json') ?: [];
|
||||||
|
$mine = 0; $all = 0;
|
||||||
|
foreach ($tokens as $tf) {
|
||||||
|
$job = json_decode((string)@file_get_contents($tf), true) ?: [];
|
||||||
|
$st = $job['status'] ?? 'unknown';
|
||||||
|
if ($st === 'queued' || $st === 'working' || $st === 'finalizing') {
|
||||||
|
$all++;
|
||||||
|
if (($job['user'] ?? '') === $username) $mine++;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
if ($mine >= $perUserCap) { $this->_jsonOut(["error"=>"You already have ZIP jobs running. Try again shortly."], 429); return; }
|
||||||
|
if ($all >= $globalCap) { $this->_jsonOut(["error"=>"ZIP queue is busy. Try again shortly."], 429); return; }
|
||||||
|
|
||||||
$zipPath = $result['zipPath'] ?? null;
|
// Create job token
|
||||||
if (!$zipPath || !file_exists($zipPath)) { $this->_jsonOut(["error"=>"ZIP archive not found."], 500); return; }
|
$token = bin2hex(random_bytes(16));
|
||||||
|
$tokFile = $tokDir . DIRECTORY_SEPARATOR . $token . '.json';
|
||||||
|
$job = [
|
||||||
|
'user' => $username,
|
||||||
|
'folder' => $folder,
|
||||||
|
'files' => array_values($files),
|
||||||
|
'status' => 'queued',
|
||||||
|
'ctime' => time(),
|
||||||
|
'startedAt' => null,
|
||||||
|
'finalizeAt' => null,
|
||||||
|
'zipPath' => null,
|
||||||
|
'error' => null
|
||||||
|
];
|
||||||
|
if (file_put_contents($tokFile, json_encode($job, JSON_PRETTY_PRINT), LOCK_EX) === false) {
|
||||||
|
$this->_jsonOut(["error"=>"Failed to create zip job."],500); return;
|
||||||
|
}
|
||||||
|
|
||||||
// switch to file streaming
|
// Robust spawn (detect php CLI, log, record PID)
|
||||||
header_remove('Content-Type');
|
$spawn = $this->spawnZipWorker($token, $tokFile, $logDir);
|
||||||
header('Content-Type: application/zip');
|
if (!$spawn['ok']) {
|
||||||
header('Content-Disposition: attachment; filename="files.zip"');
|
$job['status'] = 'error';
|
||||||
header('Content-Length: ' . filesize($zipPath));
|
$job['error'] = 'Spawn failed: '.$spawn['error'];
|
||||||
header('Cache-Control: no-store, no-cache, must-revalidate');
|
@file_put_contents($tokFile, json_encode($job, JSON_PRETTY_PRINT), LOCK_EX);
|
||||||
header('Pragma: no-cache');
|
$this->_jsonOut(["error"=>"Failed to enqueue ZIP: ".$spawn['error']], 500);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
readfile($zipPath);
|
$this->_jsonOut([
|
||||||
@unlink($zipPath);
|
'ok' => true,
|
||||||
exit;
|
'token' => $token,
|
||||||
} catch (Throwable $e) {
|
'status' => 'queued',
|
||||||
error_log('FileController::downloadZip error: '.$e->getMessage().' @ '.$e->getFile().':'.$e->getLine());
|
'statusUrl' => '/api/file/zipStatus.php?k=' . urlencode($token),
|
||||||
$this->_jsonOut(['error' => 'Internal server error while preparing ZIP.'], 500);
|
'downloadUrl' => '/api/file/downloadZipFile.php?k=' . urlencode($token)
|
||||||
} finally { $this->_jsonEnd(); }
|
]);
|
||||||
|
} catch (Throwable $e) {
|
||||||
|
error_log('FileController::downloadZip enqueue error: '.$e->getMessage().' @ '.$e->getFile().':'.$e->getLine());
|
||||||
|
$this->_jsonOut(['error' => 'Internal error while queuing ZIP.'], 500);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public function extractZip()
|
public function extractZip()
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -557,59 +557,104 @@ class FileModel {
|
|||||||
* @return array An associative array with either an "error" key or a "zipPath" key.
|
* @return array An associative array with either an "error" key or a "zipPath" key.
|
||||||
*/
|
*/
|
||||||
public static function createZipArchive($folder, $files) {
|
public static function createZipArchive($folder, $files) {
|
||||||
// Validate and build folder path.
|
// Purge old temp zips > 6h (best-effort)
|
||||||
$folder = trim($folder) ?: 'root';
|
$zipRoot = rtrim((string)META_DIR, '/\\') . DIRECTORY_SEPARATOR . 'ziptmp';
|
||||||
|
$now = time();
|
||||||
|
foreach ((glob($zipRoot . DIRECTORY_SEPARATOR . 'download-*.zip') ?: []) as $zp) {
|
||||||
|
if (is_file($zp) && ($now - (int)@filemtime($zp)) > 21600) { @unlink($zp); }
|
||||||
|
}
|
||||||
|
|
||||||
|
// Normalize and validate target folder
|
||||||
|
$folder = trim((string)$folder) ?: 'root';
|
||||||
$baseDir = realpath(UPLOAD_DIR);
|
$baseDir = realpath(UPLOAD_DIR);
|
||||||
if ($baseDir === false) {
|
if ($baseDir === false) {
|
||||||
return ["error" => "Uploads directory not configured correctly."];
|
return ["error" => "Uploads directory not configured correctly."];
|
||||||
}
|
}
|
||||||
|
|
||||||
if (strtolower($folder) === 'root' || $folder === "") {
|
if (strtolower($folder) === 'root' || $folder === "") {
|
||||||
$folderPathReal = $baseDir;
|
$folderPathReal = $baseDir;
|
||||||
} else {
|
} else {
|
||||||
// Prevent path traversal.
|
|
||||||
if (strpos($folder, '..') !== false) {
|
if (strpos($folder, '..') !== false) {
|
||||||
return ["error" => "Invalid folder name."];
|
return ["error" => "Invalid folder name."];
|
||||||
}
|
}
|
||||||
$folderPath = rtrim(UPLOAD_DIR, '/\\') . DIRECTORY_SEPARATOR . trim($folder, "/\\ ");
|
$parts = explode('/', trim($folder, "/\\ "));
|
||||||
|
foreach ($parts as $part) {
|
||||||
|
if ($part === '' || !preg_match(REGEX_FOLDER_NAME, $part)) {
|
||||||
|
return ["error" => "Invalid folder name."];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
$folderPath = rtrim(UPLOAD_DIR, '/\\') . DIRECTORY_SEPARATOR . implode(DIRECTORY_SEPARATOR, $parts);
|
||||||
$folderPathReal = realpath($folderPath);
|
$folderPathReal = realpath($folderPath);
|
||||||
if ($folderPathReal === false || strpos($folderPathReal, $baseDir) !== 0) {
|
if ($folderPathReal === false || strpos($folderPathReal, $baseDir) !== 0) {
|
||||||
return ["error" => "Folder not found."];
|
return ["error" => "Folder not found."];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate each file and build an array of files to zip.
|
// Collect files to zip (only regular files in the chosen folder)
|
||||||
$filesToZip = [];
|
$filesToZip = [];
|
||||||
foreach ($files as $fileName) {
|
foreach ($files as $fileName) {
|
||||||
// Validate file name using REGEX_FILE_NAME.
|
$fileName = basename(trim((string)$fileName));
|
||||||
$fileName = basename(trim($fileName));
|
|
||||||
if (!preg_match(REGEX_FILE_NAME, $fileName)) {
|
if (!preg_match(REGEX_FILE_NAME, $fileName)) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
$fullPath = $folderPathReal . DIRECTORY_SEPARATOR . $fileName;
|
$fullPath = $folderPathReal . DIRECTORY_SEPARATOR . $fileName;
|
||||||
if (file_exists($fullPath)) {
|
// Skip symlinks (avoid archiving outside targets via links)
|
||||||
|
if (is_link($fullPath)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (is_file($fullPath)) {
|
||||||
$filesToZip[] = $fullPath;
|
$filesToZip[] = $fullPath;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (empty($filesToZip)) {
|
if (empty($filesToZip)) {
|
||||||
return ["error" => "No valid files found to zip."];
|
return ["error" => "No valid files found to zip."];
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create a temporary ZIP file.
|
// Workspace on the big disk: META_DIR/ziptmp
|
||||||
$tempZip = tempnam(sys_get_temp_dir(), 'zip');
|
$work = rtrim((string)META_DIR, '/\\') . DIRECTORY_SEPARATOR . 'ziptmp';
|
||||||
unlink($tempZip); // Remove the temp file so that ZipArchive can create a new file.
|
if (!is_dir($work)) { @mkdir($work, 0775, true); }
|
||||||
$tempZip .= '.zip';
|
if (!is_dir($work) || !is_writable($work)) {
|
||||||
|
return ["error" => "ZIP temp dir not writable: " . $work];
|
||||||
$zip = new ZipArchive();
|
}
|
||||||
if ($zip->open($tempZip, ZipArchive::CREATE) !== TRUE) {
|
|
||||||
|
// Optional sanity: ensure there is roughly enough free space
|
||||||
|
$totalSize = 0;
|
||||||
|
foreach ($filesToZip as $fp) {
|
||||||
|
$sz = @filesize($fp);
|
||||||
|
if ($sz !== false) $totalSize += (int)$sz;
|
||||||
|
}
|
||||||
|
$free = @disk_free_space($work);
|
||||||
|
// Add ~20MB overhead and a 5% cushion
|
||||||
|
if ($free !== false && $totalSize > 0) {
|
||||||
|
$needed = (int)ceil($totalSize * 1.05) + (20 * 1024 * 1024);
|
||||||
|
if ($free < $needed) {
|
||||||
|
return ["error" => "Insufficient free space in ZIP workspace."];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@set_time_limit(0);
|
||||||
|
|
||||||
|
// Create the ZIP path inside META_DIR/ziptmp (libzip temp stays on same FS)
|
||||||
|
$zipName = 'download-' . date('Ymd-His') . '-' . bin2hex(random_bytes(4)) . '.zip';
|
||||||
|
$zipPath = $work . DIRECTORY_SEPARATOR . $zipName;
|
||||||
|
|
||||||
|
$zip = new \ZipArchive();
|
||||||
|
if ($zip->open($zipPath, \ZipArchive::CREATE | \ZipArchive::OVERWRITE) !== true) {
|
||||||
return ["error" => "Could not create zip archive."];
|
return ["error" => "Could not create zip archive."];
|
||||||
}
|
}
|
||||||
// Add each file using its base name.
|
|
||||||
foreach ($filesToZip as $filePath) {
|
foreach ($filesToZip as $filePath) {
|
||||||
|
// Add using basename at the root of the zip (matches current behavior)
|
||||||
$zip->addFile($filePath, basename($filePath));
|
$zip->addFile($filePath, basename($filePath));
|
||||||
}
|
}
|
||||||
$zip->close();
|
|
||||||
|
if (!$zip->close()) {
|
||||||
return ["zipPath" => $tempZip];
|
// Commonly indicates disk full at finalize
|
||||||
|
return ["error" => "Failed to finalize ZIP (disk full?)."];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Success: controller will readfile() and unlink()
|
||||||
|
return ["zipPath" => $zipPath];
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -623,15 +668,23 @@ class FileModel {
|
|||||||
$errors = [];
|
$errors = [];
|
||||||
$allSuccess = true;
|
$allSuccess = true;
|
||||||
$extractedFiles = [];
|
$extractedFiles = [];
|
||||||
|
|
||||||
|
// Config toggles
|
||||||
|
$SKIP_DOTFILES = defined('SKIP_DOTFILES_ON_EXTRACT') ? (bool)SKIP_DOTFILES_ON_EXTRACT : true;
|
||||||
|
|
||||||
|
// Hard limits to mitigate zip-bombs (tweak via defines if you like)
|
||||||
|
$MAX_UNZIP_BYTES = defined('MAX_UNZIP_BYTES') ? (int)MAX_UNZIP_BYTES : (200 * 1024 * 1024 * 1024); // 200 GiB
|
||||||
|
$MAX_UNZIP_FILES = defined('MAX_UNZIP_FILES') ? (int)MAX_UNZIP_FILES : 20000;
|
||||||
|
|
||||||
$baseDir = realpath(UPLOAD_DIR);
|
$baseDir = realpath(UPLOAD_DIR);
|
||||||
if ($baseDir === false) {
|
if ($baseDir === false) {
|
||||||
return ["error" => "Uploads directory not configured correctly."];
|
return ["error" => "Uploads directory not configured correctly."];
|
||||||
}
|
}
|
||||||
|
|
||||||
// Build target dir
|
// Build target dir
|
||||||
if (strtolower(trim($folder) ?: '') === "root") {
|
if (strtolower(trim($folder) ?: '') === "root") {
|
||||||
$relativePath = "";
|
$relativePath = "";
|
||||||
|
$folderNorm = "root";
|
||||||
} else {
|
} else {
|
||||||
$parts = explode('/', trim($folder, "/\\"));
|
$parts = explode('/', trim($folder, "/\\"));
|
||||||
foreach ($parts as $part) {
|
foreach ($parts as $part) {
|
||||||
@@ -640,9 +693,10 @@ class FileModel {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
$relativePath = implode(DIRECTORY_SEPARATOR, $parts) . DIRECTORY_SEPARATOR;
|
$relativePath = implode(DIRECTORY_SEPARATOR, $parts) . DIRECTORY_SEPARATOR;
|
||||||
|
$folderNorm = implode('/', $parts); // normalized with forward slashes for metadata helpers
|
||||||
}
|
}
|
||||||
|
|
||||||
$folderPath = $baseDir . DIRECTORY_SEPARATOR . $relativePath;
|
$folderPath = $baseDir . DIRECTORY_SEPARATOR . $relativePath;
|
||||||
if (!is_dir($folderPath) && !mkdir($folderPath, 0775, true)) {
|
if (!is_dir($folderPath) && !mkdir($folderPath, 0775, true)) {
|
||||||
return ["error" => "Folder not found and cannot be created."];
|
return ["error" => "Folder not found and cannot be created."];
|
||||||
}
|
}
|
||||||
@@ -650,17 +704,74 @@ class FileModel {
|
|||||||
if ($folderPathReal === false || strpos($folderPathReal, $baseDir) !== 0) {
|
if ($folderPathReal === false || strpos($folderPathReal, $baseDir) !== 0) {
|
||||||
return ["error" => "Folder not found."];
|
return ["error" => "Folder not found."];
|
||||||
}
|
}
|
||||||
|
|
||||||
// Prepare metadata container
|
// Metadata cache per folder to avoid many reads/writes
|
||||||
$metadataFile = self::getMetadataFilePath($folder);
|
$metaCache = [];
|
||||||
$destMetadata = file_exists($metadataFile) ? (json_decode(file_get_contents($metadataFile), true) ?: []) : [];
|
$getMeta = function(string $folderStr) use (&$metaCache) {
|
||||||
|
if (!isset($metaCache[$folderStr])) {
|
||||||
|
$mf = self::getMetadataFilePath($folderStr);
|
||||||
|
$metaCache[$folderStr] = file_exists($mf) ? (json_decode(file_get_contents($mf), true) ?: []) : [];
|
||||||
|
}
|
||||||
|
return $metaCache[$folderStr];
|
||||||
|
};
|
||||||
|
$putMeta = function(string $folderStr, array $meta) use (&$metaCache) {
|
||||||
|
$metaCache[$folderStr] = $meta;
|
||||||
|
};
|
||||||
|
|
||||||
$safeFileNamePattern = REGEX_FILE_NAME;
|
$safeFileNamePattern = REGEX_FILE_NAME;
|
||||||
$actor = $_SESSION['username'] ?? 'Unknown';
|
$actor = $_SESSION['username'] ?? 'Unknown';
|
||||||
$now = date(DATE_TIME_FORMAT);
|
$now = date(DATE_TIME_FORMAT);
|
||||||
|
|
||||||
|
// --- Helpers ---
|
||||||
|
|
||||||
|
// Reject absolute paths, traversal, drive letters
|
||||||
|
$isUnsafeEntryPath = function(string $entry) : bool {
|
||||||
|
$e = str_replace('\\', '/', $entry);
|
||||||
|
if ($e === '' || str_contains($e, "\0")) return true;
|
||||||
|
if (str_starts_with($e, '/')) return true; // absolute nix path
|
||||||
|
if (preg_match('/^[A-Za-z]:[\\/]/', $e)) return true; // Windows drive
|
||||||
|
if (str_contains($e, '../') || str_contains($e, '..\\')) return true;
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
|
||||||
|
// Validate each subfolder name in the path using REGEX_FOLDER_NAME
|
||||||
|
$validEntrySubdirs = function(string $entry) : bool {
|
||||||
|
$e = trim(str_replace('\\', '/', $entry), '/');
|
||||||
|
if ($e === '') return true;
|
||||||
|
$dirs = explode('/', $e);
|
||||||
|
array_pop($dirs); // remove basename; we only validate directories here
|
||||||
|
foreach ($dirs as $d) {
|
||||||
|
if ($d === '' || !preg_match(REGEX_FOLDER_NAME, $d)) return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
};
|
||||||
|
|
||||||
|
// NEW: hidden path detector — true if ANY segment starts with '.'
|
||||||
|
$isHiddenDotPath = function(string $entry) : bool {
|
||||||
|
$e = trim(str_replace('\\', '/', $entry), '/');
|
||||||
|
if ($e === '') return false;
|
||||||
|
foreach (explode('/', $e) as $seg) {
|
||||||
|
if ($seg !== '' && $seg[0] === '.') return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
|
||||||
|
// Generalized metadata stamper: writes to the specified folder's metadata.json
|
||||||
|
$stampMeta = function(string $folderStr, string $basename) use (&$getMeta, &$putMeta, $actor, $now) {
|
||||||
|
$meta = $getMeta($folderStr);
|
||||||
|
$meta[$basename] = [
|
||||||
|
'uploaded' => $now,
|
||||||
|
'modified' => $now,
|
||||||
|
'uploader' => $actor,
|
||||||
|
];
|
||||||
|
$putMeta($folderStr, $meta);
|
||||||
|
};
|
||||||
|
|
||||||
|
// No PHP execution time limit during heavy work
|
||||||
|
@set_time_limit(0);
|
||||||
|
|
||||||
foreach ($files as $zipFileName) {
|
foreach ($files as $zipFileName) {
|
||||||
$zipBase = basename(trim($zipFileName));
|
$zipBase = basename(trim((string)$zipFileName));
|
||||||
if (strtolower(substr($zipBase, -4)) !== '.zip') {
|
if (strtolower(substr($zipBase, -4)) !== '.zip') {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@@ -669,76 +780,135 @@ class FileModel {
|
|||||||
$allSuccess = false;
|
$allSuccess = false;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
$zipFilePath = $folderPathReal . DIRECTORY_SEPARATOR . $zipBase;
|
$zipFilePath = $folderPathReal . DIRECTORY_SEPARATOR . $zipBase;
|
||||||
if (!file_exists($zipFilePath)) {
|
if (!file_exists($zipFilePath)) {
|
||||||
$errors[] = "$zipBase does not exist in folder.";
|
$errors[] = "$zipBase does not exist in folder.";
|
||||||
$allSuccess = false;
|
$allSuccess = false;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
$zip = new ZipArchive();
|
$zip = new \ZipArchive();
|
||||||
if ($zip->open($zipFilePath) !== TRUE) {
|
if ($zip->open($zipFilePath) !== true) {
|
||||||
$errors[] = "Could not open $zipBase as a zip file.";
|
$errors[] = "Could not open $zipBase as a zip file.";
|
||||||
$allSuccess = false;
|
$allSuccess = false;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Minimal Zip Slip guard: fail if any entry looks unsafe
|
// ---- Pre-scan: safety and size limits + build allow-list (skip dotfiles) ----
|
||||||
$unsafe = false;
|
$unsafe = false;
|
||||||
|
$totalUncompressed = 0;
|
||||||
|
$fileCount = 0;
|
||||||
|
$allowedEntries = []; // names to extract (files and/or directories)
|
||||||
|
$allowedFiles = []; // only files (for metadata stamping)
|
||||||
|
|
||||||
for ($i = 0; $i < $zip->numFiles; $i++) {
|
for ($i = 0; $i < $zip->numFiles; $i++) {
|
||||||
$entryName = $zip->getNameIndex($i);
|
$stat = $zip->statIndex($i);
|
||||||
if ($entryName === false) { $unsafe = true; break; }
|
$name = $zip->getNameIndex($i);
|
||||||
// Absolute paths, parent traversal, or Windows drive paths
|
if ($name === false || !$stat) { $unsafe = true; break; }
|
||||||
if (strpos($entryName, '../') !== false || strpos($entryName, '..\\') !== false ||
|
|
||||||
str_starts_with($entryName, '/') || preg_match('/^[A-Za-z]:[\\\\\\/]/', $entryName)) {
|
$isDir = str_ends_with($name, '/');
|
||||||
|
|
||||||
|
// Basic path checks
|
||||||
|
if ($isUnsafeEntryPath($name) || !$validEntrySubdirs($name)) { $unsafe = true; break; }
|
||||||
|
|
||||||
|
// Skip hidden entries (any segment starts with '.')
|
||||||
|
if ($SKIP_DOTFILES && $isHiddenDotPath($name)) {
|
||||||
|
continue; // just ignore; do not treat as unsafe
|
||||||
|
}
|
||||||
|
|
||||||
|
// Detect symlinks via external attributes (best-effort)
|
||||||
|
$mode = (isset($stat['external_attributes']) ? (($stat['external_attributes'] >> 16) & 0xF000) : 0);
|
||||||
|
if ($mode === 0120000) { // S_IFLNK
|
||||||
$unsafe = true; break;
|
$unsafe = true; break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Track limits only for files we're going to extract
|
||||||
|
if (!$isDir) {
|
||||||
|
$fileCount++;
|
||||||
|
$sz = isset($stat['size']) ? (int)$stat['size'] : 0;
|
||||||
|
$totalUncompressed += $sz;
|
||||||
|
if ($fileCount > $MAX_UNZIP_FILES || $totalUncompressed > $MAX_UNZIP_BYTES) {
|
||||||
|
$unsafe = true; break;
|
||||||
|
}
|
||||||
|
$allowedFiles[] = $name;
|
||||||
|
}
|
||||||
|
|
||||||
|
$allowedEntries[] = $name;
|
||||||
}
|
}
|
||||||
|
|
||||||
if ($unsafe) {
|
if ($unsafe) {
|
||||||
$zip->close();
|
$zip->close();
|
||||||
$errors[] = "$zipBase contains unsafe paths; extraction aborted.";
|
$errors[] = "$zipBase contains unsafe or oversized contents; extraction aborted.";
|
||||||
$allSuccess = false;
|
$allSuccess = false;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Extract safely (whole archive) after precheck
|
// Nothing to extract after filtering?
|
||||||
if (!$zip->extractTo($folderPathReal)) {
|
if (empty($allowedEntries)) {
|
||||||
|
$zip->close();
|
||||||
|
// Treat as success (nothing visible to extract), but informatively note it
|
||||||
|
$errors[] = "$zipBase contained only hidden or unsupported entries.";
|
||||||
|
$allSuccess = false; // or keep true if you'd rather not mark as failure
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---- Extract ONLY the allowed entries ----
|
||||||
|
if (!$zip->extractTo($folderPathReal, $allowedEntries)) {
|
||||||
$errors[] = "Failed to extract $zipBase.";
|
$errors[] = "Failed to extract $zipBase.";
|
||||||
$allSuccess = false;
|
$allSuccess = false;
|
||||||
$zip->close();
|
$zip->close();
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Stamp metadata for extracted regular files
|
// ---- Stamp metadata for files in the target folder AND nested subfolders (allowed files only) ----
|
||||||
for ($i = 0; $i < $zip->numFiles; $i++) {
|
foreach ($allowedFiles as $entryName) {
|
||||||
$entryName = $zip->getNameIndex($i);
|
// Normalize entry path for filesystem checks
|
||||||
if ($entryName === false) continue;
|
$entryFsRel = str_replace(['\\'], '/', $entryName);
|
||||||
|
$entryFsRel = ltrim($entryFsRel, '/'); // ensure relative
|
||||||
$basename = basename($entryName);
|
|
||||||
|
// Skip any directories (shouldn't be listed here, but defend anyway)
|
||||||
|
if ($entryFsRel === '' || str_ends_with($entryFsRel, '/')) continue;
|
||||||
|
|
||||||
|
$basename = basename($entryFsRel);
|
||||||
if ($basename === '' || !preg_match($safeFileNamePattern, $basename)) continue;
|
if ($basename === '' || !preg_match($safeFileNamePattern, $basename)) continue;
|
||||||
|
|
||||||
// Only stamp files that actually exist after extraction
|
// Decide which folder's metadata to update:
|
||||||
$target = $folderPathReal . DIRECTORY_SEPARATOR . $entryName;
|
// - top-level files -> $folderNorm
|
||||||
$isDir = str_ends_with($entryName, '/') || is_dir($target);
|
// - nested files -> corresponding "<folderNorm>/<sub/dir>" (or "sub/dir" if folderNorm is 'root')
|
||||||
if ($isDir) continue;
|
$relDir = str_replace('\\', '/', trim(dirname($entryFsRel), '.'));
|
||||||
|
$relDir = ($relDir === '.' ? '' : trim($relDir, '/'));
|
||||||
$extractedFiles[] = $basename;
|
|
||||||
$destMetadata[$basename] = [
|
$targetFolderNorm = ($relDir === '' || $relDir === '.')
|
||||||
'uploaded' => $now,
|
? $folderNorm
|
||||||
'modified' => $now,
|
: (($folderNorm === 'root') ? $relDir : ($folderNorm . '/' . $relDir));
|
||||||
'uploader' => $actor,
|
|
||||||
// no tags by default
|
// Only stamp if the file actually exists on disk after extraction
|
||||||
];
|
$targetAbs = $folderPathReal . DIRECTORY_SEPARATOR . str_replace('/', DIRECTORY_SEPARATOR, $entryFsRel);
|
||||||
|
if (is_file($targetAbs)) {
|
||||||
|
// Preserve list behavior: only include top-level extracted names
|
||||||
|
if ($relDir === '' || $relDir === '.') {
|
||||||
|
$extractedFiles[] = $basename;
|
||||||
|
}
|
||||||
|
$stampMeta($targetFolderNorm, $basename);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
$zip->close();
|
$zip->close();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (file_put_contents($metadataFile, json_encode($destMetadata, JSON_PRETTY_PRINT), LOCK_EX) === false) {
|
// Persist metadata for any touched folder(s)
|
||||||
$errors[] = "Failed to update metadata.";
|
foreach ($metaCache as $folderStr => $meta) {
|
||||||
$allSuccess = false;
|
$metadataFile = self::getMetadataFilePath($folderStr);
|
||||||
|
if (!is_dir(dirname($metadataFile))) {
|
||||||
|
@mkdir(dirname($metadataFile), 0775, true);
|
||||||
|
}
|
||||||
|
if (file_put_contents($metadataFile, json_encode($meta, JSON_PRETTY_PRINT), LOCK_EX) === false) {
|
||||||
|
$errors[] = "Failed to update metadata for {$folderStr}.";
|
||||||
|
$allSuccess = false;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return $allSuccess
|
return $allSuccess
|
||||||
? ["success" => true, "extractedFiles" => $extractedFiles]
|
? ["success" => true, "extractedFiles" => $extractedFiles]
|
||||||
: ["success" => false, "error" => implode(" ", $errors)];
|
: ["success" => false, "error" => implode(" ", $errors)];
|
||||||
|
|||||||
Reference in New Issue
Block a user