Restructure repository into administration and website areas
This commit is contained in:
9
administration/Dockerfile
Normal file
9
administration/Dockerfile
Normal file
@@ -0,0 +1,9 @@
|
||||
FROM php:8.3-cli-alpine
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY . /app
|
||||
|
||||
EXPOSE 4173
|
||||
|
||||
CMD ["php", "-d", "opcache.enable_cli=0", "-S", "0.0.0.0:4173", "administration/scripts/editor_server.php"]
|
||||
60
administration/README.md
Normal file
60
administration/README.md
Normal file
@@ -0,0 +1,60 @@
|
||||
# Administration (Technik)
|
||||
|
||||
Dieser Bereich ist für Deployment, Betrieb und technische Wartung.
|
||||
|
||||
## Struktur
|
||||
|
||||
- `Dockerfile`
|
||||
- `docker-compose.yml`
|
||||
- `docker-compose.traefik-routes.yml`
|
||||
- `scripts/` (Server, Extraktion, Route-Generator)
|
||||
- `docs/` (Planungs-/Brainstorm-Dokumente)
|
||||
|
||||
## Voraussetzungen
|
||||
|
||||
- Docker + Docker Compose
|
||||
- Traefik mit externem Netzwerk `proxy`
|
||||
|
||||
## Lokaler Editor-Server
|
||||
|
||||
```bash
|
||||
./administration/scripts/run_editor_server.sh
|
||||
```
|
||||
|
||||
Aufruf: `http://127.0.0.1:4173/`
|
||||
|
||||
## Traefik Deploy
|
||||
|
||||
```bash
|
||||
docker compose -f administration/docker-compose.traefik-routes.yml up -d --build
|
||||
```
|
||||
|
||||
## Neue Route erzeugen
|
||||
|
||||
```bash
|
||||
./administration/scripts/add-webpage.sh webpage4 mydomain.de
|
||||
```
|
||||
|
||||
Danach:
|
||||
|
||||
```bash
|
||||
docker compose -f administration/docker-compose.traefik-routes.yml up -d --build
|
||||
```
|
||||
|
||||
## Security / Editor Auth
|
||||
|
||||
- Unclaimed by default (Viewer-Rolle)
|
||||
- Claim/Login/Reset über API im `editor_server.php`
|
||||
- Sensible Dateien liegen unter `website/content/` und sind via HTTP blockiert:
|
||||
- `.editor-credentials.json`
|
||||
- `.editor-reset.json`
|
||||
- `.editor-rate-limit.json`
|
||||
|
||||
## Brute-Force Schutz
|
||||
|
||||
Buckets:
|
||||
- `login_account`, `login_global`
|
||||
- `reset_request_account`, `reset_request_global`
|
||||
- `reset_confirm_account`, `reset_confirm_global`
|
||||
|
||||
Implementierung: `administration/scripts/editor_server.php`
|
||||
73
administration/docker-compose.traefik-routes.yml
Normal file
73
administration/docker-compose.traefik-routes.yml
Normal file
@@ -0,0 +1,73 @@
|
||||
services:
|
||||
webpage1:
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: administration/Dockerfile
|
||||
container_name: ikfreunde-webpage1
|
||||
volumes:
|
||||
- /srv/ikfreunde/webpage1/ikfreunde.com.html:/app/website/ikfreunde.com.html
|
||||
- /srv/ikfreunde/webpage1/site-content.de.json:/app/website/content/site-content.de.json
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- proxy
|
||||
labels:
|
||||
- traefik.enable=true
|
||||
- traefik.http.routers.webpage1.rule=Host(`mydomain.de`) && PathPrefix(`/webpage1`)
|
||||
- traefik.http.routers.webpage1.entrypoints=websecure
|
||||
- traefik.http.routers.webpage1.tls=true
|
||||
- traefik.http.services.webpage1.loadbalancer.server.port=4173
|
||||
- traefik.http.routers.webpage1.middlewares=webpage1-slash,webpage1-strip
|
||||
- traefik.http.middlewares.webpage1-slash.redirectregex.regex=^https?://([^/]+)/webpage1$
|
||||
- traefik.http.middlewares.webpage1-slash.redirectregex.replacement=https://$${1}/webpage1/
|
||||
- traefik.http.middlewares.webpage1-slash.redirectregex.permanent=true
|
||||
- traefik.http.middlewares.webpage1-strip.stripprefix.prefixes=/webpage1
|
||||
|
||||
webpage2:
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: administration/Dockerfile
|
||||
container_name: ikfreunde-webpage2
|
||||
volumes:
|
||||
- /srv/ikfreunde/webpage2/ikfreunde.com.html:/app/website/ikfreunde.com.html
|
||||
- /srv/ikfreunde/webpage2/site-content.de.json:/app/website/content/site-content.de.json
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- proxy
|
||||
labels:
|
||||
- traefik.enable=true
|
||||
- traefik.http.routers.webpage2.rule=Host(`mydomain.de`) && PathPrefix(`/webpage2`)
|
||||
- traefik.http.routers.webpage2.entrypoints=websecure
|
||||
- traefik.http.routers.webpage2.tls=true
|
||||
- traefik.http.services.webpage2.loadbalancer.server.port=4173
|
||||
- traefik.http.routers.webpage2.middlewares=webpage2-slash,webpage2-strip
|
||||
- traefik.http.middlewares.webpage2-slash.redirectregex.regex=^https?://([^/]+)/webpage2$
|
||||
- traefik.http.middlewares.webpage2-slash.redirectregex.replacement=https://$${1}/webpage2/
|
||||
- traefik.http.middlewares.webpage2-slash.redirectregex.permanent=true
|
||||
- traefik.http.middlewares.webpage2-strip.stripprefix.prefixes=/webpage2
|
||||
|
||||
webpage3:
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: administration/Dockerfile
|
||||
container_name: ikfreunde-webpage3
|
||||
volumes:
|
||||
- /srv/ikfreunde/webpage3/ikfreunde.com.html:/app/website/ikfreunde.com.html
|
||||
- /srv/ikfreunde/webpage3/site-content.de.json:/app/website/content/site-content.de.json
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- proxy
|
||||
labels:
|
||||
- traefik.enable=true
|
||||
- traefik.http.routers.webpage3.rule=Host(`mydomain.de`) && PathPrefix(`/webpage3`)
|
||||
- traefik.http.routers.webpage3.entrypoints=websecure
|
||||
- traefik.http.routers.webpage3.tls=true
|
||||
- traefik.http.services.webpage3.loadbalancer.server.port=4173
|
||||
- traefik.http.routers.webpage3.middlewares=webpage3-slash,webpage3-strip
|
||||
- traefik.http.middlewares.webpage3-slash.redirectregex.regex=^https?://([^/]+)/webpage3$
|
||||
- traefik.http.middlewares.webpage3-slash.redirectregex.replacement=https://$${1}/webpage3/
|
||||
- traefik.http.middlewares.webpage3-slash.redirectregex.permanent=true
|
||||
- traefik.http.middlewares.webpage3-strip.stripprefix.prefixes=/webpage3
|
||||
|
||||
networks:
|
||||
proxy:
|
||||
external: true
|
||||
13
administration/docker-compose.yml
Normal file
13
administration/docker-compose.yml
Normal file
@@ -0,0 +1,13 @@
|
||||
services:
|
||||
editor:
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: administration/Dockerfile
|
||||
container_name: ikfreunde-editor
|
||||
ports:
|
||||
- "4173:4173"
|
||||
volumes:
|
||||
- ..:/app
|
||||
working_dir: /app
|
||||
command: ["php", "-d", "opcache.enable_cli=0", "-S", "0.0.0.0:4173", "administration/scripts/editor_server.php"]
|
||||
restart: unless-stopped
|
||||
@@ -0,0 +1,43 @@
|
||||
---
|
||||
date: 2026-03-03
|
||||
topic: dom-json-wysiwyg-sync
|
||||
---
|
||||
|
||||
# DOM to JSON + WYSIWYG Sync
|
||||
|
||||
## What We're Building
|
||||
A content workflow for this static site that starts with DOM-to-JSON extraction and then enables inline visual editing. The immediate goal is a JSON file generated from the current HTML snapshot that captures:
|
||||
- visible page text
|
||||
- page metadata (`title`, description, Open Graph/Twitter meta)
|
||||
- image content (`img src` and `alt`)
|
||||
|
||||
The JSON should preserve subtopics and be structured by DOM context so it maps cleanly to the existing page. The follow-up goal is a lightweight WYSIWYG editor that lets humans change text directly in the page and update image `src`/`alt`, with synchronization between HTML and JSON.
|
||||
|
||||
## Why This Approach
|
||||
The chosen direction prioritizes low-friction adoption in an existing static snapshot where editors still work directly in HTML. A DOM-to-JSON first pass minimizes upfront modeling effort and captures the current content state quickly. Then, a WYSIWYG layer provides direct manipulation while preserving visual context.
|
||||
|
||||
YAGNI rationale: avoid building a full CMS or strict schema-first localization system now. Start with extraction and practical editing primitives, then evolve only if editorial complexity requires it.
|
||||
|
||||
## Key Decisions
|
||||
- Output format: Nested JSON with subtopics.
|
||||
- Extraction scope: Visible page text plus metadata (title/description/Open Graph/Twitter).
|
||||
- Key organization: DOM-first grouping with section-based top-level subtopics.
|
||||
- Duplicate handling: Hybrid strategy. Keep section-local duplicates; dedupe only global/common content.
|
||||
- Product scope: Include both stages in feature intent, but implement DOM-to-JSON extraction first.
|
||||
- Sync model (future WYSIWYG): Bidirectional HTML ↔ JSON sync.
|
||||
- Image editing v1 (future WYSIWYG): Edit `img src` and `alt` only (exclude `srcset`/`picture` initially).
|
||||
- WYSIWYG editing scope v1 (future): Content updates only (text and image properties), no styling/layout editing.
|
||||
- Content identity: Hybrid identification. Reuse existing selectors/IDs where possible and add `data-*` IDs only where needed.
|
||||
|
||||
## Resolved Questions
|
||||
- JSON shape should include subtopics rather than flat keys.
|
||||
- Metadata must be part of extraction.
|
||||
- Human editors will still work against the HTML site, so DOM-oriented extraction is preferred.
|
||||
- Full responsive image source editing is deferred to avoid layout breakage risk.
|
||||
|
||||
## Open Questions
|
||||
- None currently.
|
||||
|
||||
## Next Steps
|
||||
1. Proceed to planning (`/prompts:workflows-plan`) focused only on step 1: extraction pipeline and JSON structure contract.
|
||||
2. Follow with a second planning pass for step 2: WYSIWYG editing and robust bi-directional sync rules.
|
||||
@@ -0,0 +1,42 @@
|
||||
---
|
||||
date: 2026-03-04
|
||||
topic: wysiwyg-inline-editor-sync
|
||||
---
|
||||
|
||||
# WYSIWYG Inline Editor + HTML/JSON Sync
|
||||
|
||||
## What We're Building
|
||||
A local-first WYSIWYG editing mode for the static HTML snapshot where creators can edit page content directly on the rendered page. Text becomes editable on double-click using inline `contenteditable` behavior. Images open a small overlay for editing `src` and `alt` only.
|
||||
|
||||
Edits must update both representations: the HTML content and the extracted content JSON. Persistence is local file write (not browser-only), so creators can save directly to project files during offline editing.
|
||||
|
||||
This phase is content-only. It explicitly excludes CSS/layout editing.
|
||||
|
||||
## Why This Approach
|
||||
Approach A (in-page edit mode + local save service) was selected because it matches the intended user behavior: fast direct edits where content lives visually. It minimizes training and friction for non-technical editors who already think in terms of “change this heading here.”
|
||||
|
||||
YAGNI rationale: keep the first editor narrow and reliable. Only text and image content are editable, and image validation is warning-based (not blocking) to avoid blocking workflows while still reducing accidental visual breakage.
|
||||
|
||||
## Key Decisions
|
||||
- Interaction model: Double-click inline editing for text.
|
||||
- Save behavior: Auto-save on blur/enter plus manual save/undo controls.
|
||||
- Image editing v1: Overlay for `src` + `alt` only.
|
||||
- Ratio validation: Warning (non-blocking), threshold set to 15% difference between current and replacement image aspect ratios.
|
||||
- Persistence model: Direct local file writes to `.html` and `.json` through a local helper service.
|
||||
- Sync model: Bidirectional HTML ↔ JSON.
|
||||
- Conflict default: HTML wins when both sides changed for the same mapped key.
|
||||
- Scope guard: No CSS/layout editing in this phase.
|
||||
|
||||
## Resolved Questions
|
||||
- Inline editing is preferred over panel-based editing.
|
||||
- Save UX should support both auto-save and explicit controls.
|
||||
- Image editing should remain limited to `src`/`alt` in v1.
|
||||
- Ratio checks are advisory to preserve editor flow.
|
||||
- Local file write capability is required for practical offline usage.
|
||||
|
||||
## Open Questions
|
||||
- None currently.
|
||||
|
||||
## Next Steps
|
||||
1. Move to planning for step 2 (`/prompts:workflows-plan`) with focus on editor UX rules, content identity mapping, and local save contract.
|
||||
2. Keep implementation split into phases: text editing first, then image overlay and ratio warning behavior.
|
||||
@@ -0,0 +1,88 @@
|
||||
---
|
||||
title: "feat: DOM-to-JSON content extraction for static snapshot"
|
||||
type: feat
|
||||
status: completed
|
||||
date: 2026-03-04
|
||||
---
|
||||
|
||||
# feat: DOM-to-JSON content extraction for static snapshot
|
||||
|
||||
## Overview
|
||||
Create a first-stage extraction workflow that converts the existing HTML snapshot into a nested JSON content file. This plan is intentionally limited to extraction and content mapping. It does not include building the WYSIWYG editor yet.
|
||||
|
||||
## Problem Statement / Motivation
|
||||
Content updates are currently tied to manual HTML edits. A JSON representation is needed so text and selected image properties can be adapted more easily and later edited through an interface.
|
||||
|
||||
## Proposed Solution
|
||||
Build a deterministic DOM-to-JSON extraction flow for `ikfreunde.com.html` that captures visible text, selected metadata, and image fields (`src`, `alt`).
|
||||
|
||||
The JSON structure should be DOM-first with section-based top-level subtopics, matching the brainstorm decisions and keeping context for editors. Duplicate text handling should follow the agreed hybrid policy: keep section-local duplicates; dedupe only clearly global/common items.
|
||||
|
||||
## Scope
|
||||
In scope:
|
||||
- Extract visible text content from page sections
|
||||
- Extract metadata: `title`, `description`, Open Graph, Twitter
|
||||
- Extract image fields: `img src`, `img alt`
|
||||
- Produce nested JSON output aligned with DOM sections
|
||||
- Define stable content identity strategy (reuse existing selectors/IDs; add `data-*` only when needed)
|
||||
|
||||
Out of scope:
|
||||
- WYSIWYG editing UI
|
||||
- Styling/layout editing
|
||||
- Full responsive image source editing (`srcset`, `picture`)
|
||||
- Full bidirectional sync mechanics
|
||||
|
||||
## Technical Considerations
|
||||
- The repository is a static snapshot with bundled/minified assets; there is no existing i18n framework.
|
||||
- Extraction rules must avoid pulling non-content technical strings from scripts/styles.
|
||||
- Section mapping should remain stable even if content text changes.
|
||||
- Output should be deterministic so repeated runs produce predictable key ordering/paths.
|
||||
|
||||
## SpecFlow Analysis
|
||||
Primary flow:
|
||||
1. Input snapshot HTML is parsed.
|
||||
2. Eligible text nodes and target attributes are identified.
|
||||
3. Content is grouped by top-level page sections.
|
||||
4. Metadata and image fields are merged into the same JSON tree.
|
||||
5. Output JSON is written.
|
||||
|
||||
Edge cases to cover:
|
||||
- Empty or whitespace-only nodes
|
||||
- Repeated text across sections
|
||||
- Links/buttons with nested elements
|
||||
- Missing `alt` attributes
|
||||
- Cookie/modal/footer content that may be conditionally visible
|
||||
|
||||
## Acceptance Criteria
|
||||
- [x] A single extraction run generates one nested JSON file from `ikfreunde.com.html`.
|
||||
- [x] JSON includes visible page text grouped by section subtopics.
|
||||
- [x] JSON includes `title`, `description`, Open Graph, and Twitter metadata values.
|
||||
- [x] JSON includes `img src` and `img alt` values where present.
|
||||
- [x] Duplicate policy is applied: section-local duplicates kept; global/common duplicates deduped.
|
||||
- [x] Extraction excludes JS/CSS artifacts and non-content noise.
|
||||
- [x] Re-running extraction on unchanged input produces stable output structure.
|
||||
|
||||
## Success Metrics
|
||||
- Editors can locate and update target strings in JSON without editing HTML directly.
|
||||
- JSON organization is understandable by section/context without reverse-engineering selectors.
|
||||
- No unintended layout/content regressions in source HTML (read-only extraction phase).
|
||||
|
||||
## Dependencies & Risks
|
||||
Dependencies:
|
||||
- Final agreement on section boundaries for grouping
|
||||
- Final output file location/name convention
|
||||
|
||||
Risks:
|
||||
- Over-extraction of non-user-facing strings
|
||||
- Unstable keys if selector strategy is inconsistent
|
||||
- Ambiguity around “global/common” duplicate classification
|
||||
|
||||
Mitigations:
|
||||
- Explicit extraction allowlist for elements/attributes
|
||||
- Deterministic key-generation policy
|
||||
- Documented duplicate decision rules with examples
|
||||
|
||||
## References & Research
|
||||
- Brainstorm: `docs/brainstorms/2026-03-03-dom-json-wysiwyg-sync-brainstorm.md`
|
||||
- Source snapshot: `ikfreunde.com.html`
|
||||
- Existing site bundle references: `ikfreunde.com_files/*`
|
||||
@@ -0,0 +1,103 @@
|
||||
---
|
||||
title: "feat: Inline WYSIWYG editor with HTML-JSON sync"
|
||||
type: feat
|
||||
status: completed
|
||||
date: 2026-03-04
|
||||
---
|
||||
|
||||
# feat: Inline WYSIWYG editor with HTML-JSON sync
|
||||
|
||||
## Overview
|
||||
Build step 2 of the content workflow: a local-first WYSIWYG editor for the existing static page where creators can directly edit content on the rendered page and persist changes to both HTML and JSON.
|
||||
|
||||
This plan is limited to content editing and synchronization behavior. It explicitly excludes style/layout editing.
|
||||
|
||||
## Problem Statement / Motivation
|
||||
The repository now has extractable structured content (`content/site-content.de.json`) but no practical editing surface for creators. Editors need direct, low-friction page editing (double-click text, click image) while keeping HTML and JSON in sync.
|
||||
|
||||
## Proposed Solution
|
||||
Add an in-page edit mode with inline `contenteditable` text editing and an image overlay editor for `src` and `alt`. Implement autosave (blur/enter) plus manual save/undo controls. Persist edits via a local helper service that writes both `ikfreunde.com.html` and `content/site-content.de.json`.
|
||||
|
||||
Synchronization is bidirectional in model intent, with conflict default set to HTML wins when the same mapped key diverges.
|
||||
|
||||
## Scope
|
||||
In scope:
|
||||
- Text editing on double-click for editable content nodes
|
||||
- Image editing overlay for `src` and `alt`
|
||||
- Aspect-ratio warning (non-blocking) at 15% threshold
|
||||
- Autosave + manual save/undo controls
|
||||
- Local persistence endpoint to write HTML + JSON
|
||||
- Content identity mapping between DOM elements and JSON keys
|
||||
- Conflict handling policy: HTML wins
|
||||
|
||||
Out of scope:
|
||||
- CSS/layout editing
|
||||
- `srcset`/`picture` editing
|
||||
- Multi-user collaboration or remote persistence
|
||||
- Authentication/authorization layer
|
||||
|
||||
## Technical Considerations
|
||||
- Current site scripts are bundled/minified, so editor behavior should be isolated in a dedicated script layer.
|
||||
- Content identity mapping must be stable enough for repeat edits and sync.
|
||||
- Editing rules should avoid hidden/system nodes (cookie mechanics/scripts/non-content regions unless explicitly intended).
|
||||
- Local persistence requires a trusted local helper process and clear file write boundaries.
|
||||
- Undo scope must be defined (session-level content undo, not full VCS-like history).
|
||||
|
||||
## SpecFlow Analysis
|
||||
Primary flow:
|
||||
1. Editor enters edit mode.
|
||||
2. User double-clicks text node and edits inline.
|
||||
3. User blur/enter triggers autosave path.
|
||||
4. Mapping resolves edited node to JSON key.
|
||||
5. HTML and JSON are both updated and persisted via local helper.
|
||||
|
||||
Image flow:
|
||||
1. User selects editable image.
|
||||
2. Overlay opens with current `src` and `alt`.
|
||||
3. New source is validated; ratio warning shown if aspect ratio differs by >15%.
|
||||
4. User can still save despite warning.
|
||||
5. HTML and JSON are updated and persisted.
|
||||
|
||||
Conflict flow:
|
||||
1. Divergent values detected for same mapped key.
|
||||
2. Default resolution applies: HTML value wins.
|
||||
3. JSON is reconciled to HTML on save.
|
||||
|
||||
## Acceptance Criteria
|
||||
- [x] Double-click enables inline text editing on intended content elements.
|
||||
- [x] Text autosaves on blur/enter and also supports explicit save/undo controls.
|
||||
- [x] Clicking editable images opens an overlay with `src` and `alt` fields.
|
||||
- [x] Image ratio check warns (non-blocking) when replacement differs by >15% aspect ratio.
|
||||
- [x] Save operation persists both `ikfreunde.com.html` and `content/site-content.de.json`.
|
||||
- [x] Sync mapping updates the correct JSON key for edited text/image values.
|
||||
- [x] Conflict resolution follows HTML-wins default.
|
||||
- [x] No CSS/layout properties are modified by editor actions.
|
||||
|
||||
## Success Metrics
|
||||
- Editors can modify headings/body text/images directly on page without manual JSON editing.
|
||||
- Saved output remains consistent between HTML and JSON for edited items.
|
||||
- Editing interactions feel immediate and require minimal training.
|
||||
- No unintended style/layout changes caused by the editor.
|
||||
|
||||
## Dependencies & Risks
|
||||
Dependencies:
|
||||
- Defined DOM↔JSON mapping contract for editable nodes
|
||||
- Local helper service runtime available in the editor environment
|
||||
|
||||
Risks:
|
||||
- Incorrect key mapping leading to wrong JSON updates
|
||||
- Over-editability (allowing non-content nodes)
|
||||
- Unexpected side effects from integrating with existing bundled scripts
|
||||
- File write race conditions during rapid autosave
|
||||
|
||||
Mitigations:
|
||||
- Explicit editable-node allowlist and mapping tests
|
||||
- Isolated editor namespace/events
|
||||
- Debounced autosave + write serialization
|
||||
- Dry-run/preview diagnostics for mapping during development
|
||||
|
||||
## References & Research
|
||||
- Brainstorm input: `docs/brainstorms/2026-03-04-wysiwyg-inline-editor-sync-brainstorm.md`
|
||||
- Prior extraction plan: `docs/plans/2026-03-04-feat-dom-to-json-content-extraction-plan.md`
|
||||
- Extractor/source contract: `scripts/extract_dom_content.php`, `content/site-content.de.json`
|
||||
- Target HTML: `ikfreunde.com.html`
|
||||
98
administration/scripts/add-webpage.sh
Executable file
98
administration/scripts/add-webpage.sh
Executable file
@@ -0,0 +1,98 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
if [[ $# -lt 1 || $# -gt 2 ]]; then
|
||||
echo "Usage: $0 <route-name> [domain]"
|
||||
echo "Example: $0 webpage4 mydomain.de"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
NAME="$1"
|
||||
DOMAIN="${2:-mydomain.de}"
|
||||
|
||||
if [[ ! "$NAME" =~ ^[a-zA-Z0-9][a-zA-Z0-9_-]*$ ]]; then
|
||||
echo "Invalid route name: $NAME"
|
||||
echo "Allowed: letters, numbers, underscore, dash"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
|
||||
ROOT_BASE="${ROOT_BASE:-/srv/ikfreunde}"
|
||||
COMPOSE_FILE="${COMPOSE_FILE:-$ROOT_DIR/administration/docker-compose.traefik-routes.yml}"
|
||||
ROOT="${ROOT_BASE}/${NAME}"
|
||||
|
||||
if [[ ! -f "$COMPOSE_FILE" ]]; then
|
||||
echo "Compose file not found: $COMPOSE_FILE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
mkdir -p "$ROOT"
|
||||
|
||||
if [[ ! -f "$ROOT/ikfreunde.com.html" ]]; then
|
||||
cp "$ROOT_DIR/website/ikfreunde.com.html" "$ROOT/ikfreunde.com.html"
|
||||
echo "Created: $ROOT/ikfreunde.com.html"
|
||||
fi
|
||||
|
||||
if [[ ! -f "$ROOT/site-content.de.json" ]]; then
|
||||
cp "$ROOT_DIR/website/content/site-content.de.json" "$ROOT/site-content.de.json"
|
||||
echo "Created: $ROOT/site-content.de.json"
|
||||
fi
|
||||
|
||||
if rg -q "^ ${NAME}:$" "$COMPOSE_FILE"; then
|
||||
echo "Service '${NAME}' already exists in $COMPOSE_FILE"
|
||||
else
|
||||
block_file="$(mktemp)"
|
||||
tmp_file="$(mktemp)"
|
||||
|
||||
cat > "$block_file" <<YAML
|
||||
|
||||
${NAME}:
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: administration/Dockerfile
|
||||
container_name: ikfreunde-${NAME}
|
||||
volumes:
|
||||
- ${ROOT}/ikfreunde.com.html:/app/website/ikfreunde.com.html
|
||||
- ${ROOT}/site-content.de.json:/app/website/content/site-content.de.json
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- proxy
|
||||
labels:
|
||||
- traefik.enable=true
|
||||
- traefik.http.routers.${NAME}.rule=Host(\`${DOMAIN}\`) && PathPrefix(\`/${NAME}\`)
|
||||
- traefik.http.routers.${NAME}.entrypoints=websecure
|
||||
- traefik.http.routers.${NAME}.tls=true
|
||||
- traefik.http.services.${NAME}.loadbalancer.server.port=4173
|
||||
- traefik.http.routers.${NAME}.middlewares=${NAME}-slash,${NAME}-strip
|
||||
- traefik.http.middlewares.${NAME}-slash.redirectregex.regex=^https?://([^/]+)/${NAME}$
|
||||
- traefik.http.middlewares.${NAME}-slash.redirectregex.replacement=https://\$\${1}/${NAME}/
|
||||
- traefik.http.middlewares.${NAME}-slash.redirectregex.permanent=true
|
||||
- traefik.http.middlewares.${NAME}-strip.stripprefix.prefixes=/${NAME}
|
||||
YAML
|
||||
|
||||
awk -v block_file="$block_file" '
|
||||
BEGIN { inserted = 0 }
|
||||
/^networks:/ && inserted == 0 {
|
||||
while ((getline line < block_file) > 0) print line
|
||||
close(block_file)
|
||||
inserted = 1
|
||||
}
|
||||
{ print }
|
||||
END {
|
||||
if (inserted == 0) {
|
||||
while ((getline line < block_file) > 0) print line
|
||||
close(block_file)
|
||||
}
|
||||
}
|
||||
' "$COMPOSE_FILE" > "$tmp_file"
|
||||
|
||||
mv "$tmp_file" "$COMPOSE_FILE"
|
||||
rm -f "$block_file"
|
||||
|
||||
echo "Inserted service '${NAME}' into $COMPOSE_FILE"
|
||||
fi
|
||||
|
||||
echo
|
||||
echo "Next steps:"
|
||||
echo "1) docker compose -f $COMPOSE_FILE up -d --build"
|
||||
echo "2) Open: https://${DOMAIN}/${NAME}/"
|
||||
1014
administration/scripts/editor_server.php
Normal file
1014
administration/scripts/editor_server.php
Normal file
File diff suppressed because it is too large
Load Diff
8
administration/scripts/extract_content.sh
Executable file
8
administration/scripts/extract_content.sh
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
INPUT_HTML="${1:-website/ikfreunde.com.html}"
|
||||
OUTPUT_JSON="${2:-website/content/site-content.de.json}"
|
||||
|
||||
mkdir -p "$(dirname "$OUTPUT_JSON")"
|
||||
php -d opcache.enable_cli=0 administration/scripts/extract_dom_content.php "$INPUT_HTML" "$OUTPUT_JSON"
|
||||
318
administration/scripts/extract_dom_content.php
Executable file
318
administration/scripts/extract_dom_content.php
Executable file
@@ -0,0 +1,318 @@
|
||||
#!/usr/bin/env php
|
||||
<?php
|
||||
declare(strict_types=1);
|
||||
|
||||
if ($argc < 3) {
|
||||
fwrite(STDERR, "Usage: php administration/scripts/extract_dom_content.php <input_html> <output_json>\n");
|
||||
exit(1);
|
||||
}
|
||||
|
||||
$inputHtml = $argv[1];
|
||||
$outputJson = $argv[2];
|
||||
|
||||
if (!is_file($inputHtml)) {
|
||||
fwrite(STDERR, "Input file not found: {$inputHtml}\n");
|
||||
exit(1);
|
||||
}
|
||||
|
||||
libxml_use_internal_errors(true);
|
||||
$dom = new DOMDocument();
|
||||
$html = file_get_contents($inputHtml);
|
||||
if ($html === false) {
|
||||
fwrite(STDERR, "Failed to read input file: {$inputHtml}\n");
|
||||
exit(1);
|
||||
}
|
||||
|
||||
$loaded = $dom->loadHTML($html, LIBXML_NOERROR | LIBXML_NOWARNING | LIBXML_NONET);
|
||||
if (!$loaded) {
|
||||
fwrite(STDERR, "Failed to parse HTML: {$inputHtml}\n");
|
||||
exit(1);
|
||||
}
|
||||
|
||||
$xpath = new DOMXPath($dom);
|
||||
|
||||
$data = [
|
||||
'meta' => extractMeta($xpath, $dom),
|
||||
'shared' => extractShared($xpath),
|
||||
'sections' => extractMainSections($xpath),
|
||||
];
|
||||
|
||||
$json = json_encode($data, JSON_PRETTY_PRINT | JSON_UNESCAPED_SLASHES | JSON_UNESCAPED_UNICODE);
|
||||
if ($json === false) {
|
||||
fwrite(STDERR, "Failed to encode JSON output.\n");
|
||||
exit(1);
|
||||
}
|
||||
|
||||
if (file_put_contents($outputJson, $json . "\n") === false) {
|
||||
fwrite(STDERR, "Failed to write output file: {$outputJson}\n");
|
||||
exit(1);
|
||||
}
|
||||
|
||||
fwrite(STDOUT, "Wrote extracted content to {$outputJson}\n");
|
||||
|
||||
function extractMeta(DOMXPath $xpath, DOMDocument $dom): array
|
||||
{
|
||||
$meta = [
|
||||
'title' => normalizeText($dom->getElementsByTagName('title')->item(0)?->textContent ?? ''),
|
||||
'description' => firstAttrValue($xpath, "//meta[@name='description']", 'content'),
|
||||
'open_graph' => [],
|
||||
'twitter' => [],
|
||||
'other' => [],
|
||||
];
|
||||
|
||||
$ogNodes = $xpath->query("//meta[starts-with(@property, 'og:') or starts-with(@name, 'og:')]");
|
||||
if ($ogNodes instanceof DOMNodeList) {
|
||||
foreach ($ogNodes as $node) {
|
||||
if (!$node instanceof DOMElement) {
|
||||
continue;
|
||||
}
|
||||
$name = trim((string) ($node->getAttribute('property') ?: $node->getAttribute('name')));
|
||||
$value = normalizeText((string) $node->getAttribute('content'));
|
||||
if ($name !== '' && $value !== '') {
|
||||
$meta['open_graph'][$name] = $value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
$twitterNodes = $xpath->query("//meta[starts-with(@name, 'twitter:')]");
|
||||
if ($twitterNodes instanceof DOMNodeList) {
|
||||
foreach ($twitterNodes as $node) {
|
||||
if (!$node instanceof DOMElement) {
|
||||
continue;
|
||||
}
|
||||
$name = trim((string) $node->getAttribute('name'));
|
||||
$value = normalizeText((string) $node->getAttribute('content'));
|
||||
if ($name !== '' && $value !== '') {
|
||||
$meta['twitter'][$name] = $value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
$otherMetaNames = ['title'];
|
||||
foreach ($otherMetaNames as $metaName) {
|
||||
$value = firstAttrValue($xpath, "//meta[@name='{$metaName}']", 'content');
|
||||
if ($value !== '') {
|
||||
$meta['other'][$metaName] = $value;
|
||||
}
|
||||
}
|
||||
|
||||
ksort($meta['open_graph']);
|
||||
ksort($meta['twitter']);
|
||||
ksort($meta['other']);
|
||||
|
||||
return $meta;
|
||||
}
|
||||
|
||||
function extractShared(DOMXPath $xpath): array
|
||||
{
|
||||
$commonTexts = [];
|
||||
$textToKey = [];
|
||||
$shared = [
|
||||
'common_texts' => [],
|
||||
'navigation' => ['text_keys' => [], 'images' => []],
|
||||
'cookie_layer' => ['text_keys' => [], 'images' => []],
|
||||
'footer' => ['text_keys' => [], 'images' => []],
|
||||
];
|
||||
|
||||
$sharedRoots = [
|
||||
'navigation' => firstNode($xpath, "//header[contains(@class, 'page-head')]"),
|
||||
'cookie_layer' => firstNode($xpath, "//*[@id='cookie-layer']"),
|
||||
'footer' => firstNode($xpath, "//footer[contains(@class, 'site-footer')]"),
|
||||
];
|
||||
|
||||
foreach ($sharedRoots as $name => $root) {
|
||||
if (!$root instanceof DOMNode) {
|
||||
continue;
|
||||
}
|
||||
|
||||
$texts = extractTexts($xpath, $root);
|
||||
foreach ($texts as $text) {
|
||||
if (!isset($textToKey[$text])) {
|
||||
$key = 'common_' . str_pad((string) (count($commonTexts) + 1), 3, '0', STR_PAD_LEFT);
|
||||
$textToKey[$text] = $key;
|
||||
$commonTexts[$key] = $text;
|
||||
}
|
||||
$shared[$name]['text_keys'][] = $textToKey[$text];
|
||||
}
|
||||
|
||||
$shared[$name]['images'] = extractImages($xpath, $root);
|
||||
}
|
||||
|
||||
$shared['common_texts'] = $commonTexts;
|
||||
return $shared;
|
||||
}
|
||||
|
||||
function extractMainSections(DOMXPath $xpath): array
|
||||
{
|
||||
$sections = [];
|
||||
|
||||
$sectionQueries = [
|
||||
'hero' => "//main//section[contains(@class, 'module-hero-teaser')]",
|
||||
'page_header' => "//main//section[contains(@class, 'page-header')]",
|
||||
'projects' => "//main//section[contains(@class, 'module-projects-teaser')]",
|
||||
'services' => "//main//section[.//*[contains(@class, 'services-teaser__content')]]",
|
||||
'team' => "(//main//section[contains(@class, 'text-image')])[1]",
|
||||
'awards' => "//main//section[.//*[contains(@class, 'awards-teaser__content')]]",
|
||||
'contact' => "//main//section[contains(@class, 'contact-teaser')]",
|
||||
'clients' => "//main//section[.//*[contains(@class, 'clients-teaser__content')]]",
|
||||
'partners' => "(//main//section[contains(@class, 'text-image')])[2]",
|
||||
];
|
||||
|
||||
foreach ($sectionQueries as $name => $query) {
|
||||
$root = firstNode($xpath, $query);
|
||||
if (!$root instanceof DOMNode) {
|
||||
continue;
|
||||
}
|
||||
$sections[$name] = [
|
||||
'texts' => extractKeyValueTexts($xpath, $root),
|
||||
'images' => extractImages($xpath, $root),
|
||||
];
|
||||
}
|
||||
|
||||
return $sections;
|
||||
}
|
||||
|
||||
function extractKeyValueTexts(DOMXPath $xpath, DOMNode $root): array
|
||||
{
|
||||
$textNodes = $xpath->query('.//text()', $root);
|
||||
if (!$textNodes instanceof DOMNodeList) {
|
||||
return [];
|
||||
}
|
||||
|
||||
$counters = [];
|
||||
$texts = [];
|
||||
|
||||
foreach ($textNodes as $textNode) {
|
||||
if (!$textNode instanceof DOMText) {
|
||||
continue;
|
||||
}
|
||||
|
||||
$value = normalizeText($textNode->wholeText);
|
||||
if ($value === '') {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!isVisibleTextNode($textNode)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
$parent = $textNode->parentNode;
|
||||
if (!$parent instanceof DOMElement) {
|
||||
continue;
|
||||
}
|
||||
|
||||
$tag = strtolower($parent->tagName);
|
||||
$counters[$tag] = ($counters[$tag] ?? 0) + 1;
|
||||
$key = $tag . '_' . str_pad((string) $counters[$tag], 3, '0', STR_PAD_LEFT);
|
||||
|
||||
$texts[$key] = $value;
|
||||
}
|
||||
|
||||
return $texts;
|
||||
}
|
||||
|
||||
function extractTexts(DOMXPath $xpath, DOMNode $root): array
|
||||
{
|
||||
$textNodes = $xpath->query('.//text()', $root);
|
||||
if (!$textNodes instanceof DOMNodeList) {
|
||||
return [];
|
||||
}
|
||||
|
||||
$texts = [];
|
||||
foreach ($textNodes as $textNode) {
|
||||
if (!$textNode instanceof DOMText) {
|
||||
continue;
|
||||
}
|
||||
|
||||
$value = normalizeText($textNode->wholeText);
|
||||
if ($value === '') {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!isVisibleTextNode($textNode)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
$texts[] = $value;
|
||||
}
|
||||
|
||||
return $texts;
|
||||
}
|
||||
|
||||
function extractImages(DOMXPath $xpath, DOMNode $root): array
|
||||
{
|
||||
$imageNodes = $xpath->query('.//img[@src]', $root);
|
||||
if (!$imageNodes instanceof DOMNodeList) {
|
||||
return [];
|
||||
}
|
||||
|
||||
$images = [];
|
||||
$index = 1;
|
||||
|
||||
foreach ($imageNodes as $imageNode) {
|
||||
if (!$imageNode instanceof DOMElement) {
|
||||
continue;
|
||||
}
|
||||
|
||||
$src = trim((string) $imageNode->getAttribute('src'));
|
||||
if ($src === '') {
|
||||
continue;
|
||||
}
|
||||
|
||||
$key = 'img_' . str_pad((string) $index, 3, '0', STR_PAD_LEFT);
|
||||
$images[$key] = [
|
||||
'src' => $src,
|
||||
'alt' => normalizeText((string) $imageNode->getAttribute('alt')),
|
||||
];
|
||||
$index++;
|
||||
}
|
||||
|
||||
return $images;
|
||||
}
|
||||
|
||||
function isVisibleTextNode(DOMText $textNode): bool
|
||||
{
|
||||
$skipTags = [
|
||||
'script', 'style', 'noscript', 'template', 'svg', 'path', 'defs',
|
||||
];
|
||||
|
||||
$node = $textNode->parentNode;
|
||||
while ($node instanceof DOMNode) {
|
||||
if ($node instanceof DOMElement) {
|
||||
$tag = strtolower($node->tagName);
|
||||
if (in_array($tag, $skipTags, true)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
$node = $node->parentNode;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
function normalizeText(string $value): string
|
||||
{
|
||||
$value = str_replace(["\r", "\n", "\t"], ' ', $value);
|
||||
$value = preg_replace('/\s+/u', ' ', $value) ?? $value;
|
||||
return trim($value);
|
||||
}
|
||||
|
||||
function firstNode(DOMXPath $xpath, string $query): ?DOMNode
|
||||
{
|
||||
$nodes = $xpath->query($query);
|
||||
if (!$nodes instanceof DOMNodeList || $nodes->length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
$node = $nodes->item(0);
|
||||
return $node instanceof DOMNode ? $node : null;
|
||||
}
|
||||
|
||||
function firstAttrValue(DOMXPath $xpath, string $query, string $attr): string
|
||||
{
|
||||
$node = firstNode($xpath, $query);
|
||||
if (!$node instanceof DOMElement) {
|
||||
return '';
|
||||
}
|
||||
return normalizeText((string) $node->getAttribute($attr));
|
||||
}
|
||||
4
administration/scripts/new-route.sh
Executable file
4
administration/scripts/new-route.sh
Executable file
@@ -0,0 +1,4 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
exec "$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/add-webpage.sh" "$@"
|
||||
10
administration/scripts/run_editor_server.sh
Executable file
10
administration/scripts/run_editor_server.sh
Executable file
@@ -0,0 +1,10 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
|
||||
cd "$ROOT_DIR"
|
||||
|
||||
PORT="${1:-4173}"
|
||||
|
||||
ln -sf ikfreunde.com.html website/index.html
|
||||
php -d opcache.enable_cli=0 -S 127.0.0.1:"$PORT" administration/scripts/editor_server.php
|
||||
33
administration/serve-offline.sh
Executable file
33
administration/serve-offline.sh
Executable file
@@ -0,0 +1,33 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
||||
WEB_DIR="$ROOT_DIR/website"
|
||||
PORT="${1:-4173}"
|
||||
PID_FILE="$ROOT_DIR/administration/.offline-server.pid"
|
||||
LOG_FILE="$ROOT_DIR/administration/.offline-server.log"
|
||||
|
||||
if [[ -f "$PID_FILE" ]] && kill -0 "$(cat "$PID_FILE")" 2>/dev/null; then
|
||||
echo "Offline server already running on PID $(cat "$PID_FILE")."
|
||||
echo "Open: http://127.0.0.1:${PORT}/"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
rm -f "$PID_FILE"
|
||||
|
||||
ln -sf ikfreunde.com.html "$WEB_DIR/index.html"
|
||||
nohup python3 -m http.server "$PORT" --bind 127.0.0.1 --directory "$WEB_DIR" >"$LOG_FILE" 2>&1 &
|
||||
SERVER_PID=$!
|
||||
echo "$SERVER_PID" > "$PID_FILE"
|
||||
|
||||
sleep 0.3
|
||||
if ! kill -0 "$SERVER_PID" 2>/dev/null; then
|
||||
echo "Failed to start offline server on port $PORT."
|
||||
echo "Check log: $LOG_FILE"
|
||||
rm -f "$PID_FILE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Offline server started (PID $SERVER_PID)."
|
||||
echo "Open: http://127.0.0.1:${PORT}/"
|
||||
echo "Log: $LOG_FILE"
|
||||
20
administration/stop-offline.sh
Executable file
20
administration/stop-offline.sh
Executable file
@@ -0,0 +1,20 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
||||
PID_FILE="$ROOT_DIR/administration/.offline-server.pid"
|
||||
|
||||
if [[ ! -f "$PID_FILE" ]]; then
|
||||
echo "No PID file found. Server may already be stopped."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
PID="$(cat "$PID_FILE")"
|
||||
if kill -0 "$PID" 2>/dev/null; then
|
||||
kill "$PID"
|
||||
echo "Stopped offline server PID $PID."
|
||||
else
|
||||
echo "Process $PID is not running."
|
||||
fi
|
||||
|
||||
rm -f "$PID_FILE"
|
||||
Reference in New Issue
Block a user