11 Commits

140 changed files with 12015 additions and 53 deletions

87
Jenkinsfile vendored
View File

@@ -6,9 +6,19 @@ pipeline {
disableConcurrentBuilds()
skipDefaultCheckout(true)
}
parameters {
booleanParam(
name: 'RUN_DEMO_PURGE',
defaultValue: false,
description: 'Run a one-time demo catalogue purge before the normal idea marketplace seed and launch prep.'
)
}
environment {
PYENVPIPELINE_VIRTUALENV = '1'
GIT_SSH_COMMAND = 'ssh -o StrictHostKeyChecking=accept-new'
STAGING_AUDIT_PROJECT_NAME = 'mandelstudio'
STAGING_AUDIT_PROJECT_DIR = '/home/www-mandelstudio/mandelstudio'
STAGING_AUDIT_MANAGE = '/var/lib/virtualenv/mandelstudio/bin/manage.py'
}
stages {
@@ -32,6 +42,30 @@ pipeline {
stage('Build') {
steps {
sh '''
STABLE_INDEX_URL=${STABLE_INDEX_URL:-https://pypi.mandelblog.com/mandel/stable/+simple/}
TESTING_INDEX_URL=${TESTING_INDEX_URL:-https://pypi.mandelblog.com/mandel/testing/+simple/}
ROOT_INDEX_URL=${PIP_EXTRA_INDEX_URL:-https://pypi.mandelblog.com/root/pypi/+simple/}
export STABLE_INDEX_URL
if python3 - <<'PY'
import os
import sys
from urllib.request import Request, urlopen
from urllib.error import URLError, HTTPError
url = os.environ["STABLE_INDEX_URL"]
try:
req = Request(url, method='HEAD')
with urlopen(req, timeout=10) as response:
sys.exit(0 if response.status < 400 else 1)
except HTTPError as exc:
sys.exit(0 if exc.code < 400 else 1)
except URLError:
sys.exit(1)
PY
then
echo "devpi stable index available, but stable-first install is not enabled yet"
else
echo "devpi stable index not available, using testing as production source"
fi
if command -v sudo >/dev/null 2>&1 && sudo -n true >/dev/null 2>&1; then
sudo apt-get update -y
sudo apt-get install -y python3-venv python3-pip make build-essential libpq-dev \
@@ -48,14 +82,20 @@ pipeline {
. .venv/bin/activate
pip install coverage
pip install --upgrade pip "setuptools==69.5.1" wheel
PIP_INDEX_URL=${PIP_INDEX_URL:-https://pypi.mandelblog.com/mandel/testing/+simple/} \
PIP_EXTRA_INDEX_URL=${PIP_EXTRA_INDEX_URL:-https://pypi.mandelblog.com/root/pypi/+simple/} \
PIP_INDEX_URL="$TESTING_INDEX_URL" \
PIP_EXTRA_INDEX_URL="$ROOT_INDEX_URL" \
pip install --no-build-isolation --pre --editable . setuptools wheel --upgrade --upgrade-strategy=eager --use-deprecated=legacy-resolver
cp "${JOB_BASE_NAME}/ocyan.json" "${JOB_BASE_NAME}/${JOB_BASE_NAME}.json"
pip install ruff vdt.versionplugin.wheel
pip install --upgrade "setuptools==69.5.1" wheel
python3 scripts/validate_payment_provider_config.py
manage.py migrate --no-input --skip-checks
manage.py loaddemodata || true
if [ "${RUN_DEMO_PURGE}" = "true" ]; then
manage.py purge_demo_data
fi
manage.py seed_idea_marketplace
manage.py prepare_idea_marketplace_launch --apply-homepage-copy --purge-demo-pages
manage.py validate_idea_marketplace_launch
manage.py collectstatic --no-input --verbosity=0
pip install "httpx<0.28"
'''
@@ -74,7 +114,7 @@ pipeline {
steps {
sh '''
. .venv/bin/activate
python -m compileall -q setup.py mandelstudio
python -m compileall -q setup.py mandelstudio mandelblog_content_guard
'''
}
post {
@@ -86,6 +126,41 @@ pipeline {
}
}
}
stage('Deploy Staging') {
steps {
echo 'Triggering staging deploy for mandelstudio after successful CI build.'
build job: 'deploy-project-stg',
wait: true,
propagate: true,
parameters: [string(name: 'PROJECT_NAME', value: 'mandelstudio')]
}
}
stage('Post-Deploy Multilingual Audit') {
agent { label 'built-in' }
options {
timeout(time: 10, unit: 'MINUTES')
}
steps {
deleteDir()
checkout scm
sh 'mkdir -p artifacts && chmod +x scripts/run_remote_multilingual_audit.sh'
sh './scripts/run_remote_multilingual_audit.sh'
script {
int status = sh(script: 'python3 scripts/multilingual_audit_ci.py --json artifacts/multilingual-audit.json', returnStatus: true)
if (status == 2) {
error('Block-level multilingual issues detected or audit execution failed.')
}
if (status == 1) {
unstable('Warn-level multilingual issues detected.')
}
}
}
post {
always {
archiveArtifacts artifacts: 'artifacts/multilingual-audit.json', onlyIfSuccessful: false
}
}
}
}
post {
always {
@@ -97,10 +172,6 @@ pipeline {
. .venv/bin/activate
pip install coverage
'''
echo 'Triggering staging deploy for mandelstudio after successful CI build.'
build job: 'deploy-project-stg',
wait: false,
parameters: [string(name: 'PROJECT_NAME', value: 'mandelstudio')]
}
failure {
emailext subject: "JENKINS-NOTIFICATION: ${currentBuild.currentResult}: Job '${env.JOB_NAME} #${env.BUILD_NUMBER}'",

View File

@@ -0,0 +1,63 @@
#!/usr/bin/env groovy
pipeline {
agent none
triggers {
cron('H 2 * * *')
}
options {
disableConcurrentBuilds()
skipDefaultCheckout(true)
}
environment {
STAGING_AUDIT_HOST = 'root@49.12.204.96'
STAGING_AUDIT_PROJECT_DIR = '/home/www-mandelstudio/mandelstudio'
STAGING_AUDIT_MANAGE = '/var/lib/virtualenv/mandelstudio/bin/manage.py'
STAGING_AUDIT_SSH_CREDENTIALS_ID = 'staging-root-ssh'
}
stages {
stage('Checkout') {
steps {
withCredentials([sshUserPrivateKey(credentialsId: 'gitea-ssh', keyFileVariable: 'GIT_KEYFILE')]) {
sh '''
export GIT_SSH_COMMAND="ssh -i $GIT_KEYFILE -o StrictHostKeyChecking=accept-new"
if [ -d .git ]; then
git remote set-url origin ssh://git@git.mandelblog.com:2222/salt/mandelstudio.git
git fetch --tags --force --progress origin +refs/heads/master:refs/remotes/origin/master
else
git clone ssh://git@git.mandelblog.com:2222/salt/mandelstudio.git .
git fetch --tags --force --progress origin +refs/heads/master:refs/remotes/origin/master
fi
git checkout -f refs/remotes/origin/master
'''
}
}
}
stage('Nightly Multilingual Audit') {
agent { label 'built-in' }
options {
timeout(time: 10, unit: 'MINUTES')
}
steps {
sh 'mkdir -p artifacts && [ -f artifacts/multilingual-audit.json ] && cp artifacts/multilingual-audit.json artifacts/previous-multilingual-audit.json || true'
withCredentials([sshUserPrivateKey(credentialsId: env.STAGING_AUDIT_SSH_CREDENTIALS_ID, keyFileVariable: 'STAGING_SSH_KEYFILE')]) {
sh './scripts/run_remote_multilingual_audit.sh'
}
script {
int status = sh(script: 'python3 scripts/multilingual_audit_ci.py --json artifacts/multilingual-audit.json --previous-json artifacts/previous-multilingual-audit.json', returnStatus: true)
if (status == 2) {
error('Block-level multilingual issues detected or audit execution failed.')
}
if (status == 1) {
unstable('Warn-level multilingual issues detected.')
}
}
}
post {
always {
archiveArtifacts artifacts: 'artifacts/multilingual-audit.json,artifacts/previous-multilingual-audit.json', onlyIfSuccessful: false
}
}
}
}
}

View File

@@ -0,0 +1,142 @@
# CI Multilingual Audit
## Purpose
The multilingual audit verifies that public content stays locale-correct across all active MandelBlog languages after deploy.
It checks rendered, user-facing text for:
- mixed-language fragments
- foreign UI labels
- weak or generic badge labels flagged by policy
- locale-specific normalization problems
It does not modify content in CI. It only audits and reports.
## Jenkins jobs
### Main pipeline: `mandelstudio`
The main pipeline runs a post-deploy multilingual audit after staging deployment completes.
Stages relevant to multilingual quality:
1. `Deploy Staging`
2. `Post-Deploy Multilingual Audit`
The audit stage runs remotely on staging:
```bash
python manage.py audit_locales --format=json
```
Artifact archived:
- `artifacts/multilingual-audit.json`
### Nightly pipeline
Pipeline source:
- `Jenkinsfile.multilingual-nightly`
Schedule:
- `H 2 * * *`
The nightly job:
- runs the full multilingual audit on staging
- archives the latest JSON artifact
- compares the current artifact against the previous artifact
- prints regressions by locale
## Build result policy
The audit summary is interpreted as follows:
- `SUCCESS`
- all locales have `block=0` and `warn=0`
- `UNSTABLE`
- at least one locale has `warn > 0`
- deploy is not blocked
- `FAILED`
- at least one locale has `block > 0`
- or audit execution itself fails
This keeps deploys safe without making warning-level cleanup a hard blocker.
## Required Jenkins credential
Credential location:
- `Manage Jenkins -> Credentials -> System -> Global credentials`
Credential to add:
- `Kind`: `SSH Username with private key`
- `ID`: `staging-root-ssh`
- `Username`: `root`
- `Private key`: staging SSH key
Current implementation uses the following environment defaults:
- `STAGING_AUDIT_HOST=root@49.12.204.96`
- `STAGING_AUDIT_PROJECT_DIR=/home/www-mandelstudio/mandelstudio`
- `STAGING_AUDIT_MANAGE=/var/lib/virtualenv/mandelstudio/bin/manage.py`
## Console summary
The Jenkins stage prints a per-locale summary like this:
```text
LOCALE en: issues_found=0 issues_remaining=0 block=0 warn=0 log=0
```
Nightly runs also print regressions when present:
```text
REGRESSIONS:
- es: remaining=+2 block=+0 warn=+2 log=+0
```
If no regressions exist:
```text
REGRESSIONS: none
```
## Artifact structure
Archived file:
- `artifacts/multilingual-audit.json`
Expected clean structure:
- `run_id`
- `total_urls_checked`
- `issues_found`
- `summary`
- `issues`
Failure artifacts may also contain:
- `error`
This happens when the remote audit times out or fails, and is intentional so Jenkins still archives a machine-readable result.
## Local rerun
To rerun the same remote audit flow locally:
```bash
export STAGING_AUDIT_HOST='root@49.12.204.96'
export STAGING_AUDIT_PROJECT_DIR='/home/www-mandelstudio/mandelstudio'
export STAGING_AUDIT_MANAGE='/var/lib/virtualenv/mandelstudio/bin/manage.py'
./scripts/run_remote_multilingual_audit.sh
python3 scripts/multilingual_audit_ci.py --json artifacts/multilingual-audit.json
```
To compare against a previous artifact:
```bash
python3 scripts/multilingual_audit_ci.py \
--json artifacts/multilingual-audit.json \
--previous-json artifacts/previous-multilingual-audit.json
```
## Fixing issues by locale
Recommended response sequence for any locale that returns warnings or blocks:
1. run scoped dry-run audit for the affected locale/pages
2. inspect before/after rewrite candidates
3. apply controlled rewrite only to affected pages
4. rerun post-audit
5. manually verify rendered output
Do not bulk rewrite a locale tree without scoped review first.
## Operational notes
- remote audit execution has a timeout
- audit failure still produces JSON output for Jenkins archiving
- missing previous nightly artifact is handled gracefully
## Hardening candidates
These are operational follow-ups only. They are not required for current behavior.
- replace `root` SSH with a dedicated deploy/audit user
- move host/path configuration into Jenkins-managed environment variables or folder-level config
- document SSH credential rotation procedure

View File

@@ -0,0 +1 @@
default_app_config = "mandelblog_content_guard.apps.MandelblogContentGuardConfig"

View File

@@ -0,0 +1,25 @@
from .base import BaseLanguageAgent
from .de import GermanAgent
from .en import EnglishAgent
from .es import SpanishAgent
from .fr import FrenchAgent
from .it import ItalianAgent
from .nl import DutchAgent
from .pt import PortugueseAgent
from .ru import RussianAgent
AGENT_REGISTRY = {
"nl": DutchAgent,
"en": EnglishAgent,
"de": GermanAgent,
"fr": FrenchAgent,
"es": SpanishAgent,
"it": ItalianAgent,
"pt": PortugueseAgent,
"ru": RussianAgent,
}
def get_language_agent(locale_code: str) -> BaseLanguageAgent:
agent_class = AGENT_REGISTRY.get(locale_code, BaseLanguageAgent)
return agent_class()

View File

@@ -0,0 +1,187 @@
from __future__ import annotations
import re
from collections import defaultdict
from typing import Any
from django.utils.module_loading import import_string
from ..settings import get_rewrite_backend
class BaseLanguageAgent:
locale = "nl"
tone = "business"
preferred_formality = "neutral"
cta_defaults: dict[str, str] = {}
vocabulary_map: dict[str, str] = {}
contextual_vocabulary_map: dict[str, dict[str, str]] = {}
cleanup_patterns: tuple[tuple[re.Pattern[str], str], ...] = (
(
re.compile(
r"""^.*?\bis\s+(?:German|Spanish|French|Italian|Portuguese|Dutch),\s+not\s+Dutch.*?(?::\s*|\"\.\s*)(?P<quote>.+?)\"?\.?\s*$""",
re.IGNORECASE,
),
"{quote}",
),
(
re.compile(
r"""^.*?\btranslation\s+from\s+.*?(?::\s*|\"\.\s*)(?P<quote>.+?)\"?\.?\s*$""",
re.IGNORECASE,
),
"{quote}",
),
(
re.compile(
r"""^.*?\btraducid[oa]\s+al\s+.*?(?::\s*|\"\.\s*)(?P<quote>.+?)\"?\.?\s*$""",
re.IGNORECASE,
),
"{quote}",
),
(
re.compile(
r"""^.*?\bперевод\s+с\s+.*?(?::\s*|\"\.\s*)(?P<quote>.+?)\"?\.?\s*$""",
re.IGNORECASE,
),
"{quote}",
),
(
re.compile(
r"""^\s*La\s+entrada\s+\"?(?P<quote>.+?)\"?\s+está\s+en\s+alemán.*$""",
re.IGNORECASE,
),
"{quote}",
),
)
def __init__(self) -> None:
self.backend = self._load_backend()
def _load_backend(self):
backend_path = get_rewrite_backend()
if not backend_path:
return None
return import_string(backend_path)
def backend_prompt(self, field_path: str, text: str) -> str:
return (
f"Rewrite the following {self.locale} website copy for a small-business "
f"website in a natural, professional, sales-driven tone. Preserve meaning, "
f"remove translation artifacts, keep it concise, and do not add commentary.\n"
f"Field: {field_path}\n"
f"Locale: {self.locale}\n"
f"Tone: {self.tone}\n"
f"Formality: {self.preferred_formality}\n"
f"Text: {text}"
)
def _contextual_replacements(self, field_path: str) -> dict[str, str]:
lowered = field_path.lower()
replacements: dict[str, str] = {}
for token, mapping in self.contextual_vocabulary_map.items():
if token in lowered:
replacements.update(mapping)
return replacements
def post_cleanup_text(self, text: str, field_path: str = "") -> str:
return text
def _apply_replacements(self, text: str, replacements: dict[str, str]) -> str:
cleaned = text
phrase_replacements = {}
token_replacements = {}
for source, target in replacements.items():
if not source:
continue
if re.fullmatch(r"[\wÀ-ÿ-]+", source, flags=re.UNICODE):
token_replacements[source] = target
else:
phrase_replacements[source] = target
for source, target in sorted(phrase_replacements.items(), key=lambda item: len(item[0]), reverse=True):
cleaned = cleaned.replace(source, target)
for source, target in sorted(token_replacements.items(), key=lambda item: len(item[0]), reverse=True):
pattern = re.compile(rf"(?<![\wÀ-ÿ-]){re.escape(source)}(?![\wÀ-ÿ-])", re.UNICODE)
cleaned = pattern.sub(target, cleaned)
return cleaned
def cleanup_text(self, text: str, field_path: str = "") -> str:
cleaned = text.strip()
for pattern, replacement in self.cleanup_patterns:
match = pattern.match(cleaned)
if not match:
continue
cleaned = replacement.format(**match.groupdict()).strip()
cleaned = self._apply_replacements(cleaned, self.vocabulary_map)
cleaned = self._apply_replacements(cleaned, self._contextual_replacements(field_path))
cleaned = self.post_cleanup_text(cleaned, field_path=field_path)
return re.sub(r"\s+", " ", cleaned).strip()
def normalize_cta(self, text: str, field_path: str = "") -> str:
normalized = self.cleanup_text(text, field_path=field_path)
lowered = normalized.lower()
for keyword, replacement in self.cta_defaults.items():
if keyword in lowered:
return replacement
return normalized
def rewrite(self, text: str, field_path: str = "", issues: list[Any] | None = None) -> str:
cleaned = self.cleanup_text(text, field_path=field_path)
lowered_path = field_path.lower()
if any(token in lowered_path for token in ("cta", "button", "link_text", "submit")):
cleaned = self.normalize_cta(cleaned, field_path=field_path)
elif issues and any(
issue.issue_type in {"generic_badge_label", "foreign_ui_label", "weak_marketing_copy", "mixed_locale_heading"}
for issue in issues
):
cleaned = self.cleanup_text(cleaned, field_path=field_path)
if self.backend:
rewritten = self.backend(
locale=self.locale,
field_path=field_path,
text=cleaned,
prompt=self.backend_prompt(field_path, cleaned),
)
if isinstance(rewritten, str) and rewritten.strip():
cleaned = rewritten.strip()
return cleaned
def process_block(self, block_data: Any, field_path: str = "", issue_map: dict[str, list[Any]] | None = None):
issue_map = issue_map or {}
if isinstance(block_data, dict):
changed = False
output = {}
for key, value in block_data.items():
child_path = f"{field_path}.{key}" if field_path else str(key)
new_value, child_changed = self.process_block(value, child_path, issue_map)
output[key] = new_value
changed = changed or child_changed
return output, changed
if isinstance(block_data, list):
changed = False
output = []
for index, value in enumerate(block_data):
child_path = f"{field_path}[{index}]"
new_value, child_changed = self.process_block(value, child_path, issue_map)
output.append(new_value)
changed = changed or child_changed
return output, changed
if isinstance(block_data, str):
issues = issue_map.get(field_path, [])
needs_rewrite = bool(issues) or any(
token in field_path for token in ("cta", "button", "label", "placeholder", "help_text")
)
if not needs_rewrite:
cleaned = self.cleanup_text(block_data)
return cleaned, cleaned != block_data
rewritten = self.rewrite(block_data, field_path=field_path, issues=issues)
return rewritten, rewritten != block_data
return block_data, False
def build_issue_map(self, issues: list[Any]) -> dict[str, list[Any]]:
issue_map: dict[str, list[Any]] = defaultdict(list)
for issue in issues:
if issue.field_path:
issue_map[issue.field_path].append(issue)
return issue_map

View File

@@ -0,0 +1,23 @@
from .base import BaseLanguageAgent
from ..normalizers import normalize_de_text
from ..system_strings import build_system_vocabulary
class GermanAgent(BaseLanguageAgent):
locale = "de"
tone = "professional and trustworthy"
preferred_formality = "formal Sie"
vocabulary_map = {
**build_system_vocabulary("de", ("transparent_investment",)),
}
cta_defaults = {
"starter": "Starter-Gespräch planen",
"business": "Beratungsgespräch planen",
"support": "Support anfragen",
"service": "Dienstleistungen anzeigen",
"project": "Projekt starten",
"kontakt": "Einführungsgespräch planen",
}
def post_cleanup_text(self, text: str, field_path: str = "") -> str:
return normalize_de_text(text, field_path=field_path)

View File

@@ -0,0 +1,34 @@
from .base import BaseLanguageAgent
from ..normalizers import normalize_en_text
from ..system_strings import build_contextual_system_vocabulary, build_system_vocabulary
class EnglishAgent(BaseLanguageAgent):
locale = "en"
tone = "business-friendly and direct"
preferred_formality = "neutral"
vocabulary_map = {
**build_system_vocabulary("en", ("plan_badge", "services_badge", "transparent_label", "transparent_investment")),
}
_system_contextual = build_contextual_system_vocabulary("en", ("plan_badge", "services_badge", "transparent_label"))
contextual_vocabulary_map = {
"badge": {**_system_contextual.get("badge", {})},
"label": {**_system_contextual.get("label", {})},
"metric": {**_system_contextual.get("metric", {})},
"stat": {**_system_contextual.get("stat", {})},
"title": {**_system_contextual.get("title", {})},
"heading": {**_system_contextual.get("heading", {})},
"rendered": {**_system_contextual.get("rendered", {})},
}
cta_defaults = {
"starter": "Book starter call",
"business": "Book business call",
"support": "View support",
"service": "View services",
"project": "Start your project",
"quote": "Request a quote",
"contact": "Book intro call",
}
def post_cleanup_text(self, text: str, field_path: str = "") -> str:
return normalize_en_text(text, field_path=field_path)

View File

@@ -0,0 +1,43 @@
from .base import BaseLanguageAgent
from ..normalizers import normalize_es_text
from ..system_strings import build_contextual_system_vocabulary, build_system_vocabulary
class SpanishAgent(BaseLanguageAgent):
locale = "es"
tone = "clear and business-focused"
preferred_formality = "formal"
vocabulary_map = {
**build_system_vocabulary(
"es",
(
"plan_badge",
"response_time",
"without_commitment",
"transparent_label",
"transparent_investment",
),
),
}
_system_contextual = build_contextual_system_vocabulary("es", ("plan_badge", "transparent_label"))
contextual_vocabulary_map = {
"badge": {**_system_contextual.get("badge", {})},
"label": {**_system_contextual.get("label", {})},
"metric": {**_system_contextual.get("metric", {})},
"stat": {**_system_contextual.get("stat", {})},
"title": {**_system_contextual.get("title", {})},
"heading": {**_system_contextual.get("heading", {})},
"rendered": {**_system_contextual.get("rendered", {})},
}
cta_defaults = {
"starter": "Reservar llamada inicial",
"business": "Reservar llamada comercial",
"support": "Solicitar soporte",
"service": "Mostrar los servicios",
"project": "Inicia tu proyecto",
"quote": "Solicitar propuesta",
"contact": "Planificar la reunión inicial",
}
def post_cleanup_text(self, text: str, field_path: str = "") -> str:
return normalize_es_text(text, field_path=field_path)

View File

@@ -0,0 +1,66 @@
from .base import BaseLanguageAgent
from ..system_strings import build_contextual_system_vocabulary, build_system_vocabulary
class FrenchAgent(BaseLanguageAgent):
locale = "fr"
tone = "professional and commercial"
preferred_formality = "formal"
cta_defaults = {
"starter": "Planifier lentretien de départ",
"business": "Planifier lentretien commercial",
"support": "Voir le support",
"service": "Afficher les services",
"project": "Lancez votre projet",
"devis": "Demander un devis",
"contact": "Planifier léchange",
}
vocabulary_map = {
**build_system_vocabulary("fr"),
"SERVICES": "PRESTATIONS",
"New": "Nouveau",
"Popular": "Populaire",
"Erstes Produktionsprojekt erfolgreich abgeschlossen.": "Premier projet de production livré avec succès.",
"Von Kickoff bis zum Launch mit einem klaren Umfang.": "Du cadrage au lancement avec un périmètre clair.",
"Demande d'admission initiale": "Planifier un échange initial",
"Geschäftsprozess besprechen": "Échanger sur votre processus métier",
"Entretien d'accueil": "Entretien initial",
"Vraag over diensten": "Question sur les services",
"Konkrete erste Schätzung": "Première estimation concrète",
"Ansatz, der zu Ihrem Budget passt": "Approche adaptée à votre budget",
"Detailliertes Seitenlayout": "Structure détaillée des pages",
"Investition": "investissement",
"Unverbindliches Gespräch, klares Angebot": "Sans engagement, offre claire",
"Bereit, mit der Business-Website zu starten?": "Prêt à démarrer votre site dentreprise ?",
"Planifier un échange business": "Planifier un échange commercial",
"Aucune carte bancaire requise": "Sans engagement",
}
_system_contextual = build_contextual_system_vocabulary("fr")
contextual_vocabulary_map = {
"badge": {
**_system_contextual.get("badge", {}),
"Popular": "Le plus demandé",
},
"label": {
**_system_contextual.get("label", {}),
"Popular": "Le plus demandé",
},
"metric": {
**_system_contextual.get("metric", {}),
},
"stat": {
**_system_contextual.get("stat", {}),
},
"title": {
**_system_contextual.get("title", {}),
"SERVICES": "PRESTATIONS",
},
"heading": {
**_system_contextual.get("heading", {}),
"SERVICES": "PRESTATIONS",
},
"rendered": {
**_system_contextual.get("rendered", {}),
"SERVICES": "PRESTATIONS",
},
}

View File

@@ -0,0 +1,42 @@
from .base import BaseLanguageAgent
from ..normalizers import normalize_it_text
from ..system_strings import build_contextual_system_vocabulary, build_system_vocabulary
class ItalianAgent(BaseLanguageAgent):
locale = "it"
tone = "professional and approachable"
preferred_formality = "polite"
vocabulary_map = {
**build_system_vocabulary(
"it",
(
"weeks_1_2",
"without_commitment",
"transparent_label",
"transparent_investment",
"customization_integrations",
"multilingual_rollout",
),
),
}
_system_contextual = build_contextual_system_vocabulary("it", ("transparent_label",))
contextual_vocabulary_map = {
"badge": {**_system_contextual.get("badge", {})},
"label": {**_system_contextual.get("label", {})},
"metric": {**_system_contextual.get("metric", {})},
"stat": {**_system_contextual.get("stat", {})},
"rendered": {**_system_contextual.get("rendered", {})},
}
cta_defaults = {
"starter": "Prenota una call iniziale",
"business": "Pianifica la call business",
"support": "Richiedi supporto",
"service": "Mostra i servizi",
"project": "Avvia il tuo progetto",
"quote": "Richiedi una proposta",
"contact": "Pianifica la riunione introduttiva",
}
def post_cleanup_text(self, text: str, field_path: str = "") -> str:
return normalize_it_text(text, field_path=field_path)

View File

@@ -0,0 +1,20 @@
from .base import BaseLanguageAgent
from ..normalizers import normalize_nl_text
class DutchAgent(BaseLanguageAgent):
locale = "nl"
tone = "zakelijk en duidelijk"
preferred_formality = "je/jij professioneel"
cta_defaults = {
"starter": "Plan startergesprek",
"business": "Plan zakelijk gesprek",
"support": "Bekijk support",
"service": "Bekijk diensten",
"project": "Start jouw project",
"contact": "Plan kennismaking",
"offerte": "Vraag voorstel aan",
}
def post_cleanup_text(self, text: str, field_path: str = "") -> str:
return normalize_nl_text(text, field_path=field_path)

View File

@@ -0,0 +1,111 @@
from .base import BaseLanguageAgent
from ..system_strings import build_contextual_system_vocabulary, build_system_vocabulary
class PortugueseAgent(BaseLanguageAgent):
locale = "pt"
tone = "business-focused and practical"
preferred_formality = "neutral"
cta_defaults = {
"starter": "Agendar chamada inicial",
"business": "Agendar chamada comercial",
"support": "Ver suporte",
"service": "Ver serviços",
"project": "Iniciar o seu projeto",
"proposta": "Pedir proposta",
"contact": "Agendar reunião introdutória",
}
vocabulary_map = {
**build_system_vocabulary("pt"),
"SERVICES": "SERVIÇOS",
"New": "Novo",
"Popular": "Em destaque",
"Siti web e negozi online": "Sites e lojas online",
"Siti web e negozi online che sono rapidamente online e facili da gestire": "Sites e lojas online que ficam no ar rapidamente e são fáceis de gerir",
"Caso de cliente en directo": "Caso real de cliente",
"El primer proyecto de producción finalizado con éxito.": "O primeiro projeto de produção foi concluído com sucesso.",
"Más sobre el proceso": "Mais sobre o processo",
"Modifiez simplement vous-même.": "Edite facilmente por conta própria.",
"Opciones de la tienda web Mantenimiento y soporte Suporte mensal opcional para atualizações e estabilidade.": "Opções da loja online Manutenção e suporte Suporte mensal opcional para atualizações e estabilidade.",
"Opciones de la tienda web": "Opções da loja online",
"Planes de soporte": "Planos de suporte",
"Multilingüe": "Multilingue",
"Suivi + corrections": "Acompanhamento e correções",
"Mejoras mensuales": "Melhorias mensais",
"¿A qué velocidad puede comenzar?": "Com que rapidez podem começar?",
"¿Puedo editar textos e imágenes yo mismo?": "Posso editar textos e imagens por conta própria?",
"Einzelhandelsunternehmer": "Comerciante",
"lifestyle": "estilo de vida",
"À partir de 3 750 €": "A partir de 3.750 €",
"Transparente sobre o planejamento, o processo e a gestão.": "Clareza sobre o planeamento, o processo e a gestão.",
"Einzelhandelsinhaber Petite boutique en ligne Forfaits de services (à partir de) Pontos de partida transparentes.": "Comerciantes Pequena loja online Pacotes de serviço (a partir de) Pontos de partida claros.",
"Unsere Serviços": "Os nossos serviços",
"Unsere Serviços: vom schnellen Start bis zu skalierbarem Wachstum": "Os nossos serviços: do lançamento rápido ao crescimento escalável",
"Elija el camino": "Escolha o caminho certo",
"Elija el camino que corresponda a su fase: sitio de inicio, sitio empresarial, tienda en línea o soporte continuo.": "Escolha o caminho certo para a sua fase: site inicial, site empresarial, loja online ou suporte contínuo.",
"Début en direct": "Lançamento rápido",
"Demande d'admission initiale": "Agendar conversa inicial",
"Site Web d'Entreprise": "Site empresarial",
"Hablar sobre el proceso empresarial": "Falar sobre o processo do negócio",
"Mise en place de boutique en ligne": "Implementação de loja online",
"Maintenance & gestion": "Manutenção e gestão",
"Afficher le plan de soutien": "Ver suporte",
"Introducción multilingüe": "Lançamento multilingue",
"Forfaits de services (à partir de)": "Pacotes de serviço (a partir de)",
"Schnell online mit einer starken Basis": "Rápido online com uma base sólida",
"Startseite + Kernseiten": "Página inicial + páginas essenciais",
"Optimizado para móviles": "Otimizado para mobile",
"Gestisca lei stesso il contenuto": "Gerir o conteúdo com autonomia",
"Detailliertes Seitenlayout": "Estrutura detalhada das páginas",
"Unverbindliches Gespräch, klares Angebot": "Sem compromisso, proposta clara",
"Mehr Struktur und Konversion": "Mais estrutura e foco em conversão",
"Sections axées sur la conversion": "Secções orientadas para conversão",
"Base prête pour le SEO": "Base pronta para SEO",
"Katalog + Kasse": "Catálogo + checkout",
"Zahlungen und Auftragsfluss": "Pagamentos e fluxo de encomendas",
"Wachstumsbereite Grundlage": "Base pronta para crescimento",
"Soporte y crecimiento": "Suporte e crescimento",
"Amélioration continue": "Melhoria contínua",
"Desde 149 € al mes.": "Desde 149 € por mês.",
"Ab 2.250 €": "A partir de 2.250 €",
"Boutique en ligne": "Loja online",
"Sales-ready mit skalierbarem Stack": "Preparada para vender com uma base escalável",
"Agendar conversa sobre o serviço Ver resultados do projeto 1-2 Wochen Début en direct 4.9/5 Kundenschätzung 100% Bearbeitbar Visão geral dos serviços Cada serviço é projetado para melhorar a faturação, a confiança e a controlabilidade.": "Agendar conversa sobre o serviço Ver resultados do projeto 1 a 2 semanas Lançamento rápido 4.9/5 Avaliação dos clientes 100% Editável Visão geral dos serviços Cada serviço foi concebido para aumentar a faturação, reforçar a confiança e dar mais controlo à sua equipa.",
"Site inicial Schnell online mit einer starken Basis A partir de 1.250 € Agendar chamada inicial Startseite + Kernseiten Optimizado para móviles Gestisca lei stesso il contenuto Recomendado Site Web d'Entreprise Mehr Struktur und Konversion Ab 2.250 € Agendar chamada comercial Detailliertes Seitenlayout Sections axées sur la conversion Base prête pour le SEO Boutique en ligne Sales-ready mit skalierbarem Stack À partir de 3 750 € Iniciar o processo da loja online Katalog + Kasse Zahlungen und Auftragsfluss Wachstumsbereite Grundlage Soporte y crecimiento Amélioration continue Desde 149 € al mes.": "Site inicial Rápido online com uma base sólida A partir de 1.250 € Agendar chamada inicial Página inicial + páginas essenciais Otimizado para mobile Gerir o conteúdo com autonomia Recomendado Site empresarial Mais estrutura e foco em conversão A partir de 2.250 € Agendar chamada comercial Estrutura detalhada das páginas Secções orientadas para conversão Base pronta para SEO Loja online Preparada para vender com uma base escalável A partir de 3.750 € Iniciar o processo da loja online Catálogo + checkout Pagamentos e fluxo de encomendas Base pronta para crescimento Suporte e crescimento Melhoria contínua Desde 149 € por mês.",
"Perguntas frequentes Transparente sobre o planejamento, o processo e a gestão.": "Perguntas frequentes Clareza sobre o planeamento, o processo e a gestão.",
'Ver serviços New La entrada "Unterstützung oder Erweiterung" está en alemán, no en neerlandés.': "Ver serviços Novo Suporte ou expansão",
"Unterstützung oder Erweiterung": "Suporte ou expansão",
'La entrada "Unterstützung oder Erweiterung"': "Suporte ou expansão",
'La entrada "Unterstützung oder Erweiterung" está en alemán, no en neerlandés. Traducido al francés, es: "Suporte ou expansão".': "Suporte ou expansão",
"Sem cartão de crédito": "Sem compromisso",
}
_system_contextual = build_contextual_system_vocabulary("pt")
contextual_vocabulary_map = {
"badge": {
**_system_contextual.get("badge", {}),
"Popular": "Escolha frequente",
},
"label": {
**_system_contextual.get("label", {}),
"Popular": "Escolha frequente",
},
"metric": {
**_system_contextual.get("metric", {}),
},
"stat": {
**_system_contextual.get("stat", {}),
},
"title": {
"SERVICES": "SERVIÇOS",
"Popular": "Em destaque",
},
"heading": {
"SERVICES": "SERVIÇOS",
"Popular": "Em destaque",
},
"rendered": {
**_system_contextual.get("rendered", {}),
"SERVICES": "SERVIÇOS",
"Popular": "Em destaque",
},
}

View File

@@ -0,0 +1,39 @@
from .base import BaseLanguageAgent
from ..normalizers import normalize_ru_text
from ..system_strings import build_contextual_system_vocabulary, build_system_vocabulary
class RussianAgent(BaseLanguageAgent):
locale = "ru"
tone = "professional and confident"
preferred_formality = "neutral polite"
vocabulary_map = {
**build_system_vocabulary(
"ru",
(
"customization_integrations",
"detailed_page_structure",
"without_commitment",
),
),
}
_system_contextual = build_contextual_system_vocabulary("ru", ("plan_badge", "transparent_label"))
contextual_vocabulary_map = {
"badge": {**_system_contextual.get("badge", {})},
"label": {**_system_contextual.get("label", {})},
"metric": {**_system_contextual.get("metric", {})},
"stat": {**_system_contextual.get("stat", {})},
"rendered": {**_system_contextual.get("rendered", {})},
}
cta_defaults = {
"starter": "Запланировать стартовую консультацию",
"business": "Обсудить бизнес-проект",
"support": "Посмотреть поддержку",
"service": "Посмотреть услуги",
"project": "Запустить свой проект",
"contact": "Отправить запрос",
"quote": "Получить предложение",
}
def post_cleanup_text(self, text: str, field_path: str = "") -> str:
return normalize_ru_text(text, field_path=field_path)

View File

@@ -0,0 +1,16 @@
from __future__ import annotations
from .agents import get_language_agent
from .validators.multilingual import validate_ai_text_or_raise
def guard_ai_output(locale_code: str, field_path: str, value: str) -> str:
validate_ai_text_or_raise(locale_code, field_path, value)
return value
def rewrite_ai_output(locale_code: str, field_path: str, value: str) -> str:
agent = get_language_agent(locale_code)
rewritten = agent.rewrite(value, field_path=field_path)
validate_ai_text_or_raise(locale_code, field_path, rewritten)
return rewritten

View File

@@ -0,0 +1,10 @@
from django.apps import AppConfig
class MandelblogContentGuardConfig(AppConfig):
default_auto_field = "django.db.models.BigAutoField"
name = "mandelblog_content_guard"
verbose_name = "MandelBlog Content Guard"
def ready(self):
from . import signals # noqa: F401

View File

@@ -0,0 +1,3 @@
from .visible_text import VisibleTextExtractor, extract_visible_rendered_text, normalize_text
__all__ = ["VisibleTextExtractor", "extract_visible_rendered_text", "normalize_text"]

View File

@@ -0,0 +1,85 @@
from __future__ import annotations
import html
import re
from html.parser import HTMLParser
VISIBLE_TEXT_TAGS = {"h1", "h2", "h3", "h4", "h5", "h6", "p", "button", "a", "label", "li"}
IGNORED_TAGS = {"script", "style", "noscript", "template"}
def html_unescape(value: str) -> str:
return html.unescape(value)
def normalize_text(value: str) -> str:
return re.sub(r"\s+", " ", html_unescape(value)).strip()
class VisibleTextExtractor(HTMLParser):
def __init__(self) -> None:
super().__init__(convert_charrefs=True)
self.ignored_depth = 0
self.hidden_stack: list[bool] = []
self.visible_tag_stack: list[str] = []
self.current_chunks: list[str] = []
self.lines: list[str] = []
def handle_starttag(self, tag: str, attrs: list[tuple[str, str | None]]) -> None:
lowered = tag.lower()
attrs_dict = {key.lower(): (value or "") for key, value in attrs}
if lowered in IGNORED_TAGS:
self.ignored_depth += 1
return
self.hidden_stack.append(self._is_hidden(attrs_dict))
if lowered in VISIBLE_TEXT_TAGS and not self.ignored_depth and not any(self.hidden_stack):
self.visible_tag_stack.append(lowered)
def handle_endtag(self, tag: str) -> None:
lowered = tag.lower()
if lowered in IGNORED_TAGS and self.ignored_depth:
self.ignored_depth -= 1
return
if lowered in VISIBLE_TEXT_TAGS and self.visible_tag_stack:
self.visible_tag_stack.pop()
self._flush_line()
if self.hidden_stack:
self.hidden_stack.pop()
def handle_data(self, data: str) -> None:
if self.ignored_depth or any(self.hidden_stack) or not self.visible_tag_stack:
return
normalized = normalize_text(data)
if normalized:
self.current_chunks.append(normalized)
def handle_comment(self, data: str) -> None:
return
def close(self) -> None:
super().close()
self._flush_line()
def _flush_line(self) -> None:
if not self.current_chunks:
return
line = normalize_text(" ".join(self.current_chunks))
if line:
self.lines.append(line)
self.current_chunks = []
@staticmethod
def _is_hidden(attrs: dict[str, str]) -> bool:
if "hidden" in attrs:
return True
if attrs.get("aria-hidden", "").lower() == "true":
return True
style = attrs.get("style", "").replace(" ", "").lower()
return "display:none" in style or "visibility:hidden" in style
def extract_visible_rendered_text(body: str) -> str:
parser = VisibleTextExtractor()
parser.feed(body)
parser.close()
return "\n".join(parser.lines)

View File

@@ -0,0 +1,95 @@
from __future__ import annotations
from django.contrib import messages
from django.http import HttpResponseRedirect
from wagtail import hooks
from .types import format_issue, split_issues
from .validators.multilingual import validate_page, validate_posted_snippet, validate_snippet_instance
def _flash_issues(request, level, prefix: str, issues):
preview = issues[:6]
for issue in preview:
messages.add_message(request, level, f"{prefix}: {format_issue(issue)}")
remaining = len(issues) - len(preview)
if remaining > 0:
messages.add_message(request, level, f"{prefix}: {remaining} more issue(s) not shown.")
@hooks.register("before_publish_page")
def prevent_corrupt_multilingual_publish(request, page):
issues = validate_page(page)
blocking, warnings = split_issues(issues)
if warnings:
_flash_issues(request, messages.WARNING, "Content guard warning", warnings)
if not blocking:
return None
_flash_issues(request, messages.ERROR, "Publishing blocked", blocking)
return HttpResponseRedirect(request.path)
@hooks.register("after_edit_page")
def warn_on_corrupt_multilingual_draft(request, page):
blocking, warnings = split_issues(validate_page(page))
if blocking:
_flash_issues(request, messages.WARNING, "Draft warning", blocking)
if warnings:
_flash_issues(request, messages.WARNING, "Draft warning", warnings)
def _snippet_locale_code(instance, request) -> str:
posted_locale = request.POST.get("locale") if request.method == "POST" else None
if posted_locale:
return posted_locale
locale = getattr(instance, "locale", None)
if locale is not None and getattr(locale, "language_code", None):
return locale.language_code
return "nl"
def _validate_snippet_request(request, instance):
if request.method != "POST":
return None
issues = validate_posted_snippet(_snippet_locale_code(instance, request), request.POST.dict())
blocking, warnings = split_issues(issues)
if warnings:
_flash_issues(request, messages.WARNING, "Snippet warning", warnings)
if not blocking:
return None
_flash_issues(request, messages.ERROR, "Snippet save blocked", blocking)
return HttpResponseRedirect(request.path)
@hooks.register("before_create_snippet")
def prevent_corrupt_snippet_create(request, model):
instance = model()
posted_locale = request.GET.get("locale") or request.POST.get("locale")
if posted_locale and hasattr(instance, "locale_id"):
from wagtail.models import Locale
instance.locale = Locale.objects.get(language_code=posted_locale)
return _validate_snippet_request(request, instance)
@hooks.register("before_edit_snippet")
def prevent_corrupt_snippet_edit(request, instance):
return _validate_snippet_request(request, instance)
def _warn_saved_snippet(request, instance):
blocking, warnings = split_issues(validate_snippet_instance(instance))
if blocking:
_flash_issues(request, messages.WARNING, "Snippet integrity warning", blocking)
if warnings:
_flash_issues(request, messages.WARNING, "Snippet integrity warning", warnings)
@hooks.register("after_create_snippet")
def warn_on_saved_snippet_create(request, instance):
_warn_saved_snippet(request, instance)
@hooks.register("after_edit_snippet")
def warn_on_saved_snippet_edit(request, instance):
_warn_saved_snippet(request, instance)

View File

@@ -0,0 +1,163 @@
from __future__ import annotations
import json
from collections import defaultdict
from django.core.management.base import BaseCommand
from ...settings import audit_default_locales
from ...validators.multilingual import audit_locales
class Command(BaseCommand):
help = "Audit all public locale pages for multilingual integrity issues."
def add_arguments(self, parser):
parser.add_argument(
"--locale",
action="append",
dest="locales",
help="Limit the audit to one or more locale codes. Repeat the flag for multiple locales.",
)
parser.add_argument(
"--url",
action="append",
dest="urls",
help="Limit the audit to one or more public page URLs. Repeat the flag for multiple URLs.",
)
parser.add_argument(
"--fix",
action="store_true",
help="Apply known safe replacements and republish changed content.",
)
parser.add_argument(
"--rewrite",
action="store_true",
help="Rewrite flagged content through the locale agent system.",
)
parser.add_argument(
"--dry-run",
action="store_true",
help="Preview rewrite changes without saving content.",
)
parser.add_argument(
"--format",
choices=["text", "json"],
default="text",
help="Output format.",
)
def handle(self, *args, **options):
locale_codes = options["locales"] or audit_default_locales()
run = audit_locales(
locale_codes,
fix=options["fix"],
rewrite=options["rewrite"],
dry_run=options["dry_run"],
url_filters=options["urls"],
)
grouped = defaultdict(list)
for issue in run.issues.all().order_by("locale_code", "url", "field_path"):
grouped[issue.locale_code].append(issue)
grouped_compact = defaultdict(list)
for locale_code, issues in grouped.items():
bucket = {}
for issue in issues:
key = (
issue.url,
issue.issue_type,
issue.bad_value,
issue.replacement,
)
extra = issue.extra or {}
if key not in bucket:
bucket[key] = {
"url": issue.url,
"title": issue.title,
"severity": issue.severity,
"issue_type": issue.issue_type,
"field_paths": set([issue.field_path] if issue.field_path else []),
"bad_value": issue.bad_value,
"replacement": issue.replacement,
"fixed": issue.fixed,
"sources": set([extra.get("source")] if extra.get("source") else []),
"count": extra.get("count", 1),
}
else:
if issue.field_path:
bucket[key]["field_paths"].add(issue.field_path)
if extra.get("source"):
bucket[key]["sources"].add(extra["source"])
bucket[key]["count"] += extra.get("count", 1)
grouped_compact[locale_code] = [
{
**entry,
"field_paths": sorted(entry["field_paths"]),
"sources": sorted(entry["sources"]),
}
for entry in bucket.values()
]
if options["format"] == "json":
payload = {
"run_id": run.pk,
"total_urls_checked": run.total_urls_checked,
"issues_found": run.issues_found,
"summary": run.summary,
"issues": {
locale_code: grouped_compact.get(locale_code, [])
for locale_code in locale_codes
},
}
self.stdout.write(json.dumps(payload, indent=2, ensure_ascii=False))
return
for locale_code in locale_codes:
locale_summary = run.summary.get(locale_code, {})
self.stdout.write(f"Locale: {locale_code}")
self.stdout.write(
f"URLs checked: {locale_summary.get('total_urls_checked', 0)}"
)
self.stdout.write(
f"Issues found: {locale_summary.get('issues_found', 0)}"
)
self.stdout.write(
f"Severity: {locale_summary.get('by_severity', {})}"
)
if options["fix"]:
self.stdout.write(
f"Issues auto-fixed: {locale_summary.get('issues_fixed', 0)}"
)
if options["rewrite"]:
self.stdout.write(
f"Rewrite mode: {'dry-run' if options['dry_run'] else 'apply'}"
)
for issue in grouped_compact.get(locale_code, []):
target = issue["url"] or issue["title"] or "object"
self.stdout.write(
f"- {target} -> {issue['issue_type']}: {issue['bad_value']}"
)
if issue.get("replacement"):
self.stdout.write(f" after: {issue['replacement']}")
if issue.get("field_paths"):
self.stdout.write(f" fields: {', '.join(issue['field_paths'][:5])}")
if issue.get("sources"):
self.stdout.write(f" sources: {', '.join(issue['sources'])}")
if issue.get("count"):
self.stdout.write(f" count: {issue['count']}")
if not grouped_compact.get(locale_code):
self.stdout.write("- no issues found")
self.stdout.write("")
snippet_summary = run.summary.get("snippets") or {}
if snippet_summary:
self.stdout.write("Snippet issues:")
for model_name, count in snippet_summary.items():
self.stdout.write(f"- {model_name}: {count}")
self.stdout.write(
self.style.SUCCESS(
f"Audit run {run.pk} completed. Total URLs checked: {run.total_urls_checked}. Issues found: {run.issues_found}."
)
)

View File

@@ -0,0 +1,19 @@
from __future__ import annotations
from django.core.exceptions import ValidationError
class MultilingualValidationMixin:
"""Opt-in mixin for project models that want explicit clean()-time enforcement."""
def clean(self):
from .types import format_issue
from .validators.multilingual import validate_snippet_instance
super_clean = getattr(super(), "clean", None)
if callable(super_clean):
super_clean()
issues = validate_snippet_instance(self)
blocking = [issue for issue in issues if issue.blocks]
if blocking:
raise ValidationError({"content_guard": [format_issue(issue) for issue in blocking]})

View File

@@ -0,0 +1,15 @@
from .de import normalize_de_text
from .en import normalize_en_text
from .es import normalize_es_text
from .it import normalize_it_text
from .nl import normalize_nl_text
from .ru import normalize_ru_text
__all__ = [
"normalize_de_text",
"normalize_en_text",
"normalize_es_text",
"normalize_it_text",
"normalize_nl_text",
"normalize_ru_text",
]

View File

@@ -0,0 +1,58 @@
from __future__ import annotations
import re
DE_LINE_REPLACEMENTS = {
"Häufig gestellte Fragen Transparent über Planung, Vorgehensweise und Management.": "Häufig gestellte Fragen Klarheit über Planung, Vorgehensweise und Management.",
"Einführungsmeeting planen Projekte anzeigen Unverbindliches Gespräch, klares Angebot Wir entwickeln schnelle Websites und Webshops, die Ihr Team selbst pflegen kann.": "Erstgespräch planen · Projekte ansehen · Unverbindliches Gespräch mit klarem Angebot. Wir entwickeln schnelle Websites und Webshops, die Ihr Team selbst pflegen kann.",
"Einführungsmeeting planen Dienstleistungen anzeigen Verbindlich und klar Wir entwickeln schnelle Websites und Webshops, die Ihr Team selbst pflegen kann.": "Erstgespräch planen · Dienstleistungen anzeigen · Unverbindliches Gespräch mit klarem Angebot. Wir entwickeln schnelle Websites und Webshops, die Ihr Team selbst pflegen kann.",
"Steuern 0,00 € Korb ansehen Kasse Kontakt KONTAKT Lass uns dein Projekt konkret machen Einführungsmeeting planen Dienstleistungen anzeigen So können Sie Kontakt aufnehmen Wählen Sie die Route, die zu Ihrer Frage passt.": "Steuern 0,00 € Korb ansehen Kasse Kontakt KONTAKT Lassen Sie uns Ihr Projekt konkret machen Erstgespräch planen Dienstleistungen anzeigen So können Sie Kontakt aufnehmen Wählen Sie den Weg, der zu Ihrer Frage passt.",
"Steuern 0,00 € Korb ansehen Kasse Starter Website PLAN Starter Website Plan Starter-Gespräch planen Alle Dienstleistungen anzeigen Was du bekommst Startseite + Kernseiten Professionelle Basis, die sofort Vertrauen schafft.": "Steuern 0,00 € Korb ansehen Kasse Starter-Website PLAN Starter-Website Starter-Gespräch planen Alle Dienstleistungen anzeigen Was Sie erhalten Startseite + Kernseiten Professionelle Basis, die sofort Vertrauen schafft.",
"Steuern 0,00 € Korb ansehen Kasse Business Website PLAN Business Website Plan Beratungsgespräch planen Alle Dienstleistungen anzeigen Was du bekommst Detailliertes Seitenlayout Mehr Platz für Dienstleistungen, Fälle und Lead-Flows.": "Steuern 0,00 € Korb ansehen Kasse Business-Website PLAN Business-Website Beratungsgespräch planen Alle Dienstleistungen anzeigen Was Sie erhalten Detailliertes Seitenlayout Mehr Platz für Dienstleistungen, Referenzen und Lead-Flows.",
}
DE_PHRASE_REPLACEMENTS = {
"New": "Neu",
"Einführungsmeeting": "Erstgespräch",
"Intakegespräch": "Erstgespräch",
"SEO-ready basis": "SEO-optimierte Basis",
"Sales-ready mit skalierbarem Stack": "Verkaufsbereit mit skalierbarer Architektur",
"Continuous Verbesserung": "Kontinuierliche Verbesserung",
"Was du bekommst": "Was Sie erhalten",
"Starter Website": "Starter-Website",
"Business Website": "Business-Website",
"Support & Wachstum": "Support & Wachstum",
"Lass uns dein Projekt konkret machen": "Lassen Sie uns Ihr Projekt konkret machen",
"Wählen Sie die Route, die zu Ihrer Frage passt.": "Wählen Sie den Weg, der zu Ihrer Frage passt.",
"Verbindlich und klar": "Unverbindliches Gespräch mit klarem Angebot",
"Unverbindliches Gespräch, klares Angebot": "Unverbindliches Gespräch mit klarem Angebot",
}
def _apply_boundary_replacements(text: str, replacements: dict[str, str]) -> str:
cleaned = text
phrase_replacements = {}
token_replacements = {}
for source, target in replacements.items():
if re.fullmatch(r"[\wÀ-ÿ-]+", source, flags=re.UNICODE):
token_replacements[source] = target
else:
phrase_replacements[source] = target
for source, target in sorted(phrase_replacements.items(), key=lambda item: len(item[0]), reverse=True):
cleaned = cleaned.replace(source, target)
for source, target in sorted(token_replacements.items(), key=lambda item: len(item[0]), reverse=True):
pattern = re.compile(rf"(?<![\wÀ-ÿ-]){re.escape(source)}(?![\wÀ-ÿ-])", re.UNICODE)
cleaned = pattern.sub(target, cleaned)
return cleaned
def normalize_de_text(text: str, field_path: str = "") -> str:
cleaned = text
for source, target in DE_LINE_REPLACEMENTS.items():
if cleaned == source:
return target
cleaned = _apply_boundary_replacements(cleaned, DE_PHRASE_REPLACEMENTS)
return cleaned

View File

@@ -0,0 +1,28 @@
from __future__ import annotations
import re
EN_LINE_REPLACEMENTS = {
"Service packages (from) Transparent starting points.": "Service packages (from) Clear starting points.",
"Frequently Asked Questions Transparent about planning, approach, and management.": "Frequently Asked Questions Clear guidance on planning, approach, and management.",
"After your intake Clear scope and steps Clear planning Transparent investment Name * E-mail * Company * Project details Book business call Ready to start with Business Website?": "After your intake Clear scope and steps Clear planning Transparent pricing Name * E-mail * Company * Project details Book business call Ready to start with Business Website?",
"After your intake Clear scope and steps Clear planning Transparent investment Name * E-mail * Company * Project details Book starter call Ready to start with Starter Website?": "After your intake Clear scope and steps Clear planning Transparent pricing Name * E-mail * Company * Project details Book starter call Ready to start with Starter Website?",
"After your intake Clear scope and steps Clear planning Transparent investment Name * E-mail * Company * Project details Request support plan Ready to start with Support & Growth?": "After your intake Clear scope and steps Clear planning Transparent pricing Name * E-mail * Company * Project details Request support plan Ready to start with Support & Growth?",
"After your intake Clear scope and steps Clear planning Transparent investment Name * E-mail * Company * Project details Start webshop project Ready to start with Webshop?": "After your intake Clear scope and steps Clear planning Transparent pricing Name * E-mail * Company * Project details Start webshop project Ready to start with Webshop?",
}
EN_PHRASE_REPLACEMENTS = {
"Transparent investment": "Transparent pricing",
"Transparent about planning, approach, and management.": "Clear guidance on planning, approach, and management.",
"Transparent starting points.": "Clear starting points.",
}
def normalize_en_text(text: str, field_path: str = "") -> str:
if text in EN_LINE_REPLACEMENTS:
return EN_LINE_REPLACEMENTS[text]
cleaned = text
for source, target in sorted(EN_PHRASE_REPLACEMENTS.items(), key=lambda item: len(item[0]), reverse=True):
cleaned = cleaned.replace(source, target)
return re.sub(r"\s+", " ", cleaned).strip()

View File

@@ -0,0 +1,31 @@
from __future__ import annotations
import re
ES_LINE_REPLACEMENTS = {
"Transparente sobre la planificación, el proceso y la gestión.": "Transparencia sobre la planificación, el proceso y la gestión.",
"<p>Transparente sobre la planificación, el proceso y la gestión.</p>": "<p>Transparencia sobre la planificación, el proceso y la gestión.</p>",
"Preguntas frecuentes Transparente sobre la planificación, el proceso y la gestión.": "Preguntas frecuentes Transparencia sobre la planificación, el proceso y la gestión.",
"Preguntas frecuentes Transparenteee sobre la planificación, el proceso y la gestión.": "Preguntas frecuentes Transparencia sobre la planificación, el proceso y la gestión.",
"Planificar la reunión inicial Mostrar los proyectos Unverbindliches Gespräch, klares Angebot Construimos sitios web y tiendas online rápidas que tu equipo puede gestionar sin complicaciones.": "Planificar la reunión inicial · Mostrar los proyectos · Conversación sin compromiso con propuesta clara. Construimos sitios web y tiendas online rápidas que tu equipo puede gestionar sin complicaciones.",
}
ES_PHRASE_REPLACEMENTS = {
"Transparenteee": "Transparente",
"Transparent": "Transparente",
"Unverbindliches Gespräch, klares Angebot": "Conversación sin compromiso con propuesta clara",
}
def normalize_es_text(text: str, field_path: str = "") -> str:
if text in ES_LINE_REPLACEMENTS:
return ES_LINE_REPLACEMENTS[text]
cleaned = text
for source, target in sorted(ES_PHRASE_REPLACEMENTS.items(), key=lambda item: len(item[0]), reverse=True):
if re.fullmatch(r"[\wÀ-ÿ-]+", source, flags=re.UNICODE):
pattern = re.compile(rf"(?<![\wÀ-ÿ-]){re.escape(source)}(?![\wÀ-ÿ-])", re.UNICODE)
cleaned = pattern.sub(target, cleaned)
else:
cleaned = cleaned.replace(source, target)
return re.sub(r"\s+", " ", cleaned).strip()

View File

@@ -0,0 +1,24 @@
from __future__ import annotations
import re
IT_LINE_REPLACEMENTS = {
"Richiedi un piano di supporto Mostra i progetti Unverbindliches Gespräch, klares Angebot Realizziamo siti web e negozi online veloci che il tuo team può gestire in autonomia.": "Richiedi un piano di supporto · Mostra i progetti · Colloquio senza impegno con proposta chiara. Realizziamo siti web e negozi online veloci che il tuo team può gestire in autonomia.",
"Dopo il colloquio iniziale Obiettivi chiari e tappe Planificación clara Transparente Investition Nome * Email * Azienda * Dettagli del progetto Richiedi un piano di supporto Pronto a iniziare con supporto e crescita?": "Dopo il colloquio iniziale Obiettivi chiari e tappe Pianificazione chiara Investimento trasparente Nome * Email * Azienda * Dettagli del progetto Richiedi un piano di supporto Pronto a iniziare con supporto e crescita?",
"Mehrsprachiger Rollout-Plan Anpassung & Integrationen Integrazioni API, flussi di lavoro specifici e blocchi personalizzati adattati alla sua azienda.": "Piano di lancio multilingue Personalizzazioni e integrazioni Integrazioni API, flussi di lavoro specifici e blocchi personalizzati adattati alla sua azienda.",
}
IT_PHRASE_REPLACEMENTS = {
"Planificación clara": "Pianificazione chiara",
"Unverbindliches Gespräch, klares Angebot": "Colloquio senza impegno con proposta chiara",
}
def normalize_it_text(text: str, field_path: str = "") -> str:
if text in IT_LINE_REPLACEMENTS:
return IT_LINE_REPLACEMENTS[text]
cleaned = text
for source, target in sorted(IT_PHRASE_REPLACEMENTS.items(), key=lambda item: len(item[0]), reverse=True):
cleaned = cleaned.replace(source, target)
return re.sub(r"\s+", " ", cleaned).strip()

View File

@@ -0,0 +1,15 @@
from __future__ import annotations
import re
NL_PHRASE_REPLACEMENTS = {
"PLAN": "PLAN",
}
def normalize_nl_text(text: str, field_path: str = "") -> str:
cleaned = text
for source, target in NL_PHRASE_REPLACEMENTS.items():
cleaned = cleaned.replace(source, target)
return re.sub(r"\s+", " ", cleaned).strip()

View File

@@ -0,0 +1,24 @@
from __future__ import annotations
import re
RU_LINE_REPLACEMENTS = {
"План многоязычного запуска Anpassung & Integrationen Интеграции API, специфические рабочие процессы и индивидуальные блоки, адаптированные под вашу компанию.": "План многоязычного запуска Настройка и интеграции Интеграции API, специфические рабочие процессы и индивидуальные блоки, адаптированные под вашу компанию.",
"Запланировать звонок по бизнес-сайту Detailliertes Seitenlayout Разделы, ориентированные на конверсию Base prête pour le SEO Boutique en ligne Для проектов с товарами, оплатой и дальнейшим развитием e-commerce.": "Запланировать звонок по бизнес-сайту Детальная структура страниц Разделы, ориентированные на конверсию Основа, готовая для SEO Интернет-магазин Для проектов с товарами, оплатой и дальнейшим развитием e-commerce.",
"Связаться с нами Посмотреть проекты Unverbindliches Gespräch, klares Angebot Мы создаём быстрые сайты и интернет-магазины, которыми ваша команда может управлять самостоятельно.": "Связаться с нами · Посмотреть проекты · Без обязательств, понятное предложение. Мы создаём быстрые сайты и интернет-магазины, которыми ваша команда может управлять самостоятельно.",
}
RU_PHRASE_REPLACEMENTS = {
"Base prête pour le SEO": "Основа, готовая для SEO",
"Unverbindliches Gespräch, klares Angebot": "Без обязательств, понятное предложение",
}
def normalize_ru_text(text: str, field_path: str = "") -> str:
if text in RU_LINE_REPLACEMENTS:
return RU_LINE_REPLACEMENTS[text]
cleaned = text
for source, target in sorted(RU_PHRASE_REPLACEMENTS.items(), key=lambda item: len(item[0]), reverse=True):
cleaned = cleaned.replace(source, target)
return re.sub(r"\s+", " ", cleaned).strip()

View File

@@ -0,0 +1,79 @@
from __future__ import annotations
"""
Reusable configuration helpers for mandelblog_content_guard.
Supported Django settings:
- CONTENT_GUARD_STRICT: bool
- CONTENT_GUARD_BLOCK_MEDIUM: bool
- CONTENT_GUARD_LOCALES: list[str]
- CONTENT_GUARD_REWRITE_ENABLED: bool
- CONTENT_GUARD_REWRITE_BACKEND: dotted path | None
"""
from django.conf import settings
DEFAULT_LOCALES = ["nl", "en", "de", "fr", "es", "it", "pt", "ru"]
SEVERITY = {
"CRITICAL": "block",
"HIGH": "block",
"MEDIUM": "warn",
"LOW": "log",
}
ISSUE_LEVELS = {
"known_bad_pattern": "CRITICAL",
"wrong_language_fragment": "CRITICAL",
"rendered_bad_pattern": "CRITICAL",
"rendered_wrong_language": "CRITICAL",
"render_status": "CRITICAL",
"language_heuristic": "CRITICAL",
"cta_language_mismatch": "HIGH",
"form_language_mismatch": "HIGH",
"empty_form_copy": "HIGH",
"placeholder_value": "HIGH",
"rewrite_candidate": "MEDIUM",
"weak_marketing_copy": "MEDIUM",
"foreign_ui_label": "MEDIUM",
"generic_badge_label": "MEDIUM",
"mixed_locale_heading": "MEDIUM",
"cta_tone_check": "MEDIUM",
}
def strict_mode_enabled() -> bool:
return getattr(settings, "CONTENT_GUARD_STRICT", True)
def block_medium_enabled() -> bool:
return getattr(settings, "CONTENT_GUARD_BLOCK_MEDIUM", False)
def audit_default_locales() -> list[str]:
return list(getattr(settings, "CONTENT_GUARD_LOCALES", DEFAULT_LOCALES))
def rewrite_enabled() -> bool:
return getattr(settings, "CONTENT_GUARD_REWRITE_ENABLED", True)
def get_rewrite_backend() -> str | None:
return getattr(settings, "CONTENT_GUARD_REWRITE_BACKEND", None)
def classify_issue(issue_type: str) -> str:
return ISSUE_LEVELS.get(issue_type, "LOW")
def severity_for_issue(issue_type: str) -> str:
return SEVERITY[classify_issue(issue_type)]
def should_block_issue(issue_type: str) -> bool:
level = classify_issue(issue_type)
if level in {"CRITICAL", "HIGH"}:
return True
if level == "MEDIUM":
return block_medium_enabled() and strict_mode_enabled()
return False

View File

@@ -0,0 +1,26 @@
from __future__ import annotations
from functools import lru_cache
from django.db.models.signals import pre_save
from django.dispatch import receiver
from wagtail.models import Page
from wagtail.snippets.models import get_snippet_models
from .validators.multilingual import validate_instance_or_raise
@lru_cache(maxsize=1)
def _snippet_models():
return tuple(get_snippet_models())
def _is_snippet_instance(instance) -> bool:
instance_model = instance.__class__
return any(model == instance_model for model in _snippet_models())
@receiver(pre_save)
def enforce_multilingual_integrity(sender, instance, **kwargs):
if isinstance(instance, Page) or _is_snippet_instance(instance):
validate_instance_or_raise(instance)

View File

@@ -0,0 +1,368 @@
from __future__ import annotations
from collections.abc import Iterable
SYSTEM_STRING_SPECS = {
"plan_badge": {
"sources": ("PLAN",),
"issue_type": "generic_badge_label",
"translations": {
"en": "Package",
"fr": "FORFAIT",
"es": "Paquete",
"ru": "Пакет",
},
"canonical_by_locale": {
"de": ("PLAN",),
"nl": ("PLAN",),
"it": ("PIANO",),
},
"contexts": {
"en": {
"badge": "Package",
"label": "Package",
"title": "Package",
"heading": "Package",
"rendered": "Package",
},
"fr": {
"badge": "FORFAIT",
"label": "FORFAIT",
"title": "FORFAIT",
"heading": "FORFAIT",
"rendered": "FORFAIT",
},
"es": {
"badge": "Paquete",
"label": "Paquete",
"title": "Paquete",
"heading": "Paquete",
"rendered": "Paquete",
},
"ru": {
"badge": "Пакет",
"label": "Пакет",
"title": "Пакет",
"heading": "Пакет",
"rendered": "Пакет",
},
},
},
"services_badge": {
"sources": ("SERVICES",),
"issue_type": "generic_badge_label",
"translations": {
"en": "Services",
"fr": "PRESTATIONS",
"pt": "SERVIÇOS",
},
"contexts": {
"en": {
"badge": "Services",
"label": "Services",
"title": "Services",
"heading": "Services",
"rendered": "Services",
},
"fr": {
"badge": "PRESTATIONS",
"label": "PRESTATIONS",
"title": "PRESTATIONS",
"heading": "PRESTATIONS",
"rendered": "PRESTATIONS",
},
"pt": {
"badge": "SERVIÇOS",
"label": "SERVIÇOS",
"title": "SERVIÇOS",
"heading": "SERVIÇOS",
"rendered": "SERVIÇOS",
},
},
},
"response_time": {
"sources": ("Reaktionszeit",),
"issue_type": "foreign_ui_label",
"translations": {
"en": "Response time",
"fr": "Temps de réponse",
"es": "Tiempo de respuesta",
"it": "Tempo di risposta",
"ru": "Время ответа",
},
},
"average_delivery": {
"sources": ("Durchschnittliche Lieferung",),
"issue_type": "foreign_ui_label",
"translations": {
"en": "Average delivery time",
"fr": "Délai moyen de livraison",
"es": "Plazo medio de entrega",
"it": "Tempo medio di consegna",
"ru": "Средний срок запуска",
},
},
"without_commitment": {
"sources": ("Unverbindlich",),
"issue_type": "foreign_ui_label",
"translations": {
"en": "No obligation",
"fr": "Sans engagement",
"es": "Sin compromiso",
"it": "Senza impegno",
"pt": "Sem compromisso",
"ru": "Без обязательств",
},
},
"transparent_label": {
"sources": ("Transparent",),
"issue_type": "foreign_ui_label",
"translations": {
"en": "Clear",
"fr": "Clair",
"es": "Transparente",
"it": "Chiaro",
"pt": "Transparente",
"ru": "Прозрачно",
},
"contexts": {
"en": {
"badge": "Clear",
"label": "Clear",
"metric": "Clear",
"stat": "Clear",
"rendered": "Clear",
},
"fr": {
"badge": "Clair",
"label": "Clair",
"metric": "Clair",
"stat": "Clair",
"rendered": "Clair",
},
"es": {
"badge": "Transparente",
"label": "Transparente",
"metric": "Transparente",
"stat": "Transparente",
"rendered": "Transparente",
},
"it": {
"badge": "Chiaro",
"label": "Chiaro",
"metric": "Chiaro",
"stat": "Chiaro",
"rendered": "Chiaro",
},
"pt": {
"badge": "Clara",
"label": "Clara",
"metric": "Investimento claro",
"stat": "Investimento claro",
"rendered": "Investimento claro",
},
"ru": {
"badge": "Прозрачно",
"label": "Прозрачно",
"metric": "Прозрачно",
"stat": "Прозрачно",
"rendered": "Прозрачно",
},
},
},
"weeks_1_2": {
"sources": ("1-2 Wochen",),
"issue_type": "weak_marketing_copy",
"translations": {
"fr": "1 à 2 semaines",
"es": "1-2 semanas",
"it": "1-2 settimane",
"pt": "1 a 2 semanas",
},
"contexts": {
"fr": {
"metric": "1 à 2 semaines",
"stat": "1 à 2 semaines",
},
"es": {
"metric": "1-2 semanas",
"stat": "1-2 semanas",
},
"it": {
"metric": "1-2 settimane",
"stat": "1-2 settimane",
},
"pt": {
"metric": "1 a 2 semanas",
"stat": "1 a 2 semanas",
},
},
},
"weeks_2_4": {
"sources": ("2-4 Wochen",),
"issue_type": "foreign_ui_label",
"translations": {
"fr": "2 à 4 semaines",
},
"contexts": {
"fr": {
"metric": "2 à 4 semaines",
"stat": "2 à 4 semaines",
},
},
},
"days_label": {
"sources": ("Tages",),
"issue_type": "weak_marketing_copy",
"translations": {
"fr": "jours",
"pt": "dias",
},
},
"customer_reviews": {
"sources": ("Kundenschätzung",),
"issue_type": "foreign_ui_label",
"translations": {
"en": "Customer rating",
"fr": "Avis clients",
"es": "Valoración de clientes",
"it": "Valutazione clienti",
"pt": "Avaliação dos clientes",
"ru": "Оценка клиентов",
},
},
"editable_label": {
"sources": ("Bearbeitbar",),
"issue_type": "foreign_ui_label",
"translations": {
"en": "Editable",
"fr": "Modifiable",
"es": "Editable",
"it": "Modificabile",
"pt": "Editável",
"ru": "Редактируемо",
},
},
"core_pages_label": {
"sources": ("Startseite + Kernseiten",),
"issue_type": "foreign_ui_label",
"translations": {
"pt": "Página inicial + páginas essenciais",
},
},
"detailed_page_structure": {
"sources": ("Detailliertes Seitenlayout",),
"issue_type": "foreign_ui_label",
"translations": {
"fr": "Structure détaillée des pages",
"es": "Estructura detallada de páginas",
"it": "Struttura dettagliata delle pagine",
"pt": "Estrutura detalhada das páginas",
"ru": "Детальная структура страниц",
},
},
"business_process_cta": {
"sources": ("Geschäftsprozess besprechen",),
"issue_type": "foreign_ui_label",
"translations": {
"fr": "Échanger sur votre processus métier",
"es": "Hablar sobre el proceso del negocio",
"pt": "Falar sobre o processo do negócio",
},
},
"multilingual_rollout": {
"sources": ("Mehrsprachige Einführung", "Mehrsprachiger Rollout-Plan"),
"issue_type": "foreign_ui_label",
"translations": {
"fr": "Déploiement multilingue",
"it": "Lancio multilingue",
"ru": "Многоязычный запуск",
},
},
"customization_integrations": {
"sources": ("Anpassung & Integrationen",),
"issue_type": "foreign_ui_label",
"translations": {
"fr": "Personnalisation & intégrations",
"es": "Personalización e integraciones",
"it": "Personalizzazioni e integrazioni",
"pt": "Personalização e integrações",
"ru": "Настройка и интеграции",
},
},
"transparent_investment": {
"sources": ("Transparente Investition",),
"issue_type": "foreign_ui_label",
"translations": {
"de": "Transparente Investition",
"en": "Transparent pricing",
"fr": "Investissement transparent",
"es": "Inversión transparente",
"it": "Investimento trasparente",
"pt": "Investimento transparente",
"ru": "Прозрачный бюджет",
},
},
}
def build_system_vocabulary(locale_code: str, keys: Iterable[str] | None = None) -> dict[str, str]:
vocabulary: dict[str, str] = {}
selected_keys = tuple(keys or SYSTEM_STRING_SPECS.keys())
for key in selected_keys:
spec = SYSTEM_STRING_SPECS[key]
target = spec.get("translations", {}).get(locale_code)
if not target:
continue
for source in spec["sources"]:
vocabulary[source] = target
return vocabulary
def build_contextual_system_vocabulary(locale_code: str, keys: Iterable[str] | None = None) -> dict[str, dict[str, str]]:
contextual: dict[str, dict[str, str]] = {}
selected_keys = tuple(keys or SYSTEM_STRING_SPECS.keys())
for key in selected_keys:
spec = SYSTEM_STRING_SPECS[key]
locale_contexts = spec.get("contexts", {}).get(locale_code, {})
if not locale_contexts:
continue
source = spec["sources"][0]
for context_name, replacement in locale_contexts.items():
contextual.setdefault(context_name, {})[source] = replacement
return contextual
def build_system_rewrite_candidates(keys: Iterable[str] | None = None) -> dict[str, str]:
candidates: dict[str, str] = {}
selected_keys = tuple(keys or SYSTEM_STRING_SPECS.keys())
for key in selected_keys:
spec = SYSTEM_STRING_SPECS[key]
for source in spec["sources"]:
candidates[source] = spec["issue_type"]
return candidates
def all_system_sources() -> set[str]:
sources: set[str] = set()
for spec in SYSTEM_STRING_SPECS.values():
sources.update(spec["sources"])
return sources
def is_canonical_system_string(locale_code: str, source: str) -> bool:
for spec in SYSTEM_STRING_SPECS.values():
if source in spec.get("canonical_by_locale", {}).get(locale_code, ()):
return True
if locale_code == "de":
return source in all_system_sources()
replacement = system_string_replacement(locale_code, source)
return bool(replacement and replacement == source)
def system_string_replacement(locale_code: str, source: str) -> str:
for spec in SYSTEM_STRING_SPECS.values():
if source not in spec["sources"]:
continue
return spec.get("translations", {}).get(locale_code, "")
return ""

View File

@@ -0,0 +1,56 @@
from __future__ import annotations
import json
from django.test import SimpleTestCase
from mandelblog_content_guard.agents import get_language_agent
from mandelblog_content_guard.extractors.visible_text import extract_visible_rendered_text
from mandelblog_content_guard.system_strings import build_system_rewrite_candidates, build_system_vocabulary
from mandelblog_content_guard.validators.multilingual import validate_text_nodes
class PackageLevelContentGuardTests(SimpleTestCase):
def test_system_string_replacement_catalog(self):
self.assertEqual(build_system_vocabulary("fr")["PLAN"], "FORFAIT")
self.assertEqual(build_system_vocabulary("pt")["Unverbindlich"], "Sem compromisso")
self.assertEqual(build_system_rewrite_candidates()["PLAN"], "generic_badge_label")
def test_canonical_source_suppression(self):
nl_issues = validate_text_nodes("nl", [("body.badge", "PLAN")])
it_issues = validate_text_nodes("it", [("body.badge", "PIANO")])
self.assertFalse(any(issue.bad_value == "PLAN" for issue in nl_issues))
self.assertFalse(any(issue.bad_value == "PIANO" for issue in it_issues))
def test_visible_text_extraction(self):
html = """
<html><body>
<script>var x = 1;</script>
<style>.hidden{display:none}</style>
<h1>Visible heading</h1>
<p aria-hidden="true">Invisible text</p>
<a href="#">Visible link</a>
</body></html>
"""
extracted = extract_visible_rendered_text(html)
self.assertIn("Visible heading", extracted)
self.assertIn("Visible link", extracted)
self.assertNotIn("Invisible text", extracted)
self.assertNotIn("var x", extracted)
def test_locale_normalizers(self):
de_agent = get_language_agent("de")
en_agent = get_language_agent("en")
self.assertEqual(de_agent.rewrite("Was du bekommst", "body.heading"), "Was Sie erhalten")
self.assertEqual(en_agent.rewrite("PLAN", "body.badge"), "Package")
def test_audit_json_contract_shape(self):
payload = {
"run_id": 1,
"summary": {"en": {"total_urls_checked": 1, "issues_found": 0, "issues_fixed": 0, "remaining_issues": 0, "by_severity": {"block": 0, "warn": 0, "log": 0}}},
"issues": {"en": []},
}
rendered = json.dumps(payload)
parsed = json.loads(rendered)
self.assertEqual(sorted(parsed.keys()), ["issues", "run_id", "summary"])
self.assertIn("by_severity", parsed["summary"]["en"])

View File

@@ -0,0 +1,65 @@
from __future__ import annotations
from dataclasses import asdict, dataclass
from typing import Any
from .settings import classify_issue, severity_for_issue, should_block_issue
@dataclass
class AuditIssue:
severity: str
issue_type: str
field_path: str
bad_value: str
replacement: str = ""
extra: dict[str, Any] | None = None
@property
def level(self) -> str:
return classify_issue(self.issue_type)
@property
def blocks(self) -> bool:
return self.severity == "block" or should_block_issue(self.issue_type)
def asdict(self) -> dict[str, Any]:
data = asdict(self)
data["extra"] = data.get("extra") or {}
data["level"] = self.level
return data
def make_issue(issue_type: str, field_path: str, bad_value: str, replacement: str = "", extra: dict[str, Any] | None = None) -> AuditIssue:
return AuditIssue(
severity=severity_for_issue(issue_type),
issue_type=issue_type,
field_path=field_path,
bad_value=bad_value,
replacement=replacement,
extra=extra or {},
)
def dedupe_issues(issues: list[AuditIssue]) -> list[AuditIssue]:
seen = set()
deduped = []
for issue in issues:
key = (issue.severity, issue.issue_type, issue.field_path, issue.bad_value)
if key in seen:
continue
seen.add(key)
deduped.append(issue)
return deduped
def split_issues(issues: list[AuditIssue]) -> tuple[list[AuditIssue], list[AuditIssue]]:
blocking = [issue for issue in issues if issue.blocks]
warnings = [issue for issue in issues if not issue.blocks]
return blocking, warnings
def format_issue(issue: AuditIssue) -> str:
suffix = f" -> {issue.replacement}" if issue.replacement else ""
return f"[{issue.level}] {issue.field_path}: {issue.bad_value}{suffix}"

View File

@@ -0,0 +1,452 @@
from __future__ import annotations
import logging
import re
from collections import Counter
from typing import Any
from urllib.error import HTTPError, URLError
from urllib.request import Request, urlopen
from django.core.exceptions import ValidationError
from django.utils import timezone
from wagtail.models import Page, Site
from wagtail.snippets.models import get_snippet_models
from ..agents import get_language_agent
from ..extractors.visible_text import extract_visible_rendered_text, normalize_text
from ..settings import audit_default_locales, rewrite_enabled
from ..types import dedupe_issues, format_issue, make_issue
from .rules.cta import validate_cta
from .rules.forms import validate_form_copy
from .rules.language import detect_language_mismatch
from .rules.patterns import (
GLOBAL_BAD_PATTERNS,
KNOWN_REPLACEMENTS,
LOCALE_FORBIDDEN,
validate_patterns,
)
from mandelstudio.models import LocaleAuditIssue, LocaleAuditRun
logger = logging.getLogger("mandelstudio.multilingual")
def expected_locale(instance: Any) -> str:
locale = getattr(instance, "locale", None)
if locale is not None and getattr(locale, "language_code", None):
return locale.language_code
return "nl"
def iter_text_nodes(value: Any, path: str = ""):
if value is None:
return
if isinstance(value, str):
yield path, value
return
if hasattr(value, "raw_data"):
yield from iter_text_nodes(list(value.raw_data), path)
return
if isinstance(value, list):
for index, item in enumerate(value):
yield from iter_text_nodes(item, f"{path}[{index}]")
return
if isinstance(value, dict):
for key, item in value.items():
child_path = f"{path}.{key}" if path else str(key)
yield from iter_text_nodes(item, child_path)
def extract_instance_text(instance: Any) -> list[tuple[str, str]]:
nodes: list[tuple[str, str]] = []
for field_name in ["title", "seo_title", "search_description"]:
value = getattr(instance, field_name, None)
if isinstance(value, str) and value.strip():
nodes.append((field_name, value))
for field_name in ["body", "content", "footer", "mini_footer"]:
if hasattr(instance, field_name):
nodes.extend(list(iter_text_nodes(getattr(instance, field_name), field_name)))
return nodes
def validate_text_nodes(locale_code: str, nodes: list[tuple[str, str]]):
issues = []
for field_path, raw_text in nodes:
normalized = normalize_text(raw_text)
if not normalized:
continue
issues.extend(validate_patterns(locale_code, field_path, normalized))
issues.extend(validate_cta(locale_code, field_path, normalized))
issues.extend(validate_form_copy(locale_code, field_path, normalized))
if len(normalized) >= 80:
mismatch = detect_language_mismatch(locale_code, normalized)
if mismatch:
issues.append(make_issue("language_heuristic", field_path, mismatch["message"]))
return dedupe_issues(issues)
REWRITE_REVIEW_TYPES = {
"known_bad_pattern",
"wrong_language_fragment",
"rendered_bad_pattern",
"rendered_wrong_language",
"rewrite_candidate",
"weak_marketing_copy",
"foreign_ui_label",
"generic_badge_label",
"mixed_locale_heading",
"cta_language_mismatch",
}
def validate_page(page: Page):
return validate_text_nodes(expected_locale(page), extract_instance_text(page.specific))
def validate_snippet_instance(instance: Any):
return validate_text_nodes(expected_locale(instance), extract_instance_text(instance))
def validate_posted_snippet(locale_code: str, payload: dict[str, Any]):
nodes = [(key, value) for key, value in payload.items() if isinstance(value, str)]
return validate_text_nodes(locale_code, nodes)
def _replace_known_strings(value: Any, locale_code: str):
changes = []
if isinstance(value, str):
new = value
for bad, replacements in KNOWN_REPLACEMENTS.items():
replacement = replacements.get(locale_code)
if replacement and bad in new:
new = new.replace(bad, replacement)
changes.append({"bad": bad, "replacement": replacement})
return new, changes, new != value
if isinstance(value, list):
out = []
changed = False
for item in value:
new_item, item_changes, item_changed = _replace_known_strings(item, locale_code)
out.append(new_item)
changes.extend(item_changes)
changed = changed or item_changed
return out, changes, changed
if isinstance(value, dict):
out = {}
changed = False
for key, item in value.items():
new_item, item_changes, item_changed = _replace_known_strings(item, locale_code)
out[key] = new_item
changes.extend(item_changes)
changed = changed or item_changed
return out, changes, changed
return value, changes, False
def apply_known_replacements(instance: Any, locale_code: str):
changes = []
for field_name in ["title", "seo_title", "search_description"]:
value = getattr(instance, field_name, None)
if not isinstance(value, str):
continue
new_value, field_changes, changed = _replace_known_strings(value, locale_code)
if changed:
setattr(instance, field_name, new_value)
changes.extend({"field": field_name, **change} for change in field_changes)
for field_name in ["body", "content", "footer", "mini_footer"]:
if not hasattr(instance, field_name):
continue
field_value = getattr(instance, field_name)
if hasattr(field_value, "raw_data"):
new_raw, field_changes, changed = _replace_known_strings(list(field_value.raw_data), locale_code)
if changed:
setattr(instance, field_name, new_raw)
changes.extend({"field": field_name, **change} for change in field_changes)
elif isinstance(field_value, str):
new_value, field_changes, changed = _replace_known_strings(field_value, locale_code)
if changed:
setattr(instance, field_name, new_value)
changes.extend({"field": field_name, **change} for change in field_changes)
if not changes:
return []
if isinstance(instance, Page):
revision = instance.save_revision()
if instance.live:
revision.publish()
return changes
instance.save()
return changes
def rewrite_with_agent(instance: Any, locale_code: str, issues, *, dry_run: bool = False):
if not rewrite_enabled():
return []
agent = get_language_agent(locale_code)
issue_map = agent.build_issue_map(issues)
changes = []
for field_name in ["title", "seo_title", "search_description"]:
value = getattr(instance, field_name, None)
if not isinstance(value, str):
continue
field_issues = issue_map.get(field_name, [])
rewritten = agent.rewrite(value, field_path=field_name, issues=field_issues)
if rewritten != value:
setattr(instance, field_name, rewritten)
changes.append({"field": field_name, "before": value, "after": rewritten, "method": "agent"})
for field_name in ["body", "content", "footer", "mini_footer"]:
if not hasattr(instance, field_name):
continue
field_value = getattr(instance, field_name)
if hasattr(field_value, "raw_data"):
rewritten, changed = agent.process_block(list(field_value.raw_data), field_name, issue_map)
if changed:
setattr(instance, field_name, rewritten)
changes.append({"field": field_name, "method": "agent"})
elif isinstance(field_value, str):
rewritten = agent.rewrite(field_value, field_path=field_name, issues=issue_map.get(field_name, []))
if rewritten != field_value:
setattr(instance, field_name, rewritten)
changes.append({"field": field_name, "before": field_value, "after": rewritten, "method": "agent"})
if not changes or dry_run:
return changes
if isinstance(instance, Page):
revision = instance.save_revision()
if instance.live:
revision.publish()
return changes
instance.save()
return changes
def enumerate_public_pages(locale_codes: list[str] | None = None, url_filters: list[str] | None = None):
result = {}
site = Site.objects.order_by("id").first()
site_root = getattr(site, "root_page", None)
normalized_filters = set(url_filters or [])
for locale_code in (locale_codes or audit_default_locales()):
locale_root_path = None
if site_root is not None:
translated_root = (
Page.objects.filter(
translation_key=site_root.translation_key,
locale__language_code=locale_code,
)
.specific()
.first()
)
chosen_root = translated_root or site_root
locale_root_path = getattr(chosen_root, "path", None)
qs = (
Page.objects.filter(locale__language_code=locale_code)
.live()
.public()
.specific()
.order_by("path")
)
pages = []
for page in qs:
page_url = getattr(page, "url", None)
if not page_url:
continue
if locale_root_path and not page.path.startswith(locale_root_path):
continue
if normalized_filters and page_url not in normalized_filters:
continue
pages.append(page)
result[locale_code] = pages
return result
def fetch_rendered_text(page: Page):
page_url = getattr(page, "url", None)
if not page_url:
return 598, "missing page URL"
if str(page_url).startswith("http"):
full_url = page_url
else:
try:
site = page.get_site()
except Site.DoesNotExist:
site = None
site = site or Site.objects.order_by("id").first()
if site is None or not getattr(site, "root_url", None):
return 598, "missing site root_url"
full_url = f"{site.root_url}{page_url}"
request = Request(full_url, headers={"User-Agent": "mandelstudio-audit/1.0"})
try:
with urlopen(request, timeout=30) as response:
status = response.getcode()
body = response.read().decode("utf-8", errors="replace")
except HTTPError as exc:
status = exc.code
body = exc.read().decode("utf-8", errors="replace")
except URLError as exc:
status = 599
body = str(exc)
text = extract_visible_rendered_text(body)
return status, text
def iter_rendered_lines(rendered_text: str) -> list[str]:
lines = []
for chunk in re.split(r"(?<=[\.\!\?])\s+|\s{2,}", rendered_text):
normalized = normalize_text(chunk)
if normalized:
lines.append(normalized)
return lines
def validate_rendered_output(locale_code: str, rendered_text: str, status_code: int):
issues = []
if status_code != 200:
issues.append(make_issue("render_status", "rendered", str(status_code)))
source_counter = Counter()
for line in iter_rendered_lines(rendered_text):
line_issues = validate_patterns(locale_code, "rendered", line)
for issue in line_issues:
issue.bad_value = line
issue.extra = {**(issue.extra or {}), "source": "rendered"}
source_counter[(issue.issue_type, issue.bad_value)] += 1
issues.extend(line_issues)
for issue in issues:
if issue.extra is not None:
issue.extra["count"] = source_counter.get((issue.issue_type, issue.bad_value), 1)
for fragment in GLOBAL_BAD_PATTERNS:
if fragment in rendered_text:
issue = make_issue("rendered_bad_pattern", "rendered", fragment, KNOWN_REPLACEMENTS.get(fragment, {}).get(locale_code, ""))
issue.extra = {"source": "rendered", "count": 1}
issues.append(issue)
for fragment in LOCALE_FORBIDDEN.get(locale_code, ()):
if fragment in rendered_text:
issue = make_issue("rendered_wrong_language", "rendered", fragment, KNOWN_REPLACEMENTS.get(fragment, {}).get(locale_code, ""))
issue.extra = {"source": "rendered", "count": 1}
issues.append(issue)
return dedupe_issues(issues)
def annotate_rewrite_previews(locale_code: str, issues):
agent = get_language_agent(locale_code)
for issue in issues:
if issue.issue_type not in REWRITE_REVIEW_TYPES:
continue
if issue.replacement:
continue
preview = agent.rewrite(issue.bad_value, field_path=issue.field_path, issues=[issue])
if preview and preview != issue.bad_value:
issue.replacement = preview
issue.extra = {**(issue.extra or {}), "review_candidate": True}
return issues
def validate_instance_or_raise(instance: Any):
issues = validate_page(instance) if isinstance(instance, Page) else validate_snippet_instance(instance)
blocking = [issue for issue in issues if issue.blocks]
if not blocking:
return issues
raise ValidationError({"content_guard": [format_issue(issue) for issue in blocking]})
def validate_ai_text_or_raise(locale_code: str, field_path: str, value: str):
issues = validate_text_nodes(locale_code, [(field_path, value)])
blocking = [issue for issue in issues if issue.blocks]
if not blocking:
return issues
raise ValidationError({"content_guard": [format_issue(issue) for issue in blocking]})
def record_issues(run: LocaleAuditRun, locale_code: str, obj: Any, issues, *, fixed: bool = False) -> None:
for issue in issues:
LocaleAuditIssue.objects.create(
run=run,
locale_code=locale_code,
object_id=getattr(obj, "pk", None),
object_type=obj.__class__.__name__,
url=getattr(obj, "url", "") or "",
title=getattr(obj, "title", str(obj))[:255],
severity=issue.severity,
issue_type=issue.issue_type,
field_path=issue.field_path,
bad_value=issue.bad_value,
replacement=issue.replacement,
fixed=fixed,
extra=issue.extra or {},
)
def audit_locales(locale_codes: list[str], fix: bool = False, rewrite: bool = False, dry_run: bool = False, url_filters: list[str] | None = None) -> LocaleAuditRun:
run = LocaleAuditRun.objects.create(locale_codes=locale_codes, fix_enabled=fix or rewrite)
pages_by_locale = enumerate_public_pages(locale_codes, url_filters=url_filters)
summary: dict[str, Any] = {}
total_checked = 0
total_issues = 0
pages_with_issues = 0
for locale_code, pages in pages_by_locale.items():
locale_summary = {"total_urls_checked": len(pages), "issues_found": 0, "issues_fixed": 0, "remaining_issues": 0, "by_severity": {"block": 0, "warn": 0, "log": 0}}
for page in pages:
total_checked += 1
status_code, rendered = fetch_rendered_text(page)
issues = dedupe_issues(validate_page(page) + validate_rendered_output(locale_code, rendered, status_code))
if rewrite:
issues = annotate_rewrite_previews(locale_code, issues)
initial_issue_count = len(issues)
fixed_changes = []
if issues and fix:
fixed_changes = apply_known_replacements(page.specific, locale_code)
if fixed_changes:
record_issues(run, locale_code, page, issues, fixed=True)
status_code, rendered = fetch_rendered_text(page.specific)
issues = dedupe_issues(validate_page(page.specific) + validate_rendered_output(locale_code, rendered, status_code))
if rewrite:
issues = annotate_rewrite_previews(locale_code, issues)
if issues and rewrite:
rewrite_changes = rewrite_with_agent(page.specific, locale_code, issues, dry_run=dry_run)
if rewrite_changes:
record_issues(run, locale_code, page, issues, fixed=not dry_run)
if not dry_run:
status_code, rendered = fetch_rendered_text(page.specific)
issues = dedupe_issues(validate_page(page.specific) + validate_rendered_output(locale_code, rendered, status_code))
issues = annotate_rewrite_previews(locale_code, issues)
if issues:
pages_with_issues += 1
record_issues(run, locale_code, page, issues)
locale_summary["issues_found"] += initial_issue_count
locale_summary["issues_fixed"] += initial_issue_count - len(issues)
locale_summary["remaining_issues"] += len(issues)
for issue in issues:
locale_summary["by_severity"][issue.severity] = locale_summary["by_severity"].get(issue.severity, 0) + 1
total_issues += initial_issue_count
summary[locale_code] = locale_summary
snippet_summary = {}
for model in get_snippet_models():
count = 0
for instance in model.objects.all():
issues = validate_snippet_instance(instance)
if rewrite:
issues = annotate_rewrite_previews(expected_locale(instance), issues)
if issues and rewrite:
rewrite_changes = rewrite_with_agent(instance, expected_locale(instance), issues, dry_run=dry_run)
if rewrite_changes and not dry_run:
issues = validate_snippet_instance(instance)
if not issues:
continue
count += len(issues)
record_issues(run, expected_locale(instance), instance, issues)
if count:
snippet_summary[model.__name__] = count
total_issues += count
summary["snippets"] = snippet_summary
run.total_urls_checked = total_checked
run.issues_found = total_issues
run.pages_with_issues = pages_with_issues
run.summary = summary
run.finished_at = timezone.now()
run.save(update_fields=["total_urls_checked", "issues_found", "pages_with_issues", "summary", "finished_at"])
logger.info("Completed multilingual audit run %s", run.pk)
return run

View File

@@ -0,0 +1,146 @@
from __future__ import annotations
import re
from ...types import make_issue
CTA_RULES = {
"nl": (
r"^Plan ",
r"^Bekijk ",
r"^Vraag ",
r"^Bespreek ",
r"^Contact$",
r"^Start ",
r"^Meer ",
r"^Verstuur ",
r"^Neem ",
),
"en": (
r"^Book ",
r"^View ",
r"^Schedule ",
r"^Start ",
r"^Talk ",
r"^Discuss ",
r"^Contact$",
r"^Explore ",
r"^Learn ",
r"^Request ",
r"^Send ",
),
"de": (
r"^Plan",
r"^Mehr",
r"^Support",
r"^Start",
r"^Kontakt",
r"^Gespr",
r"^Kostenlose",
r"^Anfrage",
r"^Projekte",
r"^Verein",
r"^Besprech",
r"^Anzeig",
r"^Ansehen",
r"^Technisch",
r"^Unterst",
r"^Unsere",
r"^Service",
r"^Dienstleistungen",
r"^Erstgespräch",
r"^Einführ",
r"^Anpassung",
r"^Ansichts",
r"^Prozess",
r"^Pakete",
r"^Demo",
r"^Alle ",
r"^Ein ",
r"^Webshop",
),
"fr": (
r"^Planifier",
r"^Voir",
r"^Découvrir",
r"^Demander",
r"^Lancer",
r"^Démarrer",
r"^Contacter",
r"^Contact$",
r"^Parler",
r"^Lancez",
r"^Prendre",
r"^Envoyer",
r"^Afficher",
),
"es": (
r"^Reservar",
r"^Ver",
r"^Solicitar",
r"^Inicia",
r"^Hablar",
r"^Descubrir",
r"^Contactar",
r"^Planificar",
r"^Programe",
r"^Concertar",
r"^Enviar",
r"^Mostrar",
r"^Comenta",
),
"it": (
r"^Prenota",
r"^Vedi",
r"^Avvia",
r"^Richiedi",
r"^Contatta",
r"^Contatto$",
r"^Scopri",
r"^Pianifica",
r"^Invia",
r"^Mostra",
r"^Parla",
r"^Parliamo",
),
"pt": (
r"^Agendar",
r"^Ver",
r"^Iniciar",
r"^Pedir",
r"^Contactar",
r"^Falar",
r"^Explorar",
r"^Marcar",
r"^Solicitar",
r"^Enviar",
r"^Mostrar",
),
"ru": (
r"^Заплан",
r"^Посмотр",
r"^Запуст",
r"^Связ",
r"^Подробнее",
r"^Показать",
r"^Отправ",
r"^Получ",
r"^Запрос",
),
}
CTA_FIELDS = {
"cta_text",
"primary_cta_text",
"secondary_cta_text",
"submit_button_text",
}
def validate_cta(locale_code: str, field_path: str, normalized: str):
last_segment = field_path.split(".")[-1]
if last_segment not in CTA_FIELDS:
return []
if any(re.search(pattern, normalized) for pattern in CTA_RULES.get(locale_code, ())):
return []
return [make_issue("cta_language_mismatch", field_path, normalized)]

View File

@@ -0,0 +1,21 @@
from __future__ import annotations
from ...types import make_issue
from .patterns import PLACEHOLDER_VALUES
from .language import detect_language_mismatch
FORM_FIELDS = {"label", "placeholder", "help_text"}
def validate_form_copy(locale_code: str, field_path: str, normalized: str):
last_segment = field_path.split(".")[-1]
if last_segment not in FORM_FIELDS:
return []
issues = []
if normalized in PLACEHOLDER_VALUES or normalized == "":
issues.append(make_issue("empty_form_copy", field_path, normalized))
mismatch = detect_language_mismatch(locale_code, normalized)
if mismatch:
issues.append(make_issue("form_language_mismatch", field_path, mismatch["message"]))
return issues

View File

@@ -0,0 +1,43 @@
from __future__ import annotations
import re
STOPWORDS = {
"nl": {"de", "het", "een", "en", "voor", "met", "van", "je", "wij", "niet"},
"en": {"the", "and", "for", "with", "your", "you", "from", "that", "this", "not"},
"de": {"der", "die", "das", "und", "mit", "für", "nicht", "eine", "ist", "sie"},
"fr": {"le", "la", "les", "et", "avec", "pour", "vous", "une", "pas", "des"},
"es": {"el", "la", "los", "las", "con", "para", "una", "que", "del", "por"},
"it": {"il", "la", "con", "per", "una", "che", "del", "non", "gli", "dei"},
"pt": {"o", "a", "os", "as", "com", "para", "uma", "que", "não", "dos"},
"ru": {"и", "в", "на", "с", "для", "что", "это", "как", "по", "не"},
}
def _tokenize(text: str) -> list[str]:
text = re.sub(r"<[^>]+>", " ", text)
return re.findall(r"[\w\u0400-\u04FF']+", text.lower())
def detect_language_mismatch(locale_code: str, text: str):
tokens = _tokenize(text)
if len(tokens) < 12:
return None
scores = {code: sum(1 for token in tokens if token in words) for code, words in STOPWORDS.items()}
expected = scores.get(locale_code, 0)
foreign_locale, foreign_score = max(scores.items(), key=lambda item: item[1])
if foreign_locale == locale_code:
return None
if expected >= foreign_score:
return None
if foreign_score >= 6 and foreign_score >= expected + 4:
return {
"severity": "block",
"message": f"expected={locale_code}, detected={foreign_locale}, score={foreign_score}, expected_score={expected}",
}
if expected == 0 and foreign_score >= 5:
return {
"severity": "warn",
"message": f"expected={locale_code}, detected={foreign_locale}, score={foreign_score}, expected_score={expected}",
}
return None

View File

@@ -0,0 +1,269 @@
from __future__ import annotations
import re
from ...types import make_issue
from ...system_strings import (
build_system_rewrite_candidates,
is_canonical_system_string,
system_string_replacement,
)
GLOBAL_BAD_PATTERNS = (
"The Spanish translation",
"The Spanish translation of",
"As the input",
"The input",
"Poiché l'input",
'Unternehmen" è tedesco',
"Support anzeigen",
"Starter intake",
"Business intake",
"Plan Starter intake",
"Plan Business intake",
"Plan de admisión",
"None",
)
LOCALE_FORBIDDEN = {
"nl": ("Starter intake", "Business intake", "Poiché", "Correo electrónico", "Mostrar los servicios", "Plan de admisión"),
"en": ("Starter intake", "Business intake", "Poiché", "Correo electrónico", "Mostrar los servicios", "Questions fréquemment posées", "Plan de admisión"),
"de": ("Starter intake", "Business intake", "Poiché", "Correo electrónico", "Mostrar los servicios", "Questions fréquemment posées", "Plan de admisión"),
"fr": ("Starter intake", "Business intake", "Poiché", "Correo electrónico", "Mostrar los servicios", "Plan de admisión", "Support anzeigen"),
"es": ("Poiché", 'Unternehmen" è tedesco', "Support anzeigen", "Questions fréquemment posées"),
"it": ("Poiché l'input", "Consulta inicial sin compromiso", "Mostrar los servicios", "Questions fréquentes", "Plan de admisión", "Correo electrónico"),
"pt": ("Poiché l'input", "Consulta inicial sin compromiso", "Mostrar los servicios", "Correo electrónico", 'Unternehmen" è tedesco', "Questions fréquemment posées"),
"ru": ("Poiché l'input", "Consulta inicial sin compromiso", "Correo electrónico", 'Unternehmen" è tedesco', "Mostrar los servicios"),
}
PLACEHOLDER_VALUES = {"None", "-", "N/A", "null"}
GENERIC_BADGE_LABELS = {
"New",
"Popular",
"PLAN",
"PIANO",
"SERVICES",
}
GLOBAL_REWRITE_CANDIDATES = {
**build_system_rewrite_candidates(
(
"days_label",
"average_delivery",
"response_time",
"without_commitment",
"transparent_label",
"weeks_1_2",
"customer_reviews",
"editable_label",
"core_pages_label",
"detailed_page_structure",
"business_process_cta",
"multilingual_rollout",
"customization_integrations",
"transparent_investment",
)
),
}
LOCALE_REWRITE_CANDIDATES = {
"en": {
"Service packages (from) Transparent starting points.": "foreign_ui_label",
"Frequently Asked Questions Transparent about planning, approach, and management.": "foreign_ui_label",
"Transparent investment": "foreign_ui_label",
},
"de": {
"New": "weak_marketing_copy",
"Intakegespräch": "weak_marketing_copy",
"SEO-ready basis": "foreign_ui_label",
"Sales-ready mit skalierbarem Stack": "foreign_ui_label",
"Continuous Verbesserung": "foreign_ui_label",
"Was du bekommst": "weak_marketing_copy",
"Einführungsmeeting": "weak_marketing_copy",
"Starter Website": "weak_marketing_copy",
"Business Website": "weak_marketing_copy",
"Häufig gestellte Fragen Transparent über Planung, Vorgehensweise und Management.": "foreign_ui_label",
},
"es": {
"Preguntas frecuentes Transparente sobre la planificación, el proceso y la gestión.": "foreign_ui_label",
"Unverbindliches Gespräch, klares Angebot": "foreign_ui_label",
},
"pt": {
"Siti web e negozi online": "mixed_locale_heading",
"Caso de cliente en directo": "weak_marketing_copy",
"El primer proyecto de producción finalizado con éxito.": "weak_marketing_copy",
"Más sobre el proceso": "foreign_ui_label",
"Modifiez simplement vous-même.": "foreign_ui_label",
"Opciones de la tienda web": "foreign_ui_label",
"Planes de soporte": "foreign_ui_label",
"Multilingüe": "foreign_ui_label",
"Unsere Serviços": "mixed_locale_heading",
"Elija el camino": "mixed_locale_heading",
"Début en direct": "foreign_ui_label",
"Demande d'admission initiale": "foreign_ui_label",
"Site Web d'Entreprise": "foreign_ui_label",
"Hablar sobre el proceso empresarial": "foreign_ui_label",
"Mise en place de boutique en ligne": "foreign_ui_label",
"Maintenance & gestion": "foreign_ui_label",
"Afficher le plan de soutien": "foreign_ui_label",
"Introducción multilingüe": "foreign_ui_label",
"Forfaits de services (à partir de)": "mixed_locale_heading",
"Kundenschätzung": "foreign_ui_label",
"Gestisca lei stesso il contenuto": "foreign_ui_label",
"Optimizado para móviles": "foreign_ui_label",
"Schnell online mit einer starken Basis": "weak_marketing_copy",
"La entrada \"Unterstützung oder Erweiterung\"": "foreign_ui_label",
"Suivi + corrections": "foreign_ui_label",
"Mejoras mensuales": "foreign_ui_label",
"¿A qué velocidad puede comenzar?": "foreign_ui_label",
"¿Puedo editar textos e imágenes yo mismo?": "foreign_ui_label",
"Transparente sobre o planejamento, o processo e a gestão.": "foreign_ui_label",
"Ab 2.250 €": "foreign_ui_label",
"Boutique en ligne": "foreign_ui_label",
"Sales-ready mit skalierbarem Stack": "foreign_ui_label",
},
"fr": {
"Erstes Produktionsprojekt erfolgreich abgeschlossen.": "weak_marketing_copy",
"Von Kickoff bis zum Launch mit einem klaren Umfang.": "foreign_ui_label",
"Demande d'admission initiale": "weak_marketing_copy",
"Entretien d'accueil": "weak_marketing_copy",
"Vraag over diensten": "foreign_ui_label",
"Konkrete erste Schätzung": "foreign_ui_label",
"Ansatz, der zu Ihrem Budget passt": "foreign_ui_label",
**build_system_rewrite_candidates(("weeks_2_4",)),
"Bereit, mit der Business-Website zu starten?": "foreign_ui_label",
},
"it": {
"Planificación clara": "foreign_ui_label",
"Mehrsprachiger Rollout-Plan": "foreign_ui_label",
"Unverbindliches Gespräch, klares Angebot": "foreign_ui_label",
},
"ru": {
"Base prête pour le SEO": "foreign_ui_label",
"Unverbindliches Gespräch, klares Angebot": "foreign_ui_label",
},
}
KNOWN_REPLACEMENTS = {
"Starter intake": {
"nl": "Plan startergesprek",
"en": "Book starter call",
"de": "Starter-Gespräch planen",
"fr": "Planifier lentretien de départ",
"es": "Reservar llamada inicial",
"it": "Prenota una chiamata iniziale",
"pt": "Agendar chamada inicial",
"ru": "Запланировать стартовый звонок",
},
"Business intake": {
"nl": "Plan zakelijk gesprek",
"en": "Book business call",
"de": "Beratungsgespräch planen",
"fr": "Planifier lentretien commercial",
"es": "Reservar llamada comercial",
"it": "Prenota una chiamata commerciale",
"pt": "Agendar chamada comercial",
"ru": "Запланировать деловой звонок",
},
"Plan Starter intake": {
"nl": "Plan startergesprek",
"en": "Book starter call",
"de": "Starter-Gespräch planen",
"fr": "Planifier lentretien de départ",
"es": "Reservar llamada inicial",
"it": "Prenota una chiamata iniziale",
"pt": "Agendar chamada inicial",
"ru": "Запланировать стартовый звонок",
},
"Plan Business intake": {
"nl": "Plan zakelijk gesprek",
"en": "Book business call",
"de": "Beratungsgespräch planen",
"fr": "Planifier lentretien commercial",
"es": "Reservar llamada comercial",
"it": "Prenota una chiamata commerciale",
"pt": "Agendar chamada comercial",
"ru": "Запланировать деловой звонок",
},
"Mostrar los servicios": {
"es": "Mostrar los servicios",
"it": "Vedi servizi",
"pt": "Ver serviços",
"ru": "Показать услуги",
},
"Correo electrónico": {"pt": "E-mail", "ru": "Электронная почта"},
'Unternehmen" è tedesco, non olandese. La traduzione spagnola di "Unternehmen" è "empresa".': {
"pt": "Empresa",
"ru": "Компания",
},
'Poiché l\'input "Unverbindliche Erstberatung" è in tedesco (non in olandese), la traduzione in spagnolo è: "Consulta inicial sin compromiso".': {
"it": "Senza impegno",
"pt": "Sem compromisso",
"ru": "Без обязательств",
"es": "Consulta inicial sin compromiso",
},
}
def _contains_fragment(text: str, fragment: str) -> bool:
if re.fullmatch(r"[\wÀ-ÿ-]+", fragment, flags=re.UNICODE):
pattern = re.compile(rf"(?<![\wÀ-ÿ-]){re.escape(fragment)}(?![\wÀ-ÿ-])", re.UNICODE)
return bool(pattern.search(text))
return fragment in text
def validate_patterns(locale_code: str, field_path: str, normalized: str):
issues = []
for fragment in GLOBAL_BAD_PATTERNS:
if _contains_fragment(normalized, fragment):
issues.append(
make_issue(
"known_bad_pattern",
field_path,
fragment,
KNOWN_REPLACEMENTS.get(fragment, {}).get(locale_code, ""),
)
)
for fragment in LOCALE_FORBIDDEN.get(locale_code, ()):
if _contains_fragment(normalized, fragment):
issues.append(
make_issue(
"wrong_language_fragment",
field_path,
fragment,
KNOWN_REPLACEMENTS.get(fragment, {}).get(locale_code, ""),
)
)
if normalized in GENERIC_BADGE_LABELS and not is_canonical_system_string(locale_code, normalized):
issues.append(
make_issue(
"generic_badge_label",
field_path,
normalized,
system_string_replacement(locale_code, normalized),
)
)
for fragment, issue_type in GLOBAL_REWRITE_CANDIDATES.items():
if _contains_fragment(normalized, fragment):
if is_canonical_system_string(locale_code, fragment):
continue
issues.append(
make_issue(
issue_type,
field_path,
fragment,
system_string_replacement(locale_code, fragment),
)
)
for fragment, issue_type in LOCALE_REWRITE_CANDIDATES.get(locale_code, {}).items():
if _contains_fragment(normalized, fragment):
issues.append(
make_issue(
issue_type,
field_path,
fragment,
system_string_replacement(locale_code, fragment),
)
)
return issues

7
mandelstudio/apps.py Normal file
View File

@@ -0,0 +1,7 @@
from django.apps import AppConfig
class MandelstudioConfig(AppConfig):
default_auto_field = "django.db.models.BigAutoField"
name = "mandelstudio"
verbose_name = "Mandelstudio"

View File

@@ -0,0 +1 @@
from mandelblog_content_guard import * # noqa: F401,F403

View File

@@ -0,0 +1 @@
from mandelblog_content_guard.agents import * # noqa: F401,F403

View File

@@ -0,0 +1 @@
from mandelblog_content_guard.agents.base import * # noqa: F401,F403

View File

@@ -0,0 +1 @@
from mandelblog_content_guard.agents.de import * # noqa: F401,F403

View File

@@ -0,0 +1 @@
from mandelblog_content_guard.agents.en import * # noqa: F401,F403

View File

@@ -0,0 +1 @@
from mandelblog_content_guard.agents.es import * # noqa: F401,F403

View File

@@ -0,0 +1 @@
from mandelblog_content_guard.agents.fr import * # noqa: F401,F403

View File

@@ -0,0 +1 @@
from mandelblog_content_guard.agents.it import * # noqa: F401,F403

View File

@@ -0,0 +1 @@
from mandelblog_content_guard.agents.nl import * # noqa: F401,F403

View File

@@ -0,0 +1 @@
from mandelblog_content_guard.agents.pt import * # noqa: F401,F403

View File

@@ -0,0 +1 @@
from mandelblog_content_guard.agents.ru import * # noqa: F401,F403

View File

@@ -0,0 +1 @@
from mandelblog_content_guard.ai import * # noqa: F401,F403

View File

@@ -0,0 +1 @@
from mandelblog_content_guard.hooks import * # noqa: F401,F403

View File

@@ -0,0 +1 @@
from mandelblog_content_guard.mixins import * # noqa: F401,F403

View File

@@ -0,0 +1 @@
from mandelblog_content_guard.normalizers import * # noqa: F401,F403

View File

@@ -0,0 +1 @@
from mandelblog_content_guard.normalizers.de import * # noqa: F401,F403

View File

@@ -0,0 +1 @@
from mandelblog_content_guard.normalizers.en import * # noqa: F401,F403

View File

@@ -0,0 +1 @@
from mandelblog_content_guard.normalizers.es import * # noqa: F401,F403

View File

@@ -0,0 +1 @@
from mandelblog_content_guard.normalizers.it import * # noqa: F401,F403

View File

@@ -0,0 +1 @@
from mandelblog_content_guard.normalizers.nl import * # noqa: F401,F403

View File

@@ -0,0 +1 @@
from mandelblog_content_guard.normalizers.ru import * # noqa: F401,F403

View File

@@ -0,0 +1 @@
from mandelblog_content_guard.settings import * # noqa: F401,F403

View File

@@ -0,0 +1 @@
from mandelblog_content_guard.signals import * # noqa: F401,F403

View File

@@ -0,0 +1 @@
from mandelblog_content_guard.system_strings import * # noqa: F401,F403

View File

@@ -0,0 +1 @@
from mandelblog_content_guard.types import * # noqa: F401,F403

View File

@@ -0,0 +1 @@
from mandelblog_content_guard.validators import * # noqa: F401,F403

View File

@@ -0,0 +1 @@
from mandelblog_content_guard.validators.multilingual import * # noqa: F401,F403

View File

@@ -0,0 +1 @@
from mandelblog_content_guard.validators.rules import * # noqa: F401,F403

View File

@@ -0,0 +1 @@
from mandelblog_content_guard.validators.rules.cta import * # noqa: F401,F403

View File

@@ -0,0 +1 @@
from mandelblog_content_guard.validators.rules.forms import * # noqa: F401,F403

View File

@@ -0,0 +1 @@
from mandelblog_content_guard.validators.rules.language import * # noqa: F401,F403

View File

@@ -0,0 +1 @@
from mandelblog_content_guard.validators.rules.patterns import * # noqa: F401,F403

View File

@@ -0,0 +1,38 @@
from __future__ import annotations
from typing import Iterable
DEMO_MARKERS: tuple[str, ...] = (
"demo",
"dummy",
"sample",
"lorem",
"placeholder",
"sandbox",
"staging",
"prototype",
"template-only",
)
# Known legacy/demo pages that should never surface on production.
BLOCKED_DEMO_PAGE_SLUGS: tuple[str, ...] = (
"starter-website-2",
"business-website-2",
)
def contains_demo_marker(values: Iterable[str | None]) -> bool:
for raw_value in values:
if not raw_value:
continue
lowered = raw_value.lower()
if any(marker in lowered for marker in DEMO_MARKERS):
return True
return False
def is_blocked_demo_slug(value: str | None) -> bool:
if not value:
return False
return value.lower() in BLOCKED_DEMO_PAGE_SLUGS

View File

@@ -0,0 +1,465 @@
from __future__ import annotations
from dataclasses import dataclass
from decimal import Decimal
from django.conf import settings
from django.db.models import Q
from django.utils.text import slugify
from oscar.core.loading import get_model
from mandelstudio.content_hygiene import DEMO_MARKERS
IDEA_PRODUCT_CLASS_NAME = "Idea Product"
DIGITAL_IDEAS_CATEGORY_NAME = "Digital Ideas"
SHORT_DESCRIPTION_ATTRIBUTE_CODE = "short_description"
FULL_DESCRIPTION_ATTRIBUTE_CODE = "full_description"
IDEA_PARTNER_NAME = "Mandel Blog Studio"
@dataclass(frozen=True)
class IdeaSeedItem:
title: str
short_description: str
full_description: str
price_eur: Decimal
IDEA_PRODUCTS: tuple[IdeaSeedItem, ...] = (
IdeaSeedItem(
title="B2B Webshop Starter Blueprint",
short_description=(
"Launch a B2B webshop with a quote-first buying flow and enterprise-ready trust structure. "
"Get a clear execution path from positioning to first qualified orders."
),
full_description=(
"Introduction\n"
"A practical B2B ecommerce blueprint for teams that need to sell complex offers with confidence.\n\n"
"Problem it solves\n"
"- Generic webshop setups underperform in B2B because they ignore quote-first journeys and multi-stakeholder buying.\n"
"- Sales and marketing handoff is often unclear, which slows deal velocity.\n\n"
"Step-by-step concept\n"
"1. Define ICP and buying committee signals.\n"
"2. Map quote-first vs direct checkout decision rules.\n"
"3. Build page architecture for trust, proof, and qualification.\n"
"4. Implement lead-to-order routing between website and sales ops.\n"
"5. Run a 90-day optimization loop with conversion checkpoints.\n\n"
"Tech stack\n"
"- Django + Oscar commerce core\n"
"- Wagtail CMS for structured sales content\n"
"- Analytics and event tracking for funnel visibility\n\n"
"Business value\n"
"- Faster sales-qualified lead capture\n"
"- Lower friction for enterprise buyers\n"
"- Higher conversion from product page to qualified pipeline\n\n"
"Who it is for\n"
"Founders, growth teams, and B2B operators launching or rebuilding a serious ecommerce motion."
),
price_eur=Decimal("99.00"),
),
IdeaSeedItem(
title="AI Product Description System",
short_description=(
"Scale product copy with AI while preserving brand tone, SEO intent, and quality control. "
"Turn catalog chaos into a repeatable content engine your team can trust."
),
full_description=(
"Introduction\n"
"A production content system for generating and governing high-quality product descriptions at scale.\n\n"
"Problem it solves\n"
"- Manual copywriting does not scale across growing catalogs.\n"
"- Uncontrolled AI output introduces inconsistency and factual risk.\n\n"
"Step-by-step concept\n"
"1. Define attribute schema and content rules per category.\n"
"2. Build prompt templates linked to taxonomy fields.\n"
"3. Add QA gates for accuracy, tone, and compliance.\n"
"4. Localize with multilingual adaptation rules.\n"
"5. Monitor quality with an editorial review workflow.\n\n"
"Tech stack\n"
"- Django/Wagtail content governance\n"
"- AI model orchestration with prompt templates\n"
"- Validation layer for quality and policy checks\n\n"
"Business value\n"
"- Faster time-to-publish for new products\n"
"- Consistent conversion-focused copy\n"
"- Reduced editorial costs with better control\n\n"
"Who it is for\n"
"Ecommerce teams, marketplaces, and catalog-heavy brands that need reliable AI-assisted copy operations."
),
price_eur=Decimal("49.00"),
),
IdeaSeedItem(
title="High-Converting Landing Page Framework",
short_description=(
"Build landing pages that convert with a proven structure for message clarity, proof, and CTA flow. "
"Stop guessing and launch with a repeatable conversion framework."
),
full_description=(
"Introduction\n"
"A practical landing-page framework focused on conversion, not visual noise.\n\n"
"Problem it solves\n"
"- Teams often launch pages without a clear conversion narrative.\n"
"- Weak proof and CTA sequencing create drop-off before action.\n\n"
"Step-by-step concept\n"
"1. Align offer with one core audience intent.\n"
"2. Build headline and subheadline hierarchy.\n"
"3. Add objection-handling proof blocks and trust signals.\n"
"4. Design CTA progression for low and high intent visitors.\n"
"5. Define test plan for copy, layout, and offer variants.\n\n"
"Tech stack\n"
"- Wagtail page composition\n"
"- Bootstrap 5 component patterns\n"
"- Event tracking for funnel diagnostics\n\n"
"Business value\n"
"- Higher lead quality from the same traffic\n"
"- Faster launch cycles with reusable page logic\n"
"- Better conversion through structured experimentation\n\n"
"Who it is for\n"
"Service businesses, SaaS teams, and agencies that rely on landing pages for growth."
),
price_eur=Decimal("29.00"),
),
IdeaSeedItem(
title="Subscription-Based Service Website Model",
short_description=(
"Design a subscription service website that improves activation, retention, and recurring revenue. "
"Package offers clearly and reduce churn with lifecycle-aware UX."
),
full_description=(
"Introduction\n"
"A complete website model for subscription-first service businesses.\n\n"
"Problem it solves\n"
"- Subscription sites often sell features, not ongoing outcomes.\n"
"- Poor onboarding and renewal communication increases churn risk.\n\n"
"Step-by-step concept\n"
"1. Structure offer tiers by business outcome and support level.\n"
"2. Build onboarding pages for fast activation.\n"
"3. Add lifecycle messaging for renewal and expansion.\n"
"4. Map churn-risk touchpoints and intervention moments.\n"
"5. Track retention metrics and optimize plan positioning.\n\n"
"Tech stack\n"
"- Django + Oscar for billing-ready commerce foundations\n"
"- Wagtail for lifecycle content and onboarding assets\n"
"- Event instrumentation for retention analytics\n\n"
"Business value\n"
"- Improved activation-to-retention conversion\n"
"- More predictable recurring revenue\n"
"- Clearer upgrade path across plan tiers\n\n"
"Who it is for\n"
"Founders and operators running service subscriptions with monthly or annual plans."
),
price_eur=Decimal("69.00"),
),
IdeaSeedItem(
title="Marketplace Platform Architecture (Django)",
short_description=(
"Get a scalable marketplace architecture for Django from MVP to multi-vendor growth. "
"Includes domain boundaries, payments, moderation, and operations blueprint."
),
full_description=(
"Introduction\n"
"A technical blueprint for launching and scaling a marketplace platform on Django.\n\n"
"Problem it solves\n"
"- Marketplace projects fail when core domains and workflows are not separated early.\n"
"- Teams underestimate moderation, payout, and operational complexity.\n\n"
"Step-by-step concept\n"
"1. Define bounded domains for buyers, sellers, listings, and transactions.\n"
"2. Design catalog and search architecture for growth.\n"
"3. Implement payment orchestration and settlement flow.\n"
"4. Add moderation, permissions, and abuse controls.\n"
"5. Plan observability and phased scaling from MVP to expansion.\n\n"
"Tech stack\n"
"- Django service layer and domain modules\n"
"- Oscar commerce primitives where applicable\n"
"- Queue/events for async marketplace operations\n"
"- Monitoring and operational alerting baseline\n\n"
"Business value\n"
"- Lower re-architecture risk at scale\n"
"- Faster delivery of revenue-critical flows\n"
"- Better reliability for multi-sided operations\n\n"
"Who it is for\n"
"Technical founders, CTOs, and product teams building marketplace businesses with Django."
),
price_eur=Decimal("149.00"),
),
)
def _get_attribute_text(product, code: str) -> str:
value = (
product.attribute_values.select_related("attribute")
.filter(attribute__code=code)
.first()
)
if value is None:
return ""
for field_name in (
"value_text",
"value_richtext",
"value_option",
"value_file",
"value_image",
):
field_value = getattr(value, field_name, None)
if field_value:
return str(field_value)
return ""
def _set_attribute_text(product, attribute, text: str) -> None:
ProductAttributeValue = get_model("catalogue", "ProductAttributeValue")
value_field = (
"value_richtext" if getattr(attribute, "type", "text") == "richtext" else "value_text"
)
value, _created = ProductAttributeValue.objects.get_or_create(
product=product,
attribute=attribute,
)
if getattr(value, value_field, "") != text:
setattr(value, value_field, text)
value.save(update_fields=[value_field])
def is_idea_product(product) -> bool:
product_class = getattr(product, "product_class", None)
return bool(product_class and product_class.name == IDEA_PRODUCT_CLASS_NAME)
def get_idea_short_description(product) -> str:
return _get_attribute_text(product, SHORT_DESCRIPTION_ATTRIBUTE_CODE) or (
getattr(product, "description", "") or ""
)
def get_idea_full_description(product) -> str:
return _get_attribute_text(product, FULL_DESCRIPTION_ATTRIBUTE_CODE)
def get_unlockable_description(product, user) -> tuple[str, bool]:
unlocked = user_has_unlocked_idea(user, product)
if unlocked:
return get_idea_full_description(product) or get_idea_short_description(product), True
return get_idea_short_description(product), False
def user_has_unlocked_idea(user, product) -> bool:
if not getattr(user, "is_authenticated", False):
return False
if not is_idea_product(product):
return True
Line = get_model("order", "Line")
PaymentEventQuantity = get_model("order", "PaymentEventQuantity")
paid_statuses = {
getattr(settings, "OSCAR_PAID_ORDER_STATUS", None),
getattr(settings, "OSCAR_COMPLETE_ORDER_STATUS", None),
"paid",
"complete",
"payment-complete",
"delayed-payment",
}
paid_statuses = {
status.strip().lower() for status in paid_statuses if isinstance(status, str) and status.strip()
}
status_match = Line.objects.filter(
order__user=user,
product_id=product.id,
).filter(
Q(order__status__in=paid_statuses)
| Q(order__status__icontains="paid")
| Q(order__status__icontains="complete")
)
if status_match.exists():
return True
# Fallback to payment event evidence so unlocking still works when status names differ per provider.
return PaymentEventQuantity.objects.filter(
line__order__user=user,
line__product_id=product.id,
quantity__gt=0,
).exists()
def _ensure_digital_ideas_category():
Category = get_model("catalogue", "Category")
existing = Category.objects.filter(name=DIGITAL_IDEAS_CATEGORY_NAME).first()
if existing:
return existing
root = (
Category.objects.filter(depth=1).order_by("path").first()
if hasattr(Category, "depth")
else None
)
if root and hasattr(root, "add_child"):
return root.add_child(name=DIGITAL_IDEAS_CATEGORY_NAME)
if hasattr(Category, "add_root"):
return Category.add_root(name=DIGITAL_IDEAS_CATEGORY_NAME)
category = Category(name=DIGITAL_IDEAS_CATEGORY_NAME)
if hasattr(category, "slug"):
category.slug = slugify(DIGITAL_IDEAS_CATEGORY_NAME)
category.save()
return category
def _ensure_product_class():
ProductClass = get_model("catalogue", "ProductClass")
product_class, _created = ProductClass.objects.get_or_create(
name=IDEA_PRODUCT_CLASS_NAME,
defaults={
"requires_shipping": False,
"track_stock": False,
},
)
if product_class.requires_shipping:
product_class.requires_shipping = False
product_class.save(update_fields=["requires_shipping"])
return product_class
def _ensure_product_attributes(product_class):
ProductAttribute = get_model("catalogue", "ProductAttribute")
text_type = getattr(ProductAttribute, "TEXT", "text")
richtext_type = getattr(ProductAttribute, "RICHTEXT", "richtext")
short_attr, _ = ProductAttribute.objects.get_or_create(
product_class=product_class,
code=SHORT_DESCRIPTION_ATTRIBUTE_CODE,
defaults={
"name": "Short description",
"type": text_type,
"required": False,
},
)
full_attr, _ = ProductAttribute.objects.get_or_create(
product_class=product_class,
code=FULL_DESCRIPTION_ATTRIBUTE_CODE,
defaults={
"name": "Full description",
"type": richtext_type,
"required": False,
},
)
return short_attr, full_attr
def _ensure_partner():
Partner = get_model("partner", "Partner")
partner, _ = Partner.objects.get_or_create(name=IDEA_PARTNER_NAME)
return partner
def _upsert_stockrecord(product, partner, price_eur: Decimal):
StockRecord = get_model("partner", "StockRecord")
defaults = {
"partner_sku": f"idea-{product.id}",
"price_currency": "EUR",
"price_excl_tax": price_eur,
"num_in_stock": 99999,
}
stockrecord, _created = StockRecord.objects.get_or_create(
product=product,
partner=partner,
defaults=defaults,
)
dirty_fields: list[str] = []
for field_name, field_value in defaults.items():
if getattr(stockrecord, field_name, None) != field_value:
setattr(stockrecord, field_name, field_value)
dirty_fields.append(field_name)
if dirty_fields:
stockrecord.save(update_fields=dirty_fields)
def seed_idea_marketplace_products(
*, purge_demo_products: bool = True, retire_non_idea_products: bool = True
) -> dict[str, int]:
Product = get_model("catalogue", "Product")
product_class = _ensure_product_class()
category = _ensure_digital_ideas_category()
short_attr, full_attr = _ensure_product_attributes(product_class)
partner = _ensure_partner()
created = 0
updated = 0
for item in IDEA_PRODUCTS:
product = Product.objects.filter(title=item.title).first()
if product is None:
product = Product(
title=item.title,
slug=slugify(item.title),
product_class=product_class,
description=item.short_description,
)
if hasattr(Product, "STANDALONE") and hasattr(product, "structure"):
product.structure = Product.STANDALONE
if hasattr(product, "is_public") and not getattr(product, "is_public", False):
product.is_public = True
product.save()
created += 1
else:
dirty_fields: list[str] = []
if product.product_class_id != product_class.id:
product.product_class = product_class
dirty_fields.append("product_class")
if product.description != item.short_description:
product.description = item.short_description
dirty_fields.append("description")
if hasattr(product, "slug") and product.slug != slugify(item.title):
product.slug = slugify(item.title)
dirty_fields.append("slug")
if hasattr(product, "is_public") and not getattr(product, "is_public", False):
product.is_public = True
dirty_fields.append("is_public")
if dirty_fields:
product.save(update_fields=dirty_fields)
updated += 1
product.categories.add(category)
_set_attribute_text(product, short_attr, item.short_description)
_set_attribute_text(product, full_attr, item.full_description)
_upsert_stockrecord(product, partner, item.price_eur)
deleted_demo = 0
if purge_demo_products:
keep_titles = {item.title for item in IDEA_PRODUCTS}
demo_filter = Q()
for marker in DEMO_MARKERS:
demo_filter |= Q(title__icontains=marker) | Q(slug__icontains=marker)
demo_queryset = Product.objects.filter(demo_filter).exclude(title__in=keep_titles)
# Also purge any non-canonical products lingering in the Idea Product class
# or explicitly grouped under the Digital Ideas category.
non_canonical_ideas_queryset = (
Product.objects.filter(
Q(product_class=product_class)
| Q(categories__name__iexact=DIGITAL_IDEAS_CATEGORY_NAME)
)
.exclude(title__in=keep_titles)
.distinct()
)
delete_ids = set(demo_queryset.values_list("id", flat=True)) | set(
non_canonical_ideas_queryset.values_list("id", flat=True)
)
deleted_demo = len(delete_ids)
if deleted_demo:
Product.objects.filter(id__in=delete_ids).delete()
retired_non_idea = 0
if retire_non_idea_products:
keep_titles = {item.title for item in IDEA_PRODUCTS}
non_idea_public_qs = Product.objects.exclude(title__in=keep_titles).filter(
is_public=True
)
retired_non_idea = non_idea_public_qs.update(is_public=False)
return {
"created": created,
"updated": updated,
"deleted_demo": deleted_demo,
"retired_non_idea": retired_non_idea,
}

View File

@@ -1,5 +1,10 @@
import os
import sys
from pathlib import Path
PROJECT_ROOT = Path(__file__).resolve().parent.parent
if str(PROJECT_ROOT) not in sys.path:
sys.path.insert(0, str(PROJECT_ROOT))
def _patch_legacy_django_translation_aliases():

View File

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,3 @@
from mandelblog_content_guard.management.commands.audit_locales import (
Command, # noqa: F401
)

View File

@@ -0,0 +1,226 @@
from __future__ import annotations
from django.core.management.base import BaseCommand
from django.db import transaction
from django.db.models import Q
from wagtail.blocks import StreamValue
from wagtail.models import Page
from mandelstudio.content_hygiene import BLOCKED_DEMO_PAGE_SLUGS, DEMO_MARKERS
from mandelstudio.idea_marketplace import seed_idea_marketplace_products
HOME_COPY = {
"nl": {
"badge": "IDEA MARKETPLACE",
"headline": "Premium ideeën die je direct kunt uitvoeren",
"sub_headline": "<p>Ontdek bewezen plannen, koop de strategie en ontgrendel het volledige implementatieplan.</p>",
"features_title": "Idea Marketplace",
"features_subtitle": "<p>Preview eerst. Koop alleen wat past. Ontgrendel daarna de complete blueprint.</p>",
"footer_headline": "Klaar om een premium idee te ontgrendelen?",
"footer_subheadline": "<p>Kies een plan, rond checkout af en krijg direct toegang tot de volledige strategie.</p>",
"cta_explore": "Explore Ideas",
"cta_buy": "Buy Strategy",
"cta_unlock": "Unlock Full Plan",
},
"en": {
"badge": "IDEA MARKETPLACE",
"headline": "Premium ideas you can execute immediately",
"sub_headline": "<p>Explore proven plans, buy the strategy, and unlock the full implementation blueprint.</p>",
"features_title": "Idea Marketplace",
"features_subtitle": "<p>Preview first. Buy what fits. Unlock complete execution plans after checkout.</p>",
"footer_headline": "Ready to unlock a premium idea?",
"footer_subheadline": "<p>Select a plan, complete checkout, and get full strategy access instantly.</p>",
"cta_explore": "Explore Ideas",
"cta_buy": "Buy Strategy",
"cta_unlock": "Unlock Full Plan",
},
}
SUPPORTED_LANGUAGES = {"nl", "en", "de", "fr", "es", "it", "pt", "ru"}
def _copy_for(language_code: str) -> dict[str, str]:
normalized = (language_code or "nl").split("-")[0].lower()
if normalized not in SUPPORTED_LANGUAGES:
normalized = "nl"
return HOME_COPY["en"] if normalized != "nl" else HOME_COPY["nl"]
def _shop_url_for(language_code: str) -> str:
normalized = (language_code or "nl").split("-")[0].lower()
if normalized == "nl":
return "/shop/"
return f"/{normalized}/shop/"
def _update_homepage_stream(page) -> bool:
if not hasattr(page, "body"):
return False
body = page.body
if not body:
return False
copy = _copy_for(getattr(page.locale, "language_code", "nl"))
shop_url = _shop_url_for(getattr(page.locale, "language_code", "nl"))
stream_data = list(body.stream_data)
changed = False
for block in stream_data:
block_type = block.get("type")
value = block.get("value", {})
if not isinstance(value, dict):
continue
if block_type == "saas_hero_banner":
updates = {
"badge_text": copy["badge"],
"headline": copy["headline"],
"sub_headline": copy["sub_headline"],
"primary_cta_text": copy["cta_explore"],
"primary_cta_url": shop_url,
"secondary_cta_text": copy["cta_buy"],
"secondary_cta_url": shop_url,
}
for key, new_value in updates.items():
if value.get(key) != new_value:
value[key] = new_value
changed = True
if block_type == "saas_features":
updates = {
"section_title": copy["features_title"],
"section_subtitle": copy["features_subtitle"],
}
for key, new_value in updates.items():
if value.get(key) != new_value:
value[key] = new_value
changed = True
if block_type == "saas_cta_footer":
updates = {
"headline": copy["footer_headline"],
"subheadline": copy["footer_subheadline"],
"primary_cta_text": copy["cta_unlock"],
"primary_cta_url": shop_url,
"secondary_cta_text": copy["cta_explore"],
"secondary_cta_url": shop_url,
}
for key, new_value in updates.items():
if value.get(key) != new_value:
value[key] = new_value
changed = True
if not changed:
return False
page.body = StreamValue(page.body.stream_block, stream_data, is_lazy=True)
page.search_description = (
"Idea marketplace with premium plans. Preview each strategy and unlock full implementation after purchase."
)
page.save()
return True
def _purge_demo_pages() -> int:
marker_filter = Q()
for marker in DEMO_MARKERS:
marker_filter |= (
Q(title__icontains=marker)
| Q(slug__icontains=marker)
| Q(search_description__icontains=marker)
)
candidate_ids = set(
Page.objects.exclude(depth__lte=2).filter(marker_filter).values_list("id", flat=True)
)
candidate_ids.update(
Page.objects.exclude(depth__lte=2)
.filter(slug__in=BLOCKED_DEMO_PAGE_SLUGS)
.values_list("id", flat=True)
)
candidates = Page.objects.filter(id__in=candidate_ids).specific()
deleted = 0
for page in candidates:
page.delete()
deleted += 1
return deleted
def _update_homepages() -> int:
updated = 0
# In this architecture localized homepages are expected at depth=2.
for page in Page.objects.filter(depth=2).specific():
if _update_homepage_stream(page):
updated += 1
return updated
class Command(BaseCommand):
help = (
"Prepare production idea marketplace launch: seed idea products, purge obvious demo pages, "
"and refresh homepage sections/CTAs to marketplace messaging."
)
def add_arguments(self, parser):
parser.add_argument(
"--no-seed",
action="store_true",
help="Skip idea product seeding.",
)
parser.add_argument(
"--purge-demo-pages",
action="store_true",
help="Delete pages with obvious demo/lorem/sample markers.",
)
parser.add_argument(
"--skip-purge-demo-pages",
action="store_true",
help="Skip deleting obvious demo pages (enabled by default).",
)
parser.add_argument(
"--apply-homepage-copy",
action="store_true",
help="Update homepage stream blocks to idea marketplace messaging and CTAs.",
)
parser.add_argument(
"--skip-apply-homepage-copy",
action="store_true",
help="Skip homepage marketplace copy refresh (enabled by default).",
)
@transaction.atomic
def handle(self, *args, **options):
if not options["no_seed"]:
seed_stats = seed_idea_marketplace_products(
purge_demo_products=True,
retire_non_idea_products=True,
)
self.stdout.write(
self.style.SUCCESS(
"Seeded idea products: "
f"created={seed_stats['created']}, "
f"updated={seed_stats['updated']}, "
f"deleted_demo_products={seed_stats['deleted_demo']}, "
f"retired_non_idea_products={seed_stats['retired_non_idea']}"
)
)
should_purge_demo_pages = (
options["purge_demo_pages"] or not options["skip_purge_demo_pages"]
)
if should_purge_demo_pages:
deleted_pages = _purge_demo_pages()
self.stdout.write(
self.style.SUCCESS(f"Deleted demo pages: {deleted_pages}")
)
should_apply_homepage_copy = (
options["apply_homepage_copy"] or not options["skip_apply_homepage_copy"]
)
if should_apply_homepage_copy:
updated_pages = _update_homepages()
self.stdout.write(
self.style.SUCCESS(
f"Updated homepages with marketplace copy: {updated_pages}"
)
)

View File

@@ -0,0 +1,120 @@
from __future__ import annotations
from typing import Iterable
from django.core.management.base import BaseCommand
from django.db.models import Q
from oscar.core.loading import get_model
from wagtail.models import Page
IDEA_PRODUCT_TITLES = {
"B2B Webshop Starter Blueprint",
"AI Product Description System",
"High-Converting Landing Page Framework",
"Subscription-Based Service Website Model",
"Marketplace Platform Architecture (Django)",
}
DEMO_PAGE_SLUGS = {
"starter-website-2",
"business-website-2",
"starter-website",
"business-website",
}
DEMO_MARKERS = (
"demo",
"dummy",
"sample",
"placeholder",
"starter website",
"business website",
"lorem ipsum",
)
def _build_demo_text_filter(fields: Iterable[str]) -> Q:
query = Q()
for field in fields:
for marker in DEMO_MARKERS:
query |= Q(**{f"{field}__icontains": marker})
return query
class Command(BaseCommand):
help = (
"Remove demo content from Wagtail pages and Oscar catalogue. "
"Use --keep-only-idea-products to retain only the five launch idea products."
)
def add_arguments(self, parser):
parser.add_argument(
"--dry-run",
action="store_true",
default=False,
help="Show what would be deleted without applying changes.",
)
parser.add_argument(
"--keep-only-idea-products",
action="store_true",
default=False,
help="Delete every top-level product except the five launch idea products.",
)
def handle(self, *args, **options):
dry_run: bool = options["dry_run"]
keep_only_ideas: bool = options["keep_only_idea_products"]
Product = get_model("catalogue", "Product")
product_filter = _build_demo_text_filter(("title",))
product_filter |= Q(slug__in=DEMO_PAGE_SLUGS)
top_level_products = Product.objects.filter(parent__isnull=True)
if keep_only_ideas:
products_to_delete = top_level_products.exclude(title__in=IDEA_PRODUCT_TITLES)
else:
products_to_delete = top_level_products.filter(product_filter).exclude(
title__in=IDEA_PRODUCT_TITLES
)
pages_to_delete = (
Page.objects.live()
.public()
.filter(depth__gt=2)
.filter(Q(slug__in=DEMO_PAGE_SLUGS) | _build_demo_text_filter(("title", "slug")))
)
product_preview = list(products_to_delete.values_list("id", "title")[:30])
page_preview = list(pages_to_delete.values_list("id", "slug", "title")[:30])
self.stdout.write(f"Products matched for deletion: {products_to_delete.count()}")
for item in product_preview:
self.stdout.write(f" - product#{item[0]}: {item[1]}")
if products_to_delete.count() > len(product_preview):
self.stdout.write(" - ...")
self.stdout.write(f"Pages matched for deletion: {pages_to_delete.count()}")
for item in page_preview:
self.stdout.write(f" - page#{item[0]}: /{item[1]}/ ({item[2]})")
if pages_to_delete.count() > len(page_preview):
self.stdout.write(" - ...")
if dry_run:
self.stdout.write(self.style.WARNING("Dry run completed. No data was deleted."))
return
deleted_products = products_to_delete.count()
deleted_pages = pages_to_delete.count()
products_to_delete.delete()
for page in pages_to_delete:
# Use Wagtail's delete to remove descendants and revisions safely.
page.delete()
self.stdout.write(
self.style.SUCCESS(
f"Demo purge complete. Deleted products={deleted_products}, pages={deleted_pages}."
)
)

View File

@@ -0,0 +1,44 @@
from __future__ import annotations
from django.core.management.base import BaseCommand
from django.db import transaction
from mandelstudio.idea_marketplace import seed_idea_marketplace_products
class Command(BaseCommand):
help = (
"Seed production-ready Oscar idea products and remove obvious demo products "
"from the catalogue. By default, this also retires non-idea products from public "
"listing."
)
def add_arguments(self, parser):
parser.add_argument(
"--keep-demo-products",
action="store_true",
help="Do not delete demo/sample products.",
)
parser.add_argument(
"--keep-non-idea-products-public",
action="store_true",
help="Do not retire non-idea products from public listing.",
)
@transaction.atomic
def handle(self, *args, **options):
purge_demo = not options["keep_demo_products"]
retire_non_idea = not options["keep_non_idea_products_public"]
stats = seed_idea_marketplace_products(
purge_demo_products=purge_demo,
retire_non_idea_products=retire_non_idea,
)
self.stdout.write(
self.style.SUCCESS(
"Idea marketplace seeded: "
f"created={stats['created']}, "
f"updated={stats['updated']}, "
f"deleted_demo={stats['deleted_demo']}, "
f"retired_non_idea={stats['retired_non_idea']}"
)
)

View File

@@ -0,0 +1,232 @@
from __future__ import annotations
import json
import os
from pathlib import Path
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from django.db.models import Q
from oscar.core.loading import get_model
from mandelstudio.content_hygiene import BLOCKED_DEMO_PAGE_SLUGS, DEMO_MARKERS
from mandelstudio.idea_marketplace import (
FULL_DESCRIPTION_ATTRIBUTE_CODE,
IDEA_PRODUCT_CLASS_NAME,
IDEA_PRODUCTS,
SHORT_DESCRIPTION_ATTRIBUTE_CODE,
)
class Command(BaseCommand):
help = (
"Fail-fast launch validation for idea marketplace: payment provider, "
"catalog integrity, digital/non-shipping behavior, and EUR pricing."
)
def handle(self, *args, **options):
Product = get_model("catalogue", "Product")
ProductClass = get_model("catalogue", "ProductClass")
ProductAttribute = get_model("catalogue", "ProductAttribute")
StockRecord = get_model("partner", "StockRecord")
Page = get_model("wagtailcore", "Page")
installed_apps = list(settings.INSTALLED_APPS)
payment_apps = [app for app in installed_apps if "payment" in app.lower()]
checkout_apps = [app for app in installed_apps if "checkout" in app.lower()]
if not payment_apps:
raise CommandError("No payment app found in INSTALLED_APPS.")
if not checkout_apps:
raise CommandError("No checkout app found in INSTALLED_APPS.")
if not any("oscar_checkout" in app.lower() for app in checkout_apps):
raise CommandError("Oscar checkout app is not active.")
def _is_demo_data(value: str) -> bool:
normalized = "".join(ch for ch in str(value).lower() if ch.isalnum())
return "demodata" in normalized
if any(_is_demo_data(app) for app in installed_apps):
raise CommandError(
"Demo data plugin detected in INSTALLED_APPS. Remove all demodata plugins before launch."
)
if any("dummy" in app.lower() for app in payment_apps):
raise CommandError(
"Dummy payment app detected in INSTALLED_APPS. Use a real provider plugin before production launch."
)
if any("mollie" in app.lower() for app in payment_apps):
mollie_settings = (
getattr(settings, "PAYMENT_MOLLIE", None)
or getattr(settings, "payment_mollie", None)
or {}
)
config_key = str(mollie_settings.get("api_key", "")).strip()
env_key = str(os.environ.get("MOLLIE_API_KEY", "")).strip()
effective_key = env_key or config_key
if not effective_key or effective_key.upper() == "CHANGE_ME":
raise CommandError(
"Mollie payment provider is enabled but no valid API key is configured. "
"Set MOLLIE_API_KEY or settings.payment_mollie.api_key to a real key."
)
if not effective_key.startswith("live_"):
raise CommandError(
"Mollie key must be a live key for production launch (expected prefix 'live_')."
)
config_path = Path(__file__).resolve().parents[2] / "ocyan.json"
if config_path.exists():
with config_path.open("r", encoding="utf-8") as handle:
config_payload = json.load(handle)
config_plugins = [str(plugin) for plugin in config_payload.get("ocyan_plugins", [])]
if any(_is_demo_data(plugin) for plugin in config_plugins):
raise CommandError(
"Demo data plugin detected in ocyan.json. Remove it before launch."
)
settings_payload = config_payload.get("settings", {})
domain = str(settings_payload.get("django", {}).get("domain", "")).strip()
shop_base_url = str(
settings_payload.get("oscar", {}).get("shop_base_url", "")
).strip("/")
if not domain or domain.upper() == "CHANGE_ME":
raise CommandError(
"settings.django.domain is missing/placeholder in ocyan.json."
)
if not shop_base_url:
raise CommandError(
"settings.oscar.shop_base_url is missing in ocyan.json."
)
currency = getattr(settings, "OSCAR_DEFAULT_CURRENCY", "EUR")
if currency != "EUR":
raise CommandError(f"OSCAR_DEFAULT_CURRENCY must be EUR, got '{currency}'.")
product_class = ProductClass.objects.filter(name=IDEA_PRODUCT_CLASS_NAME).first()
if product_class is None:
raise CommandError(f"Missing ProductClass '{IDEA_PRODUCT_CLASS_NAME}'.")
if product_class.requires_shipping:
raise CommandError("Idea Product class requires_shipping must be False.")
short_attr_exists = ProductAttribute.objects.filter(
product_class=product_class, code=SHORT_DESCRIPTION_ATTRIBUTE_CODE
).exists()
full_attr_exists = ProductAttribute.objects.filter(
product_class=product_class, code=FULL_DESCRIPTION_ATTRIBUTE_CODE
).exists()
if not short_attr_exists or not full_attr_exists:
raise CommandError(
"Missing required idea product attributes: short_description and/or full_description."
)
expected_titles = {item.title for item in IDEA_PRODUCTS}
expected_prices = {item.title: item.price_eur for item in IDEA_PRODUCTS}
found_products = Product.objects.filter(product_class=product_class)
found_titles = set(found_products.values_list("title", flat=True))
missing_titles = sorted(expected_titles - found_titles)
if missing_titles:
raise CommandError(f"Missing seeded idea products: {', '.join(missing_titles)}.")
non_public_idea_titles = list(
found_products.filter(title__in=expected_titles, is_public=False).values_list(
"title", flat=True
)
)
if non_public_idea_titles:
raise CommandError(
"Seeded idea products must be public to appear in the storefront. "
f"Examples: {', '.join(sorted(non_public_idea_titles))}"
)
invalid_shipping_products = [
product.title
for product in found_products
if getattr(product, "is_shipping_required", False)
]
if invalid_shipping_products:
raise CommandError(
"Some idea products still require shipping; expected digital-only products: "
+ ", ".join(invalid_shipping_products)
)
# Validate each seeded idea has EUR stockrecord pricing in the expected range.
invalid_stockrecords: list[str] = []
missing_stockrecords: list[str] = []
for product in found_products.filter(title__in=expected_titles):
stockrecord = (
StockRecord.objects.filter(product=product)
.order_by("id")
.first()
)
if stockrecord is None:
missing_stockrecords.append(product.title)
continue
if stockrecord.price_currency != "EUR":
invalid_stockrecords.append(
f"{product.title} (currency={stockrecord.price_currency})"
)
continue
expected = expected_prices[product.title]
actual = stockrecord.price_excl_tax
if actual is None or actual != expected:
invalid_stockrecords.append(
f"{product.title} (price_excl_tax={actual}, expected={expected})"
)
continue
if actual < 29 or actual > 149:
invalid_stockrecords.append(
f"{product.title} (out-of-range price_excl_tax={actual})"
)
if missing_stockrecords:
raise CommandError(
"Missing stockrecords for seeded idea products: "
+ ", ".join(sorted(missing_stockrecords))
)
if invalid_stockrecords:
raise CommandError(
"Invalid stockrecord pricing for seeded idea products: "
+ "; ".join(sorted(invalid_stockrecords))
)
non_idea_public_titles = list(
Product.objects.exclude(title__in=expected_titles)
.filter(is_public=True)
.values_list("title", flat=True)[:10]
)
if non_idea_public_titles:
raise CommandError(
"Non-idea products are still public. Retire them before launch. "
f"Examples: {', '.join(non_idea_public_titles)}"
)
demo_page_filter = Q()
for marker in DEMO_MARKERS:
demo_page_filter |= (
Q(title__icontains=marker)
| Q(slug__icontains=marker)
| Q(search_description__icontains=marker)
)
live_demo_pages = (
Page.objects.live()
.public()
.exclude(depth__lte=2)
.filter(demo_page_filter | Q(slug__in=BLOCKED_DEMO_PAGE_SLUGS))
.values_list("title", "slug")[:10]
)
if live_demo_pages:
formatted = ", ".join(f"{title} ({slug})" for title, slug in live_demo_pages)
raise CommandError(
"Demo-like pages are still live/public. Purge them before launch. "
f"Examples: {formatted}"
)
self.stdout.write(
self.style.SUCCESS(
"Idea marketplace launch validation passed: "
f"{len(found_titles)} products, EUR currency, checkout apps={checkout_apps}, "
f"payment apps={payment_apps}."
)
)

View File

@@ -0,0 +1,60 @@
# Generated by Django 5.2.11 on 2026-03-25 16:37
import django.db.models.deletion
import uuid
import wagtail.fields
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
("wagtailcore", "0097_alter_page_locale_alter_page_translation_key"),
]
operations = [
migrations.CreateModel(
name="LocalizedFooterContent",
fields=[
("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
("title", models.CharField(default="Footer content", max_length=120)),
("translation_key", models.UUIDField(default=uuid.uuid4, editable=False)),
(
"footer",
wagtail.fields.StreamField(
[("about_us", 2), ("text", 2), ("page_list", 4), ("SubscriptionBlock", 7)],
block_lookup={
0: ("wagtail.blocks.CharBlock", (), {"help_text": "Heading of the content block.", "label": "Heading", "required": False}),
1: ("wagtail.blocks.RichTextBlock", (), {}),
2: ("wagtail.blocks.StructBlock", [[("heading", 0), ("content", 1)]], {}),
3: ("wagtail.blocks.PageChooserBlock", (), {"help_text": "List pages below this page", "label": "Page"}),
4: ("wagtail.blocks.StructBlock", [[("heading", 0), ("page", 3)]], {}),
5: ("wagtail.blocks.CharBlock", (), {"label": "Title", "required": False}),
6: ("wagtail.blocks.TextBlock", (), {"label": "Description", "required": False}),
7: ("wagtail.blocks.StructBlock", [[("title", 5), ("description", 6)]], {}),
},
default=list,
),
),
(
"mini_footer",
wagtail.fields.StreamField(
[("text", 0)],
block_lookup={0: ("wagtail.blocks.RichTextBlock", (), {})},
default=list,
),
),
("locale", models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name="+", to="wagtailcore.locale")),
("site", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name="localized_footer_contents", to="wagtailcore.site")),
],
options={
"verbose_name": "Localized footer content",
"verbose_name_plural": "Localized footer contents",
"abstract": False,
"constraints": [models.UniqueConstraint(fields=("site", "locale"), name="unique_localized_footer_per_site_locale")],
"unique_together": {("translation_key", "locale")},
},
),
]

View File

@@ -0,0 +1,236 @@
from __future__ import annotations
import uuid
from django.db import migrations
CONTENT = {
"nl": {
"about": "<p>Wij bouwen snelle websites en webshops die je team zelf kan beheren. Van eerste lancering tot doorontwikkeling: helder, schaalbaar en zonder ruis.</p>",
"links_heading": "Snelle links",
"support_heading": "Help & support",
"link_labels": {
"about": "Over ons",
"services": "Diensten",
"projects": "Projecten",
"contact": "Contact",
"capabilities": "Mogelijkheden",
"ai_search": "AI Search",
"book_call": "Plan een gesprek",
},
"mini": "<p><a href=\"{contact}\">Contact</a> - <a href=\"{services}\">Diensten</a> - <a href=\"{projects}\">Projecten</a> - Copyright 2026 - MandelBlog Studio</p>",
},
"en": {
"about": "<p>We build fast websites and webshops your team can manage without friction. From launch to growth, the setup stays clear, scalable, and easy to extend.</p>",
"links_heading": "Quick links",
"support_heading": "Help & support",
"link_labels": {
"about": "About us",
"services": "Services",
"projects": "Projects",
"contact": "Contact",
"capabilities": "Capabilities",
"ai_search": "AI Search",
"book_call": "Book a call",
},
"mini": "<p><a href=\"{contact}\">Contact</a> - <a href=\"{services}\">Services</a> - <a href=\"{projects}\">Projects</a> - Copyright 2026 - MandelBlog Studio</p>",
},
"de": {
"about": "<p>Wir entwickeln schnelle Websites und Webshops, die Ihr Team selbst pflegen kann. Von der ersten Veröffentlichung bis zur Weiterentwicklung bleibt alles klar, skalierbar und wartbar.</p>",
"links_heading": "Schnellzugriff",
"support_heading": "Hilfe & Support",
"link_labels": {
"about": "Über uns",
"services": "Dienstleistungen",
"projects": "Projekte",
"contact": "Kontakt",
"capabilities": "Möglichkeiten",
"ai_search": "KI-Suche",
"book_call": "Gespräch planen",
},
"mini": "<p><a href=\"{contact}\">Kontakt</a> - <a href=\"{services}\">Dienstleistungen</a> - <a href=\"{projects}\">Projekte</a> - Copyright 2026 - MandelBlog Studio</p>",
},
"fr": {
"about": "<p>Nous créons des sites web et des boutiques en ligne rapides que votre équipe peut gérer facilement. Du lancement à la croissance, tout reste clair, évolutif et simple à maintenir.</p>",
"links_heading": "Accès rapide",
"support_heading": "Aide & support",
"link_labels": {
"about": "À propos",
"services": "Services",
"projects": "Projets",
"contact": "Contact",
"capabilities": "Possibilités",
"ai_search": "Recherche IA",
"book_call": "Planifier un échange",
},
"mini": "<p><a href=\"{contact}\">Contact</a> - <a href=\"{services}\">Services</a> - <a href=\"{projects}\">Projets</a> - Copyright 2026 - MandelBlog Studio</p>",
},
"es": {
"about": "<p>Construimos sitios web y tiendas online rápidas que tu equipo puede gestionar sin complicaciones. Desde el lanzamiento hasta el crecimiento, todo se mantiene claro, escalable y fácil de ampliar.</p>",
"links_heading": "Accesos rápidos",
"support_heading": "Ayuda y soporte",
"link_labels": {
"about": "Sobre nosotros",
"services": "Servicios",
"projects": "Proyectos",
"contact": "Contacto",
"capabilities": "Posibilidades",
"ai_search": "Búsqueda con IA",
"book_call": "Planificar una llamada",
},
"mini": "<p><a href=\"{contact}\">Contacto</a> - <a href=\"{services}\">Servicios</a> - <a href=\"{projects}\">Proyectos</a> - Copyright 2026 - MandelBlog Studio</p>",
},
"it": {
"about": "<p>Realizziamo siti web e negozi online veloci che il tuo team può gestire in autonomia. Dal lancio alla crescita, tutto rimane chiaro, scalabile e semplice da estendere.</p>",
"links_heading": "Link rapidi",
"support_heading": "Aiuto e supporto",
"link_labels": {
"about": "Chi siamo",
"services": "Servizi",
"projects": "Progetti",
"contact": "Contatto",
"capabilities": "Possibilità",
"ai_search": "Ricerca AI",
"book_call": "Prenota una call",
},
"mini": "<p><a href=\"{contact}\">Contatto</a> - <a href=\"{services}\">Servizi</a> - <a href=\"{projects}\">Progetti</a> - Copyright 2026 - MandelBlog Studio</p>",
},
"pt": {
"about": "<p>Criamos sites e lojas online rápidos que a sua equipa consegue gerir com autonomia. Do lançamento ao crescimento, tudo permanece claro, escalável e simples de evoluir.</p>",
"links_heading": "Acesso rápido",
"support_heading": "Ajuda e suporte",
"link_labels": {
"about": "Sobre nós",
"services": "Serviços",
"projects": "Projetos",
"contact": "Contacto",
"capabilities": "Possibilidades",
"ai_search": "Pesquisa IA",
"book_call": "Marcar conversa",
},
"mini": "<p><a href=\"{contact}\">Contacto</a> - <a href=\"{services}\">Serviços</a> - <a href=\"{projects}\">Projetos</a> - Copyright 2026 - MandelBlog Studio</p>",
},
"ru": {
"about": "<p>Мы создаём быстрые сайты и интернет-магазины, которыми ваша команда может управлять самостоятельно. От запуска до развития всё остаётся понятным, масштабируемым и удобным для роста.</p>",
"links_heading": "Быстрые ссылки",
"support_heading": "Помощь и поддержка",
"link_labels": {
"about": "О нас",
"services": "Услуги",
"projects": "Проекты",
"contact": "Контакт",
"capabilities": "Возможности",
"ai_search": "AI Search",
"book_call": "Запланировать звонок",
},
"mini": "<p><a href=\"{contact}\">Контакт</a> - <a href=\"{services}\">Услуги</a> - <a href=\"{projects}\">Проекты</a> - Copyright 2026 - MandelBlog Studio</p>",
},
}
SOURCE_SLUGS = {
"about": "over-ons",
"services": "diensten",
"projects": "projecten",
"contact": "contact",
"capabilities": "mogelijkheden",
"ai_search": "ai-search",
}
def build_urls(Page, code):
source_pages = {
key: Page.objects.filter(locale__language_code="nl", slug=slug).first()
for key, slug in SOURCE_SLUGS.items()
}
urls = {}
for key, page in source_pages.items():
if not page:
urls[key] = "/"
continue
translated = Page.objects.filter(
translation_key=page.translation_key, locale__language_code=code
).first()
chosen = translated or page
urls[key] = getattr(chosen, "url", None) or "/"
return urls
def make_footer_raw(code, urls):
content = CONTENT[code]
labels = content["link_labels"]
links_html = (
f'<p><a href="{urls["about"]}">{labels["about"]}</a><br/>'
f'<a href="{urls["services"]}">{labels["services"]}</a><br/>'
f'<a href="{urls["projects"]}">{labels["projects"]}</a><br/>'
f'<a href="{urls["contact"]}">{labels["contact"]}</a></p>'
)
support_html = (
f'<p><a href="{urls["capabilities"]}">{labels["capabilities"]}</a><br/>'
f'<a href="{urls["ai_search"]}">{labels["ai_search"]}</a><br/>'
f'<a href="{urls["contact"]}">{labels["book_call"]}</a><br/>'
f'<a href="mailto:info@mandelblog.com">info@mandelblog.com</a></p>'
)
return [
{
"type": "about_us",
"id": str(uuid.uuid4()),
"value": {"heading": "MandelBlog Studio", "content": content["about"]},
},
{
"type": "text",
"id": str(uuid.uuid4()),
"value": {"heading": content["links_heading"], "content": links_html},
},
{
"type": "text",
"id": str(uuid.uuid4()),
"value": {"heading": content["support_heading"], "content": support_html},
},
]
def make_mini_raw(code, urls):
return [
{
"type": "text",
"id": str(uuid.uuid4()),
"value": CONTENT[code]["mini"].format(**urls),
}
]
def seed_footer_content(apps, schema_editor):
LocalizedFooterContent = apps.get_model("mandelstudio", "LocalizedFooterContent")
Site = apps.get_model("wagtailcore", "Site")
Locale = apps.get_model("wagtailcore", "Locale")
site = Site.objects.order_by("id").first()
if site is None:
return
from wagtail.models import Page
translation_key = uuid.uuid4()
for code in CONTENT.keys():
locale, _ = Locale.objects.get_or_create(language_code=code)
urls = build_urls(Page, code)
LocalizedFooterContent.objects.update_or_create(
site=site,
locale=locale,
defaults={
"title": f"Footer content ({code})",
"translation_key": translation_key,
"footer": make_footer_raw(code, urls),
"mini_footer": make_mini_raw(code, urls),
},
)
def reverse_seed(apps, schema_editor):
LocalizedFooterContent = apps.get_model("mandelstudio", "LocalizedFooterContent")
LocalizedFooterContent.objects.all().delete()
class Migration(migrations.Migration):
dependencies = [("mandelstudio", "0001_initial")]
operations = [migrations.RunPython(seed_footer_content, reverse_seed)]

View File

@@ -0,0 +1,51 @@
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [("mandelstudio", "0002_seed_localized_footer_content")]
operations = [
migrations.CreateModel(
name="LocaleAuditRun",
fields=[
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
("started_at", models.DateTimeField(auto_now_add=True)),
("finished_at", models.DateTimeField(blank=True, null=True)),
("locale_codes", models.JSONField(blank=True, default=list)),
("fix_enabled", models.BooleanField(default=False)),
("total_urls_checked", models.PositiveIntegerField(default=0)),
("issues_found", models.PositiveIntegerField(default=0)),
("pages_with_issues", models.PositiveIntegerField(default=0)),
("summary", models.JSONField(blank=True, default=dict)),
],
options={"ordering": ["-started_at"]},
),
migrations.CreateModel(
name="LocaleAuditIssue",
fields=[
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
("locale_code", models.CharField(max_length=12)),
("object_id", models.PositiveIntegerField(blank=True, null=True)),
("object_type", models.CharField(blank=True, max_length=128)),
("url", models.TextField(blank=True)),
("title", models.CharField(blank=True, max_length=255)),
("severity", models.CharField(max_length=16)),
("issue_type", models.CharField(max_length=64)),
("field_path", models.CharField(blank=True, max_length=512)),
("bad_value", models.TextField(blank=True)),
("replacement", models.TextField(blank=True)),
("fixed", models.BooleanField(default=False)),
("extra", models.JSONField(blank=True, default=dict)),
(
"run",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="issues",
to="mandelstudio.localeauditrun",
),
),
],
options={"ordering": ["locale_code", "url", "field_path"]},
),
]

View File

104
mandelstudio/models.py Normal file
View File

@@ -0,0 +1,104 @@
import uuid
from django.db import models
from django.utils.translation import gettext_lazy as _
from wagtail.admin.panels import FieldPanel
from wagtail.blocks import RichTextBlock
from wagtail.contrib.settings.models import BaseSiteSetting
from wagtail.fields import StreamField
from wagtail.models import Locale, Site, TranslatableMixin
from wagtail.snippets.models import register_snippet
from ocyan.plugin.wagtail.block_plugin import get_extra_ocyan_settings_blocks
from ocyan.plugin.wagtail.blocks import (
AboutUsBlock,
HeadedPagelistBlock,
HeadedRichTextBlock,
)
from mandelblog_content_guard.mixins import MultilingualValidationMixin
@register_snippet
class LocalizedFooterContent(
MultilingualValidationMixin, TranslatableMixin, models.Model
):
title = models.CharField(max_length=120, default="Footer content")
site = models.ForeignKey(
Site, on_delete=models.CASCADE, related_name="localized_footer_contents"
)
locale = models.ForeignKey(Locale, on_delete=models.PROTECT, related_name="+")
translation_key = models.UUIDField(default=uuid.uuid4, editable=False)
footer = StreamField(
[
("about_us", AboutUsBlock()),
("text", HeadedRichTextBlock()),
("page_list", HeadedPagelistBlock()),
]
+ get_extra_ocyan_settings_blocks(),
default=list,
use_json_field=True,
)
mini_footer = StreamField(
[("text", RichTextBlock())],
default=list,
use_json_field=True,
)
panels = [
FieldPanel("title"),
FieldPanel("site"),
FieldPanel("locale"),
FieldPanel("footer"),
FieldPanel("mini_footer"),
]
class Meta(TranslatableMixin.Meta):
verbose_name = _("Localized footer content")
verbose_name_plural = _("Localized footer contents")
constraints = [
models.UniqueConstraint(
fields=["site", "locale"],
name="unique_localized_footer_per_site_locale",
),
]
def __str__(self):
return f"{self.site.hostname} [{self.locale.language_code}]"
class LocaleAuditRun(models.Model):
started_at = models.DateTimeField(auto_now_add=True)
finished_at = models.DateTimeField(null=True, blank=True)
locale_codes = models.JSONField(default=list, blank=True)
fix_enabled = models.BooleanField(default=False)
total_urls_checked = models.PositiveIntegerField(default=0)
issues_found = models.PositiveIntegerField(default=0)
pages_with_issues = models.PositiveIntegerField(default=0)
summary = models.JSONField(default=dict, blank=True)
class Meta:
ordering = ["-started_at"]
class LocaleAuditIssue(models.Model):
run = models.ForeignKey(
LocaleAuditRun, related_name="issues", on_delete=models.CASCADE
)
locale_code = models.CharField(max_length=12)
object_id = models.PositiveIntegerField(null=True, blank=True)
object_type = models.CharField(max_length=128, blank=True)
url = models.TextField(blank=True)
title = models.CharField(max_length=255, blank=True)
severity = models.CharField(max_length=16)
issue_type = models.CharField(max_length=64)
field_path = models.CharField(max_length=512, blank=True)
bad_value = models.TextField(blank=True)
replacement = models.TextField(blank=True)
fixed = models.BooleanField(default=False)
extra = models.JSONField(default=dict, blank=True)
class Meta:
ordering = ["locale_code", "url", "field_path"]

View File

@@ -2,7 +2,6 @@
"ocyan_plugins": [
"ocyan.plugin.contact_form",
"ocyan.plugin.cookie_jar",
"ocyan.plugin.demo_data",
"ocyan.plugin.django",
"ocyan.plugin.newsletter",
"ocyan.plugin.oscar",
@@ -15,7 +14,7 @@
"ocyan.plugin.oscar_partner",
"ocyan.plugin.oscar_shipping",
"ocyan.plugin.oscar_sequential_order_numbers",
"ocyan.plugin.payment_dummy",
"ocyan.plugin.payment_mollie",
"ocyan.plugin.roadrunner_bs5",
"ocyan.plugin.template_engine",
"ocyan.plugin.roadrunner_productchooser",
@@ -64,8 +63,23 @@
"en"
]
},
"ocyan_dummy_payment_plugin": {
"help_text": "Hit pay, to simulate payment."
"payment_mollie": {
"api_key": "CHANGE_ME",
"ideal": true,
"creditcard": true,
"paypal": true,
"bancontact": true,
"sofort": true,
"banktransfer": false,
"belfius": false,
"bitcoin": false,
"directdebit": false,
"eps": false,
"giftcard": false,
"giropay": false,
"inghomepay": false,
"kbc": false,
"mistercash": false
},
"oscar": {
"allow_anon_checkout": true,

View File

@@ -8,6 +8,8 @@ For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.0/ref/settings/
"""
import importlib.util
import sys
from pathlib import Path
from configtype.jsonconfig import setup_search_paths
@@ -20,7 +22,53 @@ setup_search_paths("/etc/ocyan/", str(_project_app_path))
from ocyan.main.settings import * # pylint:disable=W0401,W0614
INSTALLED_APPS = ["mandelstudio"] + INSTALLED_APPS
INSTALLED_APPS = [
"mandelblog_content_guard.apps.MandelblogContentGuardConfig",
"mandelstudio",
] + INSTALLED_APPS
# Route through the project URL layer so MandelStudio can override
# sitemap/robots behavior while still delegating the main Ocyan routes.
ROOT_URLCONF = "mandelstudio.urls"
def _ensure_required_app(*candidates):
"""Ensure required plugin apps remain enabled when /etc/ocyan config omits them."""
if any(app in INSTALLED_APPS for app in candidates):
return
for app in candidates:
if importlib.util.find_spec(app):
INSTALLED_APPS.append(app)
return
_ensure_required_app(
"ocyan.plugin.carbasa.carbasa",
"ocyan.plugin.carbasa",
)
_ensure_required_app(
"ocyan.plugin.coyote.coyote",
"ocyan.plugin.coyote",
)
# Keep Carbasa/Coyote defaults stable even when plugin settings are not
# injected early enough during startup on this deployment.
OXYAN_HEADER_OPTIONS = globals().get(
"OXYAN_HEADER_OPTIONS",
[
("basic", "Basic Header"),
("big", "Big Header"),
("mega", "Mega Header"),
],
)
COMPRESS_CACHE_KEY_FUNCTION = globals().get(
"COMPRESS_CACHE_KEY_FUNCTION",
"ocyan.plugin.coyote.utils.get_compressor_cache_key",
)
OXYAN_LAZY_THEME_DEFINITIONS = globals().get(
"OXYAN_LAZY_THEME_DEFINITIONS",
"ocyan.plugin.coyote.definitions.get_coyote_definitions",
)
# Enable request language negotiation.
if "django.middleware.locale.LocaleMiddleware" not in MIDDLEWARE:
@@ -64,3 +112,16 @@ ACTIVE_VERTICAL = "agency"
# Wagtail content internationalization in admin
WAGTAIL_I18N_ENABLED = True
WAGTAIL_CONTENT_LANGUAGES = LANGUAGES
CONTENT_GUARD_STRICT = True
CONTENT_GUARD_BLOCK_MEDIUM = False
CONTENT_GUARD_LOCALES = [code for code, _label in LANGUAGES]
CONTENT_GUARD_REWRITE_ENABLED = True
CONTENT_GUARD_REWRITE_BACKEND = None
if "test" in sys.argv:
MIGRATION_MODULES = globals().get("MIGRATION_MODULES", {}).copy()
MIGRATION_MODULES["template_engine"] = (
"mandelstudio.test_migrations.template_engine"
)
TEST_RUNNER = "django.test.runner.DiscoverRunner"

81
mandelstudio/sitemaps.py Normal file
View File

@@ -0,0 +1,81 @@
from django.contrib.sitemaps.views import index as sitemap_index_view
from django.contrib.sitemaps.views import sitemap as sitemap_section_view
from django.http import HttpResponse
from wagtail.models import Locale, Page
from ocyan.plugin.wagtail_oscar_integration.constants import CACHE_DURATION
from ocyan.plugin.wagtail_oscar_integration.sitemap import CategorySitemap
from ocyan.plugin.wagtail_oscar_integration.sitemap import ProductSitemap
from ocyan.plugin.wagtail_oscar_integration.sitemap import ShopSitemap
from ocyan.plugin.wagtail_oscar_integration.sitemap import WagtailSitemap as BaseWagtailSitemap
class WagtailSitemap(BaseWagtailSitemap):
def items(self):
page_ids = []
for locale in Locale.objects.all():
translated_root_page = self.get_wagtail_site().root_page.get_translation_or_none(
locale
)
if translated_root_page is None:
continue
locale_page_ids = (
translated_root_page.get_descendants(inclusive=True)
.live()
.public()
.order_by()
.values_list("pk", flat=True)
)
page_ids.extend(locale_page_ids)
if not page_ids:
return []
return (
Page.objects.filter(pk__in=page_ids)
.live()
.public()
.defer_streamfields()
.order_by("path")
.specific()
)
def gather_sitemaps():
return {
"pages": WagtailSitemap,
"shop": ShopSitemap,
"products": ProductSitemap,
"categories": CategorySitemap,
}
def sitemap_index(request):
return sitemap_index_view(
request,
sitemaps=gather_sitemaps(),
sitemap_url_name="sitemaps",
)
def sitemap_section(request, section=None):
return sitemap_section_view(
request,
sitemaps=gather_sitemaps(),
section=section,
)
def robots_txt(request):
sitemap_url = request.build_absolute_uri("/sitemap.xml")
content = "\n".join(
[
"User-agent: *",
"Allow: /",
f"Sitemap: {sitemap_url}",
"",
]
)
return HttpResponse(content, content_type="text/plain; charset=utf-8")

View File

@@ -0,0 +1,16 @@
{% extends "carbasa/headers/header.html" %}
{% load i18n oxyan category_tags ocyan_main ocyanjson wagtailsettings_tags %}
{% block nav %}
{% ocyanjson "theme" "menu_depth" 1 as menu_depth %}
<div class="collapse navbar-collapse menu-bar page-menu-bar" id="navbarSupportedContent">
<div class="brand-wrapper">
{% include 'partials/brand.html' with big=True %}
</div>
<ul class="navbar-nav">
{% rootpage_as_category as page_tree_root %}
{% category_tree 2 page_tree_root as page_tree_items %}
{% include "partials/dropdown.html" with menu_items=page_tree_items limit=2 %}
</ul>
</div>
{% endblock %}

View File

@@ -1,5 +1,5 @@
{% extends "layout.html" %}
{% load wagtailcore_tags oxyan static string_filters %}
{% load wagtailcore_tags oxyan static string_filters mandelstudio_i18n %}
{% block extrahead %}
{{ block.super }}
@@ -19,7 +19,7 @@
{% block layout %}
<a class="btn btn-secondary hidelink" id="main_content_link" href="#skip_header" tabindex="2">
Ga naar inhoud
{% skip_to_content_text %}
</a>
{% include_header header_template|default:"engine/partials/header.html" %}
<div id="main_content" tabindex="-1">

View File

@@ -1,5 +1,5 @@
{% extends "layout.html" %}
{% load wagtailcore_tags oxyan static string_filters %}
{% load wagtailcore_tags oxyan static string_filters mandelstudio_i18n %}
{% block extrahead %}
{{ block.super }}
@@ -19,7 +19,7 @@
{% block layout %}
<a class="btn btn-secondary hidelink" id="main_content_link" href="#skip_header" tabindex="2">
Ga naar inhoud
{% skip_to_content_text %}
</a>
{% include_header header_template|default:"engine/partials/header.html" %}
<div id="main_content" tabindex="-1">

View File

@@ -1,5 +1,5 @@
{% extends "layout.html" %}
{% load wagtailcore_tags oxyan static string_filters %}
{% load wagtailcore_tags oxyan static string_filters mandelstudio_i18n %}
{% block extrahead %}
{{ block.super }}
@@ -19,7 +19,7 @@
{% block layout %}
<a class="btn btn-secondary hidelink" id="main_content_link" href="#skip_header" tabindex="2">
Ga naar inhoud
{% skip_to_content_text %}
</a>
{% include_header header_template|default:"engine/partials/header.html" %}
<div id="main_content" tabindex="-1">

View File

@@ -0,0 +1 @@
{% include "carbasa/headers/header.html" %}

Some files were not shown because too many files have changed in this diff Show More