Compare commits

..

10 Commits

Author SHA1 Message Date
Serreau Jovann
47209eac41 🔥 chore(navbar): Retire l'entrée e-page du menu de navigation.
All checks were successful
Symfony CI - Install, Test, Build, Attest & Deploy / 🚀 Deploy to Production (push) Successful in 54s
Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-10 16:04:20 +02:00
Serreau Jovann
293c5f2fa3 🔥 chore(ansible): Retire la tâche ecosplay:abonement du playbook.
Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-10 14:28:46 +02:00
Serreau Jovann
66fdd5250f ♻️ refactor(migration): Retire l'usage de docker dans backup.sh et restore.sh, utilise pg_dump/psql et tar directement sur l'hôte avec DATABASE_URL.
Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-10 14:25:44 +02:00
Serreau Jovann
8b4ae992f8 feat(migration): Ajoute les scripts backup.sh et restore.sh pour migrer la stack vers un autre serveur.
Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-10 14:17:10 +02:00
Serreau Jovann
dbf422a543 feat(nelmio_security): Ajoute cdnjs.cloudflare.com aux sources de contenu autorisées. 2026-03-30 13:43:32 +02:00
Serreau Jovann
d5563aa4db feat(nelmio_security): Ajoute cdnjs.cloudflare.com aux sources de contenu autorisées. 2026-03-30 13:42:42 +02:00
Serreau Jovann
73e4d40b4d feat(nelmio_security): Ajoute cdnjs.cloudflare.com aux sources de contenu autorisées. 2026-03-30 13:38:26 +02:00
Serreau Jovann
0233338224 🔧 fix(cloudflare): Corrige l'authentification API Cloudflare avec Global API Key et nouvelle zone ID.
Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-03-30 11:59:20 +02:00
Serreau Jovann
b0a725dd37 feat(nelmio_security): Ajoute cdnjs.cloudflare.com aux sources de contenu autorisées. 2026-03-30 11:43:44 +02:00
Serreau Jovann
767c533023 feat(nelmio_security): Ajoute cdnjs.cloudflare.com aux sources de contenu autorisées. 2026-03-30 11:39:19 +02:00
7 changed files with 343 additions and 21 deletions

5
.env
View File

@@ -56,8 +56,9 @@ PROD_URL=https://www.e-cosplay.fr
VAPID_PK=DsOg7jToRSD-VpNSV1Gt3YAhSwz4l-nqeu7yFvzbSxg
VAPID_PC=BKz0kdcsG6kk9KxciPpkfP8kEDAd408inZecij5kBDbQ1ZGZSNwS4KZ8FerC28LFXvgSqpDXtor3ePo0zBCdNqo
CLOUDFLARE_ZONE_ID=a26d2ecd33d18c984f348eeb060ed5b3
CLOUDFLARE_API_TOKEN=Kq_hpaH_ng-hAeGsJo6KhQb2TxYW1v6lRGE84aOR
CLOUDFLARE_ZONE_ID=f9e2a44aa512b455c99286d2ae845862
CLOUDFLARE_API_EMAIL=contact@e-cosplay.fr
CLOUDFLARE_API_KEY=cfk_wXHshV0AmOoKbWE19CIPoWJBeUCtSDUXo2mgkGuNe99eeac8
MARCHAND_ID=5685183792
###> google/apiclient ###
GOOGLE_API_KEY=

View File

@@ -240,11 +240,6 @@
- "{{ path }}/public/pwa"
- "{{ path }}/public/idb"
- "{{ path }}/public/workbox"
- name: Exécuter ecosplay:abonement dans le répertoire de l application
ansible.builtin.command: php bin/console ecosplay:abonement
become: false
args:
chdir: "{{ path }}"
- name: Exécuter app:cloudflare:purge dans le répertoire de l application
ansible.builtin.command: php bin/console app:cloudflare:purge
become: false

View File

@@ -1,6 +1,6 @@
www.e-cosplay.fr {
tls {
dns cloudflare cfut_2WvRdf8MffLcQxNKuLbikhvJVAPzfqNAoWhgWCBS289868ec
dns cloudflare cfat_rIHZqzCm9GKK3xVnQDNGfu6J91TseIDdTKeuWSFUdf6ccd31
}
root * {{ path }}/public
encode zstd gzip
@@ -39,10 +39,8 @@ www.e-cosplay.fr {
}
}
datas.e-cosplay.fr {
tls {
dns cloudflare Kq_hpaH_ng-hAeGsJo6KhQb2TxYW1v6lRGE84aOR
}
tls {
dns cloudflare cfat_rIHZqzCm9GKK3xVnQDNGfu6J91TseIDdTKeuWSFUdf6ccd31
}
reverse_proxy localhost:27502
}

144
backup.sh Executable file
View File

@@ -0,0 +1,144 @@
#!/bin/sh
set -eu
# ============================================================================
# E-COSPLAY - Script de sauvegarde pour migration vers un autre serveur
# ----------------------------------------------------------------------------
# N'UTILISE PAS DOCKER. S'appuie sur les binaires installés sur l'hôte
# (pg_dump, psql, tar, gzip) et lit DATABASE_URL depuis .env.local.
#
# Sauvegarde :
# - Base PostgreSQL distante via DATABASE_URL (pg_dump logique)
# - Fichiers uploadés (public/storage)
# - Fichiers d'environnement et credentials (.env.local, google.json, ...)
#
# Le tout est empaqueté dans une archive tar.gz horodatée.
# ============================================================================
RED='\033[0;31m'
ORANGE='\033[0;33m'
GREEN='\033[0;32m'
CYAN='\033[0;36m'
RESET='\033[0m'
PROJECT_DIR="$(cd "$(dirname "$0")" && pwd)"
cd "$PROJECT_DIR"
TIMESTAMP="$(date +%Y%m%d_%H%M%S)"
BACKUP_DIR="${BACKUP_DIR:-$PROJECT_DIR/var/backups}"
WORK_DIR="$BACKUP_DIR/tmp_$TIMESTAMP"
ARCHIVE="$BACKUP_DIR/e-cosplay_backup_$TIMESTAMP.tar.gz"
STORAGE_DIR="${STORAGE_DIR:-public/storage}"
echo "${CYAN}############################${RESET}"
echo "${CYAN}# E-COSPLAY BACKUP START #${RESET}"
echo "${CYAN}############################${RESET}"
echo "${CYAN}Destination : ${ARCHIVE}${RESET}"
mkdir -p "$WORK_DIR"
# ---------------------------------------------------------------------------
# 1) Vérifications
# ---------------------------------------------------------------------------
for bin in pg_dump tar gzip; do
if ! command -v "$bin" >/dev/null 2>&1; then
echo "${RED}Binaire introuvable : $bin${RESET}" >&2
exit 1
fi
done
# ---------------------------------------------------------------------------
# 2) Récupération de DATABASE_URL
# Priorité : variable d'env existante > .env.local > .env.prod.local > .env
# ---------------------------------------------------------------------------
get_database_url() {
if [ -n "${DATABASE_URL:-}" ]; then
echo "$DATABASE_URL"
return 0
fi
for f in .env.local .env.prod.local .env.prod .env; do
if [ -f "$PROJECT_DIR/$f" ]; then
url="$(grep -E '^DATABASE_URL=' "$PROJECT_DIR/$f" | tail -n1 | sed -E 's/^DATABASE_URL=//; s/^"(.*)"$/\1/; s/^'"'"'(.*)'"'"'$/\1/')"
if [ -n "$url" ]; then
echo "$url"
return 0
fi
fi
done
return 1
}
DATABASE_URL_RAW="$(get_database_url || true)"
if [ -z "$DATABASE_URL_RAW" ]; then
echo "${RED}DATABASE_URL introuvable (ni en env, ni dans .env.local/.env)${RESET}" >&2
exit 1
fi
# Strip des paramètres Doctrine (serverVersion, charset, ...) que libpq ignore
DATABASE_URL_CLEAN="$(echo "$DATABASE_URL_RAW" | sed -E 's/\?.*$//')"
# Affiche une URL masquée (cache le mot de passe)
DATABASE_URL_MASKED="$(echo "$DATABASE_URL_CLEAN" | sed -E 's#(://[^:]+:)[^@]+(@)#\1***\2#')"
echo "${CYAN}> DATABASE_URL : ${DATABASE_URL_MASKED}${RESET}"
# ---------------------------------------------------------------------------
# 3) Dump PostgreSQL
# ---------------------------------------------------------------------------
echo "${CYAN}> Dump PostgreSQL...${RESET}"
pg_dump "$DATABASE_URL_CLEAN" \
--clean --if-exists --no-owner --no-privileges \
| gzip -9 > "$WORK_DIR/database.sql.gz"
echo "${GREEN} ✓ database.sql.gz ($(du -h "$WORK_DIR/database.sql.gz" | cut -f1))${RESET}"
# ---------------------------------------------------------------------------
# 4) Fichiers uploadés
# ---------------------------------------------------------------------------
if [ -d "$PROJECT_DIR/$STORAGE_DIR" ]; then
echo "${CYAN}> Sauvegarde de ${STORAGE_DIR}...${RESET}"
tar -C "$PROJECT_DIR" -czf "$WORK_DIR/storage.tar.gz" "$STORAGE_DIR"
echo "${GREEN} ✓ storage.tar.gz ($(du -h "$WORK_DIR/storage.tar.gz" | cut -f1))${RESET}"
else
echo "${ORANGE} ! ${STORAGE_DIR} introuvable, étape ignorée${RESET}"
fi
# ---------------------------------------------------------------------------
# 5) Fichiers d'environnement et credentials
# ---------------------------------------------------------------------------
echo "${CYAN}> Copie des fichiers d'environnement et credentials...${RESET}"
mkdir -p "$WORK_DIR/env"
for f in .env .env.local .env.prod .env.prod.local google.json account.json; do
if [ -f "$PROJECT_DIR/$f" ]; then
cp -a "$PROJECT_DIR/$f" "$WORK_DIR/env/$f"
echo "${GREEN}$f${RESET}"
fi
done
# ---------------------------------------------------------------------------
# 6) Métadonnées
# ---------------------------------------------------------------------------
{
echo "timestamp=$TIMESTAMP"
echo "hostname=$(hostname)"
echo "git_commit=$(git -C "$PROJECT_DIR" rev-parse HEAD 2>/dev/null || echo unknown)"
echo "git_branch=$(git -C "$PROJECT_DIR" rev-parse --abbrev-ref HEAD 2>/dev/null || echo unknown)"
echo "database_url=$DATABASE_URL_MASKED"
echo "storage_dir=$STORAGE_DIR"
} > "$WORK_DIR/manifest.txt"
# ---------------------------------------------------------------------------
# 7) Empaquetage final
# ---------------------------------------------------------------------------
echo "${CYAN}> Empaquetage de l'archive finale...${RESET}"
tar -C "$BACKUP_DIR" -czf "$ARCHIVE" "tmp_$TIMESTAMP"
rm -rf "$WORK_DIR"
echo "${GREEN}############################${RESET}"
echo "${GREEN}# BACKUP TERMINÉ #${RESET}"
echo "${GREEN}############################${RESET}"
echo "${GREEN}Archive : ${ARCHIVE}${RESET}"
echo "${GREEN}Taille : $(du -h "$ARCHIVE" | cut -f1)${RESET}"
echo
echo "${CYAN}Pour transférer vers le nouveau serveur :${RESET}"
echo " scp \"$ARCHIVE\" user@nouveau-serveur:/chemin/vers/e-cosplay/var/backups/"
echo " ssh user@nouveau-serveur 'cd /chemin/vers/e-cosplay && ./restore.sh \"var/backups/$(basename "$ARCHIVE")\"'"

184
restore.sh Executable file
View File

@@ -0,0 +1,184 @@
#!/bin/sh
set -eu
# ============================================================================
# E-COSPLAY - Script de restauration sur un nouveau serveur
# ----------------------------------------------------------------------------
# N'UTILISE PAS DOCKER. S'appuie sur les binaires installés sur l'hôte
# (psql, tar, gunzip) et lit DATABASE_URL depuis .env.local après l'avoir
# restauré depuis l'archive.
#
# Usage : ./restore.sh <chemin/vers/e-cosplay_backup_YYYYMMDD_HHMMSS.tar.gz>
#
# Étapes :
# 1. Décompresse l'archive
# 2. Restaure les fichiers d'environnement (.env.local, etc.)
# 3. Restaure la base PostgreSQL via psql + DATABASE_URL
# 4. Restaure les fichiers uploadés (public/storage)
# 5. Vide le cache Symfony et applique les migrations Doctrine
#
# ATTENTION : la restauration ÉCRASE les données existantes (DB + storage).
# ============================================================================
RED='\033[0;31m'
ORANGE='\033[0;33m'
GREEN='\033[0;32m'
CYAN='\033[0;36m'
RESET='\033[0m'
if [ $# -lt 1 ]; then
echo "${RED}Usage : $0 <archive.tar.gz>${RESET}" >&2
exit 1
fi
ARCHIVE="$1"
if [ ! -f "$ARCHIVE" ]; then
echo "${RED}Archive introuvable : $ARCHIVE${RESET}" >&2
exit 1
fi
PROJECT_DIR="$(cd "$(dirname "$0")" && pwd)"
cd "$PROJECT_DIR"
case "$ARCHIVE" in
/*) ARCHIVE_ABS="$ARCHIVE" ;;
*) ARCHIVE_ABS="$(pwd)/$ARCHIVE" ;;
esac
STORAGE_DIR="${STORAGE_DIR:-public/storage}"
WORK_DIR="$(mktemp -d /tmp/e-cosplay_restore.XXXXXX)"
trap 'rm -rf "$WORK_DIR"' EXIT
echo "${CYAN}#############################${RESET}"
echo "${CYAN}# E-COSPLAY RESTORE START #${RESET}"
echo "${CYAN}#############################${RESET}"
echo "${CYAN}Archive : ${ARCHIVE_ABS}${RESET}"
# ---------------------------------------------------------------------------
# Vérifications
# ---------------------------------------------------------------------------
for bin in psql tar gunzip; do
if ! command -v "$bin" >/dev/null 2>&1; then
echo "${RED}Binaire introuvable : $bin${RESET}" >&2
exit 1
fi
done
# ---------------------------------------------------------------------------
# Confirmation interactive (skip avec FORCE=1)
# ---------------------------------------------------------------------------
if [ "${FORCE:-0}" != "1" ]; then
printf "${ORANGE}La restauration va ÉCRASER la base et les fichiers existants. Continuer ? [y/N] ${RESET}"
read -r answer
case "$answer" in
y|Y|yes|YES) ;;
*) echo "Annulé."; exit 0 ;;
esac
fi
# ---------------------------------------------------------------------------
# 1) Décompression
# ---------------------------------------------------------------------------
echo "${CYAN}> Décompression de l'archive...${RESET}"
tar -xzf "$ARCHIVE_ABS" -C "$WORK_DIR"
SRC="$(find "$WORK_DIR" -mindepth 1 -maxdepth 1 -type d | head -n1)"
if [ -z "$SRC" ] || [ ! -f "$SRC/manifest.txt" ]; then
echo "${RED}Archive invalide : manifest.txt introuvable${RESET}" >&2
exit 1
fi
echo "${GREEN} ✓ Manifest :${RESET}"
sed 's/^/ /' "$SRC/manifest.txt"
# ---------------------------------------------------------------------------
# 2) Restauration des fichiers d'environnement
# ---------------------------------------------------------------------------
if [ -d "$SRC/env" ]; then
echo "${CYAN}> Restauration des fichiers d'environnement...${RESET}"
for f in "$SRC"/env/*; do
[ -e "$f" ] || continue
name="$(basename "$f")"
if [ -f "$PROJECT_DIR/$name" ]; then
cp -a "$PROJECT_DIR/$name" "$PROJECT_DIR/$name.bak.$(date +%s)"
fi
cp -a "$f" "$PROJECT_DIR/$name"
echo "${GREEN}$name${RESET}"
done
fi
# ---------------------------------------------------------------------------
# 3) Récupération de DATABASE_URL (après restauration des fichiers env)
# ---------------------------------------------------------------------------
get_database_url() {
if [ -n "${DATABASE_URL:-}" ]; then
echo "$DATABASE_URL"
return 0
fi
for f in .env.local .env.prod.local .env.prod .env; do
if [ -f "$PROJECT_DIR/$f" ]; then
url="$(grep -E '^DATABASE_URL=' "$PROJECT_DIR/$f" | tail -n1 | sed -E 's/^DATABASE_URL=//; s/^"(.*)"$/\1/; s/^'"'"'(.*)'"'"'$/\1/')"
if [ -n "$url" ]; then
echo "$url"
return 0
fi
fi
done
return 1
}
DATABASE_URL_RAW="$(get_database_url || true)"
if [ -z "$DATABASE_URL_RAW" ]; then
echo "${RED}DATABASE_URL introuvable${RESET}" >&2
exit 1
fi
DATABASE_URL_CLEAN="$(echo "$DATABASE_URL_RAW" | sed -E 's/\?.*$//')"
DATABASE_URL_MASKED="$(echo "$DATABASE_URL_CLEAN" | sed -E 's#(://[^:]+:)[^@]+(@)#\1***\2#')"
echo "${CYAN}> DATABASE_URL : ${DATABASE_URL_MASKED}${RESET}"
# ---------------------------------------------------------------------------
# 4) Restauration PostgreSQL
# ---------------------------------------------------------------------------
if [ -f "$SRC/database.sql.gz" ]; then
echo "${CYAN}> Restauration de la base PostgreSQL...${RESET}"
gunzip -c "$SRC/database.sql.gz" \
| psql "$DATABASE_URL_CLEAN" -v ON_ERROR_STOP=1 >/dev/null
echo "${GREEN} ✓ Base restaurée${RESET}"
else
echo "${ORANGE} ! database.sql.gz absent, étape ignorée${RESET}"
fi
# ---------------------------------------------------------------------------
# 5) Restauration des fichiers uploadés
# ---------------------------------------------------------------------------
if [ -f "$SRC/storage.tar.gz" ]; then
echo "${CYAN}> Restauration de ${STORAGE_DIR}...${RESET}"
if [ -d "$PROJECT_DIR/$STORAGE_DIR" ]; then
backup_path="$PROJECT_DIR/${STORAGE_DIR}.bak.$(date +%s)"
mv "$PROJECT_DIR/$STORAGE_DIR" "$backup_path"
echo "${ORANGE} ! ancien dossier déplacé vers $backup_path${RESET}"
fi
tar -xzf "$SRC/storage.tar.gz" -C "$PROJECT_DIR"
echo "${GREEN}${STORAGE_DIR} restauré${RESET}"
else
echo "${ORANGE} ! storage.tar.gz absent, étape ignorée${RESET}"
fi
# ---------------------------------------------------------------------------
# 6) Cache Symfony + migrations Doctrine
# ---------------------------------------------------------------------------
if [ -x "$PROJECT_DIR/bin/console" ] && command -v php >/dev/null 2>&1; then
echo "${CYAN}> Vidage du cache Symfony...${RESET}"
php "$PROJECT_DIR/bin/console" cache:clear --no-warmup || true
php "$PROJECT_DIR/bin/console" cache:warmup || true
echo "${CYAN}> Application des migrations Doctrine éventuelles...${RESET}"
php "$PROJECT_DIR/bin/console" doctrine:migrations:migrate --no-interaction --allow-no-migration || true
fi
echo "${GREEN}#############################${RESET}"
echo "${GREEN}# RESTORE TERMINÉ #${RESET}"
echo "${GREEN}#############################${RESET}"
echo "${GREEN}Pense à vérifier :${RESET}"
echo " - les variables d'environnement spécifiques au nouveau serveur (PATH_URL, DEV_URL, ...)"
echo " - les permissions du dossier $STORAGE_DIR (utilisateur du serveur web)"
echo " - la conf Caddy / Nginx / Apache et les certificats TLS"

View File

@@ -21,14 +21,15 @@ class CloudflarePurgeCommand extends Command
private const CLOUDFLARE_API_URL = 'https://api.cloudflare.com/client/v4/zones/%s/purge_cache';
private string $zoneId;
private string $apiToken;
private string $apiEmail;
private string $apiKey;
private HttpClientInterface $httpClient;
public function __construct(HttpClientInterface $httpClient)
{
// Les variables sont injectées via le conteneur de services (services.yaml)
$this->zoneId = $_ENV['CLOUDFLARE_ZONE_ID'];
$this->apiToken = $_ENV['CLOUDFLARE_API_TOKEN'];
$this->apiEmail = $_ENV['CLOUDFLARE_API_EMAIL'];
$this->apiKey = $_ENV['CLOUDFLARE_API_KEY'];
$this->httpClient = $httpClient;
parent::__construct();
@@ -38,8 +39,8 @@ class CloudflarePurgeCommand extends Command
{
$io = new SymfonyStyle($input, $output);
if (empty($this->zoneId) || empty($this->apiToken)) {
$io->error('CLOUDFLARE_ZONE_ID ou CLOUDFLARE_API_TOKEN ne sont pas définis. Veuillez vérifier votre fichier .env.');
if (empty($this->zoneId) || empty($this->apiEmail) || empty($this->apiKey)) {
$io->error('CLOUDFLARE_ZONE_ID, CLOUDFLARE_API_EMAIL ou CLOUDFLARE_API_KEY ne sont pas définis. Veuillez vérifier votre fichier .env.');
return Command::FAILURE;
}
@@ -52,8 +53,8 @@ class CloudflarePurgeCommand extends Command
sprintf(self::CLOUDFLARE_API_URL, $this->zoneId),
[
'headers' => [
// Utilisation du Token API (méthode recommandée par Cloudflare)
'Authorization' => 'Bearer ' . $this->apiToken,
'X-Auth-Email' => $this->apiEmail,
'X-Auth-Key' => $this->apiKey,
'Content-Type' => 'application/json',
],
// Le payload pour purger tout le cache

View File

@@ -62,7 +62,6 @@
{ 'name': 'Accueil'|trans, 'route': 'app_home' },
{ 'name': 'Qui sommes-nous'|trans, 'route': 'app_about' },
{ 'name': 'Nos membres'|trans, 'route': 'app_members' },
{ 'name': 'epage_cosplay'|trans, 'route': 'app_pages' },
{ 'name': 'Nos événements'|trans, 'route': 'app_events' },
{ 'name': 'Boutiques'|trans, 'route': 'app_shop' },
{ 'name': 'Documents'|trans, 'route': 'app_doc' },