From 2953b898b50ef826cafc013cebc0903446d036ab Mon Sep 17 00:00:00 2001
From: Edward <9326037+edwardoliveira@users.noreply.github.com>
Date: Tue, 2 Sep 2025 13:00:53 -0300
Subject: [PATCH] =?UTF-8?q?Atualiza=C3=A7=C3=A3o=20da=20imagem=20base=20Do?=
=?UTF-8?q?cker=20(#3787)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Update de imagem based Docker and libs Python
Co-authored-by: Edward <9326037+edwardoliveira@users.noreply.github.com>
---
docker/Dockerfile | 174 +++++++----
docker/config/nginx/nginx.conf | 2 +-
docker/docker-compose.yaml | 16 +-
docker/gunicorn_start.sh | 50 ---
docker/simple_gunicorn.sh | 3 +-
docker/start.sh | 147 ---------
docker/{ => startup_scripts}/create_admin.py | 0
docker/{ => startup_scripts}/genkey.py | 0
docker/startup_scripts/gunicorn.conf.py | 80 +++++
docker/{ => startup_scripts}/solr_cli.py | 18 +-
docker/startup_scripts/start.sh | 289 ++++++++++++++++++
docker/{ => startup_scripts}/wait-for-pg.sh | 0
docker/{ => startup_scripts}/wait-for-solr.sh | 0
release.sh | 3 -
requirements/dev-requirements.txt | 16 +-
requirements/requirements.txt | 12 +-
requirements/test-requirements.txt | 19 +-
sapl/base/email_utils.py | 2 +-
sapl/base/search_indexes.py | 2 +-
sapl/base/templatetags/common_tags.py | 4 +-
sapl/compilacao/forms.py | 2 +-
sapl/compilacao/models.py | 4 +-
sapl/lexml/forms.py | 2 +-
sapl/lexml/models.py | 2 +-
sapl/materia/forms.py | 2 +-
sapl/materia/models.py | 28 +-
sapl/materia/urls.py | 2 +-
sapl/norma/forms.py | 2 +-
sapl/norma/models.py | 4 +-
sapl/norma/views.py | 2 +-
sapl/parlamentares/models.py | 1 -
sapl/protocoloadm/forms.py | 4 +-
sapl/protocoloadm/views.py | 4 +-
sapl/relatorios/views.py | 24 +-
sapl/settings.py | 123 ++++++--
sapl/utils.py | 2 +-
36 files changed, 666 insertions(+), 379 deletions(-)
delete mode 100755 docker/gunicorn_start.sh
delete mode 100755 docker/start.sh
rename docker/{ => startup_scripts}/create_admin.py (100%)
rename docker/{ => startup_scripts}/genkey.py (100%)
create mode 100644 docker/startup_scripts/gunicorn.conf.py
rename docker/{ => startup_scripts}/solr_cli.py (95%)
create mode 100755 docker/startup_scripts/start.sh
rename docker/{ => startup_scripts}/wait-for-pg.sh (100%)
rename docker/{ => startup_scripts}/wait-for-solr.sh (100%)
diff --git a/docker/Dockerfile b/docker/Dockerfile
index 6e5f27864..9fe3d6b75 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -1,73 +1,123 @@
-FROM python:3.9-slim-buster
-
-# Setup env
-ENV LANG C.UTF-8
-ENV LC_ALL C.UTF-8
-ENV PYTHONDONTWRITEBYTECODE 1
-ENV PYTHONUNBUFFERED=1
-ENV DEBIAN_FRONTEND noninteractive
-
-ENV BUILD_PACKAGES apt-utils apt-file libpq-dev graphviz-dev build-essential git pkg-config \
- python3-dev libxml2-dev libjpeg-dev libssl-dev libffi-dev libxslt1-dev \
- libcairo2-dev software-properties-common python3-setuptools python3-pip
-
-## NAO EH PRA TIRAR O vim DA LISTA DE COMANDOS INSTALADOS!!!
-ENV RUN_PACKAGES graphviz python3-lxml python3-magic postgresql-client python3-psycopg2 \
- poppler-utils curl jq bash vim python3-venv tzdata nodejs \
- fontconfig ttf-dejavu python nginx
-
-RUN mkdir -p /var/interlegis/sapl
+# ---------- 1) BUILDER ----------
+FROM python:3.12-slim-bookworm AS builder
+
+ENV LANG=C.UTF-8 LC_ALL=C.UTF-8 PYTHONDONTWRITEBYTECODE=1 PYTHONUNBUFFERED=1 \
+ DEBIAN_FRONTEND=noninteractive \
+ VENV_DIR=/opt/venv \
+ PIP_NO_CACHE_DIR=on
+
+# Dev headers e toolchain só no builder
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ build-essential git pkg-config \
+ libpq-dev libxml2-dev libjpeg-dev libssl-dev libffi-dev libxslt1-dev \
+ libcairo2-dev libpango1.0-dev libgdk-pixbuf-2.0-dev libharfbuzz-dev \
+ libfreetype6-dev zlib1g-dev \
+ && rm -rf /var/lib/apt/lists/*
+
+# Venv independente do sistema
+RUN python -m venv "${VENV_DIR}" \
+ && "${VENV_DIR}/bin/pip" install --upgrade pip setuptools wheel
+
+WORKDIR /build
+
+# Copie APENAS os requirements primeiro para maximizar cache
+COPY requirements/ ./requirements/
+
+# Instale os requisitos de produção
+# ATENÇÃO: se seu código importa prompt_toolkit em runtime, inclua em requirements.txt:
+# prompt_toolkit>=3,<4
+RUN "${VENV_DIR}/bin/pip" install -r requirements/requirements.txt
+
+# Opcional: verificação de conflitos (falha cedo se faltar algo)
+RUN "${VENV_DIR}/bin/pip" check || true
+
+
+# ---------- 2) RUNTIME ----------
+FROM python:3.12-slim-bookworm AS runtime
+
+ARG WITH_GRAPHVIZ=1
+ARG WITH_POPPLER=1
+ARG WITH_PSQL_CLIENT=1
+ARG WITH_NGINX=1
+
+ENV LANG=C.UTF-8 LC_ALL=C.UTF-8 PYTHONDONTWRITEBYTECODE=1 PYTHONUNBUFFERED=1 \
+ DEBIAN_FRONTEND=noninteractive \
+ VENV_DIR=/opt/venv \
+ PATH=/opt/venv/bin:$PATH \
+ PIP_NO_CACHE_DIR=on
+
+# Pacotes de runtime (sem *-dev)
+# Removi python3/python3-venv (já temos o Python da base)
+RUN set -eux; \
+ apt-get update; \
+ apt-get install -y --no-install-recommends \
+ curl jq bash tzdata fontconfig tini libmagic1 \
+ libcairo2 libpango-1.0-0 libpangocairo-1.0-0 libgdk-pixbuf-2.0-0 \
+ libharfbuzz0b libfreetype6 libjpeg62-turbo zlib1g fonts-dejavu-core; \
+ if [ "$WITH_GRAPHVIZ" = "1" ]; then apt-get install -y --no-install-recommends graphviz; fi; \
+ if [ "$WITH_POPPLER" = "1" ]; then apt-get install -y --no-install-recommends poppler-utils; fi; \
+ if [ "$WITH_PSQL_CLIENT" = "1" ]; then apt-get install -y --no-install-recommends postgresql-client; fi; \
+ if [ "$WITH_NGINX" = "1" ]; then apt-get install -y --no-install-recommends nginx; fi; \
+ rm -rf /var/lib/apt/lists/*
+
+# Usuários/grupos (idempotente)
+RUN useradd --system --no-create-home --shell /usr/sbin/nologin sapl || true \
+ && groupadd -r nginx || true \
+ && usermod -aG nginx www-data || true \
+ && usermod -aG nginx sapl || true
+
+# Estrutura de diretórios
+RUN mkdir -p /var/interlegis/sapl /var/interlegis/sapl/data /var/interlegis/sapl/media /var/interlegis/sapl/run \
+ && chown -R root:nginx /var/interlegis/sapl /var/interlegis/sapl/run \
+ && chmod -R g+rwX /var/interlegis/sapl \
+ && chmod 2775 /var/interlegis/sapl /var/interlegis/sapl/run \
+ && find /var/interlegis/sapl -type d -exec chmod g+s {} +
WORKDIR /var/interlegis/sapl/
-ADD . /var/interlegis/sapl/
-
-RUN apt-get update && \
- apt-get upgrade -y && \
- apt-get install -y --no-install-recommends $BUILD_PACKAGES $RUN_PACKAGES && \
- fc-cache -fv && \
- pip3 install --no-cache-dir --upgrade pip setuptools && \
- rm -f /etc/nginx/conf.d/* && \
- pip install --no-cache-dir -r /var/interlegis/sapl/requirements/dev-requirements.txt --upgrade setuptools && \
- SUDO_FORCE_REMOVE=yes apt-get purge -y --auto-remove $BUILD_PACKAGES && \
- apt-get autoremove && apt-get clean && rm -rf /var/lib/apt/lists/*
-
-WORKDIR /var/interlegis/sapl/
-ADD . /var/interlegis/sapl/
-
-COPY docker/start.sh $HOME
-COPY docker/solr_cli.py $HOME
-COPY docker/wait-for-pg.sh $HOME
-COPY docker/wait-for-solr.sh $HOME
-COPY docker/create_admin.py $HOME
-COPY docker/genkey.py $HOME
-COPY docker/gunicorn_start.sh $HOME
-
-COPY docker/config/nginx/sapl.conf /etc/nginx/conf.d
-COPY docker/config/nginx/nginx.conf /etc/nginx/nginx.conf
+# Traga o venv pré-instalado
+COPY --from=builder ${VENV_DIR} ${VENV_DIR}
+
+# Código da aplicação (depois do venv para aproveitar cache)
+COPY . /var/interlegis/sapl/
+
+# Nginx (somente se instalado)
+RUN if [ "$WITH_NGINX" = "1" ]; then \
+ rm -f /etc/nginx/conf.d/*; \
+ cp docker/config/nginx/sapl.conf /etc/nginx/conf.d/sapl.conf; \
+ cp docker/config/nginx/nginx.conf /etc/nginx/nginx.conf; \
+ fi
+
+# Scripts + gunicorn.conf no diretório da app
+RUN install -m 755 docker/startup_scripts/start.sh /var/interlegis/sapl/start.sh \
+ && install -m 755 docker/startup_scripts/wait-for-pg.sh /var/interlegis/sapl/wait-for-pg.sh \
+ && install -m 755 docker/startup_scripts/wait-for-solr.sh /var/interlegis/sapl/wait-for-solr.sh \
+ && install -m 644 docker/startup_scripts/solr_cli.py /var/interlegis/sapl/solr_cli.py \
+ && install -m 644 docker/startup_scripts/create_admin.py /var/interlegis/sapl/create_admin.py \
+ && install -m 644 docker/startup_scripts/genkey.py /var/interlegis/sapl/genkey.py \
+ && install -m 644 docker/startup_scripts/gunicorn.conf.py /var/interlegis/sapl/gunicorn.conf.py
+
+# (Se possível, evite copiar .env no build. Use secrets/variáveis em runtime.)
COPY docker/config/env_dockerfile /var/interlegis/sapl/sapl/.env
-RUN python3 manage.py collectstatic --noinput --clear
-
-# Remove .env(fake) e sapl.db da imagem
-RUN rm -rf /var/interlegis/sapl/sapl/.env && \
- rm -rf /var/interlegis/sapl/sapl.db
-
-RUN chmod +x /var/interlegis/sapl/start.sh && \
- chmod +x /var/interlegis/sapl/wait-for-solr.sh && \
- chmod +x /var/interlegis/sapl/wait-for-pg.sh && \
- ln -sf /dev/stdout /var/log/nginx/access.log && \
- ln -sf /dev/stderr /var/log/nginx/error.log && \
- mkdir /var/log/sapl/ && touch /var/interlegis/sapl/sapl.log && \
- ln -s /var/interlegis/sapl/sapl.log /var/log/sapl/sapl.log
-
-# Debian não possui usuário 'nginx' necessário para o Debian
-RUN useradd --no-create-home nginx
+# Logs (só se nginx estiver presente)
+RUN if [ "$WITH_NGINX" = "1" ]; then \
+ ln -sf /dev/stdout /var/log/nginx/access.log; \
+ ln -sf /dev/stderr /var/log/nginx/error.log; \
+ fi \
+ && mkdir -p /var/log/sapl/ \
+ && ln -sf /var/interlegis/sapl/sapl.log /var/log/sapl/sapl.log
-ENV DEBIAN_FRONTEND teletype
+# Cache de fontes e collectstatic
+# NÃO atualizamos pip aqui (já veio pronto do builder)
+RUN fc-cache -fv \
+ && python manage.py collectstatic --noinput --clear \
+ && rm -f /var/interlegis/sapl/sapl/.env /var/interlegis/sapl/sapl.db || true
-EXPOSE 80/tcp 443/tcp
+ENV DEBIAN_FRONTEND=teletype
+EXPOSE 80 443
VOLUME ["/var/interlegis/sapl/data", "/var/interlegis/sapl/media", "/var/log/sapl/"]
+ENTRYPOINT ["/usr/bin/tini","--"]
CMD ["/var/interlegis/sapl/start.sh"]
diff --git a/docker/config/nginx/nginx.conf b/docker/config/nginx/nginx.conf
index 29b9e805c..e002a6905 100644
--- a/docker/config/nginx/nginx.conf
+++ b/docker/config/nginx/nginx.conf
@@ -1,4 +1,4 @@
-user nginx;
+user www-data nginx;
worker_processes 1;
error_log /var/log/nginx/error.log warn;
diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml
index b1832a241..83d2e6821 100644
--- a/docker/docker-compose.yaml
+++ b/docker/docker-compose.yaml
@@ -9,7 +9,9 @@ services:
POSTGRES_PASSWORD: sapl
POSTGRES_USER: sapl
POSTGRES_DB: sapl
- PGDATA : /var/lib/postgresql/data/
+ PGDATA: /var/lib/postgresql/data/
+ TZ: UTC
+ PG_TZ: UTC
volumes:
- sapldb_data:/var/lib/postgresql/data/
ports:
@@ -31,10 +33,10 @@ services:
networks:
- sapl-net
sapl:
- image: interlegis/sapl:3.1.164-RC1
-# build:
-# context: ../
-# dockerfile: ./docker/Dockerfile
+# image: interlegis/sapl:3.1.164-RC1
+ build:
+ context: ../
+ dockerfile: ./docker/Dockerfile
container_name: sapl
labels:
NAME: "sapl"
@@ -51,7 +53,9 @@ services:
EMAIL_HOST_PASSWORD: senhasmtp
USE_SOLR: 'True'
SOLR_COLLECTION: sapl
- SOLR_URL: http://solr:solr@saplsolr:8983
+ SOLR_URL: http://admin:solr@saplsolr:8983
+ SOLR_USER: solr
+ SOLR_PASSWORD: solr
IS_ZK_EMBEDDED: 'True'
ENABLE_SAPN: 'False'
TZ: America/Sao_Paulo
diff --git a/docker/gunicorn_start.sh b/docker/gunicorn_start.sh
deleted file mode 100755
index 9ef4b7982..000000000
--- a/docker/gunicorn_start.sh
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/usr/bin/env bash
-
-##
-##
-## PARA USO EXCLUSIVO DO CONTAINER DOCKER DO SAPL!!!
-## EVITE USAR PARA CHAMADA DIRETAS
-##
-##
-
-# As seen in http://tutos.readthedocs.org/en/latest/source/ndg.html
-
-SAPL_DIR="/var/interlegis/sapl"
-
-# Seta um novo diretório foi passado como raiz para o SAPL
-# caso esse tenha sido passado como parâmetro
-if [ "$1" ]
-then
- SAPL_DIR="$1"
-fi
-
-NAME="SAPL" # Name of the application (*)
-DJANGODIR=/var/interlegis/sapl/ # Django project directory (*)
-SOCKFILE=/var/interlegis/sapl/run/gunicorn.sock # we will communicate using this unix socket (*)
-USER=`whoami` # the user to run as (*)
-GROUP=`whoami` # the group to run as (*)
-NUM_WORKERS=3 # how many worker processes should Gunicorn spawn (*)
- # NUM_WORKERS = 2 * CPUS + 1
-TIMEOUT=300
-MAX_REQUESTS=100 # number of requests before restarting worker
-DJANGO_SETTINGS_MODULE=sapl.settings # which settings file should Django use (*)
-DJANGO_WSGI_MODULE=sapl.wsgi # WSGI module name (*)
-
-echo "Starting $NAME as `whoami` on base dir $SAPL_DIR"
-
-# Create the run directory if it doesn't exist
-RUNDIR=$(dirname $SOCKFILE)
-test -d $RUNDIR || mkdir -p $RUNDIR
-
-# Start your Django Unicorn
-# Programs meant to be run under supervisor should not daemonize themselves (do not use --daemon)
-exec gunicorn ${DJANGO_WSGI_MODULE}:application \
- --name $NAME \
- --log-level debug \
- --timeout $TIMEOUT \
- --workers $NUM_WORKERS \
- --max-requests $MAX_REQUESTS \
- --user $USER \
- --access-logfile /var/log/sapl/access.log \
- --error-logfile /var/log/sapl/error.log \
- --bind=unix:$SOCKFILE
diff --git a/docker/simple_gunicorn.sh b/docker/simple_gunicorn.sh
index 977d44288..196e19dec 100755
--- a/docker/simple_gunicorn.sh
+++ b/docker/simple_gunicorn.sh
@@ -12,4 +12,5 @@ export PYTHONPATH=$DJANGODIR:$PYTHONPATH
# Get eth0 IP and filter out the netmask portion (/24, e.g.)
IP=`ip addr | grep 'inet .* eth0' | awk '{print $2}' | sed 's/\/[0-9]*//'`
-gunicorn --bind $IP:8000 sapl.wsgi:application
+#gunicorn --bind $IP:8000 sapl.wsgi:application
+gunicorn -c gunicorn.conf.py sapl.wsgi:application
diff --git a/docker/start.sh b/docker/start.sh
deleted file mode 100755
index 7a9345fbb..000000000
--- a/docker/start.sh
+++ /dev/null
@@ -1,147 +0,0 @@
-#!/usr/bin/env bash
-
-create_env() {
- echo "[ENV FILE] creating .env file..."
- # check if file exists
- if [ -f "/var/interlegis/sapl/data/secret.key" ]; then
- KEY=`cat /var/interlegis/sapl/data/secret.key`
- else
- KEY=`python3 genkey.py`
- echo $KEY > data/secret.key
- fi
-
- FILENAME="/var/interlegis/sapl/sapl/.env"
-
- if [ -z "${DATABASE_URL:-}" ]; then
- DATABASE_URL="postgresql://sapl:sapl@sapldb:5432/sapl"
- fi
-
- # ALWAYS replace the content of .env variable
- # If want to conditionally create only if absent then use IF below
- # if [ ! -f $FILENAME ]; then
-
- touch $FILENAME
-
- # explicitly use '>' to erase any previous content
- echo "SECRET_KEY="$KEY > $FILENAME
- # now only appends
- echo "DATABASE_URL = "$DATABASE_URL >> $FILENAME
- echo "DEBUG = ""${DEBUG-False}" >> $FILENAME
- echo "EMAIL_USE_TLS = ""${USE_TLS-True}" >> $FILENAME
- echo "EMAIL_PORT = ""${EMAIL_PORT-587}" >> $FILENAME
- echo "EMAIL_HOST = ""${EMAIL_HOST-''}" >> $FILENAME
- echo "EMAIL_HOST_USER = ""${EMAIL_HOST_USER-''}" >> $FILENAME
- echo "EMAIL_HOST_PASSWORD = ""${EMAIL_HOST_PASSWORD-''}" >> $FILENAME
- echo "EMAIL_SEND_USER = ""${EMAIL_HOST_USER-''}" >> $FILENAME
- echo "DEFAULT_FROM_EMAIL = ""${EMAIL_HOST_USER-''}" >> $FILENAME
- echo "SERVER_EMAIL = ""${EMAIL_HOST_USER-''}" >> $FILENAME
- echo "USE_SOLR = ""${USE_SOLR-False}" >> $FILENAME
- echo "SOLR_COLLECTION = ""${SOLR_COLLECTION-sapl}" >> $FILENAME
- echo "SOLR_URL = ""${SOLR_URL-http://localhost:8983}" >> $FILENAME
- echo "IS_ZK_EMBEDDED = ""${IS_ZK_EMBEDDED-False}" >> $FILENAME
- echo "ENABLE_SAPN = ""${ENABLE_SAPN-False}" >> $FILENAME
-
- echo "[ENV FILE] done."
-}
-
-create_env
-
-/bin/bash wait-for-pg.sh $DATABASE_URL
-
-yes yes | python3 manage.py migrate
-
-
-## SOLR
-USE_SOLR="${USE_SOLR:=False}"
-SOLR_URL="${SOLR_URL:=http://localhost:8983}"
-SOLR_COLLECTION="${SOLR_COLLECTION:=sapl}"
-NUM_SHARDS=${NUM_SHARDS:=1}
-RF=${RF:=1}
-MAX_SHARDS_PER_NODE=${MAX_SHARDS_PER_NODE:=1}
-IS_ZK_EMBEDDED="${IS_ZK_EMBEDDED:=False}"
-
-if [ "${USE_SOLR-False}" == "True" ] || [ "${USE_SOLR-False}" == "true" ]; then
-
- echo "Solr configurations"
- echo "==================="
- echo "URL: $SOLR_URL"
- echo "COLLECTION: $SOLR_COLLECTION"
- echo "NUM_SHARDS: $NUM_SHARDS"
- echo "REPLICATION FACTOR: $RF"
- echo "MAX SHARDS PER NODE: $MAX_SHARDS_PER_NODE"
- echo "ASSUME ZK EMBEDDED: $IS_ZK_EMBEDDED"
- echo "========================================="
-
- echo "running Solr script"
- /bin/bash wait-for-solr.sh $SOLR_URL
- CHECK_SOLR_RETURN=$?
-
- if [ $CHECK_SOLR_RETURN == 1 ]; then
- echo "Connecting to Solr..."
-
-
- if [ "${IS_ZK_EMBEDDED-False}" == "True" ] || [ "${IS_ZK_EMBEDDED-False}" == "true" ]; then
- ZK_EMBEDDED="--embedded_zk"
- echo "Assuming embedded ZooKeeper instalation..."
- fi
-
- python3 solr_cli.py -u $SOLR_URL -c $SOLR_COLLECTION -s $NUM_SHARDS -rf $RF -ms $MAX_SHARDS_PER_NODE $ZK_EMBEDDED &
- # Enable SOLR switch on, creating if it doesn't exist on database
- ./manage.py waffle_switch SOLR_SWITCH on --create
- else
- echo "Solr is offline, not possible to connect."
- # Disable Solr switch off, creating if it doesn't exist on database
- ./manage.py waffle_switch SOLR_SWITCH off --create
- fi
-
-else
- echo "Solr support is not initialized."
- # Disable Solr switch off, creating if it doesn't exist on database
- ./manage.py waffle_switch SOLR_SWITCH off --create
-fi
-
-## Enable/Disable SAPN
-if [ "${ENABLE_SAPN-False}" == "True" ] || [ "${ENABLE_SAPN-False}" == "true" ]; then
- echo "Enabling SAPN"
- ./manage.py waffle_switch SAPLN_SWITCH on --create
-else
- echo "Enabling SAPL"
- ./manage.py waffle_switch SAPLN_SWITCH off --create
-fi
-
-
-echo "Creating admin user..."
-
-user_created=$(python3 create_admin.py 2>&1)
-
-echo $user_created
-
-cmd=$(echo $user_created | grep 'ADMIN_USER_EXISTS')
-user_exists=$?
-
-cmd=$(echo $user_created | grep 'MISSING_ADMIN_PASSWORD')
-lack_pwd=$?
-
-if [ $user_exists -eq 0 ]; then
- echo "[SUPERUSER CREATION] User admin already exists. Not creating"
-fi
-
-if [ $lack_pwd -eq 0 ]; then
- echo "[SUPERUSER] Environment variable $ADMIN_PASSWORD for superuser admin was not set. Leaving container"
- # return -1
-fi
-
-# Backfilling AuditLog's JSON field
-time ./manage.py backfill_auditlog &
-
-echo "-------------------------------------"
-echo "| ███████╗ █████╗ ██████╗ ██╗ |"
-echo "| ██╔════╝██╔══██╗██╔══██╗██║ |"
-echo "| ███████╗███████║██████╔╝██║ |"
-echo "| ╚════██║██╔══██║██╔═══╝ ██║ |"
-echo "| ███████║██║ ██║██║ ███████╗ |"
-echo "| ╚══════╝╚═╝ ╚═╝╚═╝ ╚══════╝ |"
-echo "-------------------------------------"
-
-/bin/sh gunicorn_start.sh &
-/usr/sbin/nginx -g "daemon off;"
diff --git a/docker/create_admin.py b/docker/startup_scripts/create_admin.py
similarity index 100%
rename from docker/create_admin.py
rename to docker/startup_scripts/create_admin.py
diff --git a/docker/genkey.py b/docker/startup_scripts/genkey.py
similarity index 100%
rename from docker/genkey.py
rename to docker/startup_scripts/genkey.py
diff --git a/docker/startup_scripts/gunicorn.conf.py b/docker/startup_scripts/gunicorn.conf.py
new file mode 100644
index 000000000..95d2f0256
--- /dev/null
+++ b/docker/startup_scripts/gunicorn.conf.py
@@ -0,0 +1,80 @@
+# /var/interlegis/sapl/gunicorn.conf.py
+
+import os
+import pathlib
+import multiprocessing
+
+# ---- SAPL app configuration ----
+NAME = "SAPL"
+DJANGODIR = "/var/interlegis/sapl"
+SOCKFILE = f"unix:{DJANGODIR}/run/gunicorn.sock"
+USER = "sapl"
+GROUP = "nginx"
+NUM_WORKERS = int(os.getenv("WEB_CONCURRENCY", "3"))
+THREADS = int(os.getenv("GUNICORN_THREADS", "8"))
+TIMEOUT = int(os.getenv("GUNICORN_TIMEOUT", "300"))
+MAX_REQUESTS = 1000
+WORKER_CLASS = "gthread"
+DJANGO_SETTINGS = "sapl.settings"
+WSGI_APP = "sapl.wsgi:application"
+
+# ---- gunicorn settings ----
+# Equivalent of: --name
+proc_name = NAME
+
+# Equivalent of: --bind=unix:...
+# For quick testing via browser, you can switch to: bind = "0.0.0.0:8000"
+bind = f"unix:{SOCKFILE}"
+umask = 0o007
+user = USER
+group = GROUP
+
+# Ensure imports work like in your script’s working dir
+chdir = DJANGODIR
+
+# Allow starting with just: gunicorn -c gunicorn.conf.py
+wsgi_app = WSGI_APP
+
+# Logs
+loglevel = "debug"
+accesslog = "/var/log/sapl/access.log"
+errorlog = "/var/log/sapl/error.log"
+# errorlog = "-" # send to stderr (so you see it in docker logs or terminal)
+# accesslog = "-" # send to stdout
+capture_output = True # capture print/tracebacks from app
+
+# Worker/process lifecycle
+workers = NUM_WORKERS
+worker_class = WORKER_CLASS
+threads = THREADS
+timeout = TIMEOUT
+graceful_timeout = 30
+keepalive = 10
+backlog = 2048
+max_requests = MAX_REQUESTS
+max_requests_jitter = 100
+
+# Environment (same as exporting before running)
+raw_env = [
+ f"DJANGO_SETTINGS_MODULE={DJANGO_SETTINGS}",
+ # If you’re using ReportLab and seeing segfaults with PDFs, keep this:
+ # "RL_NOACCEL=1",
+]
+
+# If you previously enabled preload and saw segfaults with native libs, keep it off:
+preload_app = False
+
+
+# Create the run/ directory for the UNIX socket (your script did this)
+def on_starting(server):
+ pathlib.Path(SOCKFILE).parent.mkdir(parents=True, exist_ok=True)
+
+
+# Close DB connections after fork (safer when using preload or certain DB drivers)
+def post_fork(server, worker):
+ try:
+ from django import db
+ db.connections.close_all()
+ except Exception:
+ # Django not initialized yet or not available
+ pass
diff --git a/docker/solr_cli.py b/docker/startup_scripts/solr_cli.py
similarity index 95%
rename from docker/solr_cli.py
rename to docker/startup_scripts/solr_cli.py
index d452d1fe9..e7600914a 100755
--- a/docker/solr_cli.py
+++ b/docker/startup_scripts/solr_cli.py
@@ -20,6 +20,8 @@ from kazoo.client import KazooClient
#
logging.basicConfig()
+logging.captureWarnings(True)
+logger = logging.getLogger(__name__)
SECURITY_FILE_TEMPLATE = """
{
@@ -49,6 +51,7 @@ def solr_hash_password(password: str, salt: str = None):
salt (optional): base64 salt string
returns: sha256 hash of password and salt (both base64 strings)
"""
+ logger.debug("Generating Solr password")
m = sha256()
if salt is None:
salt = secrets.token_bytes(32)
@@ -67,32 +70,32 @@ def solr_hash_password(password: str, salt: str = None):
def create_security_file(username, password):
- print("Creating security.json file...")
+ logger.info("Creating security.json file...")
with open("security.json", "w") as f:
cypher, salt = solr_hash_password(password)
f.write(SECURITY_FILE_TEMPLATE % (username, cypher, salt, username))
- print("file created!")
+ logger.info("file created!")
def upload_security_file(zk_host):
zk_port = 9983 # embedded ZK port
- print(f"Uploading security file to Solr, ZK server={zk_host}:{zk_port}...")
+ logger.info(f"Uploading security file to Solr, ZK server={zk_host}:{zk_port}...")
try:
with open('security.json', 'r') as f:
data = f.read()
zk = KazooClient(hosts=f"{zk_host}:{zk_port}")
zk.start()
- print("Uploading security.json file...")
+ logger.info("Uploading security.json file...")
if zk.exists('/security.json'):
zk.set("/security.json", str.encode(data))
else:
zk.create("/security.json", str.encode(data))
data, stat = zk.get('/security.json')
- print("file uploaded!")
- print(data.decode('utf-8'))
+ logger.info("file uploaded!")
+ logger.info(data.decode('utf-8'))
zk.stop()
except Exception as e:
- print(e)
+ logger.error(e)
sys.exit(-1)
@@ -250,6 +253,7 @@ def setup_embedded_zk(solr_url):
_, solr_user, solr_pwd, solr_host, solr_port = match.groups()
if solr_user and solr_pwd and solr_host:
+ print(f"Creating Solr user {solr_user} with password {solr_pwd}")
create_security_file(solr_user, solr_pwd)
upload_security_file(solr_host)
else:
diff --git a/docker/startup_scripts/start.sh b/docker/startup_scripts/start.sh
new file mode 100755
index 000000000..b612532bc
--- /dev/null
+++ b/docker/startup_scripts/start.sh
@@ -0,0 +1,289 @@
+#!/usr/bin/env bash
+set -Eeuo pipefail
+IFS=$'\n\t'
+
+DATA_DIR="/var/interlegis/sapl/data"
+APP_DIR="/var/interlegis/sapl/sapl"
+ENV_FILE="$APP_DIR/.env"
+SECRET_FILE="$DATA_DIR/secret.key"
+
+mkdir -p "$DATA_DIR" "$APP_DIR"
+
+log() { printf '[%s] %s\n' "$(date -Is)" "$*"; }
+err() { printf '[%s] ERROR: %s\n' "$(date -Is)" "$*" >&2; }
+
+cleanup() { jobs -p | xargs -r kill 2>/dev/null || true; }
+trap cleanup TERM INT EXIT
+
+# --- new function ---
+configure_pg_timezone() {
+ : "${DATABASE_URL:=postgresql://sapl:sapl@sapldb:5432/sapl}"
+ : "${DB_TIMEZONE:=America/Sao_Paulo}"
+ : "${DB_NAME:=}"
+ : "${DB_ROLE:=}"
+
+ log "Checking database/role timezone defaults…"
+
+ # Detect DB and role if not provided
+ if [[ -z "$DB_NAME" ]]; then
+ DB_NAME="$(psql "$DATABASE_URL" -At -v ON_ERROR_STOP=1 -c 'select current_database();')"
+ fi
+ if [[ -z "$DB_ROLE" ]]; then
+ DB_ROLE="$(psql "$DATABASE_URL" -At -v ON_ERROR_STOP=1 -c 'select current_user;')"
+ fi
+
+ # What is the effective timezone for this DB/role right now?
+ current_tz="$(psql "$DATABASE_URL" -At -v ON_ERROR_STOP=1 -c 'show time zone;')"
+ current_tz_lower="${current_tz,,}"
+
+ # Consider these as already UTC
+ if [[ "$current_tz_lower" == "utc" || "$current_tz_lower" == "etc/utc" ]]; then
+ log "Timezone already UTC for DB='$DB_NAME' ROLE='$DB_ROLE' (SHOW TIME ZONE => $current_tz). Skipping ALTERs."
+ return
+ fi
+
+ log "Timezone is '$current_tz' (not UTC). Applying persistent defaults…"
+
+ # Persist at database level (requires DB owner or superuser)
+ if psql "$DATABASE_URL" -v ON_ERROR_STOP=1 -q \
+ -c "ALTER DATABASE \"$DB_NAME\" SET timezone TO '$DB_TIMEZONE';"; then
+ log "ALTER DATABASE \"$DB_NAME\" SET timezone TO '$DB_TIMEZONE' applied."
+ else
+ err "ALTER DATABASE \"$DB_NAME\" failed. Need DB owner or superuser."
+ exit 1
+ fi
+
+ # Persist at role level (requires superuser)
+ if psql "$DATABASE_URL" -v ON_ERROR_STOP=1 -q \
+ -c "ALTER ROLE \"$DB_ROLE\" SET timezone TO '$DB_TIMEZONE';"; then
+ log "ALTER ROLE \"$DB_ROLE\" SET timezone TO '$DB_TIMEZONE' applied."
+ else
+ err "ALTER ROLE \"$DB_ROLE\" failed. Need superuser privileges."
+ exit 1
+ fi
+
+ # Re-check (new session shows the new default)
+ verify_tz="$(psql "$DATABASE_URL" -At -v ON_ERROR_STOP=1 -c 'show time zone;')"
+ log "SHOW TIME ZONE now => $verify_tz (new sessions will inherit the defaults)."
+}
+
+
+create_secret() {
+ if [[ -f "$SECRET_FILE" ]]; then
+ SECRET_KEY="$(<"$SECRET_FILE")"
+ else
+ log "Generating SECRET_KEY..."
+ SECRET_KEY="$(python3 genkey.py)"
+ umask 177
+ printf '%s\n' "$SECRET_KEY" > "$SECRET_FILE"
+ chmod 600 "$SECRET_FILE"
+ fi
+ export SECRET_KEY
+}
+
+write_env_file() {
+ : "${DATABASE_URL:=postgresql://sapl:sapl@sapldb:5432/sapl}"
+ : "${DEBUG:=False}"
+ : "${EMAIL_USE_TLS:=True}"
+ : "${EMAIL_PORT:=587}"
+ : "${EMAIL_HOST:=}"
+ : "${EMAIL_HOST_USER:=}"
+ : "${EMAIL_HOST_PASSWORD:=}"
+ : "${DEFAULT_FROM_EMAIL:=$EMAIL_HOST_USER}"
+ : "${SERVER_EMAIL:=$EMAIL_HOST_USER}"
+ : "${USE_SOLR:=False}"
+ : "${SOLR_COLLECTION:=sapl}"
+ : "${SOLR_URL:=http://localhost:8983}"
+ : "${IS_ZK_EMBEDDED:=False}"
+ : "${NUM_SHARDS:=1}"
+ : "${RF:=1}"
+ : "${MAX_SHARDS_PER_NODE:=1}"
+ : "${ENABLE_SAPN:=False}"
+
+ tmp="$(mktemp)"
+ {
+ printf 'SECRET_KEY=%s\n' "$SECRET_KEY"
+ printf 'DATABASE_URL=%s\n' "$DATABASE_URL"
+ printf 'DEBUG=%s\n' "$DEBUG"
+ printf 'EMAIL_USE_TLS=%s\n' "$EMAIL_USE_TLS"
+ printf 'EMAIL_PORT=%s\n' "$EMAIL_PORT"
+ printf 'EMAIL_HOST=%s\n' "$EMAIL_HOST"
+ printf 'EMAIL_HOST_USER=%s\n' "$EMAIL_HOST_USER"
+ printf 'EMAIL_HOST_PASSWORD=%s\n' "$EMAIL_HOST_PASSWORD"
+ printf 'EMAIL_SEND_USER=%s\n' "$EMAIL_HOST_USER"
+ printf 'DEFAULT_FROM_EMAIL=%s\n' "$DEFAULT_FROM_EMAIL"
+ printf 'SERVER_EMAIL=%s\n' "$SERVER_EMAIL"
+ printf 'USE_SOLR=%s\n' "$USE_SOLR"
+ printf 'SOLR_COLLECTION=%s\n' "$SOLR_COLLECTION"
+ printf 'SOLR_URL=%s\n' "$SOLR_URL"
+ printf 'IS_ZK_EMBEDDED=%s\n' "$IS_ZK_EMBEDDED"
+ printf 'NUM_SHARDS=%s\n' "$NUM_SHARDS"
+ printf 'RF=%s\n' "$RF"
+ printf 'MAX_SHARDS_PER_NODE=%s\n' "$MAX_SHARDS_PER_NODE"
+ printf 'ENABLE_SAPN=%s\n' "$ENABLE_SAPN"
+ } > "$tmp"
+
+ chmod 600 "$tmp"
+ mv -f "$tmp" "$ENV_FILE"
+ log "[ENV] wrote $ENV_FILE"
+}
+
+wait_for_pg() {
+ : "${DATABASE_URL:=postgresql://sapl:sapl@sapldb:5432/sapl}"
+ log "Waiting for Postgres..."
+ /bin/bash wait-for-pg.sh "$DATABASE_URL"
+}
+
+migrate_db() {
+ log "Running Django migrations..."
+ python3 manage.py migrate --noinput
+}
+
+# In start.sh (near your other helpers)
+configure_solr() {
+ # respect envs, with sane defaults
+ local USE="${USE_SOLR:-False}"
+ local URL="${SOLR_URL:-http://admin:solr@localhost:8983}"
+ local COL="${SOLR_COLLECTION:-sapl}"
+ local SHARDS="${NUM_SHARDS:-1}"
+ local RF="${RF:-1}"
+ local MS="${MAX_SHARDS_PER_NODE:-1}"
+ local IS_ZK="${IS_ZK_EMBEDDED:-False}"
+
+ # total wait time before we give up (seconds)
+ local WAIT_TIMEOUT="${SOLR_WAIT_TIMEOUT:-30}"
+ # per probe max seconds
+ local PROBE_TIMEOUT="${SOLR_PROBE_TIMEOUT:-3}"
+ # sleep between probes
+ local SLEEP_SECS="${SOLR_WAIT_INTERVAL:-2}"
+
+ # feature flag OFF by default unless we confirm Solr
+ ./manage.py waffle_switch SOLR_SWITCH off --create || true
+
+ # Fast exit if disabled
+ if [[ "${USE,,}" != "true" ]]; then
+ echo "[SOLR] USE_SOLR=$USE → skipping Solr initialization."
+ return 0
+ fi
+
+ echo "[SOLR] Best-effort wait (<= ${WAIT_TIMEOUT}s): $URL, collection=$COL"
+
+ local deadline=$((SECONDS + WAIT_TIMEOUT))
+ while (( SECONDS < deadline )); do
+ # Try a cheap SolrCloud endpoint; swap for /solr/admin/info/system if you prefer
+ if curl -fsS --max-time "${PROBE_TIMEOUT}" \
+ "${URL%/}/solr/admin/collections?action=LIST" >/dev/null; then
+ echo "[SOLR] Reachable. Kicking off background configuration…"
+
+ # optional flag if ZK is embedded
+ local ZK_FLAG=""
+ if [[ "${IS_ZK,,}" == "true" ]]; then
+ ZK_FLAG="--embedded_zk"
+ fi
+
+ (
+ set -Eeuo pipefail
+ python3 solr_cli.py \
+ -u "$URL" -c "$COL" -s "$SHARDS" -rf "$RF" -ms "$MS" $ZK_FLAG
+ ./manage.py waffle_switch SOLR_SWITCH on --create
+ echo "[SOLR] Configuration done, SOLR_SWITCH=on."
+ ) >/var/log/sapl/solr_init.log 2>&1 & disown
+
+ return 0
+ fi
+ sleep "${SLEEP_SECS}"
+ done
+
+ echo "[SOLR] Not reachable within ${WAIT_TIMEOUT}s. Proceeding without Solr (SOLR_SWITCH=off)."
+ return 0
+}
+
+configure_sapn() {
+ if [[ "${ENABLE_SAPN,,}" == "true" ]]; then
+ log "Enabling SAPN"
+ python3 manage.py waffle_switch SAPN_SWITCH on --create
+ else
+ log "Disabling SAPN"
+ python3 manage.py waffle_switch SAPN_SWITCH off --create
+ fi
+}
+
+create_admin() {
+ log "Creating admin user..."
+ out="$(python3 create_admin.py 2>&1 || true)"
+ printf '%s\n' "$out"
+
+ if grep -q 'MISSING_ADMIN_PASSWORD' <<<"$out"; then
+ err "[SUPERUSER] ADMIN_PASSWORD not set. Exiting."
+ exit 1
+ fi
+}
+
+fix_logging_and_socket_perms() {
+ local APP_DIR="/var/interlegis/sapl"
+ local LOG_FILE="$APP_DIR/sapl.log"
+
+ # dirs
+ mkdir -p "$APP_DIR/run"
+ chown -R root:nginx "$APP_DIR"
+ chmod 2775 "$APP_DIR" "$APP_DIR/run"
+ chmod -R g+rwX "$APP_DIR"
+
+ # new files/sockets → 660
+ umask 0007
+
+ # ensure log file is owned by sapl and writable
+ install -Dm0660 /dev/null "$LOG_FILE"
+ chown sapl:nginx "$LOG_FILE"
+
+ # stale socket cleanup (if any)
+ rm -f "$APP_DIR/run/gunicorn.sock" 2>/dev/null || true
+}
+
+setup_cache_dir() {
+ # if you later move cache under /var/interlegis/sapl/cache, this line can read an env var
+ local CACHE_DIR="${DJANGO_CACHE_DIR:-/var/tmp/django_cache}"
+
+ mkdir -p "$CACHE_DIR"
+ chown -R sapl:nginx "$CACHE_DIR"
+ chmod -R 2775 "$CACHE_DIR"
+ find "$CACHE_DIR" -type d -exec chmod g+s {} +
+
+ # keep your global umask; 0007 ensures new files are rw for owner+group
+ umask 0007
+}
+
+start_services() {
+ log "Starting gunicorn..."
+ gunicorn -c gunicorn.conf.py &
+ log "Starting nginx..."
+ exec /usr/sbin/nginx -g "daemon off;"
+}
+
+main() {
+ create_secret
+ write_env_file
+ wait_for_pg
+ configure_pg_timezone
+ migrate_db
+ configure_solr || true
+ configure_sapn
+ create_admin
+ setup_cache_dir
+ fix_logging_and_socket_perms
+
+ cat <<'BANNER'
+-------------------------------------
+| ███████╗ █████╗ ██████╗ ██╗ |
+| ██╔════╝██╔══██╗██╔══██╗██║ |
+| ███████╗███████║██████╔╝██║ |
+| ╚════██║██╔══██║██╔═══╝ ██║ |
+| ███████║██║ ██║██║ ███████╗ |
+| ╚══════╝╚═╝ ╚═╝╚═╝ ╚══════╝ |
+-------------------------------------
+BANNER
+
+ start_services
+}
+
+main "$@"
diff --git a/docker/wait-for-pg.sh b/docker/startup_scripts/wait-for-pg.sh
similarity index 100%
rename from docker/wait-for-pg.sh
rename to docker/startup_scripts/wait-for-pg.sh
diff --git a/docker/wait-for-solr.sh b/docker/startup_scripts/wait-for-solr.sh
similarity index 100%
rename from docker/wait-for-solr.sh
rename to docker/startup_scripts/wait-for-solr.sh
diff --git a/release.sh b/release.sh
index 3ee43b85a..e2928980b 100755
--- a/release.sh
+++ b/release.sh
@@ -76,9 +76,6 @@ function set_rc_version {
fi
FINAL_VERSION=$NEXT_RC_VERSION
-## DEBUG
-# echo "OLD_VERSION: $OLD_VERSION"
-# echo "FINAL_VERSION: $FINAL_VERSION"
}
# Function to display Yes/No prompt with colored message
diff --git a/requirements/dev-requirements.txt b/requirements/dev-requirements.txt
index 56cdb1bb1..b7b662d5e 100644
--- a/requirements/dev-requirements.txt
+++ b/requirements/dev-requirements.txt
@@ -1,11 +1,11 @@
-r test-requirements.txt
-autopep8==1.2.4
-beautifulsoup4==4.9.1
-django-debug-toolbar==1.11.1
-ipdb==0.13.3
-fancycompleter==0.9.1
-pdbpp==0.10.3
-pip-review==0.4
-pipdeptree==0.10.1
+autopep8==2.3.2
+beautifulsoup4==4.13.5
+django-debug-toolbar==3.2.4
+ipdb==0.13.13
+fancycompleter==0.11.1
+pdbpp==0.11.7
+pip-review==1.3.0
+pipdeptree==2.28.0
pydevd-pycharm~=203.7148.7
diff --git a/requirements/requirements.txt b/requirements/requirements.txt
index a53ec010a..56c2459b4 100644
--- a/requirements/requirements.txt
+++ b/requirements/requirements.txt
@@ -2,7 +2,6 @@ django==2.2.28
django-haystack==3.1.1
django-filter==2.4.0
djangorestframework==3.12.4
-dj-database-url==0.5.0
django-braces==1.14.0
django-crispy-forms==1.7.2
django-contrib-postgres==0.0.1
@@ -16,14 +15,18 @@ drf-spectacular==0.18.2
django-ratelimit==3.0.1
easy-thumbnails==2.8.5
python-decouple==3.1
-psycopg2-binary==2.8.6
+dj-database-url==0.5.0
+psycopg2-binary==2.9.9
pyyaml==6.0.1
pytz==2019.3
python-magic==0.4.15
unipath==1.1
-WeasyPrint==51
Pillow==10.3.0
-gunicorn==22.0.0
+rlPyCairo==0.3.0
+reportlab==4.2.0
+WeasyPrint==66
+trml2pdf==0.6
+gunicorn==23.0.0
more-itertools==8.2.0
pysolr==3.6.0
PyPDF4==1.27.0
@@ -37,5 +40,4 @@ django-prometheus==2.2.0
asn1crypto==1.5.1
XlsxWriter==3.2.0
-git+https://github.com/interlegis/trml2pdf
git+https://github.com/interlegis/django-admin-bootstrapped
diff --git a/requirements/test-requirements.txt b/requirements/test-requirements.txt
index 5116d1fe7..2da40408f 100644
--- a/requirements/test-requirements.txt
+++ b/requirements/test-requirements.txt
@@ -1,11 +1,10 @@
-r requirements.txt
-coverage==4.4
-django-webtest==1.9.7
-flake8==2.6.2
-isort==4.2.5
-model-bakery==1.1.0
-pep8==1.7.0
-pytest==5.4.3
-pytest-cov==2.10.0
-pytest-django==3.8.0
-webtest==2.0.21
+coverage==7.6.1
+django-webtest==1.9.8
+flake8==7.1.1
+isort==5.13.2
+model-bakery==1.5.0
+pycodestyle==2.12.1
+pytest==8.3.3
+pytest-cov==5.0.0
+WebTest==3.0.6
diff --git a/sapl/base/email_utils.py b/sapl/base/email_utils.py
index 3ccf64690..2781e9876 100644
--- a/sapl/base/email_utils.py
+++ b/sapl/base/email_utils.py
@@ -21,7 +21,7 @@ def load_email_templates(templates, context={}):
tpl = loader.get_template(t)
email = tpl.render(context)
if t.endswith(".html"):
- email = email.replace('\n', '').replace('\r', '')
+ email = email.replace('\\n', '').replace('\r', '')
emails.append(email)
return emails
diff --git a/sapl/base/search_indexes.py b/sapl/base/search_indexes.py
index e76f00168..d0432aded 100644
--- a/sapl/base/search_indexes.py
+++ b/sapl/base/search_indexes.py
@@ -108,7 +108,7 @@ class TextExtractField(CharField):
continue
data += getattr(self, func)(value) + ' '
- data = data.replace('\n', ' ')
+ data = data.replace('\\n', ' ')
return data
diff --git a/sapl/base/templatetags/common_tags.py b/sapl/base/templatetags/common_tags.py
index 9a60bf85e..84f61b64f 100644
--- a/sapl/base/templatetags/common_tags.py
+++ b/sapl/base/templatetags/common_tags.py
@@ -300,7 +300,7 @@ def youtube_url(value):
# Test if YouTube video
# tested on https://pythex.org/
value = value.lower()
- youtube_pattern = "^((https?://)?(www\.)?youtube\.com\/watch\?v=)"
+ youtube_pattern = r"^((https?://)?(www\.)?youtube\.com\/watch\?v=)"
r = re.findall(youtube_pattern, value)
return True if r else False
@@ -308,7 +308,7 @@ def youtube_url(value):
@register.filter
def facebook_url(value):
value = value.lower()
- facebook_pattern = "^((https?://)?((www|pt-br)\.)?facebook\.com(\/.+)?\/videos(\/.*)?)"
+ facebook_pattern = r"^((https?://)?((www|pt-br)\.)?facebook\.com(\/.+)?\/videos(\/.*)?)"
r = re.findall(facebook_pattern, value)
return True if r else False
diff --git a/sapl/compilacao/forms.py b/sapl/compilacao/forms.py
index 18c24cf1f..c4154ec00 100644
--- a/sapl/compilacao/forms.py
+++ b/sapl/compilacao/forms.py
@@ -987,7 +987,7 @@ class DispositivoEdicaoVigenciaForm(ModelForm):
p.pk, _('%s realizada em %s. %s') % (
p.tipo_publicacao,
defaultfilters.date(
- p.data, "d \d\e F \d\e Y"),
+ p.data, r"d \d\e F \d\e Y"),
str(p.ta))) for p in pubs]
dvs = Dispositivo.objects.order_by('ordem').filter(
diff --git a/sapl/compilacao/models.py b/sapl/compilacao/models.py
index 7b54fcb67..e8a32615e 100644
--- a/sapl/compilacao/models.py
+++ b/sapl/compilacao/models.py
@@ -287,7 +287,7 @@ class TextoArticulado(TimestampedMixin):
return _('%(tipo)s nº %(numero)s, de %(data)s') % {
'tipo': self.tipo_ta,
'numero': numero,
- 'data': defaultfilters.date(self.data, "d \d\e F \d\e Y").lower()}
+ 'data': defaultfilters.date(self.data, r"d \d\e F \d\e Y").lower()}
def hash(self):
from django.core import serializers
@@ -943,7 +943,7 @@ class Publicacao(TimestampedMixin):
def __str__(self):
return _('%s realizada em %s \n %s') % (
self.tipo_publicacao,
- defaultfilters.date(self.data, "d \d\e F \d\e Y"),
+ defaultfilters.date(self.data, r"d \d\e F \d\e Y"),
self.ta)
diff --git a/sapl/lexml/forms.py b/sapl/lexml/forms.py
index 7904508af..8fd59b6ac 100644
--- a/sapl/lexml/forms.py
+++ b/sapl/lexml/forms.py
@@ -31,7 +31,7 @@ class LexmlProvedorForm(ModelForm):
return cd
if cd["xml"]:
- xml = re.sub("\n|\t", "", cd["xml"].strip())
+ xml = re.sub(r"\n|\t", "", cd["xml"].strip())
validar_xml(xml)
validar_schema(xml)
diff --git a/sapl/lexml/models.py b/sapl/lexml/models.py
index 1a5d27144..e892877fe 100644
--- a/sapl/lexml/models.py
+++ b/sapl/lexml/models.py
@@ -25,7 +25,7 @@ class LexmlProvedor(models.Model): # LexmlRegistroProvedor
def pretty_xml(self):
import html
safe_xml = html.escape(self.xml)
- return safe_xml.replace('\n', '
').replace(' ', ' ')
+ return safe_xml.replace('\\n', '
').replace(' ', ' ')
class Meta:
verbose_name = _('Provedor Lexml')
diff --git a/sapl/materia/forms.py b/sapl/materia/forms.py
index 7f3207214..c685109ae 100644
--- a/sapl/materia/forms.py
+++ b/sapl/materia/forms.py
@@ -1716,7 +1716,7 @@ class TramitacaoEmLoteForm(ModelForm):
('texto', 12)
])
- documentos_checkbox_HTML = '''
+ documentos_checkbox_HTML = r'''