diff --git a/docker/Dockerfile b/docker/Dockerfile index cb2fc1150..8b1cf12a1 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -15,62 +15,78 @@ ENV BUILD_PACKAGES="apt-utils apt-file libpq-dev graphviz-dev build-essential gi ## NAO EH PRA TIRAR O vim DA LISTA DE COMANDOS INSTALADOS!!! ENV RUN_PACKAGES="graphviz python3-lxml python3-magic postgresql-client \ poppler-utils curl jq bash vim python3-venv tzdata nodejs \ - fontconfig python3 nginx \ + fontconfig python3 nginx tini git \ libcairo2 libpango-1.0-0 libpangocairo-1.0-0 libgdk-pixbuf-2.0-0 \ libharfbuzz0b libfreetype6 libjpeg62-turbo zlib1g fonts-dejavu-core" - -RUN mkdir -p /var/interlegis/sapl -WORKDIR /var/interlegis/sapl/ +## Update apt-get +RUN apt-get update \ + && apt-get install -y --no-install-recommends $BUILD_PACKAGES $RUN_PACKAGES -ADD . /var/interlegis/sapl/ +# Create users and groups +RUN useradd --system --no-create-home --shell /usr/sbin/nologin sapl || true \ + && groupadd -r nginx || true \ + && usermod -aG nginx www-data || true \ + && usermod -aG nginx sapl || true -RUN apt-get update && \ - apt-get upgrade -y && \ - apt-get install -y --no-install-recommends $BUILD_PACKAGES $RUN_PACKAGES && \ - fc-cache -fv && \ - pip3 install --no-cache-dir --upgrade pip setuptools && \ - rm -f /etc/nginx/conf.d/* && \ - pip install --no-cache-dir -r /var/interlegis/sapl/requirements/dev-requirements.txt --upgrade setuptools && \ - SUDO_FORCE_REMOVE=yes apt-get purge -y --auto-remove $BUILD_PACKAGES && \ - apt-get autoremove && apt-get clean && rm -rf /var/lib/apt/lists/* +# create base directories and setup access +RUN mkdir -p /var/interlegis/sapl /var/interlegis/sapl/data /var/interlegis/sapl/media /var/interlegis/sapl/run/ \ + && chown -R root:nginx /var/interlegis/sapl /var/interlegis/sapl/run/ \ + && chmod -R g+rwX /var/interlegis/sapl \ + && chmod 2775 /var/interlegis/sapl /var/interlegis/sapl/run/ \ + && find /var/interlegis/sapl -type d -exec chmod g+s {} + +# Copy app code WORKDIR /var/interlegis/sapl/ -ADD . /var/interlegis/sapl/ +COPY . /var/interlegis/sapl/ + +RUN rm -f /etc/nginx/conf.d/* +COPY docker/config/nginx/sapl.conf /etc/nginx/conf.d/ +COPY docker/config/nginx/nginx.conf /etc/nginx/nginx.conf -COPY docker/start.sh $HOME -COPY docker/solr_cli.py $HOME -COPY docker/wait-for-pg.sh $HOME -COPY docker/wait-for-solr.sh $HOME -COPY docker/create_admin.py $HOME -COPY docker/genkey.py $HOME -COPY docker/gunicorn.conf.py $HOME +COPY docker/startup_scripts/start.sh $HOME +COPY docker/startup_scripts/solr_cli.py $HOME +COPY docker/startup_scripts/wait-for-pg.sh $HOME +COPY docker/startup_scripts/wait-for-solr.sh $HOME +COPY docker/startup_scripts/create_admin.py $HOME +COPY docker/startup_scripts/genkey.py $HOME +COPY docker/startup_scripts/gunicorn.conf.py $HOME COPY docker/config/nginx/sapl.conf /etc/nginx/conf.d COPY docker/config/nginx/nginx.conf /etc/nginx/nginx.conf COPY docker/config/env_dockerfile /var/interlegis/sapl/sapl/.env -RUN python3 manage.py collectstatic --noinput --clear +RUN chmod +x /var/interlegis/sapl/start.sh \ + && chmod +x /var/interlegis/sapl/wait-for-solr.sh \ + && chmod +x /var/interlegis/sapl/wait-for-pg.sh \ + && chmod +x /var/interlegis/sapl/start.sh \ + && chmod +x /var/interlegis/sapl/wait-for-solr.sh \ + && chmod +x /var/interlegis/sapl/wait-for-pg.sh + +RUN ln -sf /dev/stdout /var/log/nginx/access.log \ + && ln -sf /dev/stderr /var/log/nginx/error.log \ + && mkdir -p /var/log/sapl/ \ + && ln -sf /var/interlegis/sapl/sapl.log /var/log/sapl/sapl.log + +RUN fc-cache -fv \ + && python -m pip install --no-cache-dir --upgrade pip setuptools \ + && python -m pip install --no-cache-dir -r requirements/dev-requirements.txt -# Remove .env(fake) e sapl.db da imagem -RUN rm -rf /var/interlegis/sapl/sapl/.env && \ - rm -rf /var/interlegis/sapl/sapl.db -RUN chmod +x /var/interlegis/sapl/start.sh && \ - chmod +x /var/interlegis/sapl/wait-for-solr.sh && \ - chmod +x /var/interlegis/sapl/wait-for-pg.sh && \ - ln -sf /dev/stdout /var/log/nginx/access.log && \ - ln -sf /dev/stderr /var/log/nginx/error.log && \ - mkdir /var/log/sapl/ && touch /var/interlegis/sapl/sapl.log && \ - ln -s /var/interlegis/sapl/sapl.log /var/log/sapl/sapl.log +RUN apt-get purge -y --auto-remove $BUILD_PACKAGES \ + && apt-get autoremove -y \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* -# Debian não possui usuário 'nginx' necessário para o Debian -RUN useradd --no-create-home nginx +RUN python manage.py collectstatic --noinput --clear -ENV DEBIAN_FRONTEND teletype +RUN rm -f /var/interlegis/sapl/sapl/.env /var/interlegis/sapl/sapl.db || true -EXPOSE 80/tcp 443/tcp +ENV DEBIAN_FRONTEND=teletype +EXPOSE 80 443 VOLUME ["/var/interlegis/sapl/data", "/var/interlegis/sapl/media", "/var/log/sapl/"] +# tini as PID 1 so signals/zombies are handled +ENTRYPOINT ["/usr/bin/tini","--"] CMD ["/var/interlegis/sapl/start.sh"] diff --git a/docker/config/nginx/nginx.conf b/docker/config/nginx/nginx.conf index 29b9e805c..cd978e4e1 100644 --- a/docker/config/nginx/nginx.conf +++ b/docker/config/nginx/nginx.conf @@ -1,5 +1,5 @@ -user nginx; -worker_processes 1; +user www-data nginx; +worker_processes auto; error_log /var/log/nginx/error.log warn; pid /var/run/nginx.pid; diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index 095ba6e40..32417f3d7 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -53,7 +53,9 @@ services: EMAIL_HOST_PASSWORD: senhasmtp USE_SOLR: 'True' SOLR_COLLECTION: sapl - SOLR_URL: http://solr:solr@saplsolr:8983 + SOLR_URL: http://saplsolr:8983 + SOLR_USER: solr + SOLR_PASSWORD: solr IS_ZK_EMBEDDED: 'True' ENABLE_SAPN: 'False' TZ: America/Sao_Paulo diff --git a/docker/start.sh b/docker/start.sh deleted file mode 100755 index 7a502db82..000000000 --- a/docker/start.sh +++ /dev/null @@ -1,151 +0,0 @@ -#!/usr/bin/env bash - -create_env() { - echo "[ENV FILE] creating .env file..." - # check if file exists - if [ -f "/var/interlegis/sapl/data/secret.key" ]; then - KEY=`cat /var/interlegis/sapl/data/secret.key` - else - KEY=`python3 genkey.py` - echo $KEY > data/secret.key - fi - - FILENAME="/var/interlegis/sapl/sapl/.env" - - if [ -z "${DATABASE_URL:-}" ]; then - DATABASE_URL="postgresql://sapl:sapl@sapldb:5432/sapl" - fi - - # ALWAYS replace the content of .env variable - # If want to conditionally create only if absent then use IF below - # if [ ! -f $FILENAME ]; then - - touch $FILENAME - - # explicitly use '>' to erase any previous content - echo "SECRET_KEY="$KEY > $FILENAME - # now only appends - echo "DATABASE_URL = "$DATABASE_URL >> $FILENAME - echo "DEBUG = ""${DEBUG-False}" >> $FILENAME - echo "EMAIL_USE_TLS = ""${USE_TLS-True}" >> $FILENAME - echo "EMAIL_PORT = ""${EMAIL_PORT-587}" >> $FILENAME - echo "EMAIL_HOST = ""${EMAIL_HOST-''}" >> $FILENAME - echo "EMAIL_HOST_USER = ""${EMAIL_HOST_USER-''}" >> $FILENAME - echo "EMAIL_HOST_PASSWORD = ""${EMAIL_HOST_PASSWORD-''}" >> $FILENAME - echo "EMAIL_SEND_USER = ""${EMAIL_HOST_USER-''}" >> $FILENAME - echo "DEFAULT_FROM_EMAIL = ""${EMAIL_HOST_USER-''}" >> $FILENAME - echo "SERVER_EMAIL = ""${EMAIL_HOST_USER-''}" >> $FILENAME - echo "USE_SOLR = ""${USE_SOLR-False}" >> $FILENAME - echo "SOLR_COLLECTION = ""${SOLR_COLLECTION-sapl}" >> $FILENAME - echo "SOLR_URL = ""${SOLR_URL-http://localhost:8983}" >> $FILENAME - echo "IS_ZK_EMBEDDED = ""${IS_ZK_EMBEDDED-False}" >> $FILENAME - echo "ENABLE_SAPN = ""${ENABLE_SAPN-False}" >> $FILENAME - - echo "[ENV FILE] done." -} - -create_env - -/bin/bash wait-for-pg.sh $DATABASE_URL - -### -### This is required for compability with newer versions of psycopg2 lib -### -echo "Setting database timezone to UTC" -psql $DATABASE_URL -c 'SET TIME ZONE UTC;' - -yes yes | python3 manage.py migrate - - -## SOLR -USE_SOLR="${USE_SOLR:=False}" -SOLR_URL="${SOLR_URL:=http://admin:solr@localhost:8983}" -SOLR_COLLECTION="${SOLR_COLLECTION:=sapl}" -NUM_SHARDS=${NUM_SHARDS:=1} -RF=${RF:=1} -MAX_SHARDS_PER_NODE=${MAX_SHARDS_PER_NODE:=1} -IS_ZK_EMBEDDED="${IS_ZK_EMBEDDED:=False}" - -if [ "${USE_SOLR-False}" == "True" ] || [ "${USE_SOLR-False}" == "true" ]; then - - echo "Solr configurations" - echo "===================" - echo "URL: $SOLR_URL" - echo "COLLECTION: $SOLR_COLLECTION" - echo "NUM_SHARDS: $NUM_SHARDS" - echo "REPLICATION FACTOR: $RF" - echo "MAX SHARDS PER NODE: $MAX_SHARDS_PER_NODE" - echo "ASSUME ZK EMBEDDED: $IS_ZK_EMBEDDED" - echo "=========================================" - - echo "running Solr script" - /bin/bash wait-for-solr.sh $SOLR_URL - CHECK_SOLR_RETURN=$? - - if [ $CHECK_SOLR_RETURN == 1 ]; then - echo "Connecting to Solr..." - - - if [ "${IS_ZK_EMBEDDED-False}" == "True" ] || [ "${IS_ZK_EMBEDDED-False}" == "true" ]; then - ZK_EMBEDDED="--embedded_zk" - echo "Assuming embedded ZooKeeper instalation..." - fi - - python3 solr_cli.py -u $SOLR_URL -c $SOLR_COLLECTION -s $NUM_SHARDS -rf $RF -ms $MAX_SHARDS_PER_NODE $ZK_EMBEDDED & - # Enable SOLR switch on, creating if it doesn't exist on database - ./manage.py waffle_switch SOLR_SWITCH on --create - else - echo "Solr is offline, not possible to connect." - # Disable Solr switch off, creating if it doesn't exist on database - ./manage.py waffle_switch SOLR_SWITCH off --create - fi - -else - echo "Solr support is not initialized." - # Disable Solr switch off, creating if it doesn't exist on database - ./manage.py waffle_switch SOLR_SWITCH off --create -fi - -## Enable/Disable SAPN -if [ "${ENABLE_SAPN-False}" == "True" ] || [ "${ENABLE_SAPN-False}" == "true" ]; then - echo "Enabling SAPN" - ./manage.py waffle_switch SAPLN_SWITCH on --create -else - echo "Enabling SAPL" - ./manage.py waffle_switch SAPLN_SWITCH off --create -fi - - -echo "Creating admin user..." - -user_created=$(python3 create_admin.py 2>&1) - -echo $user_created - -cmd=$(echo $user_created | grep 'ADMIN_USER_EXISTS') -user_exists=$? - -cmd=$(echo $user_created | grep 'MISSING_ADMIN_PASSWORD') -lack_pwd=$? - -if [ $user_exists -eq 0 ]; then - echo "[SUPERUSER CREATION] User admin already exists. Not creating" -fi - -if [ $lack_pwd -eq 0 ]; then - echo "[SUPERUSER] Environment variable $ADMIN_PASSWORD for superuser admin was not set. Leaving container" - # return -1 -fi - - -echo "-------------------------------------" -echo "| ███████╗ █████╗ ██████╗ ██╗ |" -echo "| ██╔════╝██╔══██╗██╔══██╗██║ |" -echo "| ███████╗███████║██████╔╝██║ |" -echo "| ╚════██║██╔══██║██╔═══╝ ██║ |" -echo "| ███████║██║ ██║██║ ███████╗ |" -echo "| ╚══════╝╚═╝ ╚═╝╚═╝ ╚══════╝ |" -echo "-------------------------------------" - -gunicorn -c gunicorn.conf.py & -/usr/sbin/nginx -g "daemon off;" diff --git a/docker/create_admin.py b/docker/startup_scripts/create_admin.py similarity index 100% rename from docker/create_admin.py rename to docker/startup_scripts/create_admin.py diff --git a/docker/genkey.py b/docker/startup_scripts/genkey.py similarity index 100% rename from docker/genkey.py rename to docker/startup_scripts/genkey.py diff --git a/docker/gunicorn.conf.py b/docker/startup_scripts/gunicorn.conf.py similarity index 78% rename from docker/gunicorn.conf.py rename to docker/startup_scripts/gunicorn.conf.py index 217b54638..2272eb15e 100644 --- a/docker/gunicorn.conf.py +++ b/docker/startup_scripts/gunicorn.conf.py @@ -6,11 +6,11 @@ import multiprocessing # ---- SAPL app configuration ---- NAME = "SAPL" -DJANGODIR = "/var/interlegis/sapl/" -SOCKFILE = "/var/interlegis/sapl/run/gunicorn.sock" -# USER = os.getenv("RUN_AS_USER", os.getenv("USER", "nginx")) -# GROUP = os.getenv("RUN_AS_GROUP", USER) -NUM_WORKERS = 11 # keep your explicit value +DJANGODIR = "/var/interlegis/sapl" +SOCKFILE = f"unix:{DJANGODIR}/run/gunicorn.sock" +USER = "sapl" +GROUP = "nginx" +NUM_WORKERS = 2 * multiprocessing.cpu_count() + 1 # keep your explicit value TIMEOUT = 300 MAX_REQUESTS = 100 DJANGO_SETTINGS = "sapl.settings" @@ -23,6 +23,9 @@ proc_name = NAME # Equivalent of: --bind=unix:... # For quick testing via browser, you can switch to: bind = "0.0.0.0:8000" bind = f"unix:{SOCKFILE}" +umask = 0o007 +user = "sapl" +group = "nginx" # Ensure imports work like in your script’s working dir chdir = DJANGODIR @@ -32,8 +35,9 @@ wsgi_app = WSGI_APP # Logs loglevel = "debug" -errorlog = "-" # send to stderr (so you see it in docker logs or terminal) -accesslog = "-" # send to stdout +errorlog = "-" # send to stderr (so you see it in docker logs or terminal) +accesslog = "-" # send to stdout +capture_output = True # capture print/tracebacks from app # accesslog = "/var/log/sapl/access.log" # errorlog = "/var/log/sapl/error.log" @@ -44,10 +48,6 @@ graceful_timeout = 30 max_requests = MAX_REQUESTS max_requests_jitter = 0 -# Drop privileges (only applies if started as root) -# user = USER -# group = GROUP - # Environment (same as exporting before running) raw_env = [ f"DJANGO_SETTINGS_MODULE={DJANGO_SETTINGS}", diff --git a/docker/solr_cli.py b/docker/startup_scripts/solr_cli.py similarity index 100% rename from docker/solr_cli.py rename to docker/startup_scripts/solr_cli.py diff --git a/docker/startup_scripts/start.sh b/docker/startup_scripts/start.sh new file mode 100755 index 000000000..dfdcc2646 --- /dev/null +++ b/docker/startup_scripts/start.sh @@ -0,0 +1,290 @@ +#!/usr/bin/env bash +set -Eeuo pipefail +IFS=$'\n\t' + +DATA_DIR="/var/interlegis/sapl/data" +APP_DIR="/var/interlegis/sapl/sapl" +ENV_FILE="$APP_DIR/.env" +SECRET_FILE="$DATA_DIR/secret.key" + +mkdir -p "$DATA_DIR" "$APP_DIR" + +log() { printf '[%s] %s\n' "$(date -Is)" "$*"; } +err() { printf '[%s] ERROR: %s\n' "$(date -Is)" "$*" >&2; } + +cleanup() { jobs -p | xargs -r kill 2>/dev/null || true; } +trap cleanup TERM INT EXIT + +# --- new function --- +configure_pg_timezone() { + : "${DATABASE_URL:=postgresql://sapl:sapl@sapldb:5432/sapl}" + : "${DB_TIMEZONE:=America/Sao_Paulo}" + : "${DB_NAME:=}" + : "${DB_ROLE:=}" + + log "Checking database/role timezone defaults…" + /bin/bash wait-for-pg.sh "$DATABASE_URL" + + # Detect DB and role if not provided + if [[ -z "$DB_NAME" ]]; then + DB_NAME="$(psql "$DATABASE_URL" -At -v ON_ERROR_STOP=1 -c 'select current_database();')" + fi + if [[ -z "$DB_ROLE" ]]; then + DB_ROLE="$(psql "$DATABASE_URL" -At -v ON_ERROR_STOP=1 -c 'select current_user;')" + fi + + # What is the effective timezone for this DB/role right now? + current_tz="$(psql "$DATABASE_URL" -At -v ON_ERROR_STOP=1 -c 'show time zone;')" + current_tz_lower="${current_tz,,}" + + # Consider these as already UTC + if [[ "$current_tz_lower" == "utc" || "$current_tz_lower" == "etc/utc" ]]; then + log "Timezone already UTC for DB='$DB_NAME' ROLE='$DB_ROLE' (SHOW TIME ZONE => $current_tz). Skipping ALTERs." + return + fi + + log "Timezone is '$current_tz' (not UTC). Applying persistent defaults…" + + # Persist at database level (requires DB owner or superuser) + if psql "$DATABASE_URL" -v ON_ERROR_STOP=1 -q \ + -c "ALTER DATABASE \"$DB_NAME\" SET timezone TO '$DB_TIMEZONE';"; then + log "ALTER DATABASE \"$DB_NAME\" SET timezone TO '$DB_TIMEZONE' applied." + else + err "ALTER DATABASE \"$DB_NAME\" failed. Need DB owner or superuser." + exit 1 + fi + + # Persist at role level (requires superuser) + if psql "$DATABASE_URL" -v ON_ERROR_STOP=1 -q \ + -c "ALTER ROLE \"$DB_ROLE\" SET timezone TO '$DB_TIMEZONE';"; then + log "ALTER ROLE \"$DB_ROLE\" SET timezone TO '$DB_TIMEZONE' applied." + else + err "ALTER ROLE \"$DB_ROLE\" failed. Need superuser privileges." + exit 1 + fi + + # Re-check (new session shows the new default) + verify_tz="$(psql "$DATABASE_URL" -At -v ON_ERROR_STOP=1 -c 'show time zone;')" + log "SHOW TIME ZONE now => $verify_tz (new sessions will inherit the defaults)." +} + + +create_secret() { + if [[ -f "$SECRET_FILE" ]]; then + SECRET_KEY="$(<"$SECRET_FILE")" + else + log "Generating SECRET_KEY..." + SECRET_KEY="$(python3 genkey.py)" + umask 177 + printf '%s\n' "$SECRET_KEY" > "$SECRET_FILE" + chmod 600 "$SECRET_FILE" + fi + export SECRET_KEY +} + +write_env_file() { + : "${DATABASE_URL:=postgresql://sapl:sapl@sapldb:5432/sapl}" + : "${DEBUG:=False}" + : "${EMAIL_USE_TLS:=True}" + : "${EMAIL_PORT:=587}" + : "${EMAIL_HOST:=}" + : "${EMAIL_HOST_USER:=}" + : "${EMAIL_HOST_PASSWORD:=}" + : "${DEFAULT_FROM_EMAIL:=$EMAIL_HOST_USER}" + : "${SERVER_EMAIL:=$EMAIL_HOST_USER}" + : "${USE_SOLR:=False}" + : "${SOLR_COLLECTION:=sapl}" + : "${SOLR_URL:=http://localhost:8983}" + : "${IS_ZK_EMBEDDED:=False}" + : "${NUM_SHARDS:=1}" + : "${RF:=1}" + : "${MAX_SHARDS_PER_NODE:=1}" + : "${ENABLE_SAPN:=False}" + + tmp="$(mktemp)" + { + printf 'SECRET_KEY=%s\n' "$SECRET_KEY" + printf 'DATABASE_URL=%s\n' "$DATABASE_URL" + printf 'DEBUG=%s\n' "$DEBUG" + printf 'EMAIL_USE_TLS=%s\n' "$EMAIL_USE_TLS" + printf 'EMAIL_PORT=%s\n' "$EMAIL_PORT" + printf 'EMAIL_HOST=%s\n' "$EMAIL_HOST" + printf 'EMAIL_HOST_USER=%s\n' "$EMAIL_HOST_USER" + printf 'EMAIL_HOST_PASSWORD=%s\n' "$EMAIL_HOST_PASSWORD" + printf 'EMAIL_SEND_USER=%s\n' "$EMAIL_HOST_USER" + printf 'DEFAULT_FROM_EMAIL=%s\n' "$DEFAULT_FROM_EMAIL" + printf 'SERVER_EMAIL=%s\n' "$SERVER_EMAIL" + printf 'USE_SOLR=%s\n' "$USE_SOLR" + printf 'SOLR_COLLECTION=%s\n' "$SOLR_COLLECTION" + printf 'SOLR_URL=%s\n' "$SOLR_URL" + printf 'IS_ZK_EMBEDDED=%s\n' "$IS_ZK_EMBEDDED" + printf 'NUM_SHARDS=%s\n' "$NUM_SHARDS" + printf 'RF=%s\n' "$RF" + printf 'MAX_SHARDS_PER_NODE=%s\n' "$MAX_SHARDS_PER_NODE" + printf 'ENABLE_SAPN=%s\n' "$ENABLE_SAPN" + } > "$tmp" + + chmod 600 "$tmp" + mv -f "$tmp" "$ENV_FILE" + log "[ENV] wrote $ENV_FILE" +} + +wait_for_pg() { + : "${DATABASE_URL:=postgresql://sapl:sapl@sapldb:5432/sapl}" + log "Waiting for Postgres..." + /bin/bash wait-for-pg.sh "$DATABASE_URL" +} + +migrate_db() { + log "Running Django migrations..." + python3 manage.py migrate --noinput +} + +# In start.sh (near your other helpers) +configure_solr() { + # respect envs, with sane defaults + local USE="${USE_SOLR:-False}" + local URL="${SOLR_URL:-http://admin:solr@localhost:8983}" + local COL="${SOLR_COLLECTION:-sapl}" + local SHARDS="${NUM_SHARDS:-1}" + local RF="${RF:-1}" + local MS="${MAX_SHARDS_PER_NODE:-1}" + local IS_ZK="${IS_ZK_EMBEDDED:-False}" + + # total wait time before we give up (seconds) + local WAIT_TIMEOUT="${SOLR_WAIT_TIMEOUT:-30}" + # per probe max seconds + local PROBE_TIMEOUT="${SOLR_PROBE_TIMEOUT:-3}" + # sleep between probes + local SLEEP_SECS="${SOLR_WAIT_INTERVAL:-2}" + + # feature flag OFF by default unless we confirm Solr + ./manage.py waffle_switch SOLR_SWITCH off --create || true + + # Fast exit if disabled + if [[ "${USE,,}" != "true" ]]; then + echo "[SOLR] USE_SOLR=$USE → skipping Solr initialization." + return 0 + fi + + echo "[SOLR] Best-effort wait (<= ${WAIT_TIMEOUT}s): $URL, collection=$COL" + + local deadline=$((SECONDS + WAIT_TIMEOUT)) + while (( SECONDS < deadline )); do + # Try a cheap SolrCloud endpoint; swap for /solr/admin/info/system if you prefer + if curl -fsS --max-time "${PROBE_TIMEOUT}" \ + "${URL%/}/solr/admin/collections?action=LIST" >/dev/null; then + echo "[SOLR] Reachable. Kicking off background configuration…" + + # optional flag if ZK is embedded + local ZK_FLAG="" + if [[ "${IS_ZK,,}" == "true" ]]; then + ZK_FLAG="--embedded_zk" + fi + + ( + set -Eeuo pipefail + python3 solr_cli.py \ + -u "$URL" -c "$COL" -s "$SHARDS" -rf "$RF" -ms "$MS" $ZK_FLAG + ./manage.py waffle_switch SOLR_SWITCH on --create + echo "[SOLR] Configuration done, SOLR_SWITCH=on." + ) >/var/log/sapl/solr_init.log 2>&1 & disown + + return 0 + fi + sleep "${SLEEP_SECS}" + done + + echo "[SOLR] Not reachable within ${WAIT_TIMEOUT}s. Proceeding without Solr (SOLR_SWITCH=off)." + return 0 +} + +configure_sapn() { + if [[ "${ENABLE_SAPN,,}" == "true" ]]; then + log "Enabling SAPN" + python3 manage.py waffle_switch SAPN_SWITCH on --create + else + log "Disabling SAPN" + python3 manage.py waffle_switch SAPN_SWITCH off --create + fi +} + +create_admin() { + log "Creating admin user..." + out="$(python3 create_admin.py 2>&1 || true)" + printf '%s\n' "$out" + + if grep -q 'MISSING_ADMIN_PASSWORD' <<<"$out"; then + err "[SUPERUSER] ADMIN_PASSWORD not set. Exiting." + exit 1 + fi +} + +fix_logging_and_socket_perms() { + local APP_DIR="/var/interlegis/sapl" + local LOG_FILE="$APP_DIR/sapl.log" + + # dirs + mkdir -p "$APP_DIR/run" + chown -R root:nginx "$APP_DIR" + chmod 2775 "$APP_DIR" "$APP_DIR/run" + chmod -R g+rwX "$APP_DIR" + + # new files/sockets → 660 + umask 0007 + + # ensure log file is owned by sapl and writable + install -Dm0660 /dev/null "$LOG_FILE" + chown sapl:nginx "$LOG_FILE" + + # stale socket cleanup (if any) + rm -f "$APP_DIR/run/gunicorn.sock" 2>/dev/null || true +} + +setup_cache_dir() { + # if you later move cache under /var/interlegis/sapl/cache, this line can read an env var + local CACHE_DIR="${DJANGO_CACHE_DIR:-/var/tmp/django_cache}" + + mkdir -p "$CACHE_DIR" + chown -R sapl:nginx "$CACHE_DIR" + chmod -R 2775 "$CACHE_DIR" + find "$CACHE_DIR" -type d -exec chmod g+s {} + + + # keep your global umask; 0007 ensures new files are rw for owner+group + umask 0007 +} + +start_services() { + log "Starting gunicorn..." + gunicorn -c gunicorn.conf.py & + log "Starting nginx..." + exec /usr/sbin/nginx -g "daemon off;" +} + +main() { + create_secret + write_env_file + wait_for_pg + configure_pg_timezone + migrate_db + configure_solr || true + configure_sapn + create_admin + setup_cache_dir + fix_logging_and_socket_perms + + cat <<'BANNER' +------------------------------------- +| ███████╗ █████╗ ██████╗ ██╗ | +| ██╔════╝██╔══██╗██╔══██╗██║ | +| ███████╗███████║██████╔╝██║ | +| ╚════██║██╔══██║██╔═══╝ ██║ | +| ███████║██║ ██║██║ ███████╗ | +| ╚══════╝╚═╝ ╚═╝╚═╝ ╚══════╝ | +------------------------------------- +BANNER + + start_services +} + +main "$@" diff --git a/docker/wait-for-pg.sh b/docker/startup_scripts/wait-for-pg.sh similarity index 100% rename from docker/wait-for-pg.sh rename to docker/startup_scripts/wait-for-pg.sh diff --git a/docker/wait-for-solr.sh b/docker/startup_scripts/wait-for-solr.sh similarity index 100% rename from docker/wait-for-solr.sh rename to docker/startup_scripts/wait-for-solr.sh diff --git a/release.sh b/release.sh index 3ee43b85a..e2928980b 100755 --- a/release.sh +++ b/release.sh @@ -76,9 +76,6 @@ function set_rc_version { fi FINAL_VERSION=$NEXT_RC_VERSION -## DEBUG -# echo "OLD_VERSION: $OLD_VERSION" -# echo "FINAL_VERSION: $FINAL_VERSION" } # Function to display Yes/No prompt with colored message diff --git a/sapl/base/email_utils.py b/sapl/base/email_utils.py index 3ccf64690..2781e9876 100644 --- a/sapl/base/email_utils.py +++ b/sapl/base/email_utils.py @@ -21,7 +21,7 @@ def load_email_templates(templates, context={}): tpl = loader.get_template(t) email = tpl.render(context) if t.endswith(".html"): - email = email.replace('\n', '').replace('\r', '') + email = email.replace('\\n', '').replace('\r', '') emails.append(email) return emails diff --git a/sapl/base/search_indexes.py b/sapl/base/search_indexes.py index e76f00168..d0432aded 100644 --- a/sapl/base/search_indexes.py +++ b/sapl/base/search_indexes.py @@ -108,7 +108,7 @@ class TextExtractField(CharField): continue data += getattr(self, func)(value) + ' ' - data = data.replace('\n', ' ') + data = data.replace('\\n', ' ') return data diff --git a/sapl/base/templatetags/common_tags.py b/sapl/base/templatetags/common_tags.py index 9a60bf85e..84f61b64f 100644 --- a/sapl/base/templatetags/common_tags.py +++ b/sapl/base/templatetags/common_tags.py @@ -300,7 +300,7 @@ def youtube_url(value): # Test if YouTube video # tested on https://pythex.org/ value = value.lower() - youtube_pattern = "^((https?://)?(www\.)?youtube\.com\/watch\?v=)" + youtube_pattern = r"^((https?://)?(www\.)?youtube\.com\/watch\?v=)" r = re.findall(youtube_pattern, value) return True if r else False @@ -308,7 +308,7 @@ def youtube_url(value): @register.filter def facebook_url(value): value = value.lower() - facebook_pattern = "^((https?://)?((www|pt-br)\.)?facebook\.com(\/.+)?\/videos(\/.*)?)" + facebook_pattern = r"^((https?://)?((www|pt-br)\.)?facebook\.com(\/.+)?\/videos(\/.*)?)" r = re.findall(facebook_pattern, value) return True if r else False diff --git a/sapl/compilacao/forms.py b/sapl/compilacao/forms.py index 18c24cf1f..c4154ec00 100644 --- a/sapl/compilacao/forms.py +++ b/sapl/compilacao/forms.py @@ -987,7 +987,7 @@ class DispositivoEdicaoVigenciaForm(ModelForm): p.pk, _('%s realizada em %s. %s') % ( p.tipo_publicacao, defaultfilters.date( - p.data, "d \d\e F \d\e Y"), + p.data, r"d \d\e F \d\e Y"), str(p.ta))) for p in pubs] dvs = Dispositivo.objects.order_by('ordem').filter( diff --git a/sapl/compilacao/models.py b/sapl/compilacao/models.py index 7b54fcb67..e8a32615e 100644 --- a/sapl/compilacao/models.py +++ b/sapl/compilacao/models.py @@ -287,7 +287,7 @@ class TextoArticulado(TimestampedMixin): return _('%(tipo)s nº %(numero)s, de %(data)s') % { 'tipo': self.tipo_ta, 'numero': numero, - 'data': defaultfilters.date(self.data, "d \d\e F \d\e Y").lower()} + 'data': defaultfilters.date(self.data, r"d \d\e F \d\e Y").lower()} def hash(self): from django.core import serializers @@ -943,7 +943,7 @@ class Publicacao(TimestampedMixin): def __str__(self): return _('%s realizada em %s \n %s') % ( self.tipo_publicacao, - defaultfilters.date(self.data, "d \d\e F \d\e Y"), + defaultfilters.date(self.data, r"d \d\e F \d\e Y"), self.ta) diff --git a/sapl/lexml/forms.py b/sapl/lexml/forms.py index 7904508af..8fd59b6ac 100644 --- a/sapl/lexml/forms.py +++ b/sapl/lexml/forms.py @@ -31,7 +31,7 @@ class LexmlProvedorForm(ModelForm): return cd if cd["xml"]: - xml = re.sub("\n|\t", "", cd["xml"].strip()) + xml = re.sub(r"\n|\t", "", cd["xml"].strip()) validar_xml(xml) validar_schema(xml) diff --git a/sapl/lexml/models.py b/sapl/lexml/models.py index 1a5d27144..e892877fe 100644 --- a/sapl/lexml/models.py +++ b/sapl/lexml/models.py @@ -25,7 +25,7 @@ class LexmlProvedor(models.Model): # LexmlRegistroProvedor def pretty_xml(self): import html safe_xml = html.escape(self.xml) - return safe_xml.replace('\n', '
').replace(' ', ' ') + return safe_xml.replace('\\n', '
').replace(' ', ' ') class Meta: verbose_name = _('Provedor Lexml') diff --git a/sapl/materia/forms.py b/sapl/materia/forms.py index 7f3207214..c685109ae 100644 --- a/sapl/materia/forms.py +++ b/sapl/materia/forms.py @@ -1716,7 +1716,7 @@ class TramitacaoEmLoteForm(ModelForm): ('texto', 12) ]) - documentos_checkbox_HTML = ''' + documentos_checkbox_HTML = r'''
Selecione as matérias para tramitação: diff --git a/sapl/materia/models.py b/sapl/materia/models.py index 56c56d645..da2e0ac91 100644 --- a/sapl/materia/models.py +++ b/sapl/materia/models.py @@ -1,4 +1,3 @@ - from datetime import datetime from django.contrib.auth.models import Group @@ -21,8 +20,7 @@ from sapl.utils import (RANGE_ANOS, YES_NO_CHOICES, SaplGenericForeignKey, texto_upload_path, get_settings_auth_user_model, OverwriteStorage) - -#from sapl.protocoloadm.models import Protocolo +# from sapl.protocoloadm.models import Protocolo EM_TRAMITACAO = [(1, 'Sim'), (0, 'Não')] @@ -185,7 +183,6 @@ def anexo_upload_path(instance, filename): class MateriaLegislativa(models.Model): - tipo = models.ForeignKey( TipoMateriaLegislativa, on_delete=models.PROTECT, @@ -282,7 +279,7 @@ class MateriaLegislativa(models.Model): Autor, through='Autoria', through_fields=('materia', 'autor'), - symmetrical=False,) + symmetrical=False, ) data_ultima_atualizacao = models.DateTimeField( blank=True, null=True, @@ -325,7 +322,7 @@ class MateriaLegislativa(models.Model): 'numero': self.numero, 'data': defaultfilters.date( self.data_apresentacao, - "d \d\e F \d\e Y" + r"d \d\e F \d\e Y" )} def data_entrada_protocolo(self): @@ -400,7 +397,7 @@ class Autoria(models.Model): class Meta: verbose_name = _('Autoria') verbose_name_plural = _('Autorias') - unique_together = (('autor', 'materia'), ) + unique_together = (('autor', 'materia'),) ordering = ('-primeiro_autor', 'autor__nome') def __str__(self): @@ -456,9 +453,9 @@ class PautaReuniao(models.Model): def __str__(self): return _('Reunião: %(reuniao)s' ' - Matéria: %(materia)s') % { - 'reuniao': self.reuniao, - 'materia': self.materia - } + 'reuniao': self.reuniao, + 'materia': self.materia + } class Anexada(models.Model): @@ -482,8 +479,8 @@ class Anexada(models.Model): def __str__(self): return _('Principal: %(materia_principal)s' ' - Anexada: %(materia_anexada)s') % { - 'materia_principal': self.materia_principal, - 'materia_anexada': self.materia_anexada} + 'materia_principal': self.materia_principal, + 'materia_anexada': self.materia_anexada} class AssuntoMateria(models.Model): @@ -755,7 +752,6 @@ class Parecer(models.Model): class Proposicao(models.Model): - autor = models.ForeignKey( Autor, null=True, @@ -978,13 +974,13 @@ class Proposicao(models.Model): return '%s nº _____ %s' % ( self.tipo, formats.date_format( self.data_envio if self.data_envio else timezone.now(), - "\d\e d \d\e F \d\e Y")) + r"\d\e d \d\e F \d\e Y")) class Meta: ordering = ['-data_recebimento'] verbose_name = _('Proposição') verbose_name_plural = _('Proposições') - unique_together = (('content_type', 'object_id'), ) + unique_together = (('content_type', 'object_id'),) permissions = ( ('detail_proposicao_enviada', _('Pode acessar detalhes de uma proposição enviada.')), @@ -1016,7 +1012,7 @@ class Proposicao(models.Model): 'numero': self.numero_proposicao, 'data': defaultfilters.date( self.data_envio if self.data_envio else timezone.now(), - "d \d\e F \d\e Y" + r"d \d\e F \d\e Y" )} def delete(self, using=None, keep_parents=False): diff --git a/sapl/materia/urls.py b/sapl/materia/urls.py index 272970b82..80d909e79 100644 --- a/sapl/materia/urls.py +++ b/sapl/materia/urls.py @@ -143,7 +143,7 @@ urlpatterns_proposicao = [ url(r'^proposicao/devolvida/', ProposicaoDevolvida.as_view(), name='proposicao-devolvida'), url(r'^proposicao/confirmar/P(?P[0-9A-Fa-f]+)/' - '(?P\d+)', ConfirmarProposicao.as_view(), + r'(?P\d+)', ConfirmarProposicao.as_view(), name='proposicao-confirmar'), url(r'^sistema/proposicao/tipo/', include(TipoProposicaoCrud.get_urls())), diff --git a/sapl/norma/forms.py b/sapl/norma/forms.py index 5380b2993..3116f96c1 100644 --- a/sapl/norma/forms.py +++ b/sapl/norma/forms.py @@ -224,7 +224,7 @@ class NormaJuridicaForm(FileFieldCheckMixin, ModelForm): return cleaned_data import re - has_digits = re.sub('[^0-9]', '', cleaned_data['numero']) + has_digits = re.sub(r'[^0-9]', '', cleaned_data['numero']) if not has_digits: self.logger.error("Número de norma ({}) não pode conter somente letras.".format( cleaned_data['numero'])) diff --git a/sapl/norma/models.py b/sapl/norma/models.py index df672294d..668f93347 100644 --- a/sapl/norma/models.py +++ b/sapl/norma/models.py @@ -263,7 +263,7 @@ class NormaJuridica(models.Model): 'tipo': self.tipo, 'orgao_sigla': f'-{self.orgao.sigla}' if self.orgao else '', 'numero': numero_norma, - 'data': defaultfilters.date(self.data, "d \d\e F \d\e Y").lower()} + 'data': defaultfilters.date(self.data, r"d \d\e F \d\e Y").lower()} @property def epigrafe(self): @@ -278,7 +278,7 @@ class NormaJuridica(models.Model): return _('%(tipo)s nº %(numero)s, de %(data)s') % { 'tipo': self.tipo, 'numero': numero_norma, - 'data': defaultfilters.date(self.data, "d \d\e F \d\e Y").lower()} + 'data': defaultfilters.date(self.data, r"d \d\e F \d\e Y").lower()} def delete(self, using=None, keep_parents=False): texto_integral = self.texto_integral diff --git a/sapl/norma/views.py b/sapl/norma/views.py index d746483e9..e0e8e923c 100644 --- a/sapl/norma/views.py +++ b/sapl/norma/views.py @@ -494,7 +494,7 @@ def recuperar_numero_norma(request): norma = NormaJuridica.objects.filter(**param).order_by( 'tipo', 'ano', 'numero').values_list('numero', flat=True) if norma: - numeros = sorted([int(re.sub("[^0-9].*", '', n)) for n in norma]) + numeros = sorted([int(re.sub(r"[^0-9].*", '', n)) for n in norma]) next_num = numeros.pop() + 1 response = JsonResponse({'numero': next_num, 'ano': param['ano']}) diff --git a/sapl/protocoloadm/forms.py b/sapl/protocoloadm/forms.py index 58223f21c..58cd07568 100644 --- a/sapl/protocoloadm/forms.py +++ b/sapl/protocoloadm/forms.py @@ -1110,7 +1110,7 @@ class DocumentoAdministrativoForm(FileFieldCheckMixin, ModelForm): numero_protocolo = self.data['numero_protocolo'] ano_protocolo = self.data['ano_protocolo'] - complemento = re.sub('\s+', '', self.data['complemento']).upper() + complemento = re.sub(r'\s+', '', self.data['complemento']).upper() numero_documento = int(self.cleaned_data['numero']) tipo_documento = int(self.data['tipo']) ano_documento = int(self.data['ano']) @@ -1558,7 +1558,7 @@ class TramitacaoEmLoteAdmForm(ModelForm): ('texto', 12) ]) - documentos_checkbox_HTML = ''' + documentos_checkbox_HTML = r'''
Selecione os documentos para tramitação: diff --git a/sapl/protocoloadm/views.py b/sapl/protocoloadm/views.py index 1b572d792..d2fbcb08e 100755 --- a/sapl/protocoloadm/views.py +++ b/sapl/protocoloadm/views.py @@ -448,7 +448,7 @@ class DocumentoAdministrativoCrud(Crud): def form_valid(self, form): form.instance.complemento = re.sub( - '\s+', '', form.instance.complemento).upper() + r'\s+', '', form.instance.complemento).upper() return super().form_valid(form) class UpdateView(Crud.UpdateView): @@ -481,7 +481,7 @@ class DocumentoAdministrativoCrud(Crud): break form.instance.complemento = re.sub( - '\s+', '', form.instance.complemento).upper() + r'\s+', '', form.instance.complemento).upper() return super().form_valid(form) diff --git a/sapl/relatorios/views.py b/sapl/relatorios/views.py index 99a299abb..ad7e794c6 100755 --- a/sapl/relatorios/views.py +++ b/sapl/relatorios/views.py @@ -612,13 +612,13 @@ def get_sessao_plenaria(sessao, casa, user): if not is_empty(conteudo): # unescape HTML codes # https://github.com/interlegis/sapl/issues/1046 - conteudo = re.sub('style=".*?"', '', conteudo) - conteudo = re.sub('class=".*?"', '', conteudo) + conteudo = re.sub(r'style=".*?"', '', conteudo) + conteudo = re.sub(r'class=".*?"', '', conteudo) # OSTicket Ticket #796450 - conteudo = re.sub('align=".*?"', '', conteudo) - conteudo = re.sub('', '

', conteudo) + conteudo = re.sub(r'align=".*?"', '', conteudo) + conteudo = re.sub(r'', '

', conteudo) # OSTicket Ticket #796450 - conteudo = re.sub('', '
', conteudo) + conteudo = re.sub(r'', '
', conteudo) conteudo = html.unescape(conteudo) # escape special character '&' @@ -874,7 +874,7 @@ def get_sessao_plenaria(sessao, casa, user): # unescape HTML codes # https://github.com/interlegis/sapl/issues/1046 - conteudo = re.sub('style=".*?"', '', conteudo) + conteudo = re.sub(r'style=".*?"', '', conteudo) conteudo = html.unescape(conteudo) # escape special character '&' @@ -894,7 +894,7 @@ def get_sessao_plenaria(sessao, casa, user): # unescape HTML codes # https://github.com/interlegis/sapl/issues/1046 - conteudo = re.sub('style=".*?"', '', conteudo) + conteudo = re.sub(r'style=".*?"', '', conteudo) conteudo = html.unescape(conteudo) # escape special character '&' @@ -1321,13 +1321,13 @@ def get_pauta_sessao(sessao, casa): if not is_empty(conteudo): # unescape HTML codes # https://github.com/interlegis/sapl/issues/1046 - conteudo = re.sub('style=".*?"', '', conteudo) - conteudo = re.sub('class=".*?"', '', conteudo) + conteudo = re.sub(r'style=".*?"', '', conteudo) + conteudo = re.sub(r'class=".*?"', '', conteudo) # OSTicket Ticket #796450 - conteudo = re.sub('align=".*?"', '', conteudo) - conteudo = re.sub('', '

', conteudo) + conteudo = re.sub(r'align=".*?"', '', conteudo) + conteudo = re.sub(r'', '

', conteudo) # OSTicket Ticket #796450 - conteudo = re.sub('', '
', conteudo) + conteudo = re.sub(r'', '
', conteudo) conteudo = html.unescape(conteudo) # escape special character '&' diff --git a/sapl/settings.py b/sapl/settings.py index 1bd8b715b..4b749790b 100644 --- a/sapl/settings.py +++ b/sapl/settings.py @@ -114,6 +114,10 @@ USE_SOLR = config('USE_SOLR', cast=bool, default=False) SOLR_URL = config('SOLR_URL', cast=str, default='http://localhost:8983') SOLR_COLLECTION = config('SOLR_COLLECTION', cast=str, default='sapl') +# FOR HAYSTACK 3.3.1 (Django >= 3) +# SOLR_USER = config('SOLR_USER', cast=str) +# SOLR_PASSWORD = config('SOLR_PASSWORD', cast=str) + if USE_SOLR: HAYSTACK_SIGNAL_PROCESSOR = 'haystack.signals.RealtimeSignalProcessor' # enable auto-index SEARCH_BACKEND = 'haystack.backends.solr_backend.SolrEngine' @@ -126,6 +130,10 @@ HAYSTACK_CONNECTIONS = { SEARCH_URL[0]: SEARCH_URL[1], 'BATCH_SIZE': 1000, 'TIMEOUT': 20, + # 'KWARGS': { + # 'timeout': 60, + # 'auth': (SOLR_USER, SOLR_PASSWORD), # <-- for basic auth + # }, }, } @@ -196,7 +204,7 @@ CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache', 'LOCATION': '/var/tmp/django_cache', - 'OPTIONS': {"MAX_ENTRIES": 1000}, + 'OPTIONS': {"MAX_ENTRIES": 10000}, } } @@ -401,59 +409,22 @@ LOGGING_CONSOLE_VERBOSE = config( 'LOGGING_CONSOLE_VERBOSE', cast=bool, default=False) LOGGING = { - 'version': 1, - 'disable_existing_loggers': False, - - 'filters': { - 'require_debug_false': { - '()': 'django.utils.log.RequireDebugFalse', - }, - 'require_debug_true': { - '()': 'django.utils.log.RequireDebugTrue', - }, + "version": 1, + "disable_existing_loggers": False, + "handlers": { + "console": {"class": "logging.StreamHandler"}, }, - 'formatters': { - 'verbose': { - 'format': '%(levelname)s %(asctime)s ' + host + ' %(pathname)s %(name)s:%(funcName)s:%(lineno)d %(message)s' - }, - 'simple': { - 'format': '%(levelname)s %(asctime)s - %(message)s' - }, + "root": { # everything falls back here + "handlers": ["console"], + "level": 'DEBUG', }, - 'handlers': { - 'console': { - 'level': 'INFO', - 'class': 'logging.StreamHandler', - 'filters': ['require_debug_true'], - 'formatter': 'simple', - }, - 'console_verbose': { - 'level': 'DEBUG', - 'class': 'logging.StreamHandler', - 'filters': ['require_debug_true'], - 'formatter': 'verbose', - }, - 'applogfile': { - 'level': 'INFO', - 'class': 'logging.handlers.RotatingFileHandler', - 'filename': 'sapl.log', - 'maxBytes': 1024 * 1024 * 15, # 15MB - 'backupCount': 10, - 'formatter': 'verbose', + "loggers": { + "django.request": { # 500s go here + "handlers": ["console"], + "level": "ERROR", + "propagate": False, }, }, - 'loggers': { - 'sapl': { - 'handlers': ['applogfile'] + (['console_verbose'] if LOGGING_CONSOLE_VERBOSE else []), - 'level': 'DEBUG' if LOGGING_CONSOLE_VERBOSE else 'INFO', - 'propagate': True, - }, - 'django': { - 'handlers': ['applogfile'] + (['console_verbose'] if LOGGING_CONSOLE_VERBOSE else []), - 'level': 'ERROR', - 'propagate': True, - }, - } } PASSWORD_HASHERS = [ diff --git a/sapl/utils.py b/sapl/utils.py index cddeb1f49..09a334f72 100644 --- a/sapl/utils.py +++ b/sapl/utils.py @@ -851,7 +851,7 @@ def texto_upload_path(instance, filename, subpath='', pk_first=False): seguida para armazenar o arquivo. """ - filename = re.sub('\s', '_', normalize(filename.strip()).lower()) + filename = re.sub(r'\s', '_', normalize(filename.strip()).lower()) from sapl.materia.models import Proposicao from sapl.protocoloadm.models import DocumentoAdministrativo