Browse Source

Upgrade de imagem docker and bump of libs

upgrade-sapl
Edward Ribeiro 2 months ago
parent
commit
cea06d8967
  1. 15
      docker/Dockerfile
  2. 10
      docker/docker-compose.yaml
  3. 74
      docker/gunicorn.conf.py
  4. 50
      docker/gunicorn_start.sh
  5. 3
      docker/simple_gunicorn.sh
  6. 18
      docker/solr_cli.py
  7. 12
      docker/start.sh
  8. 2
      requirements/dev-requirements.txt
  9. 12
      requirements/requirements.txt
  10. 65
      sapl/settings.py

15
docker/Dockerfile

@ -1,4 +1,4 @@
FROM python:3.9-slim-buster
FROM python:3.12-slim-bookworm
# Setup env
ENV LANG C.UTF-8
@ -7,14 +7,17 @@ ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED=1
ENV DEBIAN_FRONTEND noninteractive
ENV BUILD_PACKAGES apt-utils apt-file libpq-dev graphviz-dev build-essential git pkg-config \
ENV BUILD_PACKAGES="apt-utils apt-file libpq-dev graphviz-dev build-essential git pkg-config \
python3-dev libxml2-dev libjpeg-dev libssl-dev libffi-dev libxslt1-dev \
libcairo2-dev software-properties-common python3-setuptools python3-pip
libcairo2-dev libpango1.0-dev libgdk-pixbuf-2.0-dev libharfbuzz-dev \
libfreetype6-dev zlib1g-dev software-properties-common python3-setuptools python3-pip"
## NAO EH PRA TIRAR O vim DA LISTA DE COMANDOS INSTALADOS!!!
ENV RUN_PACKAGES graphviz python3-lxml python3-magic postgresql-client python3-psycopg2 \
ENV RUN_PACKAGES="graphviz python3-lxml python3-magic postgresql-client \
poppler-utils curl jq bash vim python3-venv tzdata nodejs \
fontconfig ttf-dejavu python nginx
fontconfig python3 nginx \
libcairo2 libpango-1.0-0 libpangocairo-1.0-0 libgdk-pixbuf-2.0-0 \
libharfbuzz0b libfreetype6 libjpeg62-turbo zlib1g fonts-dejavu-core"
RUN mkdir -p /var/interlegis/sapl
@ -41,7 +44,7 @@ COPY docker/wait-for-pg.sh $HOME
COPY docker/wait-for-solr.sh $HOME
COPY docker/create_admin.py $HOME
COPY docker/genkey.py $HOME
COPY docker/gunicorn_start.sh $HOME
COPY docker/gunicorn.conf.py $HOME
COPY docker/config/nginx/sapl.conf /etc/nginx/conf.d
COPY docker/config/nginx/nginx.conf /etc/nginx/nginx.conf

10
docker/docker-compose.yaml

@ -10,6 +10,8 @@ services:
POSTGRES_USER: sapl
POSTGRES_DB: sapl
PGDATA: /var/lib/postgresql/data/
TZ: UTC
PG_TZ: UTC
volumes:
- sapldb_data:/var/lib/postgresql/data/
ports:
@ -31,10 +33,10 @@ services:
networks:
- sapl-net
sapl:
image: interlegis/sapl:3.1.164-RC1
# build:
# context: ../
# dockerfile: ./docker/Dockerfile
# image: interlegis/sapl:3.1.164-RC1
build:
context: ../
dockerfile: ./docker/Dockerfile
container_name: sapl
labels:
NAME: "sapl"

74
docker/gunicorn.conf.py

@ -0,0 +1,74 @@
# /var/interlegis/sapl/gunicorn.conf.py
import os
import pathlib
import multiprocessing
# ---- SAPL app configuration ----
NAME = "SAPL"
DJANGODIR = "/var/interlegis/sapl/"
SOCKFILE = "/var/interlegis/sapl/run/gunicorn.sock"
# USER = os.getenv("RUN_AS_USER", os.getenv("USER", "nginx"))
# GROUP = os.getenv("RUN_AS_GROUP", USER)
NUM_WORKERS = 11 # keep your explicit value
TIMEOUT = 300
MAX_REQUESTS = 100
DJANGO_SETTINGS = "sapl.settings"
WSGI_APP = "sapl.wsgi:application"
# ---- gunicorn settings ----
# Equivalent of: --name
proc_name = NAME
# Equivalent of: --bind=unix:...
# For quick testing via browser, you can switch to: bind = "0.0.0.0:8000"
bind = f"unix:{SOCKFILE}"
# Ensure imports work like in your script’s working dir
chdir = DJANGODIR
# Allow starting with just: gunicorn -c gunicorn.conf.py
wsgi_app = WSGI_APP
# Logs
loglevel = "debug"
errorlog = "-" # send to stderr (so you see it in docker logs or terminal)
accesslog = "-" # send to stdout
# accesslog = "/var/log/sapl/access.log"
# errorlog = "/var/log/sapl/error.log"
# Worker/process lifecycle
workers = NUM_WORKERS
timeout = TIMEOUT
graceful_timeout = 30
max_requests = MAX_REQUESTS
max_requests_jitter = 0
# Drop privileges (only applies if started as root)
# user = USER
# group = GROUP
# Environment (same as exporting before running)
raw_env = [
f"DJANGO_SETTINGS_MODULE={DJANGO_SETTINGS}",
# If you’re using ReportLab and seeing segfaults with PDFs, keep this:
# "RL_NOACCEL=1",
]
# If you previously enabled preload and saw segfaults with native libs, keep it off:
preload_app = False
# Create the run/ directory for the UNIX socket (your script did this)
def on_starting(server):
pathlib.Path(SOCKFILE).parent.mkdir(parents=True, exist_ok=True)
# Close DB connections after fork (safer when using preload or certain DB drivers)
def post_fork(server, worker):
try:
from django import db
db.connections.close_all()
except Exception:
# Django not initialized yet or not available
pass

50
docker/gunicorn_start.sh

@ -1,50 +0,0 @@
#!/usr/bin/env bash
##
##
## PARA USO EXCLUSIVO DO CONTAINER DOCKER DO SAPL!!!
## EVITE USAR PARA CHAMADA DIRETAS
##
##
# As seen in http://tutos.readthedocs.org/en/latest/source/ndg.html
SAPL_DIR="/var/interlegis/sapl"
# Seta um novo diretório foi passado como raiz para o SAPL
# caso esse tenha sido passado como parâmetro
if [ "$1" ]
then
SAPL_DIR="$1"
fi
NAME="SAPL" # Name of the application (*)
DJANGODIR=/var/interlegis/sapl/ # Django project directory (*)
SOCKFILE=/var/interlegis/sapl/run/gunicorn.sock # we will communicate using this unix socket (*)
USER=`whoami` # the user to run as (*)
GROUP=`whoami` # the group to run as (*)
NUM_WORKERS=3 # how many worker processes should Gunicorn spawn (*)
# NUM_WORKERS = 2 * CPUS + 1
TIMEOUT=300
MAX_REQUESTS=100 # number of requests before restarting worker
DJANGO_SETTINGS_MODULE=sapl.settings # which settings file should Django use (*)
DJANGO_WSGI_MODULE=sapl.wsgi # WSGI module name (*)
echo "Starting $NAME as `whoami` on base dir $SAPL_DIR"
# Create the run directory if it doesn't exist
RUNDIR=$(dirname $SOCKFILE)
test -d $RUNDIR || mkdir -p $RUNDIR
# Start your Django Unicorn
# Programs meant to be run under supervisor should not daemonize themselves (do not use --daemon)
exec gunicorn ${DJANGO_WSGI_MODULE}:application \
--name $NAME \
--log-level debug \
--timeout $TIMEOUT \
--workers $NUM_WORKERS \
--max-requests $MAX_REQUESTS \
--user $USER \
--access-logfile /var/log/sapl/access.log \
--error-logfile /var/log/sapl/error.log \
--bind=unix:$SOCKFILE

3
docker/simple_gunicorn.sh

@ -12,4 +12,5 @@ export PYTHONPATH=$DJANGODIR:$PYTHONPATH
# Get eth0 IP and filter out the netmask portion (/24, e.g.)
IP=`ip addr | grep 'inet .* eth0' | awk '{print $2}' | sed 's/\/[0-9]*//'`
gunicorn --bind $IP:8000 sapl.wsgi:application
#gunicorn --bind $IP:8000 sapl.wsgi:application
gunicorn -c gunicorn.conf.py sapl.wsgi:application

18
docker/solr_cli.py

@ -20,6 +20,8 @@ from kazoo.client import KazooClient
#
logging.basicConfig()
logging.captureWarnings(True)
logger = logging.getLogger(__name__)
SECURITY_FILE_TEMPLATE = """
{
@ -49,6 +51,7 @@ def solr_hash_password(password: str, salt: str = None):
salt (optional): base64 salt string
returns: sha256 hash of password and salt (both base64 strings)
"""
logger.debug("Generating Solr password")
m = sha256()
if salt is None:
salt = secrets.token_bytes(32)
@ -67,32 +70,32 @@ def solr_hash_password(password: str, salt: str = None):
def create_security_file(username, password):
print("Creating security.json file...")
logger.info("Creating security.json file...")
with open("security.json", "w") as f:
cypher, salt = solr_hash_password(password)
f.write(SECURITY_FILE_TEMPLATE % (username, cypher, salt, username))
print("file created!")
logger.info("file created!")
def upload_security_file(zk_host):
zk_port = 9983 # embedded ZK port
print(f"Uploading security file to Solr, ZK server={zk_host}:{zk_port}...")
logger.info(f"Uploading security file to Solr, ZK server={zk_host}:{zk_port}...")
try:
with open('security.json', 'r') as f:
data = f.read()
zk = KazooClient(hosts=f"{zk_host}:{zk_port}")
zk.start()
print("Uploading security.json file...")
logger.info("Uploading security.json file...")
if zk.exists('/security.json'):
zk.set("/security.json", str.encode(data))
else:
zk.create("/security.json", str.encode(data))
data, stat = zk.get('/security.json')
print("file uploaded!")
print(data.decode('utf-8'))
logger.info("file uploaded!")
logger.info(data.decode('utf-8'))
zk.stop()
except Exception as e:
print(e)
logger.error(e)
sys.exit(-1)
@ -250,6 +253,7 @@ def setup_embedded_zk(solr_url):
_, solr_user, solr_pwd, solr_host, solr_port = match.groups()
if solr_user and solr_pwd and solr_host:
print(f"Creating Solr user {solr_user} with password {solr_pwd}")
create_security_file(solr_user, solr_pwd)
upload_security_file(solr_host)
else:

12
docker/start.sh

@ -48,12 +48,18 @@ create_env
/bin/bash wait-for-pg.sh $DATABASE_URL
###
### This is required for compability with newer versions of psycopg2 lib
###
echo "Setting database timezone to UTC"
psql $DATABASE_URL -c 'SET TIME ZONE UTC;'
yes yes | python3 manage.py migrate
## SOLR
USE_SOLR="${USE_SOLR:=False}"
SOLR_URL="${SOLR_URL:=http://localhost:8983}"
SOLR_URL="${SOLR_URL:=http://admin:solr@localhost:8983}"
SOLR_COLLECTION="${SOLR_COLLECTION:=sapl}"
NUM_SHARDS=${NUM_SHARDS:=1}
RF=${RF:=1}
@ -131,8 +137,6 @@ if [ $lack_pwd -eq 0 ]; then
# return -1
fi
# Backfilling AuditLog's JSON field
time ./manage.py backfill_auditlog &
echo "-------------------------------------"
echo "| ███████╗ █████╗ ██████╗ ██╗ |"
@ -143,5 +147,5 @@ echo "| ███████║██║ ██║██║ ████
echo "| ╚══════╝╚═╝ ╚═╝╚═╝ ╚══════╝ |"
echo "-------------------------------------"
/bin/sh gunicorn_start.sh &
gunicorn -c gunicorn.conf.py &
/usr/sbin/nginx -g "daemon off;"

2
requirements/dev-requirements.txt

@ -2,7 +2,7 @@
autopep8==1.2.4
beautifulsoup4==4.9.1
django-debug-toolbar==1.11.1
django-debug-toolbar==3.2.1
ipdb==0.13.3
fancycompleter==0.9.1
pdbpp==0.10.3

12
requirements/requirements.txt

@ -2,7 +2,6 @@ django==2.2.28
django-haystack==3.1.1
django-filter==2.4.0
djangorestframework==3.12.4
dj-database-url==0.5.0
django-braces==1.14.0
django-crispy-forms==1.7.2
django-contrib-postgres==0.0.1
@ -16,14 +15,18 @@ drf-spectacular==0.18.2
django-ratelimit==3.0.1
easy-thumbnails==2.8.5
python-decouple==3.1
psycopg2-binary==2.8.6
dj-database-url==0.5.0
psycopg2-binary==2.9.9
pyyaml==6.0.1
pytz==2019.3
python-magic==0.4.15
unipath==1.1
WeasyPrint==51
Pillow==10.3.0
gunicorn==22.0.0
rlPyCairo==0.3.0
reportlab==4.2.0
WeasyPrint==66
trml2pdf==0.6
gunicorn==23.0.0
more-itertools==8.2.0
pysolr==3.6.0
PyPDF4==1.27.0
@ -37,5 +40,4 @@ django-prometheus==2.2.0
asn1crypto==1.5.1
XlsxWriter==3.2.0
git+https://github.com/interlegis/trml2pdf
git+https://github.com/interlegis/django-admin-bootstrapped

65
sapl/settings.py

@ -24,6 +24,8 @@ from unipath import Path
logging.captureWarnings(True)
logger = logging.getLogger(__name__)
host = socket.gethostbyname_ex(socket.gethostname())[0]
BASE_DIR = Path(__file__).ancestor(1)
@ -230,13 +232,43 @@ WSGI_APPLICATION = 'sapl.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
# Parse DATABASE_URL
# dj-database-url==0.5.0 is the latest compatible with Django 2.2, later versions required Django >= 4
# but it doesn't support OPTIONS tag, so we need setup_db_tz
# This should be removed once we are able to upgrade to Django >= 4
DATABASES = {
'default': config(
'DATABASE_URL', default='sqlite://:memory:',
cast=db_url,
)
"default": config("DATABASE_URL", cast=db_url)
}
def setup_db_tz():
db = DATABASES["default"]
# Normalize legacy engine alias returned by old dj-database-url
if db.get("ENGINE") == "django.db.backends.postgresql_psycopg2":
db["ENGINE"] = "django.db.backends.postgresql"
# Force UTC per connection for Postgres (fixes Django’s utc_tzinfo_factory assertion)
if db.get("ENGINE") == "django.db.backends.postgresql":
opts = db.setdefault("OPTIONS", {})
existing = (opts.get("options") or "").strip()
force_utc = "-c timezone=UTC"
opts["options"] = f"{existing} {force_utc}".strip() if existing else force_utc
# ensure default TCP port if you use HOST; leave sockets alone if HOST is empty
if db.get("HOST") and not db.get("PORT"):
db["PORT"] = "5432"
# Add connection lifetime
# in recent dj-database-url versions, replace by config("DATABASE_URL", conn_max_age=300)
db["CONN_MAX_AGE"] = 300 # keep connections for 5 minutes
# Log if DEBUG mode
if config("DEBUG", default=False, cast=bool):
logger.debug("DB config: %r", db)
setup_db_tz()
IMAGE_CROPPING_JQUERY_URL = None
THUMBNAIL_PROCESSORS = (
'image_cropping.thumbnail_processors.crop_corners',
@ -271,7 +303,6 @@ WAFFLE_CREATE_MISSING_SWITCHES = True
WAFFLE_LOG_MISSING_SWITCHES = True
WAFFLE_ENABLE_ADMIN_PAGES = True
MAX_DOC_UPLOAD_SIZE = 150 * 1024 * 1024 # 150MB
MAX_IMAGE_UPLOAD_SIZE = 2 * 1024 * 1024 # 2MB
DATA_UPLOAD_MAX_MEMORY_SIZE = 10 * 1024 * 1024 # 10MB
@ -291,6 +322,30 @@ if not TIME_ZONE:
USE_I18N = True
USE_L10N = True
USE_TZ = True
##
## Monkey patch of the Django 2.2 because latest version of psycopg2 returns DB time zone as UTC,
## but Django 2.2 requires an int! This should be removed once we are able to upgrade to Django >= 4
##
import importlib
from django.utils.timezone import utc
pg_utils = importlib.import_module("django.db.backends.postgresql.utils")
def _compat_utc_tzinfo_factory(offset):
try:
minutes = int(offset.total_seconds() // 60) if hasattr(offset, "total_seconds") else int(offset)
except Exception:
raise AssertionError("database connection isn't set to UTC")
if minutes != 0:
raise AssertionError("database connection isn't set to UTC")
return utc
pg_utils.utc_tzinfo_factory = _compat_utc_tzinfo_factory
# DATE_FORMAT = 'N j, Y'
DATE_FORMAT = 'd/m/Y'
SHORT_DATE_FORMAT = 'd/m/Y'

Loading…
Cancel
Save