diff --git a/.github/workflows/workflow.yml b/.github/workflows/workflow.yml index c3873dd69..92ed4c3ce 100644 --- a/.github/workflows/workflow.yml +++ b/.github/workflows/workflow.yml @@ -113,7 +113,7 @@ jobs: az vm create \ --resource-group "${{ env.STAGE }}-${{ env.NAME }}-${{ env.NEW_COLOUR }}" \ --name "${{ env.STAGE }}-${{ env.NAME }}-${{ env.NEW_COLOUR }}" \ - --size Standard_B2s \ + --size Standard_B2s --public-ip-sku Standard \ --image "Canonical:0001-com-ubuntu-server-jammy:22_04-lts-gen2:latest" \ --ssh-key-values ${{ secrets.DEV_PUB_KEYS }} && \ export NEW_IP=$(az vm list-ip-addresses --name "${{ env.STAGE }}-${{ env.NAME }}-${{ env.NEW_COLOUR }}" --resource-group "${{ env.STAGE }}-${{ env.NAME }}-${{ env.NEW_COLOUR }}" --query [].virtualMachine.network[].publicIpAddresses[][].ipAddress --output tsv) && \ @@ -186,12 +186,20 @@ jobs: --name "${{ env.STAGE }}-${{ env.NAME }}-${{ env.NEW_COLOUR }}" \ --command-id RunShellScript \ --scripts "\ + docker network create iati-standard-website && \ docker login -u '${{ env.REGISTRY_USERNAME }}' -p '${{ env.REGISTRY_PASSWORD }}' ${{ env.REGISTRY_LOGIN_SERVER }} && \ docker pull '${{ env.REGISTRY_LOGIN_SERVER }}/${{env.STAGE}}-${{env.NAME}}:${{ env.TAG }}' && \ + docker run --restart always --name elasticsearch -d \ + -e ES_SETTING_XPACK_SECURITY_ENABLED=False \ + -e ES_SETTING_DISCOVERY_TYPE=single-node \ + --network iati-standard-website \ + 'docker.elastic.co/elasticsearch/elasticsearch:8.15.3' && \ docker run --restart always --name website -d -p 5000:5000 \ --log-driver 'json-file' \ --log-opt max-size=100m \ --log-opt max-file=3 \ + --network iati-standard-website \ + --link elasticsearch:elasticsearch \ -e DJANGO_SETTINGS_MODULE='iati.settings.dev_public' \ -e SECRET_KEY='${{ secrets.DEV_SECRET_KEY }}' \ -e DATABASE_NAME='${{ secrets.DEV_DATABASE_NAME }}' \ @@ -264,7 +272,7 @@ jobs: deploy_prod: needs: [build] runs-on: ubuntu-latest - if: github.ref == 'refs/heads/master' + if: github.ref == 'refs/heads/main' env: DOCKER_BUILDKIT: '1' TAG: ${{ github.sha }} @@ -327,7 +335,7 @@ jobs: az vm create \ --resource-group "${{ env.STAGE }}-${{ env.NAME }}-${{ env.NEW_COLOUR }}" \ --name "${{ env.STAGE }}-${{ env.NAME }}-${{ env.NEW_COLOUR }}" \ - --size Standard_B2s \ + --size Standard_B2s --public-ip-sku Standard \ --image "Canonical:0001-com-ubuntu-server-jammy:22_04-lts-gen2:latest" \ --ssh-key-values ${{ secrets.DEV_PUB_KEYS }} && \ export NEW_IP=$(az vm list-ip-addresses --name "${{ env.STAGE }}-${{ env.NAME }}-${{ env.NEW_COLOUR }}" --resource-group "${{ env.STAGE }}-${{ env.NAME }}-${{ env.NEW_COLOUR }}" --query [].virtualMachine.network[].publicIpAddresses[][].ipAddress --output tsv) && \ @@ -400,12 +408,20 @@ jobs: --name "${{ env.STAGE }}-${{ env.NAME }}-${{ env.NEW_COLOUR }}" \ --command-id RunShellScript \ --scripts "\ + docker network create iati-standard-website && \ docker login -u '${{ env.REGISTRY_USERNAME }}' -p '${{ env.REGISTRY_PASSWORD }}' ${{ env.REGISTRY_LOGIN_SERVER }} && \ docker pull '${{ env.REGISTRY_LOGIN_SERVER }}/${{env.STAGE}}-${{env.NAME}}:${{ env.TAG }}' && \ + docker run --restart always --name elasticsearch -d \ + -e ES_SETTING_XPACK_SECURITY_ENABLED=False \ + -e ES_SETTING_DISCOVERY_TYPE=single-node \ + --network iati-standard-website \ + 'docker.elastic.co/elasticsearch/elasticsearch:8.15.3' && \ docker run --restart always --name website -d -p 5000:5000 \ --log-driver 'json-file' \ --log-opt max-size=100m \ --log-opt max-file=3 \ + --network iati-standard-website \ + --link elasticsearch:elasticsearch \ -e DJANGO_SETTINGS_MODULE='iati.settings.production' \ -e SECRET_KEY='${{ secrets.PROD_SECRET_KEY }}' \ -e DATABASE_NAME='${{ secrets.PROD_DATABASE_NAME }}' \ diff --git a/Dockerfile b/Dockerfile index 2bab6f60b..3ab3a3f19 100644 --- a/Dockerfile +++ b/Dockerfile @@ -6,7 +6,7 @@ ENV PYTHONDONTWRITEBYTECODE 1 ENV PYTHONIOENCODING utf_8 RUN apk update -RUN apk add --no-cache bash +RUN apk add --no-cache bash curl # Init engine @@ -40,52 +40,6 @@ RUN apk add --no-cache jpeg-dev zlib-dev RUN apk add --no-cache postgresql-dev RUN apk add --no-cache libmemcached-dev zlib-dev -# Elasticsearch from https://github.com/blacktop/docker-elasticsearch-alpine/blob/master/6.8/Dockerfile - -RUN apk add --no-cache openjdk8-jre su-exec - -ENV VERSION 6.8.23 -ENV DOWNLOAD_URL "https://artifacts.elastic.co/downloads/elasticsearch" -ENV ES_TARBAL "${DOWNLOAD_URL}/elasticsearch-oss-${VERSION}.tar.gz" -# ENV EXPECTED_SHA_URL "${DOWNLOAD_URL}/elasticsearch-oss-${VERSION}.tar.gz.sha512" -ENV ES_TARBALL_SHA "14dbb2809b06499373c3ec5035d829d62255c2c93103618fbfe3d7d03cecf8847f654e83c78f765f23224126ff18ed713b959857e8ecf435c475b11bcd143d3f" -RUN apk add --no-cache -t .build-deps wget ca-certificates gnupg openssl \ - && set -ex \ - && cd /tmp \ - && echo "===> Install Elasticsearch..." \ - && wget --progress=bar:force -O elasticsearch.tar.gz "$ES_TARBAL"; \ - if [ "$ES_TARBALL_SHA" ]; then \ - echo "$ES_TARBALL_SHA *elasticsearch.tar.gz" | sha512sum -c -; \ - fi; \ - tar -xf elasticsearch.tar.gz \ - && ls -lah \ - && mv elasticsearch-$VERSION /usr/share/elasticsearch \ - && adduser -D -h /usr/share/elasticsearch elasticsearch \ - && echo "===> Creating Elasticsearch Paths..." \ - && for path in \ - /usr/share/elasticsearch/data \ - /usr/share/elasticsearch/logs \ - /usr/share/elasticsearch/config \ - /usr/share/elasticsearch/config/scripts \ - /usr/share/elasticsearch/tmp \ - /usr/share/elasticsearch/plugins \ - ; do \ - mkdir -p "$path"; \ - chown -R elasticsearch:elasticsearch "$path"; \ - done \ - && rm -rf /tmp/* \ - && apk del --purge .build-deps - -COPY config/elastic/elasticsearch.yml /usr/share/elasticsearch/config/elasticsearch.yml -COPY config/elastic/log4j2.properties /usr/share/elasticsearch/config/log4j2.properties -RUN chown -R elasticsearch:elasticsearch /usr/share/elasticsearch/config - -RUN mkdir -p /var/log/messages -RUN apk add logrotate -COPY config/elastic/logrotate /etc/logrotate.d/elasticsearch -RUN chmod 644 /etc/logrotate.d/elasticsearch -COPY config/elastic/elasticsearch.service /etc/init.d/elasticsearch.service - # Web app dependencies RUN mkdir -p /usr/src/app diff --git a/Dockerfile_deploy b/Dockerfile_deploy index 625ba2601..bd4ec864d 100644 --- a/Dockerfile_deploy +++ b/Dockerfile_deploy @@ -30,6 +30,6 @@ COPY manage.py /usr/src/app/manage.py COPY pytest.ini /usr/src/app/pytest.init COPY setup.cfg /usr/src/app/setup.cfg -ENV ELASTICSEARCH_URL=http://localhost:9200 +ENV ELASTICSEARCH_URL=http://elasticsearch:9200 ENV GUNICORN_WORKERS=5 ENV COMPRESS_ENABLED='True' diff --git a/README.md b/README.md index e69fd7881..d7a39b0d1 100644 --- a/README.md +++ b/README.md @@ -19,6 +19,12 @@ This repository hosts the new IATI website based on Django and Wagtail CMS. A Po - Set a SECRET_KEY +A limit on your kernel must be increased. There are ways to do this permanently, but to do so temporarily: + +``` +sudo sysctl -w vm.max_map_count=262144 +``` + Build the project. The following will build linked `web` and `postgres` containers. ``` diff --git a/config/elastic/elasticsearch.service b/config/elastic/elasticsearch.service deleted file mode 100755 index 61854c371..000000000 --- a/config/elastic/elasticsearch.service +++ /dev/null @@ -1,58 +0,0 @@ -#! /bin/sh -### BEGIN INIT INFO -# Provides: elasticsearch -# Required-Start: $all -# Required-Stop: $all -# Default-Start: 2 3 4 5 -# Default-Stop: 0 1 6 -# Short-Description: Starts elasticsearch -# Description: Starts elasticsearch using start-stop-daemon -### END INIT INFO - -ES_HOME=/usr/share/elasticsearch -JAVA_HOME=/usr/lib/jvm/default-jvm/ -ES_MIN_MEM=256m -ES_MAX_MEM=1g -ES_JAVA_OPTS="-Xms256m -Xmx1g" -DAEMON=$ES_HOME/bin/elasticsearch -NAME=elasticsearch -DESC=elasticsearch -PID_FILE=$ES_HOME/$NAME.pid -DAEMON_OPTS="--pidfile $PID_FILE" - -test -x $DAEMON || exit 0 - -set -e - -case "$1" in - start) - echo -n "Starting $DESC: " - if start-stop-daemon --start --background --user elasticsearch --pidfile $PID_FILE --exec $DAEMON -- $DAEMON_OPTS - then - echo "started." - else - echo "failed." - fi - ;; - stop) - echo -n "Stopping $DESC: " - if start-stop-daemon --stop --user elasticsearch --pidfile $PID_FILE - then - echo "stopped." - else - echo "failed." - fi - ;; - restart|force-reload) - ${0} stop - sleep 0.5 - ${0} start - ;; - *) - N=/etc/init.d/$NAME - echo "Usage: $N {start|stop|restart|force-reload}" >&2 - exit 1 - ;; -esac - -exit 0 diff --git a/config/elastic/elasticsearch.yml b/config/elastic/elasticsearch.yml deleted file mode 100644 index b56b4e97a..000000000 --- a/config/elastic/elasticsearch.yml +++ /dev/null @@ -1,3 +0,0 @@ -cluster.name: "docker-cluster" -network.host: 0.0.0.0 -discovery.type: single-node diff --git a/config/elastic/log4j2.properties b/config/elastic/log4j2.properties deleted file mode 100644 index 9ad290ad8..000000000 --- a/config/elastic/log4j2.properties +++ /dev/null @@ -1,9 +0,0 @@ -status = error - -appender.console.type = Console -appender.console.name = console -appender.console.layout.type = PatternLayout -appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%node_name]%marker %m%n - -rootLogger.level = info -rootLogger.appenderRef.console.ref = console diff --git a/config/elastic/logrotate b/config/elastic/logrotate deleted file mode 100644 index 5c96e06d8..000000000 --- a/config/elastic/logrotate +++ /dev/null @@ -1,15 +0,0 @@ -/var/log/elasticsearch/*.log { - daily - rotate 50 - size 50M - copytruncate - compress - delaycompress - missingok - notifempty - create 644 elasticsearch elasticsearch -} -/var/log/gunicorn/*.log { - size 10M - copytruncate -} \ No newline at end of file diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index ad85574c4..75de23020 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -1,19 +1,27 @@ -version: "3.3" services: postgres: - image: postgres:12.6 + image: postgres:16 environment: - POSTGRES_HOST_AUTH_METHOD=trust ports: - 5432:5432 + + elasticsearch: + image: docker.elastic.co/elasticsearch/elasticsearch:8.15.3 + environment: + - ES_SETTING_XPACK_SECURITY_ENABLED=False + - ES_SETTING_DISCOVERY_TYPE=single-node + ports: + - 9200:9200 + web: build: context: . environment: - DATABASE_URL=postgres://postgres:@postgres:5432/postgres - SECRET_KEY=enter-a-long-unguessable-string-here - - ELASTICSEARCH_URL=http://localhost:9200 + - ELASTICSEARCH_URL=http://elasticsearch:9200 - DJANGO_SETTINGS_MODULE=iati.settings.dev - GUNICORN_WORKERS=1 - GITHUB_TOKEN @@ -21,9 +29,16 @@ services: - RECAPTCHA_PRIVATE_KEY - DEBUG_SERVER - COMPRESS_ENABLED + # I'm not sure why these links are needed; I thought Docker defaults would just do this for us but without it + # the Web container can't see the others. links: - - postgres + - postgres:postgres + - elasticsearch:elasticsearch ports: - 5000:5000 volumes: - ./:/usr/src/app + +networks: + default: + name: iati-standard-website diff --git a/docker-compose.yml b/docker-compose.yml deleted file mode 100644 index 5bdf12d74..000000000 --- a/docker-compose.yml +++ /dev/null @@ -1,24 +0,0 @@ -version: "3.3" -services: - - web: - build: - context: . - dockerfile: Dockerfile_deploy - image: 'iati-standard-website_web_deploy' - environment: - - DJANGO_SETTINGS_MODULE=iati.settings.dev - - GUNICORN_WORKERS=1 - - SECRET_KEY - - DATABASE_NAME - - DATABASE_USER - - DATABASE_PASS - - DATABASE_HOST - - DATABASE_PORT - - APPLICATIONINSIGHTS_CONNECTION_STRING - - AZURE_ACCOUNT_NAME - - AZURE_ACCOUNT_KEY - - AZURE_CONTAINER - - GITHUB_TOKEN - ports: - - 5000:5000 diff --git a/entrypoint.sh b/entrypoint.sh index c628b560b..74076a973 100755 --- a/entrypoint.sh +++ b/entrypoint.sh @@ -13,12 +13,10 @@ else done fi -if [[ -z "${ELASTICSEARCH_URL}" ]]; then - >&2 echo "Skipping Elasticsearch" -else - >&2 echo "Starting Elasticsearch" - rc-service elasticsearch.service start -fi +until curl --output /dev/null --silent --head --fail ${ELASTICSEARCH_URL}; do + >&2 echo "Elasticsearch is unavailable - sleeping" + sleep 10 +done if [[ -z "${DEBUG_SERVER}" ]]; then gunicorn iati.wsgi:application --bind 0.0.0.0:5000 --workers $GUNICORN_WORKERS >> /var/log/gunicorn/gunicorn.log 2>&1 & diff --git a/iati/settings/base.py b/iati/settings/base.py index ce1e4af3f..2fc327783 100644 --- a/iati/settings/base.py +++ b/iati/settings/base.py @@ -554,7 +554,7 @@ # Search settings WAGTAILSEARCH_BACKENDS = { 'default': { - 'BACKEND': 'wagtail.search.backends.elasticsearch6', + 'BACKEND': 'wagtail.search.backends.elasticsearch8', 'URLS': [os.getenv('ELASTICSEARCH_URL', 'http://localhost:9200')], 'INDEX': 'iati', }, diff --git a/requirements.in b/requirements.in index 0163ce8b9..0754a81c6 100644 --- a/requirements.in +++ b/requirements.in @@ -14,7 +14,7 @@ django-recaptcha3 @ git+https://github.com/bartsanchez/django-recaptcha3.git@313 django-storages[azure] django-widget-tweaks Django>=4.2,<4.3 -elasticsearch>=6.8,<7 +elasticsearch>=8,<9 gunicorn opencensus-ext-azure opencensus-ext-django diff --git a/requirements.txt b/requirements.txt index ae3774bc2..967eb32d8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -29,7 +29,9 @@ bleach==6.1.0 cachetools==5.4.0 # via google-auth certifi==2024.7.4 - # via requests + # via + # elastic-transport + # requests cffi==1.17.0 # via # cryptography @@ -107,7 +109,9 @@ djangorestframework==3.15.2 # via wagtail draftjs-exporter==2.1.7 # via wagtail -elasticsearch==6.8.2 +elastic-transport==8.15.1 + # via elasticsearch +elasticsearch==8.15.1 # via -r requirements.in et-xmlfile==1.1.0 # via openpyxl @@ -236,7 +240,7 @@ tzdata==2024.1 # via -r requirements.in urllib3==2.2.2 # via - # elasticsearch + # elastic-transport # pygithub # requests wagtail==5.2.6 diff --git a/requirements_dev.txt b/requirements_dev.txt index ec760b291..570154890 100644 --- a/requirements_dev.txt +++ b/requirements_dev.txt @@ -49,6 +49,7 @@ cachetools==5.4.0 certifi==2024.7.4 # via # -r requirements.txt + # elastic-transport # requests # selenium cffi==1.17.0 @@ -159,7 +160,11 @@ draftjs-exporter==2.1.7 # via # -r requirements.txt # wagtail -elasticsearch==6.8.2 +elastic-transport==8.15.1 + # via + # -r requirements.txt + # elasticsearch +elasticsearch==8.15.1 # via -r requirements.txt et-xmlfile==1.1.0 # via @@ -439,7 +444,7 @@ tzdata==2024.1 urllib3[socks]==2.2.2 # via # -r requirements.txt - # elasticsearch + # elastic-transport # pygithub # pytest-splinter # requests