Skip to content

Commit

Permalink
Merge pull request #785 from IATI/rollback-es-upgrade
Browse files Browse the repository at this point in the history
Deploy work
  • Loading branch information
odscjames authored Oct 31, 2024
2 parents c0483b5 + 72a62b5 commit 07be7eb
Show file tree
Hide file tree
Showing 14 changed files with 154 additions and 68 deletions.
28 changes: 6 additions & 22 deletions .github/workflows/workflow.yml
Original file line number Diff line number Diff line change
Expand Up @@ -118,10 +118,10 @@ jobs:
--ssh-key-values ${{ secrets.DEV_PUB_KEYS }} && \
export NEW_IP=$(az vm list-ip-addresses --name "${{ env.STAGE }}-${{ env.NAME }}-${{ env.NEW_COLOUR }}" --resource-group "${{ env.STAGE }}-${{ env.NAME }}-${{ env.NEW_COLOUR }}" --query [].virtualMachine.network[].publicIpAddresses[][].ipAddress --output tsv) && \
echo "NEW_IP=$NEW_IP" >> $GITHUB_ENV && \
az postgres server firewall-rule update \
az postgres flexible-server firewall-rule update \
--resource-group "${{ secrets.DEV_PSQL_RESOURCE_GROUP }}" \
--server-name "${{ secrets.DEV_PSQL_NAME }}" \
--name "${{ env.STAGE }}-${{ env.NAME }}-${{ env.NEW_COLOUR }}" \
--name "${{ secrets.DEV_PSQL_NAME }}" \
--rule-name "${{ env.STAGE }}-${{ env.NAME }}-${{ env.NEW_COLOUR }}" \
--start-ip-address $NEW_IP \
--end-ip-address $NEW_IP && \
az network nsg rule create \
Expand Down Expand Up @@ -186,20 +186,12 @@ jobs:
--name "${{ env.STAGE }}-${{ env.NAME }}-${{ env.NEW_COLOUR }}" \
--command-id RunShellScript \
--scripts "\
docker network create iati-standard-website && \
docker login -u '${{ env.REGISTRY_USERNAME }}' -p '${{ env.REGISTRY_PASSWORD }}' ${{ env.REGISTRY_LOGIN_SERVER }} && \
docker pull '${{ env.REGISTRY_LOGIN_SERVER }}/${{env.STAGE}}-${{env.NAME}}:${{ env.TAG }}' && \
docker run --restart always --name elasticsearch -d \
-e ES_SETTING_XPACK_SECURITY_ENABLED=False \
-e ES_SETTING_DISCOVERY_TYPE=single-node \
--network iati-standard-website \
'docker.elastic.co/elasticsearch/elasticsearch:8.15.3' && \
docker run --restart always --name website -d -p 5000:5000 \
--log-driver 'json-file' \
--log-opt max-size=100m \
--log-opt max-file=3 \
--network iati-standard-website \
--link elasticsearch:elasticsearch \
-e DJANGO_SETTINGS_MODULE='iati.settings.dev_public' \
-e SECRET_KEY='${{ secrets.DEV_SECRET_KEY }}' \
-e DATABASE_NAME='${{ secrets.DEV_DATABASE_NAME }}' \
Expand Down Expand Up @@ -340,10 +332,10 @@ jobs:
--ssh-key-values ${{ secrets.DEV_PUB_KEYS }} && \
export NEW_IP=$(az vm list-ip-addresses --name "${{ env.STAGE }}-${{ env.NAME }}-${{ env.NEW_COLOUR }}" --resource-group "${{ env.STAGE }}-${{ env.NAME }}-${{ env.NEW_COLOUR }}" --query [].virtualMachine.network[].publicIpAddresses[][].ipAddress --output tsv) && \
echo "NEW_IP=$NEW_IP" >> $GITHUB_ENV && \
az postgres server firewall-rule update \
az postgres flexible-server firewall-rule update \
--resource-group "${{ secrets.PROD_PSQL_RESOURCE_GROUP }}" \
--server-name "${{ secrets.PROD_PSQL_NAME }}" \
--name "${{ env.STAGE }}-${{ env.NAME }}-${{ env.NEW_COLOUR }}" \
--name "${{ secrets.PROD_PSQL_NAME }}" \
--rule-name "${{ env.STAGE }}-${{ env.NAME }}-${{ env.NEW_COLOUR }}" \
--start-ip-address $NEW_IP \
--end-ip-address $NEW_IP && \
az network nsg rule create \
Expand Down Expand Up @@ -408,20 +400,12 @@ jobs:
--name "${{ env.STAGE }}-${{ env.NAME }}-${{ env.NEW_COLOUR }}" \
--command-id RunShellScript \
--scripts "\
docker network create iati-standard-website && \
docker login -u '${{ env.REGISTRY_USERNAME }}' -p '${{ env.REGISTRY_PASSWORD }}' ${{ env.REGISTRY_LOGIN_SERVER }} && \
docker pull '${{ env.REGISTRY_LOGIN_SERVER }}/${{env.STAGE}}-${{env.NAME}}:${{ env.TAG }}' && \
docker run --restart always --name elasticsearch -d \
-e ES_SETTING_XPACK_SECURITY_ENABLED=False \
-e ES_SETTING_DISCOVERY_TYPE=single-node \
--network iati-standard-website \
'docker.elastic.co/elasticsearch/elasticsearch:8.15.3' && \
docker run --restart always --name website -d -p 5000:5000 \
--log-driver 'json-file' \
--log-opt max-size=100m \
--log-opt max-file=3 \
--network iati-standard-website \
--link elasticsearch:elasticsearch \
-e DJANGO_SETTINGS_MODULE='iati.settings.production' \
-e SECRET_KEY='${{ secrets.PROD_SECRET_KEY }}' \
-e DATABASE_NAME='${{ secrets.PROD_DATABASE_NAME }}' \
Expand Down
48 changes: 47 additions & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONIOENCODING utf_8

RUN apk update
RUN apk add --no-cache bash curl
RUN apk add --no-cache bash

# Init engine

Expand Down Expand Up @@ -40,6 +40,52 @@ RUN apk add --no-cache jpeg-dev zlib-dev
RUN apk add --no-cache postgresql-dev
RUN apk add --no-cache libmemcached-dev zlib-dev

# Elasticsearch from https://github.com/blacktop/docker-elasticsearch-alpine/blob/master/6.8/Dockerfile

RUN apk add --no-cache openjdk8-jre su-exec

ENV VERSION 6.8.23
ENV DOWNLOAD_URL "https://artifacts.elastic.co/downloads/elasticsearch"
ENV ES_TARBAL "${DOWNLOAD_URL}/elasticsearch-oss-${VERSION}.tar.gz"
# ENV EXPECTED_SHA_URL "${DOWNLOAD_URL}/elasticsearch-oss-${VERSION}.tar.gz.sha512"
ENV ES_TARBALL_SHA "14dbb2809b06499373c3ec5035d829d62255c2c93103618fbfe3d7d03cecf8847f654e83c78f765f23224126ff18ed713b959857e8ecf435c475b11bcd143d3f"
RUN apk add --no-cache -t .build-deps wget ca-certificates gnupg openssl \
&& set -ex \
&& cd /tmp \
&& echo "===> Install Elasticsearch..." \
&& wget --progress=bar:force -O elasticsearch.tar.gz "$ES_TARBAL"; \
if [ "$ES_TARBALL_SHA" ]; then \
echo "$ES_TARBALL_SHA *elasticsearch.tar.gz" | sha512sum -c -; \
fi; \
tar -xf elasticsearch.tar.gz \
&& ls -lah \
&& mv elasticsearch-$VERSION /usr/share/elasticsearch \
&& adduser -D -h /usr/share/elasticsearch elasticsearch \
&& echo "===> Creating Elasticsearch Paths..." \
&& for path in \
/usr/share/elasticsearch/data \
/usr/share/elasticsearch/logs \
/usr/share/elasticsearch/config \
/usr/share/elasticsearch/config/scripts \
/usr/share/elasticsearch/tmp \
/usr/share/elasticsearch/plugins \
; do \
mkdir -p "$path"; \
chown -R elasticsearch:elasticsearch "$path"; \
done \
&& rm -rf /tmp/* \
&& apk del --purge .build-deps

COPY config/elastic/elasticsearch.yml /usr/share/elasticsearch/config/elasticsearch.yml
COPY config/elastic/log4j2.properties /usr/share/elasticsearch/config/log4j2.properties
RUN chown -R elasticsearch:elasticsearch /usr/share/elasticsearch/config

RUN mkdir -p /var/log/messages
RUN apk add logrotate
COPY config/elastic/logrotate /etc/logrotate.d/elasticsearch
RUN chmod 644 /etc/logrotate.d/elasticsearch
COPY config/elastic/elasticsearch.service /etc/init.d/elasticsearch.service

# Web app dependencies

RUN mkdir -p /usr/src/app
Expand Down
2 changes: 1 addition & 1 deletion Dockerfile_deploy
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,6 @@ COPY manage.py /usr/src/app/manage.py
COPY pytest.ini /usr/src/app/pytest.init
COPY setup.cfg /usr/src/app/setup.cfg

ENV ELASTICSEARCH_URL=http://elasticsearch:9200
ENV ELASTICSEARCH_URL=http://localhost:9200
ENV GUNICORN_WORKERS=5
ENV COMPRESS_ENABLED='True'
6 changes: 0 additions & 6 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,6 @@ This repository hosts the new IATI website based on Django and Wagtail CMS. A Po

- Set a SECRET_KEY

A limit on your kernel must be increased. There are ways to do this permanently, but to do so temporarily:

```
sudo sysctl -w vm.max_map_count=262144
```

Build the project. The following will build linked `web` and `postgres` containers.

```
Expand Down
58 changes: 58 additions & 0 deletions config/elastic/elasticsearch.service
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
#! /bin/sh
### BEGIN INIT INFO
# Provides: elasticsearch
# Required-Start: $all
# Required-Stop: $all
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
# Short-Description: Starts elasticsearch
# Description: Starts elasticsearch using start-stop-daemon
### END INIT INFO

ES_HOME=/usr/share/elasticsearch
JAVA_HOME=/usr/lib/jvm/default-jvm/
ES_MIN_MEM=256m
ES_MAX_MEM=1g
ES_JAVA_OPTS="-Xms256m -Xmx1g"
DAEMON=$ES_HOME/bin/elasticsearch
NAME=elasticsearch
DESC=elasticsearch
PID_FILE=$ES_HOME/$NAME.pid
DAEMON_OPTS="--pidfile $PID_FILE"

test -x $DAEMON || exit 0

set -e

case "$1" in
start)
echo -n "Starting $DESC: "
if start-stop-daemon --start --background --user elasticsearch --pidfile $PID_FILE --exec $DAEMON -- $DAEMON_OPTS
then
echo "started."
else
echo "failed."
fi
;;
stop)
echo -n "Stopping $DESC: "
if start-stop-daemon --stop --user elasticsearch --pidfile $PID_FILE
then
echo "stopped."
else
echo "failed."
fi
;;
restart|force-reload)
${0} stop
sleep 0.5
${0} start
;;
*)
N=/etc/init.d/$NAME
echo "Usage: $N {start|stop|restart|force-reload}" >&2
exit 1
;;
esac

exit 0
3 changes: 3 additions & 0 deletions config/elastic/elasticsearch.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
cluster.name: "docker-cluster"
network.host: 0.0.0.0
discovery.type: single-node
9 changes: 9 additions & 0 deletions config/elastic/log4j2.properties
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
status = error

appender.console.type = Console
appender.console.name = console
appender.console.layout.type = PatternLayout
appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%node_name]%marker %m%n

rootLogger.level = info
rootLogger.appenderRef.console.ref = console
15 changes: 15 additions & 0 deletions config/elastic/logrotate
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
/var/log/elasticsearch/*.log {
daily
rotate 50
size 50M
copytruncate
compress
delaycompress
missingok
notifempty
create 644 elasticsearch elasticsearch
}
/var/log/gunicorn/*.log {
size 10M
copytruncate
}
20 changes: 2 additions & 18 deletions docker-compose.dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,39 +6,23 @@ services:
- POSTGRES_HOST_AUTH_METHOD=trust
ports:
- 5432:5432

elasticsearch:
image: docker.elastic.co/elasticsearch/elasticsearch:8.15.3
environment:
- ES_SETTING_XPACK_SECURITY_ENABLED=False
- ES_SETTING_DISCOVERY_TYPE=single-node
ports:
- 9200:9200

web:
build:
context: .
environment:
- DATABASE_URL=postgres://postgres:@postgres:5432/postgres
- SECRET_KEY=enter-a-long-unguessable-string-here
- ELASTICSEARCH_URL=http://elasticsearch:9200
- ELASTICSEARCH_URL=http://localhost:9200
- DJANGO_SETTINGS_MODULE=iati.settings.dev
- GUNICORN_WORKERS=1
- GITHUB_TOKEN
- RECAPTCHA_PUBLIC_KEY
- RECAPTCHA_PRIVATE_KEY
- DEBUG_SERVER
- COMPRESS_ENABLED
# I'm not sure why these links are needed; I thought Docker defaults would just do this for us but without it
# the Web container can't see the others.
links:
- postgres:postgres
- elasticsearch:elasticsearch
- postgres
ports:
- 5000:5000
volumes:
- ./:/usr/src/app

networks:
default:
name: iati-standard-website
10 changes: 6 additions & 4 deletions entrypoint.sh
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,12 @@ else
done
fi

until curl --output /dev/null --silent --head --fail ${ELASTICSEARCH_URL}; do
>&2 echo "Elasticsearch is unavailable - sleeping"
sleep 10
done
if [[ -z "${ELASTICSEARCH_URL}" ]]; then
>&2 echo "Skipping Elasticsearch"
else
>&2 echo "Starting Elasticsearch"
rc-service elasticsearch.service start
fi

if [[ -z "${DEBUG_SERVER}" ]]; then
gunicorn iati.wsgi:application --bind 0.0.0.0:5000 --workers $GUNICORN_WORKERS >> /var/log/gunicorn/gunicorn.log 2>&1 &
Expand Down
2 changes: 1 addition & 1 deletion iati/settings/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -554,7 +554,7 @@
# Search settings
WAGTAILSEARCH_BACKENDS = {
'default': {
'BACKEND': 'wagtail.search.backends.elasticsearch8',
'BACKEND': 'wagtail.search.backends.elasticsearch6',
'URLS': [os.getenv('ELASTICSEARCH_URL', 'http://localhost:9200')],
'INDEX': 'iati',
},
Expand Down
2 changes: 1 addition & 1 deletion requirements.in
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ django-recaptcha3 @ git+https://github.com/bartsanchez/django-recaptcha3.git@313
django-storages[azure]
django-widget-tweaks
Django>=4.2,<4.3
elasticsearch>=8,<9
elasticsearch>=6.8,<7
gunicorn
opencensus-ext-azure
opencensus-ext-django
Expand Down
10 changes: 3 additions & 7 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,7 @@ bleach==6.1.0
cachetools==5.4.0
# via google-auth
certifi==2024.7.4
# via
# elastic-transport
# requests
# via requests
cffi==1.17.0
# via
# cryptography
Expand Down Expand Up @@ -109,9 +107,7 @@ djangorestframework==3.15.2
# via wagtail
draftjs-exporter==2.1.7
# via wagtail
elastic-transport==8.15.1
# via elasticsearch
elasticsearch==8.15.1
elasticsearch==6.8.2
# via -r requirements.in
et-xmlfile==1.1.0
# via openpyxl
Expand Down Expand Up @@ -240,7 +236,7 @@ tzdata==2024.1
# via -r requirements.in
urllib3==2.2.2
# via
# elastic-transport
# elasticsearch
# pygithub
# requests
wagtail==5.2.6
Expand Down
9 changes: 2 additions & 7 deletions requirements_dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,6 @@ cachetools==5.4.0
certifi==2024.7.4
# via
# -r requirements.txt
# elastic-transport
# requests
# selenium
cffi==1.17.0
Expand Down Expand Up @@ -160,11 +159,7 @@ draftjs-exporter==2.1.7
# via
# -r requirements.txt
# wagtail
elastic-transport==8.15.1
# via
# -r requirements.txt
# elasticsearch
elasticsearch==8.15.1
elasticsearch==6.8.2
# via -r requirements.txt
et-xmlfile==1.1.0
# via
Expand Down Expand Up @@ -444,7 +439,7 @@ tzdata==2024.1
urllib3[socks]==2.2.2
# via
# -r requirements.txt
# elastic-transport
# elasticsearch
# pygithub
# pytest-splinter
# requests
Expand Down

0 comments on commit 07be7eb

Please sign in to comment.