diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml new file mode 100644 index 00000000..da855842 --- /dev/null +++ b/.gitlab-ci.yml @@ -0,0 +1,70 @@ +workflow: + rules: + - if: '$CI_COMMIT_BRANCH =~ /^(demo|stable|staging|test)$/' + - if: '$CI_PIPELINE_SOURCE == "merge_request_event"' + +stages: + - deploy + - test + - clean_test + - clean_env + - update + - clean_build + +deploy: + stage: deploy + environment: $CI_COMMIT_REF_NAME + script: + - ansible-playbook -i $ANSIBLE_INVENTORY $DEPLOY_PLAYBOOK -e "build_id=$CI_COMMIT_SHORT_SHA repo_version=$CI_COMMIT_REF_NAME" + +integration_test: + stage: test + environment: $CI_COMMIT_REF_NAME + script: + - ansible-playbook -i $ANSIBLE_INVENTORY $TEST_PLAYBOOK -e "build_id=$CI_COMMIT_SHORT_SHA" + +clean_test: + stage: clean_test + environment: + name: $CI_COMMIT_REF_NAME + on_stop: clean_gitlab_env + script: + - ansible-playbook -i $ANSIBLE_INVENTORY $DELETE_PLAYBOOK -e "build_id=$CI_COMMIT_SHORT_SHA" + rules: + - if: '$CI_PIPELINE_SOURCE == "merge_request_event"' + when: always + - when: on_failure + +clean_gitlab_env: + stage: clean_env + variables: + GIT_STRATEGY: none + environment: + name: $CI_COMMIT_REF_NAME + action: stop + script: + - echo "Cleaning deleted branches from environments" + rules: + - if: '$CI_PIPELINE_SOURCE == "merge_request_event"' + when: manual + - when: never + +update_proxy: + stage: update + environment: $CI_COMMIT_REF_NAME + script: + - ansible-playbook -i $ANSIBLE_INVENTORY $UPDATE_PROXY_PLAYBOOK -e "build_id=$CI_COMMIT_SHORT_SHA" + rules: + - if: '$CI_PIPELINE_SOURCE == "merge_request_event"' + when: never + - when: always + +clean_previous_build: + stage: clean_build + environment: $CI_COMMIT_REF_NAME + script: + - ansible-playbook -i $ANSIBLE_INVENTORY $DELETE_PLAYBOOK -e "build_id=${CI_COMMIT_BEFORE_SHA:0:8}" + rules: + - if: '$CI_PIPELINE_SOURCE == "merge_request_event"' + when: never + - when: always diff --git a/.travis.yml b/.travis.yml index d070fca4..0fe692cc 100755 --- a/.travis.yml +++ b/.travis.yml @@ -52,8 +52,8 @@ install: before_script: - psql -U postgres < resources/sql/init_test.sql -- sudo mkdir -p src/log -- sudo chown -R $USER:$USER src/log +- sudo mkdir -p /var/log/metax-api/errors +- sudo chown -R $USER:$USER /var/log/metax-api script: - cd src && python manage.py migrate metax_api diff --git a/Dockerfile b/Dockerfile index 77fedea1..6f23920d 100755 --- a/Dockerfile +++ b/Dockerfile @@ -1,8 +1,10 @@ -FROM python:3.6 +FROM python:3.8 ENV PYTHONUNBUFFERED 1 ENV PYTHONDONTWRITEBYTECODE 1 +RUN mkdir -p /var/log/metax-api/errors && touch /var/log/metax-api/metax-api.json.log + WORKDIR /code COPY requirements.txt /code/ @@ -15,19 +17,4 @@ RUN pip install -r requirements.txt EXPOSE 8008 EXPOSE 8006 -ARG METAX_DATABASE_HOST -ARG REDIS_HOST -ARG RABBITMQ_HOST -ARG RABBIT_MQ_PASSWORD=guest -ARG RABBIT_MQ_USER=guest -ARG ELASTIC_SEARCH_HOST - -ENV METAX_DATABASE_HOST $METAX_DATABASE_HOST -ENV REDIS_HOST $REDIS_HOST -ENV RABBIT_MQ_HOSTS $RABBITMQ_HOST -ENV RABBIT_MQ_PASSWORD $RABBIT_MQ_PASSWORD -ENV RABBIT_MQ_USER $RABBIT_MQ_USER -ENV ELASTIC_SEARCH_HOSTS $ELASTIC_SEARCH_HOST - -# CMD ["python", "/code/manage.py", "runserver", "0.0.0.0:8008"] -CMD ["python", "manage.py", "runsslserver", "--certificate", ".certs/cert.pem","--key", ".certs/key.pem", "0.0.0.0:8008"] \ No newline at end of file +CMD ["python", "manage.py", "runserver", "0.0.0.0:8008"] \ No newline at end of file diff --git a/README.md b/README.md index 03a0516b..cd781101 100755 --- a/README.md +++ b/README.md @@ -1,15 +1,7 @@ This repository contains the code for Metax API service. -# Build status +## License -## Test branch -[![Build Status](https://travis-ci.com/CSCfi/metax-api.svg?branch=test)](https://travis-ci.com/CSCfi/metax-api) - -## Stable branch -[![Build Status](https://travis-ci.com/CSCfi/metax-api.svg?branch=stable)](https://travis-ci.com/CSCfi/metax-api) - -License -------- Copyright (c) 2018-2020 Ministry of Education and Culture, Finland Licensed under [GNU GPLv2 License](LICENSE) @@ -17,65 +9,7 @@ Licensed under [GNU GPLv2 License](LICENSE) ## Setting up local development environment -### Prerequisites - -#### Docker-Engine - -Install Docker-Engine either following instructions below or looking up your platform specific instructions [from docs.docker.com][1] - -##### Linux - -`$ curl -fsSL https://get.docker.com -o get-docker.sh` - -`$ sudo sh get-docker.sh` - -`$ sudo usermod -aG docker $USER` - -Log out and back in to activate non-sudo docker capabilities - -##### Mac - -https://docs.docker.com/docker-for-mac/install/ - -#### Portainer (Optional) - -We will use portainer container management tool for monitoring various development dependencies. Command below will start portainer on every system startup. - -`$ docker volume create portainer_data` (optional for mac) - -`$ docker run -d -p 8000:8000 -p 9000:9000 --name=portainer --restart=always -v /var/run/docker.sock:/var/run/docker.sock -v portainer_data:/data portainer/portainer-ce` - -Finish the Portainer setup by logging in at http://localhost:9000, create a local endpoint from the Portainer interface. - -#### Docker commands - -__NOTICE If you want to start the services everytime your computer boots, replace `--restart=unless-stopped` with `--restart=always`__ - -Run the following docker commands to start services: - -##### Redis - -`docker run -d -p 6379:6379 --name metax-redis -v metax-redis:/data --restart=unless-stopped redis` - -##### Postgres - -`docker run -d -p 5432:5432 --name metax-postgres -v metax-postgres:/var/lib/postgresql96/data -e POSTGRES_USER=metax_db_user -e POSTGRES_PASSWORD=YMDLekQMqrVKcs3 -e POSTGRES_DB=metax_db --restart=unless-stopped postgres:9` - -__NOTICE: copy values of `POSTGRES_USER`, `POSTGRES_PASSWORD` and `POSTGRES_DB` into your `.env` files as `METAX_DATABASE_USER`, `METAX_DATABASE_PASSWORD` and `METAX_DATABASE`__ - -##### Elasticsearch - -`docker run -d -p 9200:9200 -p 9300:9300 -v metax-es:/usr/share/elasticsearch/data --name metax-es -e discovery.type=single-node --restart=unless-stopped elasticsearch:7.9.2` - -##### RabbitMQ - -`docker run -d -p 5671:5671 -p 5672:5672 -p 15672:15672 -v metax-rabbitmq:/var/lib/rabbitmq --name metax-rabbitmq --restart=unless-stopped rabbitmq:3-management` - -#### mkcerts - -Install [mkcerts][2] and run `mkcert -install` and after it the following command: -`mkcert -cert-file cert.pem -key-file key.pem 0.0.0.0 localhost 127.0.0.1 ::1 metax.csc.local 20.20.20.20` -Move the `cert.pem` and `key.pem` to `src/.certs` folder (create the folder if not present). +You can also set up the development environment with [Docker-swarm setup](/docs/docker-stack.md) or with [standalone Docker-containers setup](/docs/single-docker-images.md). ### Required environmental variables @@ -83,42 +17,21 @@ copy `src/metax_api/settings/.env.template` as `src/metax_api/settings/.env` and ### Create log directory -`mkdir -p src/log/errors` - -### Run Metax inside a container (Optional) - -Check the IP addresses of Redis, RabbitMQ, ElasticSearch and Postgres:9 either from Portainer container list (click the link in the container name to see all attributes) or by going to portainer network tab or by typing `docker container ps` followed by `docker network inspect bridge` - -Build new docker image from repository root with this command (change ip-addresses to real ones: - -`docker build -t metax-api:latest --build-arg METAX_DATABASE_HOST=xxx.xx.x.x --build-arg REDIS_HOST=xxx.xx.x.x --build-arg RABBITMQ_HOST=xxx.xx.x.x --build-arg ELASTIC_SEARCH_HOST=xxx.xx.x.x:xxxx .` - -Run the built container with command: - -`docker run -it --name metax-web --mount type=bind,source="$(pwd)"/src,target=/code -p 8008:8008 --rm metax-api:latest` - -You should see metax-server starting at port 8008 with hot reload enabled +`mkdir -p /var/log/metax-api/errors` ### Initial setup commands -docker network inspect bridge -IF you configured metax-container, access the command line of the container with `docker exec -it metax-web bash` -__NOTICE: Skip activating virtualenv and navigating to src folder if you have metax running on container__ +Activate your python 3.8 virtualenv, install requirements with `pip install -r requirements.txt` -Activate your python 3.6 virtualenv, `cd` into `src` folder and run following commands: - -setup the database with migrate command: +`cd` into `src` folder and run following commands: `python manage.py migrate` -__NOTICE: Skip following steps if your running metax on container and have terminal open in the container__ - start the development server with: -`python manage.py runsslserver --certificate .certs/cert.pem --key .certs/key.pem 8008` -Open another terminal and `cd` into `src`, and load the initial data with following commands: +`python manage.py runserver 8008` -__These commands must be run in both setups (container/not-container metax)__ +Open another terminal and `cd` into `src`, and load the initial data with following commands: `python manage.py index_refdata` @@ -126,13 +39,15 @@ __These commands must be run in both setups (container/not-container metax)__ `python manage.py loadinitialdata` -`python manage.py loaddata metax_api/tests/testdata/test_data.json` +`python manage.py loaddata metax_api/tests/testdata/test_data.json` + +Metax api is available from your browser at http://localhost:8008 + +## Running tests + +run the tests with command `DJANGO_ENV=test python manage.py test --parallel --failfast --keepdb -v 0` -run the tests with command `DJANGO_ENV=test python manage.py test --failfast --keepdb -v 0` -Metax api is available from your browser at https://localhost:8008 -[1]: https://docs.docker.com/engine/install/ -[2]: https://github.com/FiloSottile/mkcert diff --git a/config-swap-stack.yml b/config-swap-stack.yml new file mode 100644 index 00000000..590672c6 --- /dev/null +++ b/config-swap-stack.yml @@ -0,0 +1,55 @@ +version: "3.8" + +services: + metax: + image: fairdata-docker.artifactory.ci.csc.fi/fairdata-metax-web + ports: + - 8008:8008 + - 8000:8000 + volumes: + - ./src:/code + environment: + DEBUG: 'true' + METAX_DATABASE: 'metax_db_test' + METAX_DATABASE_PASSWORD: 'YMDLekQMqrVKcs3' + METAX_DATABASE_USER: 'metax_test' + REDIS_HOST: metax-redis + RABBIT_MQ_HOSTS: metax-rabbitmq + ELASTIC_SEARCH_HOSTS: metax-elasticsearch + METAX_DATABASE_HOST: metax-db + + metax-redis: + image: redis + volumes: + - metax-redis:/data + metax-db: + image: postgres:9 + environment: + POSTGRES_USER: 'metax_test' + POSTGRES_PASSWORD: 'YMDLekQMqrVKcs3' + POSTGRES_DB: 'metax_db_test' + volumes: + - metax-postgres:/var/lib/postgresql/data + metax-elasticsearch: + image: elasticsearch:7.9.2 + environment: + discovery.type: 'single-node' + volumes: + - metax-es:/usr/share/elasticsearch/data + metax-rabbitmq: + image: rabbitmq:3-management + volumes: + - metax-rabbitmq:/var/lib/rabbitmq + +volumes: + metax-rabbitmq: + external: true + metax-es: + external: true + metax-postgres: + external: true + metax-redis: + external: true + + + diff --git a/containers/nginx-docker.yml b/containers/nginx-docker.yml new file mode 100644 index 00000000..602fc888 --- /dev/null +++ b/containers/nginx-docker.yml @@ -0,0 +1,42 @@ +version: "3.8" + +services: + metax-nginx: + image: nginx:latest + configs: + - source: metax-nginx-config + target: '/etc/nginx/nginx.conf' + - source: fairdata-ssl-certificate + target: '/etc/nginx/ssl_certs/fd-dev.csc.fi.crt.pem' + - source: fairdata-ssl-certificate-key + target: '/etc/nginx/ssl_certs/fd-dev.csc.fi.key.pem' + - source: metax-nginx-elastic-headers-config + target: '/etc/nginx/elastic_headers.conf' + - source: metax-nginx-shared-headers-config + target: '/etc/nginx/shared_headers.conf' + - source: metax-nginx-api-response-headers-config + target: '/etc/nginx/api_response_headers.conf' + - source: metax-nginx-static-file-headers-config + target: '/etc/nginx/static_file_headers.conf' + - source: metax-nginx-dh-param-config + target: '/etc/nginx/ssl_certs/nginx_dhparam.pem' + ports: + - 443:443 + +configs: + fairdata-ssl-certificate: + external: True + fairdata-ssl-certificate-key: + external: True + metax-nginx-config: + external: True + metax-nginx-elastic-headers-config: + external: True + metax-nginx-shared-headers-config: + external: True + metax-nginx-api-response-headers-config: + external: True + metax-nginx-static-file-headers-config: + external: True + metax-nginx-dh-param-config: + external: True \ No newline at end of file diff --git a/containers/portainer/templates.json b/containers/portainer/templates.json deleted file mode 100755 index 7d2d1f33..00000000 --- a/containers/portainer/templates.json +++ /dev/null @@ -1,234 +0,0 @@ -{ - "version": "2", - "templates": [ - { - "type": 1, - "title": "Nginx", - "description": "High performance web server", - "categories": [ - "webserver" - ], - "platform": "linux", - "logo": "https://portainer-io-assets.sfo2.digitaloceanspaces.com/logos/nginx.png", - "image": "nginx:latest", - "ports": [ - "80/tcp", - "443/tcp" - ], - "volumes": [ - { - "container": "/etc/nginx" - }, - { - "container": "/usr/share/nginx/html" - } - ] - }, - { - "type": 1, - "title": "PostgreSQL", - "description": "The most advanced open-source database", - "categories": [ - "database" - ], - "platform": "linux", - "logo": "https://portainer-io-assets.sfo2.digitaloceanspaces.com/logos/postgres.png", - "image": "postgres:latest", - "env": [ - { - "name": "POSTGRES_USER", - "label": "Superuser" - }, - { - "name": "POSTGRES_PASSWORD", - "label": "Superuser password" - } - ], - "ports": [ - "5432/tcp" - ], - "volumes": [ - { - "container": "/var/lib/postgresql/data" - } - ] - }, - { - "type": 1, - "title": "PostgreSQL 9", - "description": "The most advanced open-source database", - "categories": [ - "database" - ], - "platform": "linux", - "logo": "https://portainer-io-assets.sfo2.digitaloceanspaces.com/logos/postgres.png", - "image": "postgres:9", - "env": [ - { - "name": "POSTGRES_USER", - "label": "postgres user", - "default": "metax_db_user" - }, - { - "name": "POSTGRES_PASSWORD", - "label": "postgres password", - "default": "YMDLekQMqrVKcs3" - } - ], - "ports": [ - "5432/tcp" - ], - "volumes": [ - { - "container": "/var/lib/postgresql96/data" - } - ] - }, - { - "type": 1, - "title": "Elasticsearch", - "description": "Open-source search and analytics engine", - "categories": [ - "database" - ], - "platform": "linux", - "logo": "https://portainer-io-assets.sfo2.digitaloceanspaces.com/logos/elasticsearch.png", - "image": "elasticsearch:7.9.2", - "ports": [ - "9200/tcp", - "9300/tcp" - ], - "volumes": [ - { - "container": "/usr/share/elasticsearch/data" - } - ], - "env": [ - { - "name": "discovery.type", - "label": "Discovery Type", - "default": "single-node" - } - ] - }, - { - "type": 1, - "title": "Redis", - "description": "Open-source in-memory data structure store", - "categories": [ - "database" - ], - "platform": "linux", - "logo": "https://portainer-io-assets.sfo2.digitaloceanspaces.com/logos/redis.png", - "image": "redis:latest", - "ports": [ - "6379/tcp" - ], - "volumes": [ - { - "container": "/data" - } - ] - }, - { - "type": 1, - "title": "Bitnami-Redis", - "description": "Open-source in-memory data structure store", - "categories": [ - "database" - ], - "platform": "linux", - "logo": "https://portainer-io-assets.sfo2.digitaloceanspaces.com/logos/redis.png", - "image": "bitnami/redis:latest", - "ports": [ - "6379/tcp" - ], - "volumes": [ - { - "container": "/data" - } - ], - "env": [ - { - "name": "ALLOW_EMPTY_PASSWORD", - "label": "Allow empty password", - "default": "no" - }, - { - "name": "REDIS_PASSWORD", - "label": "Redis password", - "default": "NDE6oXov3hpoZV" - } - ] - }, - { - "type": 1, - "title": "Redis Sentinel by Bitnami ", - "description": "Open-source in-memory data structure store", - "categories": [ - "database" - ], - "platform": "linux", - "logo": "https://portainer-io-assets.sfo2.digitaloceanspaces.com/logos/redis.png", - "image": "bitnami/redis-sentinel:latest", - "ports": [ - "26379/tcp" - ], - "volumes": [ - { - "container": "/data" - } - ], - "env": [ - { - "name": "REDIS_MASTER_HOST", - "label": "Host of the Redis master to monitor", - "default": "redis" - }, - { - "name": "REDIS_MASTER_PORT_NUMBER", - "label": "Port of the Redis master to monitor", - "default": "6379" - }, - { - "name": "REDIS_MASTER_SET", - "label": "Name of the set of Redis instances to monitor", - "default": "mymaster" - }, - { - "name": "REDIS_MASTER_PASSWORD", - "label": "Password to authenticate with the master", - "default": "7RewDzhRdXhoYc" - }, - { - "name": "REDIS_MASTER_USER", - "label": "Username to authenticate with when ACL is enabled for the master" - }, - { - "name": "REDIS_SENTINEL_PASSWORD", - "label": "Password to authenticate with this sentinel and to authenticate to other sentinels" - } - ] - }, - { - "type": 1, - "title": "RabbitMQ", - "description": "Highly reliable enterprise messaging system", - "categories": [ - "messaging" - ], - "platform": "linux", - "logo": "https://portainer-io-assets.sfo2.digitaloceanspaces.com/logos/rabbitmq.png", - "image": "rabbitmq:latest", - "ports": [ - "5671/tcp", - "5672/tcp" - ], - "volumes": [ - { - "container": "/var/lib/rabbitmq" - } - ] - } - ] - } \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index 82b62f9f..76fc8108 100755 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,34 +1,62 @@ version: "3.8" services: - db: - image: library/postgres:9 + metax: + image: fairdata-docker.artifactory.ci.csc.fi/fairdata-metax-web + ports: + - 8008:8008 + - 8000:8000 + volumes: + - ./src:/code environment: - - POSTGRES_DB=postgres - - POSTGRES_USER=postgres - - POSTGRES_PASSWORD=postgres + DEBUG: 'true' + METAX_DATABASE: 'metax_db_test' + METAX_DATABASE_PASSWORD: 'YMDLekQMqrVKcs3' + METAX_DATABASE_USER: 'metax_test' + REDIS_HOST: metax-redis + RABBIT_MQ_HOSTS: metax-rabbitmq + ELASTIC_SEARCH_HOSTS: metax-elasticsearch + METAX_DATABASE_HOST: metax-db + configs: + - source: metax-web-config + target: '/code/metax_api/settings/.env' + + metax-redis: + image: redis volumes: - - postgres_data:/var/lib/postgresql/data/ - web: - build: . - command: python manage.py runserver 0.0.0.0:8008 + - metax-redis:/data + + metax-db: + image: postgres:9 + environment: + POSTGRES_USER: 'metax_test' + POSTGRES_PASSWORD: 'YMDLekQMqrVKcs3' + POSTGRES_DB: 'metax_db_test' volumes: - - src:/code - ports: - - "8008:8008" - depends_on: - - db - - redis - # env_file: - # - ./src/metax_api/settings/.env + - metax-postgres:/var/lib/postgresql/data + + metax-elasticsearch: + image: elasticsearch:7.9.2 environment: - - REDIS_HOST=redis - links: - - redis:redis - redis: - image: library/redis - ports: - - "6379:6379" + discovery.type: 'single-node' + volumes: + - metax-es:/usr/share/elasticsearch/data + + metax-rabbitmq: + image: rabbitmq:3-management + volumes: + - metax-rabbitmq:/var/lib/rabbitmq + volumes: - postgres_data: - src: + metax-rabbitmq: + external: true + metax-es: + external: true + metax-postgres: + external: true + metax-redis: + external: true +configs: + metax-web-config: + external: True + diff --git a/docs/docker-prerequisites.md b/docs/docker-prerequisites.md new file mode 100644 index 00000000..3340f706 --- /dev/null +++ b/docs/docker-prerequisites.md @@ -0,0 +1,30 @@ + +# Docker setup prerequisites + +## Docker-Engine + +Install Docker-Engine either following instructions below or looking up your platform specific instructions [from docs.docker.com][1] + +### Linux + +`$ curl -fsSL https://get.docker.com -o get-docker.sh` + +`$ sudo sh get-docker.sh` + +`$ sudo usermod -aG docker $USER` + +Log out and back in to activate non-sudo docker capabilities + +### Mac + +https://docs.docker.com/docker-for-mac/install/ + +## Portainer (Optional) + +You can use portainer container management tool for monitoring various development dependencies. Command below will start portainer on every system startup. + +`$ docker run -d -p 8000:8000 -p 9000:9000 --name=portainer --restart=always -v /var/run/docker.sock:/var/run/docker.sock -v portainer_data:/data portainer/portainer-ce` + +Finish the Portainer setup by logging in at http://localhost:9000, create a local endpoint from the Portainer interface. + +[1]: https://docs.docker.com/engine/install/ \ No newline at end of file diff --git a/docs/docker-stack.md b/docs/docker-stack.md new file mode 100644 index 00000000..889bc898 --- /dev/null +++ b/docs/docker-stack.md @@ -0,0 +1,53 @@ +# Local development with Docker-swarm + +## Building metax-image + +After installing [Docker prerequisites](docker-prerequisites.md), build the metax-web docker image with the following command: + +`docker build -t fairdata-docker.artifactory.ci.csc.fi/fairdata-metax-web .` + +## Building httpd-image + +`docker build -t fairdata-docker.artifactory.ci.csc.fi/fairdata-metax-httpd -f containers/apache-image.Dockerfile .` + +## Pushing metax-image to Artifactory + + `docker push fairdata-docker.artifactory.ci.csc.fi/fairdata-metax-web` + +## Pushing httpd-image to Artifactory + +`docker push fairdata-docker.artifactory.ci.csc.fi/fairdata-metax-httpd` + + +## Running the stack locally + +In the repository root, run + +`docker stack deploy -c docker-compose.yml --resolve-image=always --with-registry-auth metax-dev` + +## Running the stack without predefined docker-configs + +`docker stack deploy -c config-swap-stack.yml --resolve-image=always --with-registry-auth metax-dev` + +## Adding nginx to the stack + +`docker stack deploy -c docker-compose.yml -c containers/nginx-docker.yml --resolve-image=always --with-registry-auth metax-dev` + +## Running all services + +`docker stack deploy --resolve-image=always --with-registry-auth -c docker-compose.yml -c containers/nginx-docker.yml -c containers/apache-docker.yml metax-dev` + +## Running Metax management commands + +To run Metax management commands, locate the running metax-dev_metax container and open terminal inside it with: + +`docker exec -it bash` + +## Adding docker-config to the stack + +`docker service update --config-add source=metax-web-stable-config,target=/code/metax_api/settings/.env metax-dev_metax` + +## Swapping docker-config in the stack + +`docker service update --config-rm --config-add source=,target=/code/metax_api/settings/.env metax-dev_metax` + diff --git a/docs/local_ssl_setup.md b/docs/local_ssl_setup.md new file mode 100644 index 00000000..5ed07119 --- /dev/null +++ b/docs/local_ssl_setup.md @@ -0,0 +1,13 @@ +# Install ssl cert for local https testing + +# Setup + +Install [mkcerts][2] and run `mkcert -install` and after it the following command: +`mkcert -cert-file cert.pem -key-file key.pem 0.0.0.0 localhost 127.0.0.1 ::1 metax.csc.local 20.20.20.20` +Move the `cert.pem` and `key.pem` to `src/.certs` folder (create the folder if not present). + +## Run SSL enabled development server + +`python manage.py runsslserver --certificate .certs/cert.pem --key .certs/key.pem 8008` + +[2]: https://github.com/FiloSottile/mkcert \ No newline at end of file diff --git a/docs/manage-commands.md b/docs/manage-commands.md new file mode 100644 index 00000000..39ed3160 --- /dev/null +++ b/docs/manage-commands.md @@ -0,0 +1,33 @@ +# Metax management commands + +## Create and migrate database + +`python manage.py migrate` + +## Index the reference data + +`python manage.py index_refdata` + +## Reload reference data to redis cache + +`python manage.py reload_refdata_cache` + +## Add necessary initial data to database + +`python manage.py loadinitialdata` + +## Add some test datasets to database + +`python manage.py loaddata metax_api/tests/testdata/test_data.json` + +## Run all tests + +`DJANGO_ENV=test python manage.py test --parallel --failfast --keepdb -v 0` + +## Inspect current application settings + +`python manage.py diffsettings --output unified --force-color` + +## Execute management commands against docker swarm metax-api container + +`docker exec $(docker ps -q -f name=metax-dev_metax) python manage.py check` diff --git a/docs/single-docker-images.md b/docs/single-docker-images.md new file mode 100644 index 00000000..7d3b0f00 --- /dev/null +++ b/docs/single-docker-images.md @@ -0,0 +1,37 @@ +# Docker setup without swarm + +__NOTICE If you want to start the services everytime your computer boots, replace `--restart=unless-stopped` with `--restart=always`__ + +After installing [Docker prerequisites](docker-prerequisites.md),run the following docker commands to start services: + +## Redis + +`docker run -d -p 6379:6379 --name metax-redis -v metax-redis:/data --restart=unless-stopped redis` + +## Postgres + +`docker run -d -p 5432:5432 --name metax-postgres -v metax-postgres:/var/lib/postgresql96/data -e POSTGRES_USER=metax_db_user -e POSTGRES_PASSWORD=YMDLekQMqrVKcs3 -e POSTGRES_DB=metax_db --restart=unless-stopped postgres:9` + +__NOTICE: copy values of `POSTGRES_USER`, `POSTGRES_PASSWORD` and `POSTGRES_DB` into your `.env` files as `METAX_DATABASE_USER`, `METAX_DATABASE_PASSWORD` and `METAX_DATABASE`__ + +## Elasticsearch + +`docker run -d -p 9200:9200 -p 9300:9300 -v metax-es:/usr/share/elasticsearch/data --name metax-es -e discovery.type=single-node --restart=unless-stopped elasticsearch:7.9.2` + +## RabbitMQ + +`docker run -d -p 5671:5671 -p 5672:5672 -p 15672:15672 -v metax-rabbitmq:/var/lib/rabbitmq --name metax-rabbitmq --restart=unless-stopped rabbitmq:3-management` + +## Metax +Check the IP addresses of Redis, RabbitMQ, ElasticSearch and Postgres:9 either from Portainer container list (click the link in the container name to see all attributes) or by going to portainer network tab or by typing `docker container ps` followed by `docker network inspect bridge` + +Build new docker image from repository root with this command + +`docker build -t fairdata-metax-web:latest .` + +Run the built container with command: + +`docker run -it --name fairdata-metax-web --mount type=bind,source="$(pwd)"/src,target=/code -p 8008:8008 --rm -e METAX_DATABASE_USER= -e METAX_DATABASE_PASSWORD= -e METAX_DATABASE= -e REDIS_HOST= -e RABBIT_MQ_HOSTS= -e ELASTIC_SEARCH_HOSTS= -e METAX_DATABASE_HOST= fairdata-metax-web:latest` + +## Metax management commands +access the command line of the container with `docker exec -it metax-web bash` diff --git a/requirements.in b/requirements.in index 4326b1cb..006d948f 100755 --- a/requirements.in +++ b/requirements.in @@ -29,12 +29,13 @@ xmltodict # MIT-license pip-tools # keeping dependencies updated traitlets>=4.3.2,<5.0.0 # via ipython, version 5.0+ is Python 3.7+ only xmltodict # MIT-license -pip-tools # keeping dependencies updated django-environ django-split-settings icecream rdflib django-sslserver +tblib django-watchman - +django-debug-toolbar +pip-tools>6 diff --git a/requirements.txt b/requirements.txt index 235ab8ee..999147c3 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,76 +4,194 @@ # # pip-compile # -asgiref==3.2.10 # via django -asttokens==2.0.4 # via icecream -attrs==20.3.0 # via jsonschema -backcall==0.2.0 # via ipython -certifi==2020.6.20 # via elasticsearch, requests -chardet==3.0.4 # via requests -click==7.1.2 # via pip-tools -colorama==0.4.4 # via icecream -coverage==5.3 # via coveralls -coveralls==2.1.2 # via -r requirements.in -datacite==1.0.1 # via -r requirements.in -decorator==4.4.2 # via ipython, traitlets -django-environ==0.4.5 # via -r requirements.in -django-rainbowtests==0.6.0 # via -r requirements.in -django-split-settings==1.0.1 # via -r requirements.in -django-sslserver==0.22 # via -r requirements.in -django-watchman==1.2.0 # via -r requirements.in -djangorestframework==3.12.2 # via -r requirements.in -django==3.1.4 # via -r requirements.in, django-rainbowtests, django-sslserver, django-watchman, djangorestframework -docopt==0.6.2 # via coveralls -elasticsearch==7.10.0 # via -r requirements.in -executing==0.5.3 # via icecream -flake8==3.8.4 # via -r requirements.in -gevent==20.9.0 # via -r requirements.in -greenlet==0.4.17 # via gevent -gunicorn==20.0.4 # via -r requirements.in -hiredis==1.1.0 # via -r requirements.in -icecream==2.0.0 # via -r requirements.in -idna==2.10 # via requests -ipdb==0.13.4 # via -r requirements.in -ipython-genutils==0.2.0 # via traitlets -ipython==7.16.1 # via ipdb -isodate==0.6.0 # via rdflib -jedi==0.17.2 # via ipython -jsonschema==3.2.0 # via -r requirements.in, datacite -lxml==4.6.2 # via -r requirements.in, datacite, pyoai -mccabe==0.6.1 # via flake8 -parso==0.7.1 # via jedi -pexpect==4.8.0 # via ipython -pickleshare==0.7.5 # via ipython -pika==1.1.0 # via -r requirements.in -pip-tools==5.3.1 # via -r requirements.in -prompt-toolkit==3.0.8 # via ipython -psycopg2-binary==2.8.6 # via -r requirements.in -ptyprocess==0.6.0 # via pexpect -pycodestyle==2.6.0 # via flake8 -pyflakes==2.2.0 # via flake8 -pygments==2.7.2 # via icecream, ipython -pyjwt==1.7.1 # via -r requirements.in -pyoai==2.5.0 # via -r requirements.in -pyparsing==2.4.7 # via rdflib -pyrsistent==0.17.3 # via jsonschema -python-dateutil==2.8.1 # via -r requirements.in -python-simplexquery==1.0.5.3 # via -r requirements.in -pytz==2020.5 # via -r requirements.in, django -pyyaml==5.3.1 # via -r requirements.in -rdflib==5.0.0 # via -r requirements.in -redis==3.5.3 # via -r requirements.in -requests==2.25.0 # via -r requirements.in, coveralls, datacite, responses -responses==0.12.1 # via -r requirements.in -simplejson==3.17.2 # via -r requirements.in -six==1.15.0 # via asttokens, isodate, jsonschema, pip-tools, pyoai, python-dateutil, rdflib, responses, structlog, traitlets -sqlparse==0.4.1 # via django -structlog==20.1.0 # via -r requirements.in -traitlets==4.3.3 # via -r requirements.in, ipython -urllib3==1.25.11 # via -r requirements.in, elasticsearch, requests, responses -wcwidth==0.2.5 # via prompt-toolkit -xmltodict==0.12.0 # via -r requirements.in -zope.event==4.5.0 # via gevent -zope.interface==5.1.2 # via gevent +asgiref==3.2.10 + # via django +asttokens==2.0.4 + # via icecream +attrs==20.3.0 + # via jsonschema +backcall==0.2.0 + # via ipython +certifi==2020.6.20 + # via + # elasticsearch + # requests +chardet==3.0.4 + # via requests +click==7.1.2 + # via pip-tools +colorama==0.4.4 + # via icecream +coverage==5.3 + # via coveralls +coveralls==2.1.2 + # via -r requirements.in +datacite==1.0.1 + # via -r requirements.in +decorator==4.4.2 + # via + # ipython + # traitlets +django-debug-toolbar==3.2 + # via -r requirements.in +django-environ==0.4.5 + # via -r requirements.in +django-rainbowtests==0.6.0 + # via -r requirements.in +django-split-settings==1.0.1 + # via -r requirements.in +django-sslserver==0.22 + # via -r requirements.in +django-watchman==1.2.0 + # via -r requirements.in +django==3.1.4 + # via + # -r requirements.in + # django-debug-toolbar + # django-rainbowtests + # django-sslserver + # django-watchman + # djangorestframework +djangorestframework==3.12.2 + # via -r requirements.in +docopt==0.6.2 + # via coveralls +elasticsearch==7.10.0 + # via -r requirements.in +executing==0.5.3 + # via icecream +flake8==3.8.4 + # via -r requirements.in +gevent==20.9.0 + # via -r requirements.in +greenlet==0.4.17 + # via gevent +gunicorn==20.0.4 + # via -r requirements.in +hiredis==1.1.0 + # via -r requirements.in +icecream==2.0.0 + # via -r requirements.in +idna==2.10 + # via requests +ipdb==0.13.4 + # via -r requirements.in +ipython-genutils==0.2.0 + # via traitlets +ipython==7.16.1 + # via ipdb +isodate==0.6.0 + # via rdflib +jedi==0.17.2 + # via ipython +jsonschema==3.2.0 + # via + # -r requirements.in + # datacite +lxml==4.6.2 + # via + # -r requirements.in + # datacite + # pyoai +mccabe==0.6.1 + # via flake8 +parso==0.7.1 + # via jedi +pep517==0.10.0 + # via pip-tools +pexpect==4.8.0 + # via ipython +pickleshare==0.7.5 + # via ipython +pika==1.1.0 + # via -r requirements.in +pip-tools==6.0.1 + # via -r requirements.in +prompt-toolkit==3.0.8 + # via ipython +psycopg2-binary==2.8.6 + # via -r requirements.in +ptyprocess==0.6.0 + # via pexpect +pycodestyle==2.6.0 + # via flake8 +pyflakes==2.2.0 + # via flake8 +pygments==2.7.2 + # via + # icecream + # ipython +pyjwt==1.7.1 + # via -r requirements.in +pyoai==2.5.0 + # via -r requirements.in +pyparsing==2.4.7 + # via rdflib +pyrsistent==0.17.3 + # via jsonschema +python-dateutil==2.8.1 + # via -r requirements.in +python-simplexquery==1.0.5.3 + # via -r requirements.in +pytz==2020.5 + # via + # -r requirements.in + # django +pyyaml==5.3.1 + # via -r requirements.in +rdflib==5.0.0 + # via -r requirements.in +redis==3.5.3 + # via -r requirements.in +requests==2.25.0 + # via + # -r requirements.in + # coveralls + # datacite + # responses +responses==0.12.1 + # via -r requirements.in +simplejson==3.17.2 + # via -r requirements.in +six==1.15.0 + # via + # asttokens + # isodate + # jsonschema + # pyoai + # python-dateutil + # rdflib + # responses + # structlog + # traitlets +sqlparse==0.4.1 + # via + # django + # django-debug-toolbar +structlog==20.1.0 + # via -r requirements.in +tblib==1.7.0 + # via -r requirements.in +toml==0.10.2 + # via pep517 +traitlets==4.3.3 + # via + # -r requirements.in + # ipython +urllib3==1.25.11 + # via + # -r requirements.in + # elasticsearch + # requests + # responses +wcwidth==0.2.5 + # via prompt-toolkit +xmltodict==0.12.0 + # via -r requirements.in +zope.event==4.5.0 + # via gevent +zope.interface==5.1.2 + # via gevent # The following packages are considered to be unsafe in a requirements file: # pip diff --git a/src/metax_api/management/commands/fix_file_counts.py b/src/metax_api/management/commands/fix_file_counts.py index 121f968e..8bdc7a53 100644 --- a/src/metax_api/management/commands/fix_file_counts.py +++ b/src/metax_api/management/commands/fix_file_counts.py @@ -8,9 +8,15 @@ class Command(BaseCommand): def handle(self, *args, **options): - dirs_with_no_files = Directory.objects.filter(file_count=0, parent_directory=None) - logger.info(f"fix_file_counts command found {dirs_with_no_files.count()} directories with file_count=0") + dirs_with_no_files = Directory.objects_unfiltered.all() + dir_sum = dirs_with_no_files.count() + logger.info(f"fix_file_counts command found {dir_sum} directories") + i=0 for dir in dirs_with_no_files: - dir.calculate_byte_size_and_file_count() - logger.info(f"folder has {dir.file_count} files after recalculation") + i += 1 + try: + dir.calculate_byte_size_and_file_count() + except Exception as e: + logger.error(f"can't fix filecount for directory {i}/{dir_sum}") + logger.info(f"folder {i}/{dir_sum} has {dir.file_count} files after recalculation") logger.info(f"fix_file_counts command executed successfully") \ No newline at end of file diff --git a/src/metax_api/management/commands/fix_file_counts_cr.py b/src/metax_api/management/commands/fix_file_counts_cr.py new file mode 100644 index 00000000..6768ffc7 --- /dev/null +++ b/src/metax_api/management/commands/fix_file_counts_cr.py @@ -0,0 +1,19 @@ +import logging + +from django.core.management.base import BaseCommand + +from metax_api.models import CatalogRecord + +logger = logging.getLogger(__name__) + +class Command(BaseCommand): + def handle(self, *args, **options): + CRS = CatalogRecord.objects.all() + crs_sum = CRS.count() + logger.info(f"fix_file_counts command found {crs_sum} catalog records with file_count=0 and byte_size=0") + i = 1 + for catalog_record in CRS: + logger.info(f"Calculating {i}/{crs_sum} {catalog_record.identifier} ") + catalog_record.calculate_directory_byte_sizes_and_file_counts() + i += 1 + logger.info(f"fix_file_counts command executed successfully") \ No newline at end of file diff --git a/src/metax_api/management/commands/fix_file_counts_for_cr.py b/src/metax_api/management/commands/fix_file_counts_for_cr.py new file mode 100644 index 00000000..2409ae14 --- /dev/null +++ b/src/metax_api/management/commands/fix_file_counts_for_cr.py @@ -0,0 +1,15 @@ +import logging + +from django.core.management.base import BaseCommand + +from metax_api.models import CatalogRecord + +logger = logging.getLogger(__name__) + +class Command(BaseCommand): + def handle(self, *args, **options): + CRS = CatalogRecord.objects.all() + for catalog_record in CRS: + catalog_record.calculate_directory_byte_sizes_and_file_counts() + logger.info(f"Calculating {catalog_record.identifier} ") + logger.info(f"fix_file_counts command executed successfully") \ No newline at end of file diff --git a/src/metax_api/management/commands/load_data_to_TTV.py b/src/metax_api/management/commands/load_data_to_TTV.py new file mode 100644 index 00000000..874886aa --- /dev/null +++ b/src/metax_api/management/commands/load_data_to_TTV.py @@ -0,0 +1,47 @@ +# This file is part of the Metax API service +# +# Copyright 2017-2018 Ministry of Education and Culture, Finland +# +# :author: CSC - IT Center for Science Ltd., Espoo Finland +# :license: MIT + +import logging + +from django.core.management.base import BaseCommand +from metax_api.models import CatalogRecord +from metax_api.services import RabbitMQService + +logger = logging.getLogger(__name__) + +# serializer needs these in context so give them. +# definitely not the way to do it but still, here it is.. +class User: + def __init__(self): + self.is_service = True + +class Request: + def __init__(self, user): + self.user = user + self.query_params = [] + self.method = 'POST' + +class Command(BaseCommand): + + help = "Upload all existing data to TTV's RabbitMQ queue" + + def handle(self, *args, **options): + catalog_records = CatalogRecord.objects.filter(state='published') + aff_rows = 0 + user = User() + request = Request(user) + context = {'request': request} + + for catalog_record in catalog_records: + serializer = catalog_record.serializer_class + cr_json = serializer(catalog_record, context=context).data + cr_json['data_catalog'] = {'catalog_json': catalog_record.data_catalog.catalog_json} + + RabbitMQService.publish(cr_json, routing_key='create', exchange="TTV-datasets") + aff_rows += 1 + + logger.info("All catalog records published to TTV exchange") diff --git a/src/metax_api/models/catalog_record.py b/src/metax_api/models/catalog_record.py index 36544d6f..6d829952 100755 --- a/src/metax_api/models/catalog_record.py +++ b/src/metax_api/models/catalog_record.py @@ -1267,8 +1267,7 @@ def _post_create_operations(self): super().save() - if self.state == self.STATE_PUBLISHED: - self.add_post_request_callable(RabbitMQPublishRecord(self, 'create')) + self.add_post_request_callable(RabbitMQPublishRecord(self, 'create')) _logger.info( 'Created a new # :license: MIT - import logging from collections import defaultdict from os import getpid @@ -510,7 +509,7 @@ def _mark_datasets_as_deprecated(file_ids): from metax_api.models.catalog_record import RabbitMQPublishRecord for cr in deprecated_records: - CallableService.add_post_request_callable(RabbitMQPublishRecord(cr, 'update')) + cr.add_post_request_callable(RabbitMQPublishRecord(cr, 'update')) @classmethod def get_directory_contents(cls, identifier=None, path=None, project_identifier=None, diff --git a/src/metax_api/settings/__init__.py b/src/metax_api/settings/__init__.py index 42dccade..d20975b7 100755 --- a/src/metax_api/settings/__init__.py +++ b/src/metax_api/settings/__init__.py @@ -27,12 +27,12 @@ ELASTIC_SEARCH_USE_SSL=(bool, False), ENABLE_V1_ENDPOINTS=(bool, True), ENABLE_V2_ENDPOINTS=(bool, True), - ERROR_FILES_PATH=(str, join(BASE_DIR, "log", "errors")), + ERROR_FILES_PATH=(str, join("/var", "log", "metax-api", "errors")), ES_CONFIG_DIR=(str, join(REFDATA_INDEXER_PATH, "resources", "es-config/")), LOCAL_REF_DATA_FOLDER=(str,join(REFDATA_INDEXER_PATH, "resources", "local-refdata/"),), - LOGGING_DEBUG_HANDLER_FILE=(str, join(BASE_DIR, "log", "metax_api.log")), - LOGGING_GENERAL_HANDLER_FILE=(str, join(BASE_DIR, "log", "metax_api.log")), - LOGGING_JSON_FILE_HANDLER_FILE=(str, join(BASE_DIR, "log", "metax_api.json.log")), + LOGGING_DEBUG_HANDLER_FILE=(str, join("/var", "log", "metax-api", "metax_api.log")), + LOGGING_GENERAL_HANDLER_FILE=(str, join("/var", "log", "metax-api", "metax_api.log")), + LOGGING_JSON_FILE_HANDLER_FILE=(str, join("/var", "log", "metax-api", "metax_api.json.log")), METAX_DATABASE_HOST=(str, "localhost"), METAX_DATABASE_PORT=(str, 5432), METAX_ENV=(str, "local_development"), @@ -73,6 +73,5 @@ # optional('environments/legacy.py'), ] ic(ENV) - # Include settings: include(*base_settings) diff --git a/src/metax_api/settings/environments/local.py b/src/metax_api/settings/environments/local.py index 33d1d00f..ca3cf163 100755 --- a/src/metax_api/settings/environments/local.py +++ b/src/metax_api/settings/environments/local.py @@ -1,6 +1,6 @@ from watchman import constants as watchman_constants -from metax_api.settings.components.common import INSTALLED_APPS, ALLOWED_HOSTS +from metax_api.settings.components.common import INSTALLED_APPS, ALLOWED_HOSTS, MIDDLEWARE, DEBUG INSTALLED_APPS += ["watchman"] @@ -47,3 +47,20 @@ "vhost": "metax", }, ] +if 'debug_toolbar' not in INSTALLED_APPS: + INSTALLED_APPS += ['debug_toolbar'] +if 'debug_toolbar.middleware.DebugToolbarMiddleware' not in MIDDLEWARE: + MIDDLEWARE = ['debug_toolbar.middleware.DebugToolbarMiddleware'] + MIDDLEWARE +INTERNAL_IPS = [ + '127.0.0.1', + '0.0.0.0' +] +def show_toolbar(request): + if DEBUG: + return True + else: + return False +DEBUG_TOOLBAR_CONFIG = { + "SHOW_TOOLBAR_CALLBACK" : show_toolbar, +} + diff --git a/src/metax_api/settings/environments/remote.py b/src/metax_api/settings/environments/remote.py index 72ff9bc1..fc3c528b 100644 --- a/src/metax_api/settings/environments/remote.py +++ b/src/metax_api/settings/environments/remote.py @@ -1,4 +1,7 @@ from metax_api.settings import env +from metax_api.settings.components.common import API_USERS + +API_USERS.clear() API_USERS = [ { diff --git a/src/metax_api/urls.py b/src/metax_api/urls.py index 9fd05f99..30c1f749 100755 --- a/src/metax_api/urls.py +++ b/src/metax_api/urls.py @@ -22,7 +22,7 @@ """ from django.conf import settings as django_settings from django.conf.urls import url, include -from django.urls import re_path +from django.urls import re_path, path from metax_api.api.oaipmh.base.view import oaipmh_view as oaipmh from metax_api.api.rest.base.router import api_urlpatterns as rest_api_v1 @@ -30,6 +30,8 @@ from metax_api.api.rpc.base.router import api_urlpatterns as rpc_api_v1 from metax_api.api.rpc.v2.router import api_urlpatterns as rpc_api_v2 from metax_api.views.router import view_urlpatterns +import debug_toolbar + v1_urls = [ url('', include(view_urlpatterns)), @@ -55,4 +57,5 @@ urlpatterns += [ re_path(r'^watchman/', include('watchman.urls')), + path('__debug__/', include(debug_toolbar.urls)), ] diff --git a/swagger/v1/swagger.yaml b/swagger/v1/swagger.yaml index ec997116..e91f7d3f 100755 --- a/swagger/v1/swagger.yaml +++ b/swagger/v1/swagger.yaml @@ -5,7 +5,7 @@ info: host: __METAX_ENV_DOMAIN__ schemes: - https -basePath: /rest/v1/ +basePath: '' produces: - application/json paths: