diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index d09c8c37..0de37e73 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1,7 +1,7 @@ workflow: rules: - - if: '$CI_COMMIT_BRANCH =~ /^(demo|stable|staging|test)$/' - - if: '$CI_PIPELINE_SOURCE == "merge_request_event"' + - if: $CI_COMMIT_BRANCH =~ /^(demo|stable|staging|test)$/ + - if: $CI_PIPELINE_SOURCE == "merge_request_event" && $CI_MERGE_REQUEST_SOURCE_BRANCH_NAME !~ /^(demo|master|stable|staging|test)$/ stages: - deploy @@ -31,11 +31,9 @@ clean_test: script: - ansible-playbook -i $ANSIBLE_INVENTORY $DELETE_PLAYBOOK -e "build_id=$CI_COMMIT_SHORT_SHA" rules: - - if: $CI_PIPELINE_SOURCE == "merge_request_event" && $CI_MERGE_REQUEST_SOURCE_BRANCH_NAME =~ /^(demo|stable|staging|test)$/ - when: never - if: $CI_PIPELINE_SOURCE == "merge_request_event" when: always - - when: on_failure + - when: never clean_gitlab_env: stage: clean_env @@ -47,7 +45,7 @@ clean_gitlab_env: script: - echo "Cleaning deleted branches from environments" rules: - - if: $CI_PIPELINE_SOURCE == "merge_request_event" && $CI_MERGE_REQUEST_SOURCE_BRANCH_NAME !~ /^(demo|stable|staging|test)$/ + - if: $CI_PIPELINE_SOURCE == "merge_request_event" when: manual - when: never @@ -57,9 +55,9 @@ update_proxy: script: - ansible-playbook -i $ANSIBLE_INVENTORY $UPDATE_PROXY_PLAYBOOK -e "build_id=$CI_COMMIT_SHORT_SHA" rules: - - if: '$CI_PIPELINE_SOURCE == "merge_request_event"' - when: never - - when: always + - if: $CI_COMMIT_BRANCH =~ /^(demo|stable|staging|test)$/ + when: always + - when: never clean_previous_build: stage: clean_build @@ -67,6 +65,6 @@ clean_previous_build: script: - ansible-playbook -i $ANSIBLE_INVENTORY $DELETE_PLAYBOOK -e "build_id=${CI_COMMIT_BEFORE_SHA:0:8}" rules: - - if: '$CI_PIPELINE_SOURCE == "merge_request_event"' - when: never - - when: always + - if: $CI_COMMIT_BRANCH =~ /^(demo|stable|staging|test)$/ + when: always + - when: never diff --git a/.travis-deploy.sh b/.travis-deploy.sh deleted file mode 100755 index 0b94dc82..00000000 --- a/.travis-deploy.sh +++ /dev/null @@ -1,30 +0,0 @@ -#!/bin/bash -xeu - -# Run after the tests are successfully completed in travis build. - -if [[ "$TRAVIS_BRANCH" == "master" || "$TRAVIS_PULL_REQUEST" != "false" ]]; then - exit 0 -fi - -# pip install ansible -# git clone https://github.com/CSCfi/metax-ops -# cd metax-ops/ansible/ - -# if [[ "$TRAVIS_BRANCH" == "test" && "$TRAVIS_PULL_REQUEST" == "false" ]]; then -# echo "Deploying to test.." -# ansible-galaxy -r requirements.yml install --roles-path=roles -# ansible-playbook -vv -i inventories/test/hosts site_deploy.yml --extra-vars "ssh_user=metax-deploy-user server_domain_name=metax.fd-test.csc.fi" -# elif [[ "$TRAVIS_BRANCH" == "stable" && "$TRAVIS_PULL_REQUEST" == "false" ]]; then -# echo "Deploying to stable.." -# ansible-galaxy -r requirements.yml install --roles-path=roles -# ansible-playbook -vv -i inventories/stable/hosts site_deploy.yml --extra-vars "ssh_user=metax-deploy-user server_domain_name=metax.fd-stable.csc.fi" -# fi - -# # Make sure the last command to run before this part is the ansible-playbook command -# if [ $? -eq 0 ] -# then -# exit 0 -# else -# exit 1 -# fi -exit 0 diff --git a/.travis.yml b/.travis.yml deleted file mode 100755 index 0fe692cc..00000000 --- a/.travis.yml +++ /dev/null @@ -1,65 +0,0 @@ -dist: trusty - -language: python -python: 3.6 - -cache: pip - -addons: - apt: - packages: - - xqilla - - libxqilla-dev - postgresql: '9.6' - -services: - - postgresql - - redis-server - -env: - global: - - AUTH_SERVER_LOGOUT_URL=http://logout - - DATACITE_PREFIX=10.12345 - - DATACITE_URL=http://mockurl.com - - DATACITE_ETSIN_URL_TEMPLATE=http://mockurl.com/%s - - DATACITE_PASSWORD=not_set - - DATACITE_USERNAME=not_set - - DEBUG=false - - DJANGO_ENV=travis - - DJANGO_SECRET_KEY=lxpRfm8.JOUa7K.eOxZGtL*o+.zt.ybj - - DRAFT_ENABLED=true - - ELASTIC_SEARCH_HOSTS=https://metax.fd-test.csc.fi/es - - ELASTIC_SEARCH_USE_SSL=true - - METAX_DATABASE=metax_db_test - - METAX_DATABASE_PASSWORD= - - METAX_DATABASE_USER=metax_test - - OAI_BASE_URL=http://mockurl.com - - OAI_ETSIN_URL_TEMPLATE=http://mockurl.com/%s - - OAI_ADMIN_EMAIL=mocked@mail.com - - REMS_ENABLED=true - - REMS_API_KEY=not_set - - REMS_BASE_URL=http://mockurl.com - - REMS_ETSIN_URL_TEMPLATE=http://mockurl.com/%s - - REMS_METAX_USER=not_set - - REMS_REPORTER_USER=not_set - - REMS_AUTO_APPROVER=not_set - - REMS_FORM_ID=99 - - ENABLE_V1_ENDPOINTS=true - - ENABLE_V2_ENDPOINTS=true - -install: -- pip install -r requirements.txt - -before_script: -- psql -U postgres < resources/sql/init_test.sql -- sudo mkdir -p /var/log/metax-api/errors -- sudo chown -R $USER:$USER /var/log/metax-api - -script: -- cd src && python manage.py migrate metax_api -- coverage run --source="." manage.py test metax_api -- COVERALLS_REPO_TOKEN=$coveralls_token coveralls - -notifications: - flowdock: - secure: j1VbRwvbQVl5IQ38NDFlVGgQOs/gbzfDVb7YF65ouJ/ad6+J/zSOBosD8QLnpz7d3Py97+4oy4ZnL7U6QMyW8hKezSCa7maAB5q/vPI6bDXpA8LQ8vvR8EcGa6gQHaWvgChlorDF2hNlyBCL8/LqU8p8c2sbETA+FCKgPJv9g4Zb9ag9eyq/BTC8YSVXqx8nd2knprLA5R8Wk/V93wJo5DGo4CVVXLba847ovt8NQ+pkVv2LskRA7wN/y7H+Un9fIpTjSFR0spD/JLHwMRRe463U45PZJ3S197leuomSscCpxTDNFFokLFJEird3NoDzR9Ny/2rmk+jsJKOf0P1iTniS7lZHzHG+GmWzoXWZaihJAnxYAyzyjLGkaz8aIJjNi/w5wuIKQXj1yLBaGJvply5O2VmNrr9sdhGmcbNZI0ZDTjKvmwir/uu1KGek1oKCh2M//6Zw/Ht5oCRkID5Qs21U5/onKJFJo/U7ZsVc8AJVKH0SqqVuQR24hYNlMr7aNYznXfVyuMX0oJXXKbtHGb+CPEqoMTRGYGUVMz1oMwlEbO0n51nbmmtF6co+WrzdHCyeHU+lC8MBR1cEpc2ZFIQ89ESJVDImKJw8+pJOpgu8DsiLFwrnwZ0c0xysQXEDS2pR+My0O4e0bY/WnADpA+JBy2e6fR/wVhXdUpRD2vs= diff --git a/Dockerfile b/Dockerfile index 6f23920d..54133ab5 100755 --- a/Dockerfile +++ b/Dockerfile @@ -5,16 +5,14 @@ ENV PYTHONDONTWRITEBYTECODE 1 RUN mkdir -p /var/log/metax-api/errors && touch /var/log/metax-api/metax-api.json.log -WORKDIR /code - -COPY requirements.txt /code/ - RUN apt-get update && apt install xqilla libxerces-c-dev build-essential libssl-dev libffi-dev python-dev libxqilla-dev -y -RUN pip install --upgrade pip wheel -RUN pip install -r requirements.txt +RUN pip install --upgrade pip wheel poetry +COPY pyproject.toml poetry.lock /code/ +WORKDIR /code +RUN poetry config virtualenvs.create false && poetry install --no-interaction --no-ansi --extras "docs simplexquery" EXPOSE 8008 EXPOSE 8006 -CMD ["python", "manage.py", "runserver", "0.0.0.0:8008"] \ No newline at end of file +CMD ["python", "manage.py", "runserver", "0.0.0.0:8008"] diff --git a/ENV_VARS.md b/ENV_VARS.md index 2b4a4a2e..13ea7db0 100755 --- a/ENV_VARS.md +++ b/ENV_VARS.md @@ -2,61 +2,61 @@ copy .env.template to .env and fill the required values from below table. Required column tells if you have to have the variable in the .env file -| Name | Required | Default | Description | -| ------------------------------ | -------- | ------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------- | -| ADDITIONAL_USER_PROJECTS_PATH | unknown | "" | No clue if this is important | -| ALLOWED_HOSTS | no | | defines which IP-addresses are allowed to access metax, DJANGO_ENV=local overrides this | -| AUTH_SERVER_LOGOUT_URL | unknown | | Requires testing if this is needed | -| DATACITE_ETSIN_URL_TEMPLATE | yes | | -| DATACITE_PASSWORD | yes | | -| DATACITE_PREFIX | yes | | -| DATACITE_URL | yes | | -| DATACITE_USERNAME | yes | | -| DEBUG | no | False | -| DJANGO_ENV | no | local | Specifies the environment, corresponds with the environments found in src/metax_api/settings/environments/ | -| DJANGO_SECRET_KEY | yes | | -| DRAFT_ENABLED | no | | -| ELASTIC_SEARCH_HOSTS | yes | | Elastic Search instance IP and port | -| ELASTIC_SEARCH_PORT | no | 9200 | Is not used currently, but should be in the future | -| ELASTIC_SEARCH_USE_SSL | yes | | Should Elastic Search queries use https | -| ERROR_FILES_PATH | no | src/log/errors | Error file folder | -| ES_CONFIG_DIR | no | src/metax_api/tasks/refdata/refdata_indexer/resources/es-config | metax-ops compatibility | -| LOCAL_REF_DATA_FOLDER | no | src/metax_api/tasks/refdata/refdata_indexer/resources/local-refdata | metax-ops compatibility | -| LOGGING_DEBUG_HANDLER_FILE | no | src/log/metax_api.log | metax-ops compatibility | -| LOGGING_GENERAL_HANDLER_FILE | no | src/log/metax_api.log | metax-ops compatibility | -| LOGGING_JSON_FILE_HANDLER_FILE | no | src/log/metax_api.json.log | metax-ops compatibility | -| METAX_API_ROOT | yes | | Metax internal variable, must be https | -| METAX_DATABASE | yes | | Postgres database name | -| METAX_DATABASE_PASSWORD | yes | | Postgres database password | -| METAX_DATABASE_PORT | no | 5432 | Postgres instance exposed port | -| METAX_DATABASE_USER | yes | | Postgres user which owns the database | -| METAX_ENV | no | local_development | Used by Metax internally, should be replaced with DJANGO_ENV in the future | -| OAI_BASE_URL | yes | | -| ORG_FILE_PATH | yes | src/metax_api/tasks/refdata/refdata_indexer/resources/organizations/organizations.csv | metax-ops compatibility | -| RABBIT_MQ_HOSTS | yes | | RabbitMQ instance IP | -| RABBIT_MQ_PASSWORD | no | guest | -| RABBIT_MQ_PORT | no | 5672 | -| RABBIT_MQ_TTV_ENABLED | no | | -| RABBIT_MQ_USER | no | guest | -| RABBIT_MQ_VHOST | no | | -| RABBIT_MQ_VHOST_TTV | no | | -| REDIS_HOST | yes | | Redis instance IP | -| REDIS_LOCALHOST_PORT | unknown | 6379 | Not sure if all references to this are gone | -| REDIS_PASSWORD | no | | -| REDIS_PORT | no | 6379 | -| REDIS_TEST_DB | yes | | Pick a number, any number | -| REDIS_USE_PASSWORD | no | false | -| REMS_API_KEY | no | -| REMS_AUTO_APPROVER | no | -| REMS_BASE_URL | no | -| REMS_ENABLED | no | -| REMS_ETSIN_URL_TEMPLATE | yes | | -| REMS_FORM_ID | yes | | -| REMS_METAX_USER | no | -| REMS_REPORTER_USER | no | -| SERVER_DOMAIN_NAME | no | -| TRAVIS | no | false | Metax internal used to figure out if Metax is running on Travis virtual machine | -| V1_ENABLED | no | | -| V2_ENABLED | no | | -| VALIDATE_TOKEN_URL | yes | | -| WKT_FILENAME | no | src/metax_api/tasks/refdata/refdata_indexer/resources/uri_to_wkt.json | \ No newline at end of file +| Name | Required | Default | Description | +| --------------------------------------- | -------- | ------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------- | +| ALWAYS_RELOAD_REFERENCE_DATA_ON_RESTART | no | True +| ADDITIONAL_USER_PROJECTS_PATH | no | "" | Defines the file location where additional projects can be given for specific endusers | +| ALLOWED_HOSTS | no | [] | Defines which IP-addresses are allowed to access metax, DJANGO_ENV=local overrides this | +| AUTH_SERVER_LOGOUT_URL | yes | | URL on the auth server where logout button on /secure page will finally redirect the user | +| DATACITE_ETSIN_URL_TEMPLATE | yes | | Landing page URL for the dataset for Datacite service. Must contain '%s' | +| DATACITE_PASSWORD | yes | | +| DATACITE_PREFIX | yes | | +| DATACITE_URL | yes | | +| DATACITE_USERNAME | yes | | +| DEBUG | no | False | +| DJANGO_ENV | no | local | Specifies the environment, corresponds with the environments found in src/metax_api/settings/environments/ | +| DJANGO_SECRET_KEY | yes | | +| ELASTIC_SEARCH_HOSTS | no | localhost | Elastic Search instance IPs | +| ELASTIC_SEARCH_PORT | no | 9200 | +| ELASTIC_SEARCH_USE_SSL | no | False | Should Elastic Search queries use https | +| ERROR_FILES_PATH | no | src/log/metax-api/errors | Error file folder | +| ES_CONFIG_DIR | no | src/metax_api/tasks/refdata/refdata_indexer/resources/es-config | metax-ops compatibility | +| LOCAL_REF_DATA_FOLDER | no | src/metax_api/tasks/refdata/refdata_indexer/resources/local-refdata | metax-ops compatibility | +| LOGGING_DEBUG_HANDLER_FILE | no | /var/log/metax-api/metax_api.log | metax-ops compatibility | +| LOGGING_GENERAL_HANDLER_FILE | no | /var/log/metax-api/metax_api.log | metax-ops compatibility | +| LOGGING_JSON_FILE_HANDLER_FILE | no | /var/log/metax-api/metax_api.json.log | metax-ops compatibility | +| METAX_DATABASE | yes | | Postgres database name, not required in docker stack configuration | +| METAX_DATABASE_HOST | no | localhost | Postgres database host | +| METAX_DATABASE_PASSWORD | yes | | Postgres database password, not required in docker stack configuration | +| METAX_DATABASE_PORT | no | 5432 | Postgres instance exposed port | +| METAX_DATABASE_USER | yes | | Postgres user which owns the database, not required in docker stack configuration | +| OAI_BASE_URL | no | https://metax.fd-dev.csc.fi/oai/ | Metax OAI server base url | +| OAI_BATCH_SIZE | no | 25 | Batch size of the oai response | +| OAI_REPOSITORY_NAME | no | Metax | Repository name of OAI server | +| OAI_ETSIN_URL_TEMPLATE | yes | | Landing page URL of the dataset. Must contain '%s' | +| OAI_ADMIN_EMAIL | yes | | +| ORG_FILE_PATH | no | src/metax_api/tasks/refdata/refdata_indexer/resources/organizations/organizations.csv | metax-ops compatibility | +| RABBIT_MQ_HOSTS | no | localhost | RabbitMQ instance IPs | +| RABBIT_MQ_PASSWORD | no | guest | +| RABBIT_MQ_PORT | no | 5672 | +| RABBIT_MQ_USER | no | guest | +| RABBIT_MQ_USE_VHOST | no | False +| RABBIT_MQ_VHOST | no | | Required if RABBIT_MQ_USE_VHOST is True | +| REDIS_HOST | no | localhost | Redis instance IPs | +| REDIS_PASSWORD | no | | Required if REDIS_USE_PASSWORD is True +| REDIS_PORT | no | 6379 | +| REDIS_TEST_DB | no | 15 | Pick a number, any number | +| REDIS_USE_PASSWORD | no | False | +| REMS_API_KEY | no | | Required if REMS is enabled | +| REMS_AUTO_APPROVER | no | | Required if REMS is enabled | +| REMS_BASE_URL | no | | Required if REMS is enabled | +| REMS_ENABLED | no | False +| REMS_ETSIN_URL_TEMPLATE | no | | Landing page URL of the dataset. Required if REMS is enabled, Must contain '%s' | +| REMS_FORM_ID | no | | Required if REMS is enabled | +| REMS_METAX_USER | no | | Required if REMS is enabled | +| REMS_REPORTER_USER | no | | Required if REMS is enabled | +| SERVER_DOMAIN_NAME | no | metax.fd-dev.csc.fi | +| ENABLE_V1_ENDPOINTS | no | True +| ENABLE_V2_ENDPOINTS | no | True +| VALIDATE_TOKEN_URL | no | https://127.0.0.1/secure/validate_token | URL where bearer tokens get validated +| WKT_FILENAME | no | src/metax_api/tasks/refdata/refdata_indexer/resources/uri_to_wkt.json | \ No newline at end of file diff --git a/README.md b/README.md index cd781101..737e40ae 100755 --- a/README.md +++ b/README.md @@ -11,6 +11,24 @@ Licensed under [GNU GPLv2 License](LICENSE) You can also set up the development environment with [Docker-swarm setup](/docs/docker-stack.md) or with [standalone Docker-containers setup](/docs/single-docker-images.md). +### Python dependencies + +Install [Poetry](https://python-poetry.org/docs/) for your OS. Navigate to the repository root and run command `poetry install`. this will create and activate new Python virtualenv, installing all necessary Python packages to it. + +You can generate traditional requirements.txt file with `poetry export --dev -E simplexquery --without-hashes -f requirements.txt --output requirements.txt` + +### Managing dependencies + +__NOTICE: Please remember to execute `poetry export --dev -E simplexquery --without-hashes -f requirements.txt --output requirements.txt` after any additions, updates or removals.__ + +Developer dependencies can be added with command `poetry add -D ` +Application dependencies can be added with command `poetry add ` + +Dependencies can be updated using `poetry update`. Please notice that this will update all packages and their dependencies, respecting the dependency constraints defined in pyproject.toml + +Dependencies can be removed with `poetry remove (-D) ` + + ### Required environmental variables copy `src/metax_api/settings/.env.template` as `src/metax_api/settings/.env` and fill required variables, you can find examples in ENV_VARS.md diff --git a/config-swap-stack.yml b/config-swap-stack.yml index 590672c6..8b4ae404 100644 --- a/config-swap-stack.yml +++ b/config-swap-stack.yml @@ -1,43 +1,53 @@ version: "3.8" services: - metax: + metax-web: image: fairdata-docker.artifactory.ci.csc.fi/fairdata-metax-web + hostname: metax-web ports: - 8008:8008 - 8000:8000 - volumes: - - ./src:/code environment: DEBUG: 'true' - METAX_DATABASE: 'metax_db_test' - METAX_DATABASE_PASSWORD: 'YMDLekQMqrVKcs3' - METAX_DATABASE_USER: 'metax_test' - REDIS_HOST: metax-redis - RABBIT_MQ_HOSTS: metax-rabbitmq - ELASTIC_SEARCH_HOSTS: metax-elasticsearch + METAX_DATABASE: metax_db_test + METAX_DATABASE_PASSWORD: YMDLekQMqrVKcs3 + METAX_DATABASE_USER: metax_test METAX_DATABASE_HOST: metax-db + ELASTIC_SEARCH_HOSTS: metax-elasticsearch + RABBIT_MQ_HOSTS: metax-rabbitmq + REDIS_HOST: metax-redis + volumes: + - ./src:/code + stdin_open: True + tty: True metax-redis: image: redis + hostname: metax-redis volumes: - metax-redis:/data + metax-db: image: postgres:9 + hostname: metax-db environment: - POSTGRES_USER: 'metax_test' - POSTGRES_PASSWORD: 'YMDLekQMqrVKcs3' - POSTGRES_DB: 'metax_db_test' + POSTGRES_USER: metax_test + POSTGRES_PASSWORD: YMDLekQMqrVKcs3 + POSTGRES_DB: metax_db_test volumes: - metax-postgres:/var/lib/postgresql/data + metax-elasticsearch: image: elasticsearch:7.9.2 + hostname: metax-elasticsearch environment: - discovery.type: 'single-node' + discovery.type: single-node volumes: - metax-es:/usr/share/elasticsearch/data + metax-rabbitmq: image: rabbitmq:3-management + hostname: metax-rabbitmq volumes: - metax-rabbitmq:/var/lib/rabbitmq @@ -50,6 +60,3 @@ volumes: external: true metax-redis: external: true - - - diff --git a/containers/nginx-docker.yml b/containers/nginx-docker.yml deleted file mode 100644 index 602fc888..00000000 --- a/containers/nginx-docker.yml +++ /dev/null @@ -1,42 +0,0 @@ -version: "3.8" - -services: - metax-nginx: - image: nginx:latest - configs: - - source: metax-nginx-config - target: '/etc/nginx/nginx.conf' - - source: fairdata-ssl-certificate - target: '/etc/nginx/ssl_certs/fd-dev.csc.fi.crt.pem' - - source: fairdata-ssl-certificate-key - target: '/etc/nginx/ssl_certs/fd-dev.csc.fi.key.pem' - - source: metax-nginx-elastic-headers-config - target: '/etc/nginx/elastic_headers.conf' - - source: metax-nginx-shared-headers-config - target: '/etc/nginx/shared_headers.conf' - - source: metax-nginx-api-response-headers-config - target: '/etc/nginx/api_response_headers.conf' - - source: metax-nginx-static-file-headers-config - target: '/etc/nginx/static_file_headers.conf' - - source: metax-nginx-dh-param-config - target: '/etc/nginx/ssl_certs/nginx_dhparam.pem' - ports: - - 443:443 - -configs: - fairdata-ssl-certificate: - external: True - fairdata-ssl-certificate-key: - external: True - metax-nginx-config: - external: True - metax-nginx-elastic-headers-config: - external: True - metax-nginx-shared-headers-config: - external: True - metax-nginx-api-response-headers-config: - external: True - metax-nginx-static-file-headers-config: - external: True - metax-nginx-dh-param-config: - external: True \ No newline at end of file diff --git a/deploy-key.enc b/deploy-key.enc deleted file mode 100755 index fe247fb1..00000000 Binary files a/deploy-key.enc and /dev/null differ diff --git a/docker-compose.yml b/docker-compose.yml index 76fc8108..0373b215 100755 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,62 +1,92 @@ version: "3.8" services: - metax: + metax-web: image: fairdata-docker.artifactory.ci.csc.fi/fairdata-metax-web + hostname: metax-web ports: - 8008:8008 - 8000:8000 - volumes: - - ./src:/code environment: DEBUG: 'true' - METAX_DATABASE: 'metax_db_test' - METAX_DATABASE_PASSWORD: 'YMDLekQMqrVKcs3' - METAX_DATABASE_USER: 'metax_test' - REDIS_HOST: metax-redis - RABBIT_MQ_HOSTS: metax-rabbitmq - ELASTIC_SEARCH_HOSTS: metax-elasticsearch + METAX_DATABASE: metax_db_test + METAX_DATABASE_PASSWORD: YMDLekQMqrVKcs3 + METAX_DATABASE_USER: metax_test METAX_DATABASE_HOST: metax-db + ELASTIC_SEARCH_HOSTS: metax-elasticsearch + RABBIT_MQ_HOSTS: metax-rabbitmq + REDIS_HOST: metax-redis + volumes: + - ./src:/code configs: - source: metax-web-config - target: '/code/metax_api/settings/.env' + target: /code/metax_api/settings/.env + stdin_open: True + tty: True metax-redis: image: redis + hostname: metax-redis volumes: - metax-redis:/data metax-db: image: postgres:9 + hostname: metax-db environment: - POSTGRES_USER: 'metax_test' - POSTGRES_PASSWORD: 'YMDLekQMqrVKcs3' - POSTGRES_DB: 'metax_db_test' + POSTGRES_USER: metax_test + POSTGRES_PASSWORD: YMDLekQMqrVKcs3 + POSTGRES_DB: metax_db_test volumes: - metax-postgres:/var/lib/postgresql/data metax-elasticsearch: image: elasticsearch:7.9.2 + hostname: metax-elasticsearch environment: - discovery.type: 'single-node' + discovery.type: single-node volumes: - metax-es:/usr/share/elasticsearch/data metax-rabbitmq: image: rabbitmq:3-management + hostname: metax-rabbitmq volumes: - metax-rabbitmq:/var/lib/rabbitmq + fairdata-nginx: + image: nginx:latest + configs: + - source: fairdata-nginx-config + target: /etc/nginx/nginx.conf + - source: fairdata-ssl-certificate + target: /etc/pki/tls/certs/ssl.crt.pem + - source: fairdata-ssl-certificate-key + target: /etc/pki/tls/private/ssl.key.pem + - source: metax-nginx-config + target: /etc/nginx/sites-enabled/metax + ports: + - 443:443 + volumes: metax-rabbitmq: - external: true + external: True metax-es: - external: true + external: True metax-postgres: - external: true + external: True metax-redis: - external: true + external: True + configs: metax-web-config: external: True + fairdata-nginx-config: + external: True + fairdata-ssl-certificate: + external: True + fairdata-ssl-certificate-key: + external: True + metax-nginx-config: + external: True diff --git a/docs/docker-stack.md b/docs/docker-stack.md index 889bc898..61b6bae5 100644 --- a/docs/docker-stack.md +++ b/docs/docker-stack.md @@ -6,17 +6,17 @@ After installing [Docker prerequisites](docker-prerequisites.md), build the meta `docker build -t fairdata-docker.artifactory.ci.csc.fi/fairdata-metax-web .` -## Building httpd-image + ## Pushing metax-image to Artifactory `docker push fairdata-docker.artifactory.ci.csc.fi/fairdata-metax-web` -## Pushing httpd-image to Artifactory + ## Running the stack locally @@ -25,29 +25,22 @@ In the repository root, run `docker stack deploy -c docker-compose.yml --resolve-image=always --with-registry-auth metax-dev` +This stack contains the common Fairdata nginx proxy. + ## Running the stack without predefined docker-configs `docker stack deploy -c config-swap-stack.yml --resolve-image=always --with-registry-auth metax-dev` -## Adding nginx to the stack - -`docker stack deploy -c docker-compose.yml -c containers/nginx-docker.yml --resolve-image=always --with-registry-auth metax-dev` - -## Running all services - -`docker stack deploy --resolve-image=always --with-registry-auth -c docker-compose.yml -c containers/nginx-docker.yml -c containers/apache-docker.yml metax-dev` - ## Running Metax management commands -To run Metax management commands, locate the running metax-dev_metax container and open terminal inside it with: +To run Metax management commands, locate the running metax-dev_metax-web container and open terminal inside it with: `docker exec -it bash` ## Adding docker-config to the stack -`docker service update --config-add source=metax-web-stable-config,target=/code/metax_api/settings/.env metax-dev_metax` +`docker service update --config-add source=metax-web-stable-config,target=/code/metax_api/settings/.env metax-dev_metax-web` ## Swapping docker-config in the stack -`docker service update --config-rm --config-add source=,target=/code/metax_api/settings/.env metax-dev_metax` - +`docker service update --config-rm --config-add source=,target=/code/metax_api/settings/.env metax-dev_metax-web` diff --git a/docs/manage-commands.md b/docs/manage-commands.md index 39ed3160..f772cc0a 100644 --- a/docs/manage-commands.md +++ b/docs/manage-commands.md @@ -30,4 +30,4 @@ ## Execute management commands against docker swarm metax-api container -`docker exec $(docker ps -q -f name=metax-dev_metax) python manage.py check` +`docker exec $(docker ps -q -f name=metax-dev_metax-web) python manage.py check` diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 00000000..e0c09740 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,1663 @@ +[[package]] +name = "alabaster" +version = "0.7.12" +description = "A configurable sidebar-enabled Sphinx theme" +category = "main" +optional = true +python-versions = "*" + +[[package]] +name = "appdirs" +version = "1.4.4" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "appnope" +version = "0.1.2" +description = "Disable App Nap on macOS >= 10.9" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "asgiref" +version = "3.3.4" +description = "ASGI specs, helper code, and adapters" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +typing-extensions = {version = "*", markers = "python_version < \"3.8\""} + +[package.extras] +tests = ["pytest", "pytest-asyncio", "mypy (>=0.800)"] + +[[package]] +name = "asttokens" +version = "2.0.5" +description = "Annotate AST trees with source code positions" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +six = "*" + +[package.extras] +test = ["astroid", "pytest"] + +[[package]] +name = "attrs" +version = "20.3.0" +description = "Classes Without Boilerplate" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[package.extras] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "furo", "sphinx", "pre-commit"] +docs = ["furo", "sphinx", "zope.interface"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six"] + +[[package]] +name = "autosemver" +version = "0.5.5" +description = "Tools to handle automatic semantic versioning in python" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +dulwich = ">=0.19.6,<0.20" + +[[package]] +name = "babel" +version = "2.9.1" +description = "Internationalization utilities" +category = "main" +optional = true +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[package.dependencies] +pytz = ">=2015.7" + +[[package]] +name = "backcall" +version = "0.2.0" +description = "Specifications for callback functions passed in to an API" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "black" +version = "20.8b1" +description = "The uncompromising code formatter." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +appdirs = "*" +click = ">=7.1.2" +mypy-extensions = ">=0.4.3" +pathspec = ">=0.6,<1" +regex = ">=2020.1.8" +toml = ">=0.10.1" +typed-ast = ">=1.4.0" +typing-extensions = ">=3.7.4" + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] + +[[package]] +name = "certifi" +version = "2020.12.5" +description = "Python package for providing Mozilla's CA Bundle." +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "chardet" +version = "4.0.0" +description = "Universal encoding detector for Python 2 and 3" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "click" +version = "7.1.2" +description = "Composable command line interface toolkit" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "colorama" +version = "0.4.4" +description = "Cross-platform colored terminal text." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "datacite" +version = "1.1.1" +description = "Python API wrapper for the DataCite Metadata Store API." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +idutils = ">=1.0.0" +jsonschema = ">=3.0.0" +lxml = ">=4.5.0" +requests = ">=2.5.0" + +[package.extras] +all = ["Sphinx (>=3)", "responses (>=0.10.6)", "mock (>=1.3.0)", "pytest-invenio (>=1.4.0)"] +docs = ["Sphinx (>=3)"] +tests = ["responses (>=0.10.6)", "mock (>=1.3.0)", "pytest-invenio (>=1.4.0)"] + +[[package]] +name = "decorator" +version = "5.0.7" +description = "Decorators for Humans" +category = "dev" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "django" +version = "3.1.8" +description = "A high-level Python Web framework that encourages rapid development and clean, pragmatic design." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +asgiref = ">=3.2.10,<4" +pytz = "*" +sqlparse = ">=0.2.2" + +[package.extras] +argon2 = ["argon2-cffi (>=16.1.0)"] +bcrypt = ["bcrypt"] + +[[package]] +name = "django-debug-toolbar" +version = "3.2.1" +description = "A configurable set of panels that display various debug information about the current request/response." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +Django = ">=2.2" +sqlparse = ">=0.2.0" + +[[package]] +name = "django-environ" +version = "0.4.5" +description = "Django-environ allows you to utilize 12factor inspired environment variables to configure your Django application." +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "django-rainbowtests" +version = "0.6.0" +description = "A colorful Django Test Runner." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +django = "*" + +[[package]] +name = "django-split-settings" +version = "1.0.1" +description = "Organize Django settings into multiple files and directories. Easily override and modify settings. Use wildcards and optional settings files." +category = "main" +optional = false +python-versions = ">=3.6,<4.0" + +[[package]] +name = "django-watchman" +version = "1.2.0" +description = "django-watchman exposes a status endpoint for your backing services" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +django = ">=2.0" + +[[package]] +name = "djangorestframework" +version = "3.12.4" +description = "Web APIs for Django, made easy." +category = "main" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +django = ">=2.2" + +[[package]] +name = "docutils" +version = "0.16" +description = "Docutils -- Python Documentation Utilities" +category = "main" +optional = true +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "dulwich" +version = "0.19.16" +description = "Python Git Library" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +certifi = "*" +urllib3 = ">=1.24.1" + +[package.extras] +fastimport = ["fastimport"] +pgp = ["gpg"] +https = ["urllib3[secure] (>=1.24.1)"] + +[[package]] +name = "elasticsearch" +version = "7.12.1" +description = "Python client for Elasticsearch" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4" + +[package.dependencies] +certifi = "*" +urllib3 = ">=1.21.1,<2" + +[package.extras] +async = ["aiohttp (>=3,<4)"] +develop = ["requests (>=2.0.0,<3.0.0)", "coverage", "mock", "pyyaml", "pytest", "pytest-cov", "sphinx (<1.7)", "sphinx-rtd-theme", "black", "jinja2"] +docs = ["sphinx (<1.7)", "sphinx-rtd-theme"] +requests = ["requests (>=2.4.0,<3.0.0)"] + +[[package]] +name = "executing" +version = "0.6.0" +description = "Get the currently executing AST node of a frame, and other information" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "gunicorn" +version = "20.1.0" +description = "WSGI HTTP Server for UNIX" +category = "main" +optional = false +python-versions = ">=3.5" + +[package.extras] +eventlet = ["eventlet (>=0.24.1)"] +gevent = ["gevent (>=1.4.0)"] +setproctitle = ["setproctitle"] +tornado = ["tornado (>=0.2)"] + +[[package]] +name = "icecream" +version = "2.1.0" +description = "Never use print() to debug again; inspect variables, expressions, and program execution with a single, simple function call." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +asttokens = ">=2.0.1" +colorama = ">=0.3.9" +executing = ">=0.3.1" +pygments = ">=2.2.0" + +[[package]] +name = "idna" +version = "2.10" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "idutils" +version = "1.1.8" +description = "Small library for persistent identifiers used in scholarly communication." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +isbnid-fork = ">=0.4.4" +six = ">=1.10" + +[package.extras] +all = ["Sphinx (>=1.4.2)", "check-manifest (>=0.25)", "coverage (>=4.0)", "isort (>=4.2.2)", "pydocstyle (>=1.0)", "pytest-cache (>=1.0)", "pytest-cov (>=1.8.0)", "pytest-pep8 (>=1.0.6)", "pytest-runner (>=2.6.2)", "pytest (>=3.6.0)"] +docs = ["Sphinx (>=1.4.2)"] +tests = ["check-manifest (>=0.25)", "coverage (>=4.0)", "isort (>=4.2.2)", "pydocstyle (>=1.0)", "pytest-cache (>=1.0)", "pytest-cov (>=1.8.0)", "pytest-pep8 (>=1.0.6)", "pytest-runner (>=2.6.2)", "pytest (>=3.6.0)"] + +[[package]] +name = "imagesize" +version = "1.2.0" +description = "Getting image size from png/jpeg/jpeg2000/gif file" +category = "main" +optional = true +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "importlib-metadata" +version = "4.0.1" +description = "Read metadata from Python packages" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} +zipp = ">=0.5" + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] + +[[package]] +name = "ipdb" +version = "0.13.7" +description = "IPython-enabled pdb" +category = "dev" +optional = false +python-versions = ">=2.7" + +[package.dependencies] +ipython = {version = ">=7.17.0", markers = "python_version > \"3.6\""} +toml = {version = ">=0.10.2", markers = "python_version > \"3.6\""} + +[[package]] +name = "ipython" +version = "7.22.0" +description = "IPython: Productive Interactive Computing" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +appnope = {version = "*", markers = "sys_platform == \"darwin\""} +backcall = "*" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +decorator = "*" +jedi = ">=0.16" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} +pickleshare = "*" +prompt-toolkit = ">=2.0.0,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.1.0" +pygments = "*" +traitlets = ">=4.2" + +[package.extras] +all = ["Sphinx (>=1.3)", "ipykernel", "ipyparallel", "ipywidgets", "nbconvert", "nbformat", "nose (>=0.10.1)", "notebook", "numpy (>=1.16)", "pygments", "qtconsole", "requests", "testpath"] +doc = ["Sphinx (>=1.3)"] +kernel = ["ipykernel"] +nbconvert = ["nbconvert"] +nbformat = ["nbformat"] +notebook = ["notebook", "ipywidgets"] +parallel = ["ipyparallel"] +qtconsole = ["qtconsole"] +test = ["nose (>=0.10.1)", "requests", "testpath", "pygments", "nbformat", "ipykernel", "numpy (>=1.16)"] + +[[package]] +name = "ipython-genutils" +version = "0.2.0" +description = "Vestigial utilities from IPython" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "isbnid-fork" +version = "0.5.2" +description = "Python ISBN ids" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +autosemver = ">=0.2,<1.0" + +[package.extras] +tests = ["pytest-pep8 (>=1.0.6)", "pytest (>=3.0.4)"] + +[[package]] +name = "isodate" +version = "0.6.0" +description = "An ISO 8601 date/time/duration parser and formatter" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +six = "*" + +[[package]] +name = "isort" +version = "5.8.0" +description = "A Python utility / library to sort Python imports." +category = "dev" +optional = false +python-versions = ">=3.6,<4.0" + +[package.extras] +pipfile_deprecated_finder = ["pipreqs", "requirementslib"] +requirements_deprecated_finder = ["pipreqs", "pip-api"] +colors = ["colorama (>=0.4.3,<0.5.0)"] + +[[package]] +name = "jedi" +version = "0.18.0" +description = "An autocompletion tool for Python that can be used for text editors." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +parso = ">=0.8.0,<0.9.0" + +[package.extras] +qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] +testing = ["Django (<3.1)", "colorama", "docopt", "pytest (<6.0.0)"] + +[[package]] +name = "jinja2" +version = "2.11.3" +description = "A very fast and expressive template engine." +category = "main" +optional = true +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.dependencies] +MarkupSafe = ">=0.23" + +[package.extras] +i18n = ["Babel (>=0.8)"] + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +attrs = ">=17.4.0" +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} +pyrsistent = ">=0.14.0" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format_nongpl = ["idna", "jsonpointer (>1.13)", "webcolors", "rfc3986-validator (>0.1.0)", "rfc3339-validator"] + +[[package]] +name = "livereload" +version = "2.6.3" +description = "Python LiveReload is an awesome tool for web developers" +category = "main" +optional = true +python-versions = "*" + +[package.dependencies] +six = "*" +tornado = {version = "*", markers = "python_version > \"2.7\""} + +[[package]] +name = "lxml" +version = "4.6.3" +description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" + +[package.extras] +cssselect = ["cssselect (>=0.7)"] +html5 = ["html5lib"] +htmlsoup = ["beautifulsoup4"] +source = ["Cython (>=0.29.7)"] + +[[package]] +name = "markupsafe" +version = "1.1.1" +description = "Safely add untrusted strings to HTML/XML markup." +category = "main" +optional = true +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" + +[[package]] +name = "mypy-extensions" +version = "0.4.3" +description = "Experimental type system extensions for programs checked with the mypy typechecker." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "packaging" +version = "20.9" +description = "Core utilities for Python packages" +category = "main" +optional = true +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[package.dependencies] +pyparsing = ">=2.0.2" + +[[package]] +name = "parso" +version = "0.8.2" +description = "A Python Parser" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] +testing = ["docopt", "pytest (<6.0.0)"] + +[[package]] +name = "pathspec" +version = "0.8.1" +description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "pexpect" +version = "4.8.0" +description = "Pexpect allows easy control of interactive console applications." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +ptyprocess = ">=0.5" + +[[package]] +name = "pickleshare" +version = "0.7.5" +description = "Tiny 'shelve'-like database with concurrency support" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "pika" +version = "1.2.0" +description = "Pika Python AMQP Client Library" +category = "main" +optional = false +python-versions = "*" + +[package.extras] +gevent = ["gevent"] +tornado = ["tornado"] +twisted = ["twisted"] + +[[package]] +name = "prompt-toolkit" +version = "3.0.18" +description = "Library for building powerful interactive command lines in Python" +category = "dev" +optional = false +python-versions = ">=3.6.1" + +[package.dependencies] +wcwidth = "*" + +[[package]] +name = "psycopg2-binary" +version = "2.8.6" +description = "psycopg2 - Python-PostgreSQL Database Adapter" +category = "main" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" + +[[package]] +name = "ptyprocess" +version = "0.7.0" +description = "Run a subprocess in a pseudo terminal" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "pygments" +version = "2.8.1" +description = "Pygments is a syntax highlighting package written in Python." +category = "main" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "pyjwt" +version = "2.1.0" +description = "JSON Web Token implementation in Python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +crypto = ["cryptography (>=3.3.1,<4.0.0)"] +dev = ["sphinx", "sphinx-rtd-theme", "zope.interface", "cryptography (>=3.3.1,<4.0.0)", "pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)", "mypy", "pre-commit"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] +tests = ["pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)"] + +[[package]] +name = "pyoai" +version = "2.5.0" +description = "The oaipmh module is a Python implementation of an \"Open Archives\nInitiative Protocol for Metadata Harvesting\" (version 2) client and server.\nThe protocol is described here:\n\nhttp://www.openarchives.org/OAI/openarchivesprotocol.html" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +lxml = "*" +six = "*" + +[[package]] +name = "pyparsing" +version = "2.4.7" +description = "Python parsing module" +category = "main" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "pyrsistent" +version = "0.17.3" +description = "Persistent/Functional/Immutable data structures" +category = "main" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "python-box" +version = "5.3.0" +description = "Advanced Python dictionaries with dot notation access" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.extras] +pyyaml = ["pyyaml"] +all = ["ruamel.yaml", "toml", "msgpack"] +msgpack = ["msgpack"] +"ruamel.yaml" = ["ruamel.yaml"] +toml = ["toml"] + +[[package]] +name = "python-dateutil" +version = "2.8.1" +description = "Extensions to the standard Python datetime module" +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-simplexquery" +version = "1.0.5.3" +description = "A simple native XQuery processing module using xqilla." +category = "main" +optional = true +python-versions = "*" + +[[package]] +name = "pytz" +version = "2021.1" +description = "World timezone definitions, modern and historical" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "rdflib" +version = "5.0.0" +description = "RDFLib is a Python library for working with RDF, a simple yet powerful language for representing information." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +isodate = "*" +pyparsing = "*" +six = "*" + +[package.extras] +docs = ["sphinx (<3)", "sphinxcontrib-apidoc"] +html = ["html5lib"] +sparql = ["requests"] +tests = ["html5lib", "networkx", "nose", "doctest-ignore-unicode"] + +[[package]] +name = "redis" +version = "3.5.3" +description = "Python client for Redis key-value store" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.extras] +hiredis = ["hiredis (>=0.1.3)"] + +[[package]] +name = "regex" +version = "2021.4.4" +description = "Alternative regular expression module, to replace re." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "requests" +version = "2.25.1" +description = "Python HTTP for Humans." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.dependencies] +certifi = ">=2017.4.17" +chardet = ">=3.0.2,<5" +idna = ">=2.5,<3" +urllib3 = ">=1.21.1,<1.27" + +[package.extras] +security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"] +socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] + +[[package]] +name = "responses" +version = "0.13.3" +description = "A utility library for mocking out the `requests` Python library." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.dependencies] +requests = ">=2.0" +six = "*" +urllib3 = ">=1.25.10" + +[package.extras] +tests = ["coverage (>=3.7.1,<6.0.0)", "pytest-cov", "pytest-localserver", "flake8", "pytest (>=4.6,<5.0)", "pytest (>=4.6)", "mypy"] + +[[package]] +name = "six" +version = "1.15.0" +description = "Python 2 and 3 compatibility utilities" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "snowballstemmer" +version = "2.1.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +category = "main" +optional = true +python-versions = "*" + +[[package]] +name = "sphinx" +version = "3.5.4" +description = "Python documentation generator" +category = "main" +optional = true +python-versions = ">=3.5" + +[package.dependencies] +alabaster = ">=0.7,<0.8" +babel = ">=1.3" +colorama = {version = ">=0.3.5", markers = "sys_platform == \"win32\""} +docutils = ">=0.12,<0.17" +imagesize = "*" +Jinja2 = ">=2.3" +packaging = "*" +Pygments = ">=2.0" +requests = ">=2.5.0" +snowballstemmer = ">=1.1" +sphinxcontrib-applehelp = "*" +sphinxcontrib-devhelp = "*" +sphinxcontrib-htmlhelp = "*" +sphinxcontrib-jsmath = "*" +sphinxcontrib-qthelp = "*" +sphinxcontrib-serializinghtml = "*" + +[package.extras] +docs = ["sphinxcontrib-websupport"] +lint = ["flake8 (>=3.5.0)", "isort", "mypy (>=0.800)", "docutils-stubs"] +test = ["pytest", "pytest-cov", "html5lib", "cython", "typed-ast"] + +[[package]] +name = "sphinx-autobuild" +version = "2021.3.14" +description = "Rebuild Sphinx documentation on changes, with live-reload in the browser." +category = "main" +optional = true +python-versions = ">=3.6" + +[package.dependencies] +colorama = "*" +livereload = "*" +sphinx = "*" + +[package.extras] +test = ["pytest", "pytest-cov"] + +[[package]] +name = "sphinx-rtd-theme" +version = "0.5.2" +description = "Read the Docs theme for Sphinx" +category = "main" +optional = true +python-versions = "*" + +[package.dependencies] +docutils = "<0.17" +sphinx = "*" + +[package.extras] +dev = ["transifex-client", "sphinxcontrib-httpdomain", "bump2version"] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "1.0.2" +description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" +category = "main" +optional = true +python-versions = ">=3.5" + +[package.extras] +lint = ["flake8", "mypy", "docutils-stubs"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "1.0.2" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." +category = "main" +optional = true +python-versions = ">=3.5" + +[package.extras] +lint = ["flake8", "mypy", "docutils-stubs"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "1.0.3" +description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +category = "main" +optional = true +python-versions = ">=3.5" + +[package.extras] +lint = ["flake8", "mypy", "docutils-stubs"] +test = ["pytest", "html5lib"] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +description = "A sphinx extension which renders display math in HTML via JavaScript" +category = "main" +optional = true +python-versions = ">=3.5" + +[package.extras] +test = ["pytest", "flake8", "mypy"] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "1.0.3" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." +category = "main" +optional = true +python-versions = ">=3.5" + +[package.extras] +lint = ["flake8", "mypy", "docutils-stubs"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "1.1.4" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." +category = "main" +optional = true +python-versions = ">=3.5" + +[package.extras] +lint = ["flake8", "mypy", "docutils-stubs"] +test = ["pytest"] + +[[package]] +name = "sqlparse" +version = "0.4.1" +description = "A non-validating SQL parser." +category = "main" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "structlog" +version = "21.1.0" +description = "Structured Logging for Python" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +typing-extensions = {version = "*", markers = "python_version < \"3.8\""} + +[package.extras] +dev = ["coverage", "freezegun (>=0.2.8)", "pretend", "pytest-asyncio", "pytest-randomly", "pytest (>=6.0)", "simplejson", "furo", "sphinx", "sphinx-toolbox", "twisted", "pre-commit"] +docs = ["furo", "sphinx", "sphinx-toolbox", "twisted"] +tests = ["coverage", "freezegun (>=0.2.8)", "pretend", "pytest-asyncio", "pytest-randomly", "pytest (>=6.0)", "simplejson"] + +[[package]] +name = "tblib" +version = "1.7.0" +description = "Traceback serialization library." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +category = "dev" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "tornado" +version = "6.1" +description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +category = "main" +optional = true +python-versions = ">= 3.5" + +[[package]] +name = "traitlets" +version = "5.0.5" +description = "Traitlets Python configuration system" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +ipython-genutils = "*" + +[package.extras] +test = ["pytest"] + +[[package]] +name = "typed-ast" +version = "1.4.3" +description = "a fork of Python 2 and 3 ast modules with type comment support" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "typing-extensions" +version = "3.7.4.3" +description = "Backported and Experimental Type Hints for Python 3.5+" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "urllib3" +version = "1.26.4" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" + +[package.extras] +secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] +brotli = ["brotlipy (>=0.6.0)"] + +[[package]] +name = "wcwidth" +version = "0.2.5" +description = "Measures the displayed width of unicode strings in a terminal" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "xmltodict" +version = "0.12.0" +description = "Makes working with XML feel like you are working with JSON" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "zipp" +version = "3.4.1" +description = "Backport of pathlib-compatible object wrapper for zip files" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "pytest-enabler", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] + +[extras] +docs = ["Sphinx", "sphinx-autobuild", "sphinx-rtd-theme"] +simplexquery = ["python-simplexquery"] + +[metadata] +lock-version = "1.1" +python-versions = "^3.7" +content-hash = "428eeb62e466c2d516b96fb115be858ef72eaa17d932fcf714f8ba7da863ffaf" + +[metadata.files] +alabaster = [ + {file = "alabaster-0.7.12-py2.py3-none-any.whl", hash = "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359"}, + {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"}, +] +appdirs = [ + {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, + {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, +] +appnope = [ + {file = "appnope-0.1.2-py2.py3-none-any.whl", hash = "sha256:93aa393e9d6c54c5cd570ccadd8edad61ea0c4b9ea7a01409020c9aa019eb442"}, + {file = "appnope-0.1.2.tar.gz", hash = "sha256:dd83cd4b5b460958838f6eb3000c660b1f9caf2a5b1de4264e941512f603258a"}, +] +asgiref = [ + {file = "asgiref-3.3.4-py3-none-any.whl", hash = "sha256:92906c611ce6c967347bbfea733f13d6313901d54dcca88195eaeb52b2a8e8ee"}, + {file = "asgiref-3.3.4.tar.gz", hash = "sha256:d1216dfbdfb63826470995d31caed36225dcaf34f182e0fa257a4dd9e86f1b78"}, +] +asttokens = [ + {file = "asttokens-2.0.5-py2.py3-none-any.whl", hash = "sha256:0844691e88552595a6f4a4281a9f7f79b8dd45ca4ccea82e5e05b4bbdb76705c"}, + {file = "asttokens-2.0.5.tar.gz", hash = "sha256:9a54c114f02c7a9480d56550932546a3f1fe71d8a02f1bc7ccd0ee3ee35cf4d5"}, +] +attrs = [ + {file = "attrs-20.3.0-py2.py3-none-any.whl", hash = "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6"}, + {file = "attrs-20.3.0.tar.gz", hash = "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700"}, +] +autosemver = [ + {file = "autosemver-0.5.5.tar.gz", hash = "sha256:0af1e8a9c3604545c067311f1c26403e8f0d60b5d9561c0217e14eee21c98b02"}, +] +babel = [ + {file = "Babel-2.9.1-py2.py3-none-any.whl", hash = "sha256:ab49e12b91d937cd11f0b67cb259a57ab4ad2b59ac7a3b41d6c06c0ac5b0def9"}, + {file = "Babel-2.9.1.tar.gz", hash = "sha256:bc0c176f9f6a994582230df350aa6e05ba2ebe4b3ac317eab29d9be5d2768da0"}, +] +backcall = [ + {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, + {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, +] +black = [ + {file = "black-20.8b1.tar.gz", hash = "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea"}, +] +certifi = [ + {file = "certifi-2020.12.5-py2.py3-none-any.whl", hash = "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"}, + {file = "certifi-2020.12.5.tar.gz", hash = "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c"}, +] +chardet = [ + {file = "chardet-4.0.0-py2.py3-none-any.whl", hash = "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"}, + {file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"}, +] +click = [ + {file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"}, + {file = "click-7.1.2.tar.gz", hash = "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"}, +] +colorama = [ + {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, + {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, +] +datacite = [ + {file = "datacite-1.1.1-py2.py3-none-any.whl", hash = "sha256:842b52b47380b658c728f4596dcab4fdaf524ebcfb895a337be267b4a2e9b1bf"}, + {file = "datacite-1.1.1.tar.gz", hash = "sha256:4e3d3153d849f0a5f331ba585bf4d2f6fcc87f63738bbf9621f19f3727ae3e4d"}, +] +decorator = [ + {file = "decorator-5.0.7-py3-none-any.whl", hash = "sha256:945d84890bb20cc4a2f4a31fc4311c0c473af65ea318617f13a7257c9a58bc98"}, + {file = "decorator-5.0.7.tar.gz", hash = "sha256:6f201a6c4dac3d187352661f508b9364ec8091217442c9478f1f83c003a0f060"}, +] +django = [ + {file = "Django-3.1.8-py3-none-any.whl", hash = "sha256:c348b3ddc452bf4b62361f0752f71a339140c777ebea3cdaaaa8fdb7f417a862"}, + {file = "Django-3.1.8.tar.gz", hash = "sha256:f8393103e15ec2d2d313ccbb95a3f1da092f9f58d74ac1c61ca2ac0436ae1eac"}, +] +django-debug-toolbar = [ + {file = "django-debug-toolbar-3.2.1.tar.gz", hash = "sha256:a5ff2a54f24bf88286f9872836081078f4baa843dc3735ee88524e89f8821e33"}, + {file = "django_debug_toolbar-3.2.1-py3-none-any.whl", hash = "sha256:e759e63e3fe2d3110e0e519639c166816368701eab4a47fed75d7de7018467b9"}, +] +django-environ = [ + {file = "django-environ-0.4.5.tar.gz", hash = "sha256:6c9d87660142608f63ec7d5ce5564c49b603ea8ff25da595fd6098f6dc82afde"}, + {file = "django_environ-0.4.5-py2.py3-none-any.whl", hash = "sha256:c57b3c11ec1f319d9474e3e5a79134f40174b17c7cc024bbb2fad84646b120c4"}, +] +django-rainbowtests = [ + {file = "django-rainbowtests-0.6.0.tar.gz", hash = "sha256:0700ee1386935822dca296d323d67b0563cb2e5012b553ebca7c9391f2298cd9"}, +] +django-split-settings = [ + {file = "django-split-settings-1.0.1.tar.gz", hash = "sha256:2da16cd967cd38315ec7ff0ae0c9db8488f8528bb2e5de26cd898328dc4bbeac"}, + {file = "django_split_settings-1.0.1-py3-none-any.whl", hash = "sha256:8d636649023289d0ef0ba08b0a4f37761adc94a29ee0ebfe65922c3cb0594ede"}, +] +django-watchman = [ + {file = "django-watchman-1.2.0.tar.gz", hash = "sha256:c38830c58984b8eb29db30a3e332968d1c7e235dee3f5c0a907d8d79a37a3125"}, + {file = "django_watchman-1.2.0-py2.py3-none-any.whl", hash = "sha256:6c0b8889456ed644fcfe2f7c3294cb4ef8568a85772b7f95842e2d8c9b4bbff5"}, +] +djangorestframework = [ + {file = "djangorestframework-3.12.4-py3-none-any.whl", hash = "sha256:6d1d59f623a5ad0509fe0d6bfe93cbdfe17b8116ebc8eda86d45f6e16e819aaf"}, + {file = "djangorestframework-3.12.4.tar.gz", hash = "sha256:f747949a8ddac876e879190df194b925c177cdeb725a099db1460872f7c0a7f2"}, +] +docutils = [ + {file = "docutils-0.16-py2.py3-none-any.whl", hash = "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af"}, + {file = "docutils-0.16.tar.gz", hash = "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc"}, +] +dulwich = [ + {file = "dulwich-0.19.16-cp27-cp27m-win32.whl", hash = "sha256:267160904e9a1cb6c248c5efc53597a35d038ecc6f60bdc4546b3053bed11982"}, + {file = "dulwich-0.19.16-cp27-cp27m-win_amd64.whl", hash = "sha256:4e3aba5e4844e7c700721c1fc696987ea820ee3528a03604dc4e74eff4196826"}, + {file = "dulwich-0.19.16-cp35-cp35m-win_amd64.whl", hash = "sha256:60bb2c2c92f5025c1b53a556304008f0f624c98ae36f22d870e056b2d4236c11"}, + {file = "dulwich-0.19.16-cp36-cp36m-win_amd64.whl", hash = "sha256:f00d132082b8fcc2eb0d722abc773d4aeb5558c1475d7edd1f0f571146c29db9"}, + {file = "dulwich-0.19.16-py2-none-any.whl", hash = "sha256:10699277c6268d0c16febe141a5b1c1a6e9744f3144c2d2de1706f4b1adafe63"}, + {file = "dulwich-0.19.16-py3-none-any.whl", hash = "sha256:dddae02d372fc3b5cfb0046d0f62246ef281fa0c088df7601ab5916607add94b"}, + {file = "dulwich-0.19.16.tar.gz", hash = "sha256:f74561c448bfb6f04c07de731c1181ae4280017f759b0bb04fa5770aa84ca850"}, +] +elasticsearch = [ + {file = "elasticsearch-7.12.1-py2.py3-none-any.whl", hash = "sha256:1840fea8c305224b8c28acabc8697f739cdfb03618f2d2427b42838971a787f6"}, + {file = "elasticsearch-7.12.1.tar.gz", hash = "sha256:df35d8c638f946f098a74681b18611bdf27ba469fa2063e3dfc8bdc290b11419"}, +] +executing = [ + {file = "executing-0.6.0-py2.py3-none-any.whl", hash = "sha256:a2f10f802b4312b92bd256279b43720271b0d9b540a0dbab7be4c28fbc536479"}, + {file = "executing-0.6.0.tar.gz", hash = "sha256:a07046e608c56948a899e1c7dc45327ed84ee67edf245041eb8c6722658c14e3"}, +] +gunicorn = [ + {file = "gunicorn-20.1.0.tar.gz", hash = "sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8"}, +] +icecream = [ + {file = "icecream-2.1.0-py2.py3-none-any.whl", hash = "sha256:4c441862751e9a8b52c1a5b551056a3430361230b8c845a7953f8b6e400a4f27"}, + {file = "icecream-2.1.0.tar.gz", hash = "sha256:c2e7b74c1c12caa2cfde050f2e636493ee77a9fb4a494b5593418ab359924a24"}, +] +idna = [ + {file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"}, + {file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"}, +] +idutils = [ + {file = "IDUtils-1.1.8-py2.py3-none-any.whl", hash = "sha256:6a032009da9dad262c2d1ad4978100b5937749e27862845a7bf49c38a39e3fff"}, + {file = "IDUtils-1.1.8.tar.gz", hash = "sha256:21497bf279b64aadce923a11b1ed9b601c0bf01eb82c3f952877eef026586d78"}, +] +imagesize = [ + {file = "imagesize-1.2.0-py2.py3-none-any.whl", hash = "sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1"}, + {file = "imagesize-1.2.0.tar.gz", hash = "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1"}, +] +importlib-metadata = [ + {file = "importlib_metadata-4.0.1-py3-none-any.whl", hash = "sha256:d7eb1dea6d6a6086f8be21784cc9e3bcfa55872b52309bc5fad53a8ea444465d"}, + {file = "importlib_metadata-4.0.1.tar.gz", hash = "sha256:8c501196e49fb9df5df43833bdb1e4328f64847763ec8a50703148b73784d581"}, +] +ipdb = [ + {file = "ipdb-0.13.7.tar.gz", hash = "sha256:178c367a61c1039e44e17c56fcc4a6e7dc11b33561261382d419b6ddb4401810"}, +] +ipython = [ + {file = "ipython-7.22.0-py3-none-any.whl", hash = "sha256:c0ce02dfaa5f854809ab7413c601c4543846d9da81010258ecdab299b542d199"}, + {file = "ipython-7.22.0.tar.gz", hash = "sha256:9c900332d4c5a6de534b4befeeb7de44ad0cc42e8327fa41b7685abde58cec74"}, +] +ipython-genutils = [ + {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"}, + {file = "ipython_genutils-0.2.0.tar.gz", hash = "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"}, +] +isbnid-fork = [ + {file = "isbnid_fork-0.5.2.tar.gz", hash = "sha256:8d878866aa0e7f06e700a37fce586c7398ce4837da8bca39683db7028a9c3837"}, +] +isodate = [ + {file = "isodate-0.6.0-py2.py3-none-any.whl", hash = "sha256:aa4d33c06640f5352aca96e4b81afd8ab3b47337cc12089822d6f322ac772c81"}, + {file = "isodate-0.6.0.tar.gz", hash = "sha256:2e364a3d5759479cdb2d37cce6b9376ea504db2ff90252a2e5b7cc89cc9ff2d8"}, +] +isort = [ + {file = "isort-5.8.0-py3-none-any.whl", hash = "sha256:2bb1680aad211e3c9944dbce1d4ba09a989f04e238296c87fe2139faa26d655d"}, + {file = "isort-5.8.0.tar.gz", hash = "sha256:0a943902919f65c5684ac4e0154b1ad4fac6dcaa5d9f3426b732f1c8b5419be6"}, +] +jedi = [ + {file = "jedi-0.18.0-py2.py3-none-any.whl", hash = "sha256:18456d83f65f400ab0c2d3319e48520420ef43b23a086fdc05dff34132f0fb93"}, + {file = "jedi-0.18.0.tar.gz", hash = "sha256:92550a404bad8afed881a137ec9a461fed49eca661414be45059329614ed0707"}, +] +jinja2 = [ + {file = "Jinja2-2.11.3-py2.py3-none-any.whl", hash = "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419"}, + {file = "Jinja2-2.11.3.tar.gz", hash = "sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6"}, +] +jsonschema = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] +livereload = [ + {file = "livereload-2.6.3.tar.gz", hash = "sha256:776f2f865e59fde56490a56bcc6773b6917366bce0c267c60ee8aaf1a0959869"}, +] +lxml = [ + {file = "lxml-4.6.3-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:df7c53783a46febb0e70f6b05df2ba104610f2fb0d27023409734a3ecbb78fb2"}, + {file = "lxml-4.6.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:1b7584d421d254ab86d4f0b13ec662a9014397678a7c4265a02a6d7c2b18a75f"}, + {file = "lxml-4.6.3-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:079f3ae844f38982d156efce585bc540c16a926d4436712cf4baee0cce487a3d"}, + {file = "lxml-4.6.3-cp27-cp27m-win32.whl", hash = "sha256:bc4313cbeb0e7a416a488d72f9680fffffc645f8a838bd2193809881c67dd106"}, + {file = "lxml-4.6.3-cp27-cp27m-win_amd64.whl", hash = "sha256:8157dadbb09a34a6bd95a50690595e1fa0af1a99445e2744110e3dca7831c4ee"}, + {file = "lxml-4.6.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7728e05c35412ba36d3e9795ae8995e3c86958179c9770e65558ec3fdfd3724f"}, + {file = "lxml-4.6.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:4bff24dfeea62f2e56f5bab929b4428ae6caba2d1eea0c2d6eb618e30a71e6d4"}, + {file = "lxml-4.6.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:74f7d8d439b18fa4c385f3f5dfd11144bb87c1da034a466c5b5577d23a1d9b51"}, + {file = "lxml-4.6.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f90ba11136bfdd25cae3951af8da2e95121c9b9b93727b1b896e3fa105b2f586"}, + {file = "lxml-4.6.3-cp35-cp35m-win32.whl", hash = "sha256:f2380a6376dfa090227b663f9678150ef27543483055cc327555fb592c5967e2"}, + {file = "lxml-4.6.3-cp35-cp35m-win_amd64.whl", hash = "sha256:c4f05c5a7c49d2fb70223d0d5bcfbe474cf928310ac9fa6a7c6dddc831d0b1d4"}, + {file = "lxml-4.6.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d2e35d7bf1c1ac8c538f88d26b396e73dd81440d59c1ef8522e1ea77b345ede4"}, + {file = "lxml-4.6.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:289e9ca1a9287f08daaf796d96e06cb2bc2958891d7911ac7cae1c5f9e1e0ee3"}, + {file = "lxml-4.6.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:bccbfc27563652de7dc9bdc595cb25e90b59c5f8e23e806ed0fd623755b6565d"}, + {file = "lxml-4.6.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:820628b7b3135403540202e60551e741f9b6d3304371712521be939470b454ec"}, + {file = "lxml-4.6.3-cp36-cp36m-win32.whl", hash = "sha256:5a0a14e264069c03e46f926be0d8919f4105c1623d620e7ec0e612a2e9bf1c04"}, + {file = "lxml-4.6.3-cp36-cp36m-win_amd64.whl", hash = "sha256:92e821e43ad382332eade6812e298dc9701c75fe289f2a2d39c7960b43d1e92a"}, + {file = "lxml-4.6.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:efd7a09678fd8b53117f6bae4fa3825e0a22b03ef0a932e070c0bdbb3a35e654"}, + {file = "lxml-4.6.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:efac139c3f0bf4f0939f9375af4b02c5ad83a622de52d6dfa8e438e8e01d0eb0"}, + {file = "lxml-4.6.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:0fbcf5565ac01dff87cbfc0ff323515c823081c5777a9fc7703ff58388c258c3"}, + {file = "lxml-4.6.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:122fba10466c7bd4178b07dba427aa516286b846b2cbd6f6169141917283aae2"}, + {file = "lxml-4.6.3-cp37-cp37m-win32.whl", hash = "sha256:3439c71103ef0e904ea0a1901611863e51f50b5cd5e8654a151740fde5e1cade"}, + {file = "lxml-4.6.3-cp37-cp37m-win_amd64.whl", hash = "sha256:4289728b5e2000a4ad4ab8da6e1db2e093c63c08bdc0414799ee776a3f78da4b"}, + {file = "lxml-4.6.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b007cbb845b28db4fb8b6a5cdcbf65bacb16a8bd328b53cbc0698688a68e1caa"}, + {file = "lxml-4.6.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:76fa7b1362d19f8fbd3e75fe2fb7c79359b0af8747e6f7141c338f0bee2f871a"}, + {file = "lxml-4.6.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:26e761ab5b07adf5f555ee82fb4bfc35bf93750499c6c7614bd64d12aaa67927"}, + {file = "lxml-4.6.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:66e575c62792c3f9ca47cb8b6fab9e35bab91360c783d1606f758761810c9791"}, + {file = "lxml-4.6.3-cp38-cp38-win32.whl", hash = "sha256:89b8b22a5ff72d89d48d0e62abb14340d9e99fd637d046c27b8b257a01ffbe28"}, + {file = "lxml-4.6.3-cp38-cp38-win_amd64.whl", hash = "sha256:2a9d50e69aac3ebee695424f7dbd7b8c6d6eb7de2a2eb6b0f6c7db6aa41e02b7"}, + {file = "lxml-4.6.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ce256aaa50f6cc9a649c51be3cd4ff142d67295bfc4f490c9134d0f9f6d58ef0"}, + {file = "lxml-4.6.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:7610b8c31688f0b1be0ef882889817939490a36d0ee880ea562a4e1399c447a1"}, + {file = "lxml-4.6.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f8380c03e45cf09f8557bdaa41e1fa7c81f3ae22828e1db470ab2a6c96d8bc23"}, + {file = "lxml-4.6.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:884ab9b29feaca361f7f88d811b1eea9bfca36cf3da27768d28ad45c3ee6f969"}, + {file = "lxml-4.6.3-cp39-cp39-win32.whl", hash = "sha256:33bb934a044cf32157c12bfcfbb6649807da20aa92c062ef51903415c704704f"}, + {file = "lxml-4.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:542d454665a3e277f76954418124d67516c5f88e51a900365ed54a9806122b83"}, + {file = "lxml-4.6.3.tar.gz", hash = "sha256:39b78571b3b30645ac77b95f7c69d1bffc4cf8c3b157c435a34da72e78c82468"}, +] +markupsafe = [ + {file = "MarkupSafe-1.1.1-cp27-cp27m-macosx_10_6_intel.whl", hash = "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161"}, + {file = "MarkupSafe-1.1.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7"}, + {file = "MarkupSafe-1.1.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183"}, + {file = "MarkupSafe-1.1.1-cp27-cp27m-win32.whl", hash = "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b"}, + {file = "MarkupSafe-1.1.1-cp27-cp27m-win_amd64.whl", hash = "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e"}, + {file = "MarkupSafe-1.1.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f"}, + {file = "MarkupSafe-1.1.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1"}, + {file = "MarkupSafe-1.1.1-cp34-cp34m-macosx_10_6_intel.whl", hash = "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5"}, + {file = "MarkupSafe-1.1.1-cp34-cp34m-manylinux1_i686.whl", hash = "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1"}, + {file = "MarkupSafe-1.1.1-cp34-cp34m-manylinux1_x86_64.whl", hash = "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735"}, + {file = "MarkupSafe-1.1.1-cp34-cp34m-win32.whl", hash = "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21"}, + {file = "MarkupSafe-1.1.1-cp34-cp34m-win_amd64.whl", hash = "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235"}, + {file = "MarkupSafe-1.1.1-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b"}, + {file = "MarkupSafe-1.1.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f"}, + {file = "MarkupSafe-1.1.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905"}, + {file = "MarkupSafe-1.1.1-cp35-cp35m-win32.whl", hash = "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1"}, + {file = "MarkupSafe-1.1.1-cp35-cp35m-win_amd64.whl", hash = "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d"}, + {file = "MarkupSafe-1.1.1-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff"}, + {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473"}, + {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e"}, + {file = "MarkupSafe-1.1.1-cp36-cp36m-win32.whl", hash = "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66"}, + {file = "MarkupSafe-1.1.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5"}, + {file = "MarkupSafe-1.1.1-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d"}, + {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e"}, + {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6"}, + {file = "MarkupSafe-1.1.1-cp37-cp37m-win32.whl", hash = "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2"}, + {file = "MarkupSafe-1.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c"}, + {file = "MarkupSafe-1.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15"}, + {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2"}, + {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42"}, + {file = "MarkupSafe-1.1.1-cp38-cp38-win32.whl", hash = "sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b"}, + {file = "MarkupSafe-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be"}, + {file = "MarkupSafe-1.1.1.tar.gz", hash = "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b"}, +] +mypy-extensions = [ + {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, + {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, +] +packaging = [ + {file = "packaging-20.9-py2.py3-none-any.whl", hash = "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a"}, + {file = "packaging-20.9.tar.gz", hash = "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5"}, +] +parso = [ + {file = "parso-0.8.2-py2.py3-none-any.whl", hash = "sha256:a8c4922db71e4fdb90e0d0bc6e50f9b273d3397925e5e60a717e719201778d22"}, + {file = "parso-0.8.2.tar.gz", hash = "sha256:12b83492c6239ce32ff5eed6d3639d6a536170723c6f3f1506869f1ace413398"}, +] +pathspec = [ + {file = "pathspec-0.8.1-py2.py3-none-any.whl", hash = "sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d"}, + {file = "pathspec-0.8.1.tar.gz", hash = "sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd"}, +] +pexpect = [ + {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, + {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, +] +pickleshare = [ + {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, + {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, +] +pika = [ + {file = "pika-1.2.0-py2.py3-none-any.whl", hash = "sha256:59da6701da1aeaf7e5e93bb521cc03129867f6e54b7dd352c4b3ecb2bd7ec624"}, + {file = "pika-1.2.0.tar.gz", hash = "sha256:f023d6ac581086b124190cb3dc81dd581a149d216fa4540ac34f9be1e3970b89"}, +] +prompt-toolkit = [ + {file = "prompt_toolkit-3.0.18-py3-none-any.whl", hash = "sha256:bf00f22079f5fadc949f42ae8ff7f05702826a97059ffcc6281036ad40ac6f04"}, + {file = "prompt_toolkit-3.0.18.tar.gz", hash = "sha256:e1b4f11b9336a28fa11810bc623c357420f69dfdb6d2dac41ca2c21a55c033bc"}, +] +psycopg2-binary = [ + {file = "psycopg2-binary-2.8.6.tar.gz", hash = "sha256:11b9c0ebce097180129e422379b824ae21c8f2a6596b159c7659e2e5a00e1aa0"}, + {file = "psycopg2_binary-2.8.6-cp27-cp27m-macosx_10_6_intel.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:d14b140a4439d816e3b1229a4a525df917d6ea22a0771a2a78332273fd9528a4"}, + {file = "psycopg2_binary-2.8.6-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:1fabed9ea2acc4efe4671b92c669a213db744d2af8a9fc5d69a8e9bc14b7a9db"}, + {file = "psycopg2_binary-2.8.6-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:f5ab93a2cb2d8338b1674be43b442a7f544a0971da062a5da774ed40587f18f5"}, + {file = "psycopg2_binary-2.8.6-cp27-cp27m-win32.whl", hash = "sha256:b4afc542c0ac0db720cf516dd20c0846f71c248d2b3d21013aa0d4ef9c71ca25"}, + {file = "psycopg2_binary-2.8.6-cp27-cp27m-win_amd64.whl", hash = "sha256:e74a55f6bad0e7d3968399deb50f61f4db1926acf4a6d83beaaa7df986f48b1c"}, + {file = "psycopg2_binary-2.8.6-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:0deac2af1a587ae12836aa07970f5cb91964f05a7c6cdb69d8425ff4c15d4e2c"}, + {file = "psycopg2_binary-2.8.6-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ad20d2eb875aaa1ea6d0f2916949f5c08a19c74d05b16ce6ebf6d24f2c9f75d1"}, + {file = "psycopg2_binary-2.8.6-cp34-cp34m-win32.whl", hash = "sha256:950bc22bb56ee6ff142a2cb9ee980b571dd0912b0334aa3fe0fe3788d860bea2"}, + {file = "psycopg2_binary-2.8.6-cp34-cp34m-win_amd64.whl", hash = "sha256:b8a3715b3c4e604bcc94c90a825cd7f5635417453b253499664f784fc4da0152"}, + {file = "psycopg2_binary-2.8.6-cp35-cp35m-macosx_10_6_intel.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:d1b4ab59e02d9008efe10ceabd0b31e79519da6fb67f7d8e8977118832d0f449"}, + {file = "psycopg2_binary-2.8.6-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:ac0c682111fbf404525dfc0f18a8b5f11be52657d4f96e9fcb75daf4f3984859"}, + {file = "psycopg2_binary-2.8.6-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:7d92a09b788cbb1aec325af5fcba9fed7203897bbd9269d5691bb1e3bce29550"}, + {file = "psycopg2_binary-2.8.6-cp35-cp35m-win32.whl", hash = "sha256:aaa4213c862f0ef00022751161df35804127b78adf4a2755b9f991a507e425fd"}, + {file = "psycopg2_binary-2.8.6-cp35-cp35m-win_amd64.whl", hash = "sha256:c2507d796fca339c8fb03216364cca68d87e037c1f774977c8fc377627d01c71"}, + {file = "psycopg2_binary-2.8.6-cp36-cp36m-macosx_10_6_intel.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:ee69dad2c7155756ad114c02db06002f4cded41132cc51378e57aad79cc8e4f4"}, + {file = "psycopg2_binary-2.8.6-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:e82aba2188b9ba309fd8e271702bd0d0fc9148ae3150532bbb474f4590039ffb"}, + {file = "psycopg2_binary-2.8.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:d5227b229005a696cc67676e24c214740efd90b148de5733419ac9aaba3773da"}, + {file = "psycopg2_binary-2.8.6-cp36-cp36m-win32.whl", hash = "sha256:a0eb43a07386c3f1f1ebb4dc7aafb13f67188eab896e7397aa1ee95a9c884eb2"}, + {file = "psycopg2_binary-2.8.6-cp36-cp36m-win_amd64.whl", hash = "sha256:e1f57aa70d3f7cc6947fd88636a481638263ba04a742b4a37dd25c373e41491a"}, + {file = "psycopg2_binary-2.8.6-cp37-cp37m-macosx_10_6_intel.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:833709a5c66ca52f1d21d41865a637223b368c0ee76ea54ca5bad6f2526c7679"}, + {file = "psycopg2_binary-2.8.6-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:ba28584e6bca48c59eecbf7efb1576ca214b47f05194646b081717fa628dfddf"}, + {file = "psycopg2_binary-2.8.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:6a32f3a4cb2f6e1a0b15215f448e8ce2da192fd4ff35084d80d5e39da683e79b"}, + {file = "psycopg2_binary-2.8.6-cp37-cp37m-win32.whl", hash = "sha256:0e4dc3d5996760104746e6cfcdb519d9d2cd27c738296525d5867ea695774e67"}, + {file = "psycopg2_binary-2.8.6-cp37-cp37m-win_amd64.whl", hash = "sha256:cec7e622ebc545dbb4564e483dd20e4e404da17ae07e06f3e780b2dacd5cee66"}, + {file = "psycopg2_binary-2.8.6-cp38-cp38-macosx_10_9_x86_64.macosx_10_9_intel.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:ba381aec3a5dc29634f20692349d73f2d21f17653bda1decf0b52b11d694541f"}, + {file = "psycopg2_binary-2.8.6-cp38-cp38-manylinux1_i686.whl", hash = "sha256:a0c50db33c32594305b0ef9abc0cb7db13de7621d2cadf8392a1d9b3c437ef77"}, + {file = "psycopg2_binary-2.8.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:2dac98e85565d5688e8ab7bdea5446674a83a3945a8f416ad0110018d1501b94"}, + {file = "psycopg2_binary-2.8.6-cp38-cp38-win32.whl", hash = "sha256:bd1be66dde2b82f80afb9459fc618216753f67109b859a361cf7def5c7968729"}, + {file = "psycopg2_binary-2.8.6-cp38-cp38-win_amd64.whl", hash = "sha256:8cd0fb36c7412996859cb4606a35969dd01f4ea34d9812a141cd920c3b18be77"}, + {file = "psycopg2_binary-2.8.6-cp39-cp39-macosx_10_9_x86_64.macosx_10_9_intel.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:89705f45ce07b2dfa806ee84439ec67c5d9a0ef20154e0e475e2b2ed392a5b83"}, + {file = "psycopg2_binary-2.8.6-cp39-cp39-manylinux1_i686.whl", hash = "sha256:42ec1035841b389e8cc3692277a0bd81cdfe0b65d575a2c8862cec7a80e62e52"}, + {file = "psycopg2_binary-2.8.6-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7312e931b90fe14f925729cde58022f5d034241918a5c4f9797cac62f6b3a9dd"}, +] +ptyprocess = [ + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, +] +pygments = [ + {file = "Pygments-2.8.1-py3-none-any.whl", hash = "sha256:534ef71d539ae97d4c3a4cf7d6f110f214b0e687e92f9cb9d2a3b0d3101289c8"}, + {file = "Pygments-2.8.1.tar.gz", hash = "sha256:2656e1a6edcdabf4275f9a3640db59fd5de107d88e8663c5d4e9a0fa62f77f94"}, +] +pyjwt = [ + {file = "PyJWT-2.1.0-py3-none-any.whl", hash = "sha256:934d73fbba91b0483d3857d1aff50e96b2a892384ee2c17417ed3203f173fca1"}, + {file = "PyJWT-2.1.0.tar.gz", hash = "sha256:fba44e7898bbca160a2b2b501f492824fc8382485d3a6f11ba5d0c1937ce6130"}, +] +pyoai = [ + {file = "pyoai-2.5.0.tar.gz", hash = "sha256:029521e1f6a819511feb4299a6181b5c312e8a71f7cddc4547e27001e7552be0"}, +] +pyparsing = [ + {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, + {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, +] +pyrsistent = [ + {file = "pyrsistent-0.17.3.tar.gz", hash = "sha256:2e636185d9eb976a18a8a8e96efce62f2905fea90041958d8cc2a189756ebf3e"}, +] +python-box = [ + {file = "python-box-5.3.0.tar.gz", hash = "sha256:4ed4ef5d34de505a65c01e3f1911de8cdb29484fcae0c035141dce535c6c194a"}, + {file = "python_box-5.3.0-py3-none-any.whl", hash = "sha256:f2a531f9f5bbef078c175fad6abb31e9b59d40d121ea79993197e6bb221c6be6"}, +] +python-dateutil = [ + {file = "python-dateutil-2.8.1.tar.gz", hash = "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c"}, + {file = "python_dateutil-2.8.1-py2.py3-none-any.whl", hash = "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"}, +] +python-simplexquery = [ + {file = "python-simplexquery-1.0.5.3.win32-py2.6.exe", hash = "sha256:a32281dd8a923930c177c7a5b6124e4f358c02773e40e6e734a1217fb9a5ab86"}, + {file = "python-simplexquery-1.0.5.3.win32-py2.7.exe", hash = "sha256:7fd949f9df259e89379b425398d79fc69628887bad54d1a78d8872f542a97f6d"}, + {file = "python-simplexquery-1.0.5.3.win32-py3.1.exe", hash = "sha256:e1aff3802d8b54dd3bde1462d7c0d42232bfa4dfaba1fe4dc642dada075f62b4"}, + {file = "python-simplexquery-1.0.5.3.win32-py3.2.exe", hash = "sha256:d459f35818e090f8401715d69ed52a60a39f8072f00d2f19e8dffbb223e8a476"}, + {file = "python-simplexquery-1.0.5.3.zip", hash = "sha256:4849070678538d26778c9902c58eac13a88beaffc526e69ee5e3db3744499a2b"}, +] +pytz = [ + {file = "pytz-2021.1-py2.py3-none-any.whl", hash = "sha256:eb10ce3e7736052ed3623d49975ce333bcd712c7bb19a58b9e2089d4057d0798"}, + {file = "pytz-2021.1.tar.gz", hash = "sha256:83a4a90894bf38e243cf052c8b58f381bfe9a7a483f6a9cab140bc7f702ac4da"}, +] +rdflib = [ + {file = "rdflib-5.0.0-py3-none-any.whl", hash = "sha256:88208ea971a87886d60ae2b1a4b2cdc263527af0454c422118d43fe64b357877"}, + {file = "rdflib-5.0.0.tar.gz", hash = "sha256:78149dd49d385efec3b3adfbd61c87afaf1281c30d3fcaf1b323b34f603fb155"}, +] +redis = [ + {file = "redis-3.5.3-py2.py3-none-any.whl", hash = "sha256:432b788c4530cfe16d8d943a09d40ca6c16149727e4afe8c2c9d5580c59d9f24"}, + {file = "redis-3.5.3.tar.gz", hash = "sha256:0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2"}, +] +regex = [ + {file = "regex-2021.4.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:619d71c59a78b84d7f18891fe914446d07edd48dc8328c8e149cbe0929b4e000"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:47bf5bf60cf04d72bf6055ae5927a0bd9016096bf3d742fa50d9bf9f45aa0711"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:281d2fd05555079448537fe108d79eb031b403dac622621c78944c235f3fcf11"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:bd28bc2e3a772acbb07787c6308e00d9626ff89e3bfcdebe87fa5afbfdedf968"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:7c2a1af393fcc09e898beba5dd59196edaa3116191cc7257f9224beaed3e1aa0"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c38c71df845e2aabb7fb0b920d11a1b5ac8526005e533a8920aea97efb8ec6a4"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:96fcd1888ab4d03adfc9303a7b3c0bd78c5412b2bfbe76db5b56d9eae004907a"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:ade17eb5d643b7fead300a1641e9f45401c98eee23763e9ed66a43f92f20b4a7"}, + {file = "regex-2021.4.4-cp36-cp36m-win32.whl", hash = "sha256:e8e5b509d5c2ff12f8418006d5a90e9436766133b564db0abaec92fd27fcee29"}, + {file = "regex-2021.4.4-cp36-cp36m-win_amd64.whl", hash = "sha256:11d773d75fa650cd36f68d7ca936e3c7afaae41b863b8c387a22aaa78d3c5c79"}, + {file = "regex-2021.4.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d3029c340cfbb3ac0a71798100ccc13b97dddf373a4ae56b6a72cf70dfd53bc8"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:18c071c3eb09c30a264879f0d310d37fe5d3a3111662438889ae2eb6fc570c31"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:4c557a7b470908b1712fe27fb1ef20772b78079808c87d20a90d051660b1d69a"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:01afaf2ec48e196ba91b37451aa353cb7eda77efe518e481707e0515025f0cd5"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:3a9cd17e6e5c7eb328517969e0cb0c3d31fd329298dd0c04af99ebf42e904f82"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:90f11ff637fe8798933fb29f5ae1148c978cccb0452005bf4c69e13db951e765"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:919859aa909429fb5aa9cf8807f6045592c85ef56fdd30a9a3747e513db2536e"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:339456e7d8c06dd36a22e451d58ef72cef293112b559010db3d054d5560ef439"}, + {file = "regex-2021.4.4-cp37-cp37m-win32.whl", hash = "sha256:67bdb9702427ceddc6ef3dc382455e90f785af4c13d495f9626861763ee13f9d"}, + {file = "regex-2021.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:32e65442138b7b76dd8173ffa2cf67356b7bc1768851dded39a7a13bf9223da3"}, + {file = "regex-2021.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1e1c20e29358165242928c2de1482fb2cf4ea54a6a6dea2bd7a0e0d8ee321500"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:314d66636c494ed9c148a42731b3834496cc9a2c4251b1661e40936814542b14"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:6d1b01031dedf2503631d0903cb563743f397ccaf6607a5e3b19a3d76fc10480"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:741a9647fcf2e45f3a1cf0e24f5e17febf3efe8d4ba1281dcc3aa0459ef424dc"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:4c46e22a0933dd783467cf32b3516299fb98cfebd895817d685130cc50cd1093"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:e512d8ef5ad7b898cdb2d8ee1cb09a8339e4f8be706d27eaa180c2f177248a10"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:980d7be47c84979d9136328d882f67ec5e50008681d94ecc8afa8a65ed1f4a6f"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:ce15b6d103daff8e9fee13cf7f0add05245a05d866e73926c358e871221eae87"}, + {file = "regex-2021.4.4-cp38-cp38-win32.whl", hash = "sha256:a91aa8619b23b79bcbeb37abe286f2f408d2f2d6f29a17237afda55bb54e7aac"}, + {file = "regex-2021.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:c0502c0fadef0d23b128605d69b58edb2c681c25d44574fc673b0e52dce71ee2"}, + {file = "regex-2021.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:598585c9f0af8374c28edd609eb291b5726d7cbce16be6a8b95aa074d252ee17"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:ee54ff27bf0afaf4c3b3a62bcd016c12c3fdb4ec4f413391a90bd38bc3624605"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7d9884d86dd4dd489e981d94a65cd30d6f07203d90e98f6f657f05170f6324c9"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:bf5824bfac591ddb2c1f0a5f4ab72da28994548c708d2191e3b87dd207eb3ad7"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:563085e55b0d4fb8f746f6a335893bda5c2cef43b2f0258fe1020ab1dd874df8"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9c3db21af35e3b3c05764461b262d6f05bbca08a71a7849fd79d47ba7bc33ed"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:3916d08be28a1149fb97f7728fca1f7c15d309a9f9682d89d79db75d5e52091c"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:fd45ff9293d9274c5008a2054ecef86a9bfe819a67c7be1afb65e69b405b3042"}, + {file = "regex-2021.4.4-cp39-cp39-win32.whl", hash = "sha256:fa4537fb4a98fe8fde99626e4681cc644bdcf2a795038533f9f711513a862ae6"}, + {file = "regex-2021.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:97f29f57d5b84e73fbaf99ab3e26134e6687348e95ef6b48cfd2c06807005a07"}, + {file = "regex-2021.4.4.tar.gz", hash = "sha256:52ba3d3f9b942c49d7e4bc105bb28551c44065f139a65062ab7912bef10c9afb"}, +] +requests = [ + {file = "requests-2.25.1-py2.py3-none-any.whl", hash = "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"}, + {file = "requests-2.25.1.tar.gz", hash = "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804"}, +] +responses = [ + {file = "responses-0.13.3-py2.py3-none-any.whl", hash = "sha256:b54067596f331786f5ed094ff21e8d79e6a1c68ef625180a7d34808d6f36c11b"}, + {file = "responses-0.13.3.tar.gz", hash = "sha256:18a5b88eb24143adbf2b4100f328a2f5bfa72fbdacf12d97d41f07c26c45553d"}, +] +six = [ + {file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"}, + {file = "six-1.15.0.tar.gz", hash = "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"}, +] +snowballstemmer = [ + {file = "snowballstemmer-2.1.0-py2.py3-none-any.whl", hash = "sha256:b51b447bea85f9968c13b650126a888aabd4cb4463fca868ec596826325dedc2"}, + {file = "snowballstemmer-2.1.0.tar.gz", hash = "sha256:e997baa4f2e9139951b6f4c631bad912dfd3c792467e2f03d7239464af90e914"}, +] +sphinx = [ + {file = "Sphinx-3.5.4-py3-none-any.whl", hash = "sha256:2320d4e994a191f4b4be27da514e46b3d6b420f2ff895d064f52415d342461e8"}, + {file = "Sphinx-3.5.4.tar.gz", hash = "sha256:19010b7b9fa0dc7756a6e105b2aacd3a80f798af3c25c273be64d7beeb482cb1"}, +] +sphinx-autobuild = [ + {file = "sphinx-autobuild-2021.3.14.tar.gz", hash = "sha256:de1ca3b66e271d2b5b5140c35034c89e47f263f2cd5db302c9217065f7443f05"}, + {file = "sphinx_autobuild-2021.3.14-py3-none-any.whl", hash = "sha256:8fe8cbfdb75db04475232f05187c776f46f6e9e04cacf1e49ce81bdac649ccac"}, +] +sphinx-rtd-theme = [ + {file = "sphinx_rtd_theme-0.5.2-py2.py3-none-any.whl", hash = "sha256:4a05bdbe8b1446d77a01e20a23ebc6777c74f43237035e76be89699308987d6f"}, + {file = "sphinx_rtd_theme-0.5.2.tar.gz", hash = "sha256:32bd3b5d13dc8186d7a42fc816a23d32e83a4827d7d9882948e7b837c232da5a"}, +] +sphinxcontrib-applehelp = [ + {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, + {file = "sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a"}, +] +sphinxcontrib-devhelp = [ + {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, + {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, +] +sphinxcontrib-htmlhelp = [ + {file = "sphinxcontrib-htmlhelp-1.0.3.tar.gz", hash = "sha256:e8f5bb7e31b2dbb25b9cc435c8ab7a79787ebf7f906155729338f3156d93659b"}, + {file = "sphinxcontrib_htmlhelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:3c0bc24a2c41e340ac37c85ced6dafc879ab485c095b1d65d2461ac2f7cca86f"}, +] +sphinxcontrib-jsmath = [ + {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, + {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, +] +sphinxcontrib-qthelp = [ + {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, + {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, +] +sphinxcontrib-serializinghtml = [ + {file = "sphinxcontrib-serializinghtml-1.1.4.tar.gz", hash = "sha256:eaa0eccc86e982a9b939b2b82d12cc5d013385ba5eadcc7e4fed23f4405f77bc"}, + {file = "sphinxcontrib_serializinghtml-1.1.4-py2.py3-none-any.whl", hash = "sha256:f242a81d423f59617a8e5cf16f5d4d74e28ee9a66f9e5b637a18082991db5a9a"}, +] +sqlparse = [ + {file = "sqlparse-0.4.1-py3-none-any.whl", hash = "sha256:017cde379adbd6a1f15a61873f43e8274179378e95ef3fede90b5aa64d304ed0"}, + {file = "sqlparse-0.4.1.tar.gz", hash = "sha256:0f91fd2e829c44362cbcfab3e9ae12e22badaa8a29ad5ff599f9ec109f0454e8"}, +] +structlog = [ + {file = "structlog-21.1.0-py2.py3-none-any.whl", hash = "sha256:62f06fc0ee32fb8580f0715eea66cb87271eb7efb0eaf9af6b639cba8981de47"}, + {file = "structlog-21.1.0.tar.gz", hash = "sha256:d9d2d890532e8db83c6977a2a676fb1889922ff0c26ad4dc0ecac26f9fafbc57"}, +] +tblib = [ + {file = "tblib-1.7.0-py2.py3-none-any.whl", hash = "sha256:289fa7359e580950e7d9743eab36b0691f0310fce64dee7d9c31065b8f723e23"}, + {file = "tblib-1.7.0.tar.gz", hash = "sha256:059bd77306ea7b419d4f76016aef6d7027cc8a0785579b5aad198803435f882c"}, +] +toml = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] +tornado = [ + {file = "tornado-6.1-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:d371e811d6b156d82aa5f9a4e08b58debf97c302a35714f6f45e35139c332e32"}, + {file = "tornado-6.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:0d321a39c36e5f2c4ff12b4ed58d41390460f798422c4504e09eb5678e09998c"}, + {file = "tornado-6.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9de9e5188a782be6b1ce866e8a51bc76a0fbaa0e16613823fc38e4fc2556ad05"}, + {file = "tornado-6.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:61b32d06ae8a036a6607805e6720ef00a3c98207038444ba7fd3d169cd998910"}, + {file = "tornado-6.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:3e63498f680547ed24d2c71e6497f24bca791aca2fe116dbc2bd0ac7f191691b"}, + {file = "tornado-6.1-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:6c77c9937962577a6a76917845d06af6ab9197702a42e1346d8ae2e76b5e3675"}, + {file = "tornado-6.1-cp35-cp35m-win32.whl", hash = "sha256:6286efab1ed6e74b7028327365cf7346b1d777d63ab30e21a0f4d5b275fc17d5"}, + {file = "tornado-6.1-cp35-cp35m-win_amd64.whl", hash = "sha256:fa2ba70284fa42c2a5ecb35e322e68823288a4251f9ba9cc77be04ae15eada68"}, + {file = "tornado-6.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0a00ff4561e2929a2c37ce706cb8233b7907e0cdc22eab98888aca5dd3775feb"}, + {file = "tornado-6.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:748290bf9112b581c525e6e6d3820621ff020ed95af6f17fedef416b27ed564c"}, + {file = "tornado-6.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:e385b637ac3acaae8022e7e47dfa7b83d3620e432e3ecb9a3f7f58f150e50921"}, + {file = "tornado-6.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:25ad220258349a12ae87ede08a7b04aca51237721f63b1808d39bdb4b2164558"}, + {file = "tornado-6.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:65d98939f1a2e74b58839f8c4dab3b6b3c1ce84972ae712be02845e65391ac7c"}, + {file = "tornado-6.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:e519d64089b0876c7b467274468709dadf11e41d65f63bba207e04217f47c085"}, + {file = "tornado-6.1-cp36-cp36m-win32.whl", hash = "sha256:b87936fd2c317b6ee08a5741ea06b9d11a6074ef4cc42e031bc6403f82a32575"}, + {file = "tornado-6.1-cp36-cp36m-win_amd64.whl", hash = "sha256:cc0ee35043162abbf717b7df924597ade8e5395e7b66d18270116f8745ceb795"}, + {file = "tornado-6.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7250a3fa399f08ec9cb3f7b1b987955d17e044f1ade821b32e5f435130250d7f"}, + {file = "tornado-6.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:ed3ad863b1b40cd1d4bd21e7498329ccaece75db5a5bf58cd3c9f130843e7102"}, + {file = "tornado-6.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:dcef026f608f678c118779cd6591c8af6e9b4155c44e0d1bc0c87c036fb8c8c4"}, + {file = "tornado-6.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:70dec29e8ac485dbf57481baee40781c63e381bebea080991893cd297742b8fd"}, + {file = "tornado-6.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:d3f7594930c423fd9f5d1a76bee85a2c36fd8b4b16921cae7e965f22575e9c01"}, + {file = "tornado-6.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3447475585bae2e77ecb832fc0300c3695516a47d46cefa0528181a34c5b9d3d"}, + {file = "tornado-6.1-cp37-cp37m-win32.whl", hash = "sha256:e7229e60ac41a1202444497ddde70a48d33909e484f96eb0da9baf8dc68541df"}, + {file = "tornado-6.1-cp37-cp37m-win_amd64.whl", hash = "sha256:cb5ec8eead331e3bb4ce8066cf06d2dfef1bfb1b2a73082dfe8a161301b76e37"}, + {file = "tornado-6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:20241b3cb4f425e971cb0a8e4ffc9b0a861530ae3c52f2b0434e6c1b57e9fd95"}, + {file = "tornado-6.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:c77da1263aa361938476f04c4b6c8916001b90b2c2fdd92d8d535e1af48fba5a"}, + {file = "tornado-6.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:fba85b6cd9c39be262fcd23865652920832b61583de2a2ca907dbd8e8a8c81e5"}, + {file = "tornado-6.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:1e8225a1070cd8eec59a996c43229fe8f95689cb16e552d130b9793cb570a288"}, + {file = "tornado-6.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:d14d30e7f46a0476efb0deb5b61343b1526f73ebb5ed84f23dc794bdb88f9d9f"}, + {file = "tornado-6.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:8f959b26f2634a091bb42241c3ed8d3cedb506e7c27b8dd5c7b9f745318ddbb6"}, + {file = "tornado-6.1-cp38-cp38-win32.whl", hash = "sha256:34ca2dac9e4d7afb0bed4677512e36a52f09caa6fded70b4e3e1c89dbd92c326"}, + {file = "tornado-6.1-cp38-cp38-win_amd64.whl", hash = "sha256:6196a5c39286cc37c024cd78834fb9345e464525d8991c21e908cc046d1cc02c"}, + {file = "tornado-6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f0ba29bafd8e7e22920567ce0d232c26d4d47c8b5cf4ed7b562b5db39fa199c5"}, + {file = "tornado-6.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:33892118b165401f291070100d6d09359ca74addda679b60390b09f8ef325ffe"}, + {file = "tornado-6.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7da13da6f985aab7f6f28debab00c67ff9cbacd588e8477034c0652ac141feea"}, + {file = "tornado-6.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:e0791ac58d91ac58f694d8d2957884df8e4e2f6687cdf367ef7eb7497f79eaa2"}, + {file = "tornado-6.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:66324e4e1beede9ac79e60f88de548da58b1f8ab4b2f1354d8375774f997e6c0"}, + {file = "tornado-6.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:a48900ecea1cbb71b8c71c620dee15b62f85f7c14189bdeee54966fbd9a0c5bd"}, + {file = "tornado-6.1-cp39-cp39-win32.whl", hash = "sha256:d3d20ea5782ba63ed13bc2b8c291a053c8d807a8fa927d941bd718468f7b950c"}, + {file = "tornado-6.1-cp39-cp39-win_amd64.whl", hash = "sha256:548430be2740e327b3fe0201abe471f314741efcb0067ec4f2d7dcfb4825f3e4"}, + {file = "tornado-6.1.tar.gz", hash = "sha256:33c6e81d7bd55b468d2e793517c909b139960b6c790a60b7991b9b6b76fb9791"}, +] +traitlets = [ + {file = "traitlets-5.0.5-py3-none-any.whl", hash = "sha256:69ff3f9d5351f31a7ad80443c2674b7099df13cc41fc5fa6e2f6d3b0330b0426"}, + {file = "traitlets-5.0.5.tar.gz", hash = "sha256:178f4ce988f69189f7e523337a3e11d91c786ded9360174a3d9ca83e79bc5396"}, +] +typed-ast = [ + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6"}, + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075"}, + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528"}, + {file = "typed_ast-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428"}, + {file = "typed_ast-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3"}, + {file = "typed_ast-1.4.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f"}, + {file = "typed_ast-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363"}, + {file = "typed_ast-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7"}, + {file = "typed_ast-1.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899"}, + {file = "typed_ast-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c"}, + {file = "typed_ast-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805"}, + {file = "typed_ast-1.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39"}, + {file = "typed_ast-1.4.3-cp38-cp38-win32.whl", hash = "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927"}, + {file = "typed_ast-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40"}, + {file = "typed_ast-1.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3"}, + {file = "typed_ast-1.4.3-cp39-cp39-win32.whl", hash = "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808"}, + {file = "typed_ast-1.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c"}, + {file = "typed_ast-1.4.3.tar.gz", hash = "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65"}, +] +typing-extensions = [ + {file = "typing_extensions-3.7.4.3-py2-none-any.whl", hash = "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f"}, + {file = "typing_extensions-3.7.4.3-py3-none-any.whl", hash = "sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918"}, + {file = "typing_extensions-3.7.4.3.tar.gz", hash = "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c"}, +] +urllib3 = [ + {file = "urllib3-1.26.4-py2.py3-none-any.whl", hash = "sha256:2f4da4594db7e1e110a944bb1b551fdf4e6c136ad42e4234131391e21eb5b0df"}, + {file = "urllib3-1.26.4.tar.gz", hash = "sha256:e7b021f7241115872f92f43c6508082facffbd1c048e3c6e2bb9c2a157e28937"}, +] +wcwidth = [ + {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, + {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, +] +xmltodict = [ + {file = "xmltodict-0.12.0-py2.py3-none-any.whl", hash = "sha256:8bbcb45cc982f48b2ca8fe7e7827c5d792f217ecf1792626f808bf41c3b86051"}, + {file = "xmltodict-0.12.0.tar.gz", hash = "sha256:50d8c638ed7ecb88d90561beedbf720c9b4e851a9fa6c47ebd64e99d166d8a21"}, +] +zipp = [ + {file = "zipp-3.4.1-py3-none-any.whl", hash = "sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098"}, + {file = "zipp-3.4.1.tar.gz", hash = "sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76"}, +] diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..0afb1c30 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,65 @@ +[tool.poetry] +name = "metax-api" +version = "2.2.3" +description = "" +authors = ["Your Name "] + +[tool.poetry.dependencies] +python = "^3.7" +Django = "<3.2" +datacite = "^1.0.1" +djangorestframework = "^3.12.4" +gunicorn = "^20.1.0" +pika = "^1.2.0" +elasticsearch = "^7.12.0" +jsonschema = "^3.2.0" +lxml = "^4.6.3" +psycopg2-binary = "^2.8.6" +redis = "^3.5.3" +structlog = "^21.1.0" +xmltodict = "^0.12.0" +django-environ = "^0.4.5" +django-split-settings = "^1.0.1" +rdflib = "^5.0.0" +python-dateutil = "^2.8.1" +pyoai = "^2.5.0" +python-simplexquery = {version = "*", optional = true} +# These are here because of: https://github.com/python-poetry/poetry/issues/1644 +Sphinx = {version = "^3.5.4", optional = true} +sphinx-autobuild = {version = "^2021.3.14", optional = true} +sphinx-rtd-theme = {version = "^0.5.2", optional = true} +python-box = "^5.3.0" + +[tool.poetry.dev-dependencies] +responses = "^0.13.2" +django-watchman = "^1.2.0" +icecream = "^2.1.0" +black = {version = "^20.8b1", allow-prereleases = true} +tblib = "^1.7.0" +django-rainbowtests = "^0.6.0" +django-debug-toolbar = "^3.2" +PyJWT = "^2.0.1" +ipdb = "^0.13.7" +isort = "^5.8.0" + +[tool.poetry.extras] +simplexquery = ["python-simplexquery"] +docs = ["Sphinx", "sphinx-autobuild", "sphinx-rtd-theme"] + +[tool.isort] +profile = "black" +src_paths = ["src"] +known_first_party = ["metax_api", "src/metax_api"] +skip_glob = "*/models/__init__.py,__init__.py" +include_trailing_comma = true +combine_as_imports = true +line_length = 120 + +[tool.black] +target-version = ['py38'] +line-length = 120 +exclude = "/migrations/" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" diff --git a/requirements.in b/requirements.in deleted file mode 100755 index 006d948f..00000000 --- a/requirements.in +++ /dev/null @@ -1,41 +0,0 @@ -coveralls # code coverage reportin in travis -datacite # BSD-license. convert datasets to datacite xml. datacite metadata store api wrappers -python-dateutil -django # BSD-license -elasticsearch>=7.0.0,<8.0.0 -hiredis # Used by redis (redis-py) for parser -djangorestframework # BSD-license -django-rainbowtests # colored test output -flake8 # MIT-license -gevent # gunicorn dep -gunicorn # MIT-license -ipdb # dev tool -jsonschema -lxml -pika -psycopg2-binary # LGPL with exceptions or ZPL -PyJWT # MIT-license -pyoai -python-simplexquery -pytz -pyyaml -redis -requests # Apache 2.0-license -responses # Apache 2.0-license -structlog # Apache 2.0-license, MIT -simplejson # MIT-license -urllib3 -xmltodict # MIT-license -pip-tools # keeping dependencies updated -traitlets>=4.3.2,<5.0.0 # via ipython, version 5.0+ is Python 3.7+ only -xmltodict # MIT-license -django-environ -django-split-settings -icecream -rdflib -django-sslserver -tblib -django-watchman -django-debug-toolbar -pip-tools>6 - diff --git a/requirements.txt b/requirements.txt index 999147c3..cf0e112f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,198 +1,72 @@ -# -# This file is autogenerated by pip-compile -# To update, run: -# -# pip-compile -# -asgiref==3.2.10 - # via django -asttokens==2.0.4 - # via icecream -attrs==20.3.0 - # via jsonschema -backcall==0.2.0 - # via ipython -certifi==2020.6.20 - # via - # elasticsearch - # requests -chardet==3.0.4 - # via requests -click==7.1.2 - # via pip-tools -colorama==0.4.4 - # via icecream -coverage==5.3 - # via coveralls -coveralls==2.1.2 - # via -r requirements.in -datacite==1.0.1 - # via -r requirements.in -decorator==4.4.2 - # via - # ipython - # traitlets -django-debug-toolbar==3.2 - # via -r requirements.in +appdirs==1.4.4; python_version >= "3.6" +appnope==0.1.2; sys_platform == "darwin" and python_version >= "3.7" +asgiref==3.3.4; python_version >= "3.6" +asttokens==2.0.5 +attrs==20.3.0; python_version >= "2.7" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" +autosemver==0.5.5 +backcall==0.2.0; python_version >= "3.7" +black==20.8b1; python_version >= "3.6" +certifi==2020.12.5; python_version >= "2.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version < "4" +chardet==4.0.0; python_version >= "2.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" +click==7.1.2; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6" +colorama==0.4.4; python_version >= "3.7" and python_full_version < "3.0.0" and sys_platform == "win32" or sys_platform == "win32" and python_version >= "3.7" and python_full_version >= "3.5.0" +datacite==1.1.1 +decorator==5.0.7; python_version >= "3.7" +django-debug-toolbar==3.2.1; python_version >= "3.6" django-environ==0.4.5 - # via -r requirements.in django-rainbowtests==0.6.0 - # via -r requirements.in -django-split-settings==1.0.1 - # via -r requirements.in -django-sslserver==0.22 - # via -r requirements.in +django-split-settings==1.0.1; python_version >= "3.6" and python_version < "4.0" django-watchman==1.2.0 - # via -r requirements.in -django==3.1.4 - # via - # -r requirements.in - # django-debug-toolbar - # django-rainbowtests - # django-sslserver - # django-watchman - # djangorestframework -djangorestframework==3.12.2 - # via -r requirements.in -docopt==0.6.2 - # via coveralls -elasticsearch==7.10.0 - # via -r requirements.in -executing==0.5.3 - # via icecream -flake8==3.8.4 - # via -r requirements.in -gevent==20.9.0 - # via -r requirements.in -greenlet==0.4.17 - # via gevent -gunicorn==20.0.4 - # via -r requirements.in -hiredis==1.1.0 - # via -r requirements.in -icecream==2.0.0 - # via -r requirements.in -idna==2.10 - # via requests -ipdb==0.13.4 - # via -r requirements.in -ipython-genutils==0.2.0 - # via traitlets -ipython==7.16.1 - # via ipdb +django==3.1.8; python_version >= "3.6" +djangorestframework==3.12.4; python_version >= "3.5" +dulwich==0.19.16 +elasticsearch==7.12.1; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.4.0" and python_version < "4") +executing==0.6.0 +gunicorn==20.1.0; python_version >= "3.5" +icecream==2.1.0 +idna==2.10; python_version >= "2.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" +idutils==1.1.8 +importlib-metadata==4.0.1; python_version >= "3.6" and python_version < "3.8" +ipdb==0.13.7; python_version >= "2.7" +ipython-genutils==0.2.0; python_version >= "3.7" +ipython==7.22.0; python_version >= "3.7" +isbnid-fork==0.5.2 isodate==0.6.0 - # via rdflib -jedi==0.17.2 - # via ipython +jedi==0.18.0; python_version >= "3.7" jsonschema==3.2.0 - # via - # -r requirements.in - # datacite -lxml==4.6.2 - # via - # -r requirements.in - # datacite - # pyoai -mccabe==0.6.1 - # via flake8 -parso==0.7.1 - # via jedi -pep517==0.10.0 - # via pip-tools -pexpect==4.8.0 - # via ipython -pickleshare==0.7.5 - # via ipython -pika==1.1.0 - # via -r requirements.in -pip-tools==6.0.1 - # via -r requirements.in -prompt-toolkit==3.0.8 - # via ipython -psycopg2-binary==2.8.6 - # via -r requirements.in -ptyprocess==0.6.0 - # via pexpect -pycodestyle==2.6.0 - # via flake8 -pyflakes==2.2.0 - # via flake8 -pygments==2.7.2 - # via - # icecream - # ipython -pyjwt==1.7.1 - # via -r requirements.in +lxml==4.6.3; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.5.0") +mypy-extensions==0.4.3; python_version >= "3.6" +parso==0.8.2; python_version >= "3.7" +pathspec==0.8.1; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6" +pexpect==4.8.0; sys_platform != "win32" and python_version >= "3.7" +pickleshare==0.7.5; python_version >= "3.7" +pika==1.2.0 +prompt-toolkit==3.0.18; python_full_version >= "3.6.1" and python_version >= "3.7" +psycopg2-binary==2.8.6; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.4.0") +ptyprocess==0.7.0; sys_platform != "win32" and python_version >= "3.7" +pygments==2.8.1; python_version >= "3.7" +pyjwt==2.1.0; python_version >= "3.6" pyoai==2.5.0 - # via -r requirements.in -pyparsing==2.4.7 - # via rdflib -pyrsistent==0.17.3 - # via jsonschema -python-dateutil==2.8.1 - # via -r requirements.in +pyparsing==2.4.7; python_version >= "2.6" and python_full_version < "3.0.0" or python_full_version >= "3.3.0" +pyrsistent==0.17.3; python_version >= "3.5" +python-box==5.3.0; python_version >= "3.6" +python-dateutil==2.8.1; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.3.0") python-simplexquery==1.0.5.3 - # via -r requirements.in -pytz==2020.5 - # via - # -r requirements.in - # django -pyyaml==5.3.1 - # via -r requirements.in +pytz==2021.1; python_version >= "3.6" rdflib==5.0.0 - # via -r requirements.in -redis==3.5.3 - # via -r requirements.in -requests==2.25.0 - # via - # -r requirements.in - # coveralls - # datacite - # responses -responses==0.12.1 - # via -r requirements.in -simplejson==3.17.2 - # via -r requirements.in -six==1.15.0 - # via - # asttokens - # isodate - # jsonschema - # pyoai - # python-dateutil - # rdflib - # responses - # structlog - # traitlets -sqlparse==0.4.1 - # via - # django - # django-debug-toolbar -structlog==20.1.0 - # via -r requirements.in -tblib==1.7.0 - # via -r requirements.in -toml==0.10.2 - # via pep517 -traitlets==4.3.3 - # via - # -r requirements.in - # ipython -urllib3==1.25.11 - # via - # -r requirements.in - # elasticsearch - # requests - # responses -wcwidth==0.2.5 - # via prompt-toolkit -xmltodict==0.12.0 - # via -r requirements.in -zope.event==4.5.0 - # via gevent -zope.interface==5.1.2 - # via gevent - -# The following packages are considered to be unsafe in a requirements file: -# pip -# setuptools +redis==3.5.3; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.5.0") +regex==2021.4.4; python_version >= "3.6" +requests==2.25.1; python_version >= "2.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" +responses==0.13.3; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.5.0") +six==1.15.0; python_version >= "2.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" +sqlparse==0.4.1; python_version >= "3.6" +structlog==21.1.0; python_version >= "3.6" +tblib==1.7.0; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.5.0") +toml==0.10.2; python_version > "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.3.0" and python_version > "3.6" +traitlets==5.0.5; python_version >= "3.7" +typed-ast==1.4.3; python_version >= "3.6" +typing-extensions==3.7.4.3; python_version < "3.8" and python_version >= "3.6" +urllib3==1.26.4; python_version >= "2.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version < "4" +wcwidth==0.2.5; python_full_version >= "3.6.1" and python_version >= "3.7" +xmltodict==0.12.0; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.4.0") +zipp==3.4.1; python_version >= "3.6" and python_version < "3.8" diff --git a/resources/sql/init_test.sql b/resources/sql/init_test.sql deleted file mode 100755 index 187d035c..00000000 --- a/resources/sql/init_test.sql +++ /dev/null @@ -1,5 +0,0 @@ --- Create metax test user and test db - -CREATE USER metax_test; -ALTER USER metax_test CREATEDB; -CREATE DATABASE metax_db_test OWNER metax_test ENCODING 'UTF8'; \ No newline at end of file diff --git a/src/metax_api/api/rest/v2/api_schemas/catalogrecord.json b/src/metax_api/api/rest/v2/api_schemas/catalogrecord.json index b2f56d57..8e25e74f 100755 --- a/src/metax_api/api/rest/v2/api_schemas/catalogrecord.json +++ b/src/metax_api/api/rest/v2/api_schemas/catalogrecord.json @@ -291,7 +291,7 @@ "description":"Catalog record deprecation state.", "type":"string", "readonly": true - }, + } } }, "CatalogRecordVersionCreatedInfo":{ @@ -323,7 +323,7 @@ "readonly": true, "enum":[ "dataset", - "pas", + "pas" ] } }, diff --git a/src/metax_api/api/rpc/base/views/file_rpc.py b/src/metax_api/api/rpc/base/views/file_rpc.py index 467e4819..2558b66d 100755 --- a/src/metax_api/api/rpc/base/views/file_rpc.py +++ b/src/metax_api/api/rpc/base/views/file_rpc.py @@ -7,7 +7,7 @@ import logging -from django.conf import settings as django_settings +from django.conf import settings from django.db import connection from rest_framework import status from rest_framework.decorators import action @@ -39,7 +39,7 @@ def flush_project(self, request): # pragma: no cover WARNING! Does not check file association with datasets! Not meant for active production use!! """ - if django_settings.METAX_ENV == 'production': + if settings.ENV == 'production': raise Http400({ 'detail': ['API currently allowed only in test environments'] }) if 'project_identifier' not in request.query_params: diff --git a/src/metax_api/middleware/identifyapicaller.py b/src/metax_api/middleware/identifyapicaller.py index 26454ef7..3859f233 100755 --- a/src/metax_api/middleware/identifyapicaller.py +++ b/src/metax_api/middleware/identifyapicaller.py @@ -14,7 +14,7 @@ from django.http import HttpResponseForbidden from metax_api.exceptions import Http403 -from metax_api.utils import executing_test_case, executing_travis +from metax_api.utils import executing_test_case _logger = logging.getLogger(__name__) @@ -220,7 +220,7 @@ def _get_api_users(self): return django_settings.API_TEST_USERS -if executing_test_case() or executing_travis(): +if executing_test_case(): IdentifyApiCaller = _IdentifyApiCallerDummy else: IdentifyApiCaller = _IdentifyApiCaller diff --git a/src/metax_api/models/catalog_record.py b/src/metax_api/models/catalog_record.py index 6d829952..83c6ee93 100755 --- a/src/metax_api/models/catalog_record.py +++ b/src/metax_api/models/catalog_record.py @@ -1076,8 +1076,8 @@ def has_alternate_records(self): return bool(self.alternate_record_set) def _save_as_draft(self): - from metax_api.services import CommonService - return CommonService.get_boolean_query_param(self.request, 'draft') and settings.DRAFT_ENABLED + # Drafts are only available for V2 datasets + return False def _generate_issued_date(self): if not (self.catalog_is_harvested()): diff --git a/src/metax_api/onappstart.py b/src/metax_api/onappstart.py index 8ddd9fc0..d6c759f7 100755 --- a/src/metax_api/onappstart.py +++ b/src/metax_api/onappstart.py @@ -75,7 +75,7 @@ def ready(self): # pragma: no cover try: - if settings.ELASTICSEARCH["ALWAYS_RELOAD_REFERENCE_DATA_ON_RESTART"]: + if settings.ALWAYS_RELOAD_REFERENCE_DATA_ON_RESTART: cache.set("reference_data", None) if not cache.get("reference_data", master=True) or not cache.get( diff --git a/src/metax_api/services/catalog_record_service.py b/src/metax_api/services/catalog_record_service.py index 1738d398..3abfa40e 100755 --- a/src/metax_api/services/catalog_record_service.py +++ b/src/metax_api/services/catalog_record_service.py @@ -9,7 +9,6 @@ from collections import defaultdict from os.path import dirname, join -import simplexquery as sxq import xmltodict from django.db.models import Q from rest_framework import status @@ -31,6 +30,12 @@ _logger = logging.getLogger(__name__) +# simplexquery requires dependencies outside Python packages and is not that important +# it also blocks debugging if not present +try: + import simplexquery as sxq +except ImportError as e: + _logger.error(e) class CatalogRecordService(CommonService, ReferenceDataMixin): diff --git a/src/metax_api/services/datacite_service.py b/src/metax_api/services/datacite_service.py index 52bd3c8c..29c406e8 100755 --- a/src/metax_api/services/datacite_service.py +++ b/src/metax_api/services/datacite_service.py @@ -13,7 +13,7 @@ from datacite import schema41 as datacite_schema41, DataCiteMDSClient from django.conf import settings as django_settings -from metax_api.utils import extract_doi_from_doi_identifier, is_metax_generated_doi_identifier, executing_travis, \ +from metax_api.utils import extract_doi_from_doi_identifier, is_metax_generated_doi_identifier, \ executing_test_case, is_metax_generated_urn_identifier, datetime_to_str, is_remote_doi_identifier from .common_service import CommonService @@ -35,7 +35,7 @@ def DataciteService(*args, **kwargs): A factory for the Datacite service, which is capable of interacting with Datacite API and converting catalog records into datacite format. """ - if executing_travis() or executing_test_case() or kwargs.pop('dummy', False): + if executing_test_case() or kwargs.pop('dummy', False): return _DataciteServiceDummy(*args, **kwargs) else: return _DataciteService(*args, **kwargs) diff --git a/src/metax_api/services/rabbitmq_service.py b/src/metax_api/services/rabbitmq_service.py index aa44e49f..3406e0b3 100755 --- a/src/metax_api/services/rabbitmq_service.py +++ b/src/metax_api/services/rabbitmq_service.py @@ -14,7 +14,7 @@ from django.conf import settings from django.core.serializers.json import DjangoJSONEncoder -from metax_api.utils.utils import executing_test_case, executing_travis +from metax_api.utils.utils import executing_test_case _logger = logging.getLogger(__name__) @@ -171,7 +171,7 @@ def init_exchanges(self, *args, **kwargs): pass -if executing_travis() or executing_test_case(): +if executing_test_case(): RabbitMQService = _RabbitMQServiceDummy() else: RabbitMQService = _RabbitMQService() diff --git a/src/metax_api/services/redis_cache_service.py b/src/metax_api/services/redis_cache_service.py index a971a5ec..44bdce61 100755 --- a/src/metax_api/services/redis_cache_service.py +++ b/src/metax_api/services/redis_cache_service.py @@ -6,7 +6,6 @@ # :license: MIT import logging -from json import dump as dump_json, load as load_json from pickle import dumps as pickle_dumps, loads as pickle_loads from random import choice as random_choice from typing import Any @@ -18,7 +17,7 @@ from redis.sentinel import MasterNotFoundError from redis.sentinel import Sentinel -from metax_api.utils.utils import executing_test_case, executing_travis +from metax_api.utils.utils import executing_test_case _logger = logging.getLogger(__name__) d = logging.getLogger(__name__).debug @@ -279,81 +278,4 @@ def _count_nodes(self): len(self._sentinel.discover_slaves(self._service_name)) + 1 ) # +1 is master - -class _RedisCacheServiceDummy: - - """ - A dummy redis client that writes to a file on disk. - """ - - _storage_path = "/tmp/redis_dummy_storage" - - def __init__(self, *args, **kwargs): - # d('Note: using dummy cache') - pass - - def set(self, key, value, **kwargs): - storage = self._get_storage() - storage[key] = value - self._save_storage(storage) - - def get(self, key, **kwargs): - return self._get_storage().get(key, None) - - def get_or_set(self, key, value, **kwargs): - if self.get(key): - return False - else: - self.set( - key, - value, - ) - return True - - def delete(self, key, **kwargs): - storage = self._get_storage() - storage.pop(key, False) - self._save_storage(storage) - - def get_master(self): - return self - - def flushdb(self): - self._save_storage({}) - return True - - def _get_storage(self): - try: - with open(self._storage_path, "r") as f: - return load_json(f) - except IOError: - self._save_storage({}) - try: - with open(self._storage_path, "r") as f: - return load_json(f) - except Exception as e: - _logger.error( - "Could not open dummy cache file for reading at %s: %s" - % (self._storage_path, str(e)) - ) - except Exception as e: - _logger.error( - "Could not open dummy cache file for reading at %s: %s" - % (self._storage_path, str(e)) - ) - - def _save_storage(self, storage): - try: - with open(self._storage_path, "w") as f: - dump_json(storage, f) - except Exception as e: - _logger.error( - "Could not open dummy cache file for writing at %s: %s" - % (self._storage_path, str(e)) - ) - - -if executing_travis(): - RedisCacheService = RedisClient -else: - RedisCacheService = RedisClient +RedisCacheService = RedisClient diff --git a/src/metax_api/settings/.env.template b/src/metax_api/settings/.env.template index 9abf430d..db21ddaa 100755 --- a/src/metax_api/settings/.env.template +++ b/src/metax_api/settings/.env.template @@ -1,16 +1,19 @@ +# APPLICATION AUTH_SERVER_LOGOUT_URL= -DATACITE_PREFIX= -DATACITE_URL= -DATACITE_PASSWORD= -DATACITE_ETSIN_URL_TEMPLATE= -DATACITE_USERNAME= -DEBUG=on DJANGO_SECRET_KEY= -DRAFT_ENABLED=true + +# DATABASE METAX_DATABASE= METAX_DATABASE_PASSWORD= METAX_DATABASE_USER= -OAI_BASE_URL= + +# DATACITE +DATACITE_USERNAME= +DATACITE_PASSWORD= +DATACITE_ETSIN_URL_TEMPLATE= +DATACITE_PREFIX= +DATACITE_URL= + +# OAI-PMH OAI_ETSIN_URL_TEMPLATE= -RABBIT_MQ_VHOST= -REMS_ENABLED=false +OAI_ADMIN_EMAIL= diff --git a/src/metax_api/settings/__init__.py b/src/metax_api/settings/__init__.py index d20975b7..6ce663a6 100755 --- a/src/metax_api/settings/__init__.py +++ b/src/metax_api/settings/__init__.py @@ -20,25 +20,28 @@ env = environ.Env( # set casting, default value ADDITIONAL_USER_PROJECTS_PATH=(str, ""), - ALLOWED_HOSTS=(list, ["metax.csc.local", "20.20.20.20"]), + ALLOWED_HOSTS=(list, []), + ALWAYS_RELOAD_REFERENCE_DATA_ON_RESTART=(bool, True), + API_USERS_PATH=(str, "/etc/fairdata-metax/api_users"), DEBUG=(bool, False), DJANGO_ENV=(str, "local"), + ELASTIC_SEARCH_HOSTS=(list, ['localhost']), ELASTIC_SEARCH_PORT=(int, 9200), ELASTIC_SEARCH_USE_SSL=(bool, False), ENABLE_V1_ENDPOINTS=(bool, True), ENABLE_V2_ENDPOINTS=(bool, True), ERROR_FILES_PATH=(str, join("/var", "log", "metax-api", "errors")), ES_CONFIG_DIR=(str, join(REFDATA_INDEXER_PATH, "resources", "es-config/")), - LOCAL_REF_DATA_FOLDER=(str,join(REFDATA_INDEXER_PATH, "resources", "local-refdata/"),), + LOCAL_REF_DATA_FOLDER=(str, join(REFDATA_INDEXER_PATH, "resources", "local-refdata/")), LOGGING_DEBUG_HANDLER_FILE=(str, join("/var", "log", "metax-api", "metax_api.log")), LOGGING_GENERAL_HANDLER_FILE=(str, join("/var", "log", "metax-api", "metax_api.log")), LOGGING_JSON_FILE_HANDLER_FILE=(str, join("/var", "log", "metax-api", "metax_api.json.log")), METAX_DATABASE_HOST=(str, "localhost"), METAX_DATABASE_PORT=(str, 5432), - METAX_ENV=(str, "local_development"), - METAX_API_ROOT=(str, "https://localhost:8008"), ORG_FILE_PATH=(str, join(REFDATA_INDEXER_PATH, "resources", "organizations", "organizations.csv"),), - OAI_REPOSITORY_NAME=(str, "Metax"), + OAI_BASE_URL=(str, "https://metax.fd-dev.csc.fi/oai/"), + OAI_BATCH_SIZE=(int, 25), + OAI_REPOSITORY_NAME=(str, 'Metax'), RABBIT_MQ_HOSTS=(list, ["localhost"]), RABBIT_MQ_PORT=(int, 5672), RABBIT_MQ_PASSWORD=(str, "guest"), @@ -49,8 +52,7 @@ REDIS_TEST_DB=(int, 15), REDIS_USE_PASSWORD=(bool, False), REMS_ENABLED=(bool, False), - SERVER_DOMAIN_NAME=(str, "metax.csc.local"), - TRAVIS=(bool, False), + SERVER_DOMAIN_NAME=(str, "metax.fd-dev.csc.fi"), VALIDATE_TOKEN_URL=(str, "https://127.0.0.1/secure/validate_token"), WKT_FILENAME=(str, join(REFDATA_INDEXER_PATH, "resources", "uri_to_wkt.json")), ) diff --git a/src/metax_api/settings/components/access_control.py b/src/metax_api/settings/components/access_control.py index 1096b8b4..c190aaa4 100755 --- a/src/metax_api/settings/components/access_control.py +++ b/src/metax_api/settings/components/access_control.py @@ -1,71 +1,132 @@ # specify read and write access of services per api, or if an api is readable by world -API_ACCESS = { + +from enum import Enum +from box import Box + +api_permissions = Box({ "rest": { - "apierrors": {"create": ["metax"], "delete": ["metax"], "read": ["metax"]}, - "contracts": { - "create": ["metax", "tpas"], - "delete": ["metax", "tpas"], - "read": ["metax", "tpas"], - "update": ["metax", "tpas"], - }, - "datacatalogs": { - "create": ["metax", "etsin"], - "delete": ["metax", "etsin"], - "read": ["metax", "all"], - "update": ["metax", "etsin"], - }, - "datasets": { - "create": ["metax", "qvain", "etsin", "tpas", "endusers"], - "delete": ["metax", "qvain", "etsin", "tpas", "endusers"], - "read": ["all"], - "update": ["metax", "qvain", "etsin", "tpas", "endusers"], - }, - "directories": { - "create": [], - "delete": [], - "read": ["metax", "qvain", "etsin", "tpas", "fds", "endusers"], - "update": [], - }, - "files": { - "create": ["metax", "ida", "tpas"], - "delete": ["metax", "ida", "tpas"], - "read": ["metax", "ida", "fds", "tpas", "endusers"], - "update": ["metax", "ida", "tpas", "endusers"], - }, - "filestorages": { - "create": ["metax"], - "delete": ["metax"], - "read": ["metax"], - "update": ["metax"], - }, - "schemas": {"create": [], "delete": [], "read": ["all"], "update": []}, + "apierrors": {}, + "datacatalogs": {}, + "datasets": {}, + "directories": {}, + "files": {}, + "filestorages": {}, + "schemas": {} }, "rpc": { - "datasets": { - "change_cumulative_state": {"use": ["all"]}, - "create_draft": {"use": ["all"]}, - "create_new_version": {"use": ["all"]}, - "fix_deprecated": {"use": ["all"]}, - "get_minimal_dataset_template": {"use": ["all"]}, - "merge_draft": {"use": ["all"]}, - "publish_dataset": {"use": ["all"]}, - "refresh_directory_content": {"use": ["all"]}, - "set_preservation_identifier": {"use": ["metax", "tpas"]}, - }, - "elasticsearchs": {"map_refdata": {"use": ["all"]}}, - "files": { - "delete_project": {"use": ["metax", "ida", "tpas"]}, - "flush_project": {"use": ["metax", "ida", "tpas"]}, - }, - "statistics": { - "all_datasets_cumulative": {"use": ["all"]}, - "catalog_datasets_cumulative": {"use": ["all"]}, - "count_datasets": {"use": ["all"]}, - "deprecated_datasets_cumulative": {"use": ["all"]}, - "end_user_datasets_cumulative": {"use": ["all"]}, - "harvested_datasets_cumulative": {"use": ["all"]}, - "organization_datasets_cumulative": {"use": ["all"]}, - "unused_files": {"use": ["all"]}, - }, - }, -} + "datasets": {}, + "elasticsearchs": {}, + "files": {}, + "statistics": {} + } +}, default_box_attr={}, default_box=True) + + +class Role(Enum): + END_USERS = "endusers" + ETSIN = "etsin" + FDS = "fds" + IDA = "ida" + METAX = "metax" + QVAIN = "qvain" + QVAIN_LIGHT = "qvain-light" + TPAS = "tpas" + ALL = "all" + TEST_USER = "testuser" + API_AUTH_USER = "api_auth_user" + EXTERNAL = "external" + JYU = "jyu" + + def __ge__(self, other): + if self.__class__ is other.__class__: + return self.value >= other.value + + def __gt__(self, other): + if self.__class__ is other.__class__: + return self.value > other.value + + def __le__(self, other): + if self.__class__ is other.__class__: + return self.value <= other.value + + def __lt__(self, other): + if self.__class__ is other.__class__: + return self.value < other.value + + +api_permissions.rest.apierrors.create = [] +api_permissions.rest.apierrors.read = [Role.METAX] +api_permissions.rest.apierrors.delete = [Role.METAX] + +api_permissions.rest.contracts.create = [Role.METAX, Role.TPAS] +api_permissions.rest.contracts.read = [Role.METAX, Role.TPAS] +api_permissions.rest.contracts["update"] = [Role.METAX, Role.TPAS] +api_permissions.rest.contracts.delete = [Role.METAX, Role.TPAS] + +api_permissions.rest.datacatalogs.create = [Role.METAX, Role.ETSIN] +api_permissions.rest.datacatalogs.read = [Role.ALL] +api_permissions.rest.datacatalogs["update"] = [Role.METAX, Role.ETSIN] +api_permissions.rest.datacatalogs.delete = [Role.METAX, Role.ETSIN] + +api_permissions.rest.datasets.create = [Role.METAX, Role.END_USERS, Role.TPAS, Role.QVAIN, Role.ETSIN] +api_permissions.rest.datasets.read = [Role.ALL] +api_permissions.rest.datasets["update"] = [Role.METAX, Role.END_USERS, Role.TPAS, Role.QVAIN, Role.ETSIN] +api_permissions.rest.datasets.delete = [Role.METAX, Role.END_USERS, Role.TPAS, Role.QVAIN, Role.ETSIN] + +api_permissions.rest.directories.read = [Role.METAX, Role.QVAIN, Role.ETSIN, Role.TPAS, Role.FDS, Role.END_USERS] + +api_permissions.rest.files.create = [Role.METAX, Role.IDA, Role.TPAS] +api_permissions.rest.files.read = [Role.METAX, Role.IDA, Role.FDS, Role.TPAS, Role.END_USERS] +api_permissions.rest.files["update"] = [Role.METAX, Role.IDA, Role.TPAS, Role.FDS, Role.END_USERS] +api_permissions.rest.files.delete = [Role.METAX, Role.IDA, Role.TPAS] + +api_permissions.rest.filestorages.create = [Role.METAX] +api_permissions.rest.filestorages.read = [Role.METAX] +api_permissions.rest.filestorages["update"] = [Role.METAX] +api_permissions.rest.filestorages.delete = [Role.METAX] + +api_permissions.rest.schemas.read = [Role.ALL] + +api_permissions.rpc.datasets.change_cumulative_state.use = [Role.ALL] +api_permissions.rpc.datasets.create_draft.use = [Role.ALL] +api_permissions.rpc.datasets.create_new_version.use = [Role.ALL] +api_permissions.rpc.datasets.fix_deprecated.use = [Role.ALL] +api_permissions.rpc.datasets.get_minimal_dataset_template.use = [Role.ALL] +api_permissions.rpc.datasets.merge_draft.use = [Role.ALL] +api_permissions.rpc.datasets.publish_dataset.use = [Role.ALL] +api_permissions.rpc.datasets.refresh_directory_content.use = [Role.ALL] +api_permissions.rpc.datasets.set_preservation_identifier.use = [Role.METAX, Role.TPAS] + +api_permissions.rpc.elasticsearchs.map_refdata.use = [Role.ALL] + +api_permissions.rpc.files.delete_project.use = [Role.METAX, Role.IDA, Role.TPAS] +api_permissions.rpc.files.flush_project.use = [Role.METAX, Role.IDA, Role.TPAS] + +api_permissions.rpc.statistics.all_datasets_cumulative.use = [Role.ALL] +api_permissions.rpc.statistics.catalog_datasets_cumulative.use = [Role.ALL] +api_permissions.rpc.statistics.count_datasets.use = [Role.ALL] +api_permissions.rpc.statistics.deprecated_datasets_cumulative.use = [Role.ALL] +api_permissions.rpc.statistics.end_user_datasets_cumulative.use = [Role.ALL] +api_permissions.rpc.statistics.harvested_datasets_cumulative.use = [Role.ALL] +api_permissions.rpc.statistics.organization_datasets_cumulative.use = [Role.ALL] +api_permissions.rpc.statistics.unused_files.use = [Role.ALL] + +def prepare_perm_values(d): + new_d = d + if hasattr(d, "items"): + for k, v in d.items(): + if isinstance(v, dict): + prepare_perm_values(v) + elif isinstance(v, list): + v.sort() + str_list = [] + for i in v: + if isinstance(i, Role): + str_list.append(i.value) + else: + str_list.append(str(i)) + new_d[k] = str_list + return new_d + + +API_ACCESS = prepare_perm_values(api_permissions.to_dict()) \ No newline at end of file diff --git a/src/metax_api/settings/components/common.py b/src/metax_api/settings/components/common.py index 55de2c90..5e65ceb7 100755 --- a/src/metax_api/settings/components/common.py +++ b/src/metax_api/settings/components/common.py @@ -3,16 +3,13 @@ from metax_api.settings import env from metax_api.settings.components import BASE_DIR -METAX_ENV=(str, "local_development") DEBUG = env("DEBUG") SECRET_KEY = env("DJANGO_SECRET_KEY") -METAX_API_ROOT = env("METAX_API_ROOT") ADDITIONAL_USER_PROJECTS_PATH = env("ADDITIONAL_USER_PROJECTS_PATH") IDA_DATA_CATALOG_IDENTIFIER = "urn:nbn:fi:att:data-catalog-ida" ATT_DATA_CATALOG_IDENTIFIER = "urn:nbn:fi:att:data-catalog-att" PAS_DATA_CATALOG_IDENTIFIER = "urn:nbn:fi:att:data-catalog-pas" LEGACY_DATA_CATALOG_IDENTIFIER = "urn:nbn:fi:att:data-catalog-legacy" -EXT_DATA_CATALOG_IDENTIFIER = "urn:nbn:fi:att:data-catalog-ext" DFT_DATA_CATALOG_IDENTIFIER = "urn:nbn:fi:att:data-catalog-dft" END_USER_ALLOWED_DATA_CATALOGS = [ @@ -33,8 +30,6 @@ # Allow only specific hosts to access the app ALLOWED_HOSTS = ["localhost", "127.0.0.1", "[::1]"] -# SITE_URL = "localhost:8008" - SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https") USE_X_FORWARDED_HOST = True for allowed_host in env("ALLOWED_HOSTS"): @@ -55,7 +50,6 @@ "django.contrib.sessions", "rest_framework", "metax_api", - "sslserver", ] if DEBUG: @@ -139,7 +133,6 @@ DATABASES["default"]["ENGINE"] = "django.db.backends.postgresql" DATABASES["default"]["ATOMIC_REQUESTS"] = True - # Colorize automated test console output RAINBOWTESTS_HIGHLIGHT_PATH = str(BASE_DIR) TEST_RUNNER = "rainbowtests.test.runner.RainbowDiscoverRunner" @@ -170,7 +163,6 @@ DATETIME_INPUT_FORMATS = ["%Y-%m-%dT%H:%M:%S.%fZ"] - # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.11/howto/static-files/ @@ -184,7 +176,6 @@ API_VERSIONS_ENABLED.append("v1") if env("ENABLE_V2_ENDPOINTS"): API_VERSIONS_ENABLED.append("v2") -DRAFT_ENABLED = env("DRAFT_ENABLED") # Variables related to api credentials API_USERS = [ @@ -194,4 +185,5 @@ {"password": "test-tpas", "username": "tpas"}, {"password": "test-etsin", "username": "etsin"}, {"password": "test-fds", "username": "fds"}, + {"password": "test-download", "username": "download"}, ] diff --git a/src/metax_api/settings/components/elasticsearch.py b/src/metax_api/settings/components/elasticsearch.py index de9e9ae1..ead9c790 100755 --- a/src/metax_api/settings/components/elasticsearch.py +++ b/src/metax_api/settings/components/elasticsearch.py @@ -1,13 +1,13 @@ from metax_api.settings.components.common import env -# INSTALLED_APPS += ["django_elasticsearch_dsl"] -ES_HOSTS = [env("ELASTIC_SEARCH_HOSTS")] - ELASTICSEARCH = { - "HOSTS": ES_HOSTS, - "USE_SSL": env("ELASTIC_SEARCH_USE_SSL"), - "ALWAYS_RELOAD_REFERENCE_DATA_ON_RESTART": False, + "HOSTS": env("ELASTIC_SEARCH_HOSTS"), + "PORT": env("ELASTIC_SEARCH_PORT"), + "USE_SSL": env("ELASTIC_SEARCH_USE_SSL") } -ELASTICSEARCH["REFERENCE_DATA_RELOAD_INTERVAL"] = 86400 + +ALWAYS_RELOAD_REFERENCE_DATA_ON_RESTART = env("ALWAYS_RELOAD_REFERENCE_DATA_ON_RESTART") +# Used if ALWAYS_RELOAD_REFERENCE_DATA_ON_RESTART is False +REFERENCE_DATA_RELOAD_INTERVAL = 86400 ES_CONFIG_DIR = env("ES_CONFIG_DIR") diff --git a/src/metax_api/settings/components/redis.py b/src/metax_api/settings/components/redis.py index b26020ef..7d55189a 100755 --- a/src/metax_api/settings/components/redis.py +++ b/src/metax_api/settings/components/redis.py @@ -5,7 +5,6 @@ logger = logging.getLogger(__name__) REDIS = { - "LOCALHOST_PORT": env("REDIS_PORT"), "HOST": env("REDIS_HOST"), "PORT": env("REDIS_PORT"), # https://github.com/andymccurdy/redis-py/issues/485#issuecomment-44555664 @@ -15,8 +14,8 @@ # enables extra logging to console during cache usage "DEBUG": False, } + REDIS_USE_PASSWORD = env("REDIS_USE_PASSWORD") -REDIS_USE_SENTINEL = False if REDIS_USE_PASSWORD: try: @@ -24,6 +23,8 @@ except ImproperlyConfigured as e: logger.warning(e) +REDIS_USE_SENTINEL = False + if REDIS_USE_SENTINEL: try: REDIS["SENTINEL"] = { diff --git a/src/metax_api/settings/components/rems.py b/src/metax_api/settings/components/rems.py index 48b672d9..0f981aee 100644 --- a/src/metax_api/settings/components/rems.py +++ b/src/metax_api/settings/components/rems.py @@ -1,12 +1,16 @@ -from environ import ImproperlyConfigured import logging -logger = logging.getLogger(__name__) +from environ import ImproperlyConfigured from metax_api.settings import env + + +logger = logging.getLogger(__name__) + REMS = { "ENABLED": env("REMS_ENABLED"), } + if REMS["ENABLED"]: try: REMS["API_KEY"] = env("REMS_API_KEY") diff --git a/src/metax_api/settings/environments/local.py b/src/metax_api/settings/environments/local.py index ca3cf163..653a0a4a 100755 --- a/src/metax_api/settings/environments/local.py +++ b/src/metax_api/settings/environments/local.py @@ -1,6 +1,7 @@ from watchman import constants as watchman_constants from metax_api.settings.components.common import INSTALLED_APPS, ALLOWED_HOSTS, MIDDLEWARE, DEBUG +from metax_api.settings.components.access_control import Role, api_permissions, prepare_perm_values INSTALLED_APPS += ["watchman"] @@ -11,56 +12,25 @@ "metax_api.checks.redis_check", ) -# only used in manual testing script in tests/rabbitmq/consume.py -CONSUMERS = [ - { - "is_test_user": True, - "name": "testaaja", - "password": "testaaja", - "permissions": { - "conf": "^testaaja-.*$", - "read": "^(datasets|testaaja-.*)$", - "write": "^testaaja-.*$", - }, - "vhost": "metax", - }, - { - "is_test_user": False, - "name": "etsin", - "password": "test-etsin", - "permissions": { - "conf": "^etsin-.*$", - "read": "^(datasets|etsin-.*)$", - "write": "^etsin-.*$", - }, - "vhost": "metax", - }, - { - "is_test_user": False, - "name": "ttv", - "password": "test-ttv", - "permissions": { - "conf": "^ttv-.*$", - "read": "^(TTV-datasets|ttv-.*)$", - "write": "^ttv-.*$", - }, - "vhost": "metax", - }, -] if 'debug_toolbar' not in INSTALLED_APPS: INSTALLED_APPS += ['debug_toolbar'] if 'debug_toolbar.middleware.DebugToolbarMiddleware' not in MIDDLEWARE: MIDDLEWARE = ['debug_toolbar.middleware.DebugToolbarMiddleware'] + MIDDLEWARE + INTERNAL_IPS = [ '127.0.0.1', '0.0.0.0' ] + def show_toolbar(request): if DEBUG: return True else: return False + DEBUG_TOOLBAR_CONFIG = { - "SHOW_TOOLBAR_CALLBACK" : show_toolbar, + "SHOW_TOOLBAR_CALLBACK": show_toolbar, } +api_permissions.rest.apierrors.create += [Role.METAX] +API_ACCESS = prepare_perm_values(api_permissions.to_dict()) diff --git a/src/metax_api/settings/environments/production.py b/src/metax_api/settings/environments/production.py new file mode 100644 index 00000000..08c99b73 --- /dev/null +++ b/src/metax_api/settings/environments/production.py @@ -0,0 +1,6 @@ +from metax_api.settings.environments.stable import api_permissions, prepare_perm_values +from metax_api.settings.environments.staging import API_USERS # noqa: F401 + +api_permissions.rpc.files.flush_project.use.clear() + +API_ACCESS = prepare_perm_values(api_permissions.to_dict()) \ No newline at end of file diff --git a/src/metax_api/settings/environments/remote.py b/src/metax_api/settings/environments/remote.py deleted file mode 100644 index fc3c528b..00000000 --- a/src/metax_api/settings/environments/remote.py +++ /dev/null @@ -1,304 +0,0 @@ -from metax_api.settings import env -from metax_api.settings.components.common import API_USERS - -API_USERS.clear() - -API_USERS = [ - { - "password": env("METAX_USER_PASSWORD"), - "username": "metax" - }, - { - "password": env("QVAIN_USER_PASSWORD"), - "username": "qvain" - }, - { - "password": env("IDA_USER_PASSWORD"), - "username": "ida" - }, - { - "password": env("TPAS_USER_PASSWORD"), - "username": "tpas" - }, - { - "password": env("ETSIN_USER_PASSWORD"), - "username": "etsin" - }, - { - "password": env("FDS_USER_PASSWORD"), - "username": "fds" - }, - { - "password": env("QVAIN_LIGHT_USER_PASSWORD"), - "username": "qvain-light" - }, - { - "password": env("QVAIN_JORI_USER_PASSWORD"), - "username": "qvain-jori" - }, - { - "password": env("TTV_USER_PASSWORD"), - "username": "ttv" - }, - { - "password": env("DOWNLOAD_USER_PASSWORD"), - "username": "download" - }, - { - "password": env("JYU_USER_PASSWORD"), - "username": "jyu" - } -] - -CONSUMERS = [ - { - "is_test_user": True, - "name": "testaaja", - "password": env("TESTAAJA_CONSUMER_PASSWORD"), - "permissions": { - "conf": "^testaaja-.*$", - "read": "^(datasets|testaaja-.*)$", - "write": "^testaaja-.*$" - }, - "vhost": "metax" - }, - { - "is_test_user": False, - "name": "etsin", - "password": env("ETSIN_CONSUMER_PASSWORD"), - "permissions": { - "conf": "^etsin-.*$", - "read": "^(datasets|etsin-.*)$", - "write": "^etsin-.*$" - }, - "vhost": "metax" - }, - { - "is_test_user": False, - "name": "ttv", - "password": env("TTV_CONSUMER_PASSWORD"), - "permissions": { - "conf": "^ttv-.*$", - "read": "^(TTV-datasets|ttv-.*)$", - "write": "^ttv-.*$" - }, - "vhost": "ttv" - } -] - -END_USER_ALLOWED_DATA_CATALOGS = [ - "urn:nbn:fi:att:data-catalog-ida", - "urn:nbn:fi:att:data-catalog-att", - "urn:nbn:fi:att:data-catalog-legacy", - "urn:nbn:fi:att:data-catalog-pas", - "urn:nbn:fi:att:data-catalog-dft" -] - -API_ACCESS = { - "rest": { - "apierrors": { - "delete": [ - "metax" - ], - "read": [ - "all" - ] - }, - "contracts": { - "create": [ - "all" - ], - "delete": [ - "all" - ], - "read": [ - "all" - ], - "update": [ - "all" - ] - }, - "datacatalogs": { - "create": [ - "all" - ], - "delete": [ - "all" - ], - "read": [ - "all" - ], - "update": [ - "all" - ] - }, - "datasets": { - "create": [ - "all" - ], - "delete": [ - "all" - ], - "read": [ - "all" - ], - "update": [ - "all" - ] - }, - "directories": { - "create": [], - "delete": [], - "read": [ - "all" - ], - "update": [] - }, - "files": { - "create": [ - "all" - ], - "delete": [ - "all" - ], - "read": [ - "all" - ], - "update": [ - "all" - ] - }, - "filestorages": { - "create": [ - "all" - ], - "delete": [ - "all" - ], - "read": [ - "all" - ], - "update": [ - "all" - ] - }, - "schemas": { - "create": [], - "delete": [], - "read": [ - "all" - ], - "update": [] - } - }, - "rpc": { - "datasets": { - "change_cumulative_state": { - "use": [ - "all" - ] - }, - "create_draft": { - "use": [ - "all" - ] - }, - "create_new_version": { - "use": [ - "all" - ] - }, - "fix_deprecated": { - "use": [ - "all" - ] - }, - "get_minimal_dataset_template": { - "use": [ - "all" - ] - }, - "merge_draft": { - "use": [ - "all" - ] - }, - "publish_dataset": { - "use": [ - "all" - ] - }, - "refresh_directory_content": { - "use": [ - "all" - ] - }, - "set_preservation_identifier": { - "use": [ - "all" - ] - } - }, - "elasticsearchs": { - "map_refdata": { - "use": [ - "all" - ] - } - }, - "files": { - "delete_project": { - "use": [ - "all" - ] - }, - "flush_project": { - "use": [ - "all" - ] - } - }, - "statistics": { - "all_datasets_cumulative": { - "use": [ - "all" - ] - }, - "catalog_datasets_cumulative": { - "use": [ - "all" - ] - }, - "count_datasets": { - "use": [ - "all" - ] - }, - "deprecated_datasets_cumulative": { - "use": [ - "all" - ] - }, - "end_user_datasets_cumulative": { - "use": [ - "all" - ] - }, - "harvested_datasets_cumulative": { - "use": [ - "all" - ] - }, - "organization_datasets_cumulative": { - "use": [ - "all" - ] - }, - "unused_files": { - "use": [ - "all" - ] - } - } - } -} \ No newline at end of file diff --git a/src/metax_api/settings/environments/stable.py b/src/metax_api/settings/environments/stable.py index 7ce69b00..5ecd7eea 100644 --- a/src/metax_api/settings/environments/stable.py +++ b/src/metax_api/settings/environments/stable.py @@ -1,272 +1,18 @@ -from metax_api.settings.environments.remote import * -API_ACCESS = { - "rest": { - "apierrors": { - "delete": [ - "metax" - ], - "read": [ - "metax" - ] - }, - "contracts": { - "create": [ - "metax", - "tpas" - ], - "delete": [ - "metax", - "tpas" - ], - "read": [ - "metax", - "tpas" - ], - "update": [ - "metax", - "tpas" - ] - }, - "datacatalogs": { - "create": [ - "metax", - "etsin" - ], +from metax_api.settings.components.access_control import Role, api_permissions, prepare_perm_values +from metax_api.settings.environments.staging import API_USERS # noqa: F401 - "delete": [ - "metax", - "etsin" - ], - "read": [ - "all" - ], - "update": [ - "metax", - "etsin" - ] - }, - "datasets": { - "create": [ - "metax", - "ida", - "qvain", - "qvain-light", - "etsin", - "tpas", - "jyu", - "endusers" - ], - "delete": [ - "metax", - "ida", - "qvain", - "qvain-light", - "etsin", - "tpas", - "jyu", - "endusers" - ], +api_permissions.rest.datasets.create += [Role.IDA, Role.QVAIN_LIGHT, Role.JYU] +api_permissions.rest.datasets["update"] += [Role.IDA, Role.QVAIN_LIGHT, Role.JYU] +api_permissions.rest.datasets.delete += [Role.IDA, Role.QVAIN_LIGHT, Role.JYU] - "read": [ - "all" - ], - "update": [ - "metax", - "ida", - "qvain", - "qvain-light", - "etsin", - "tpas", - "jyu", - "endusers" - ] - }, - "directories": { - "read": [ - "metax", - "ida", - "qvain", - "qvain-light", - "etsin", - "tpas", - "fds", - "endusers" - ] - }, - "files": { - "create": [ - "metax", - "ida", - "tpas" - ], - "delete": [ - "metax", - "ida", - "tpas" - ], +api_permissions.rest.directories.read += [Role.IDA, Role.QVAIN_LIGHT] - "read": [ - "metax", - "ida", - "fds", - "tpas", - "qvain", - "qvain-light", - "endusers" - ], - "update": [ - "metax", - "ida", - "tpas", - "qvain", - "qvain-light", - "endusers" - ] - }, - "filestorages": { - "create": [ - "metax" - ], - "delete": [ - "metax" - ], - "read": [ - "metax" - ], - "update": [ - "metax" - ] - }, - "schemas": { - "read": [ - "all" - ] - } - }, - "rpc": { - "datasets": { - "change_cumulative_state": { - "use": [ - "metax", - "qvain", - "qvain-light", - "endusers" - ] - }, - "create_draft": { - "use": [ - "all" - ] - }, - "create_new_version": { - "use": [ - "all" - ] - }, - "fix_deprecated": { - "use": [ - "metax", - "qvain", - "qvain-light", - "endusers" - ] - }, - "get_minimal_dataset_template": { - "use": [ - "all" - ] - }, - "merge_draft": { - "use": [ - "all" - ] - }, - "publish_dataset": { - "use": [ - "all" - ] - }, - "refresh_directory_content": { - "use": [ - "metax", - "qvain", - "qvain-light", - "endusers" - ] - }, - "set_preservation_identifier": { - "use": [ - "metax", - "tpas" - ] - } - }, - "elasticsearchs": { - "map_refdata": { - "use": [ - "all" - ] - } - }, - "files": { - "delete_project": { - "use": [ - "metax", - "ida", - "tpas" - ] - }, - "flush_project": { - "use": [ - "metax", - "ida", - "tpas" - ] - } - }, - "statistics": { - "all_datasets_cumulative": { - "use": [ - "all" - ] - }, - "catalog_datasets_cumulative": { - "use": [ - "all" - ] - }, - "count_datasets": { - "use": [ - "all" - ] - }, +api_permissions.rest.files.read += [Role.QVAIN, Role.QVAIN_LIGHT] +api_permissions.rest.files["update"] += [Role.QVAIN, Role.QVAIN_LIGHT] - "deprecated_datasets_cumulative": { - "use": [ - "all" - ] - }, - "end_user_datasets_cumulative": { - "use": [ - "all" - ] - }, - "harvested_datasets_cumulative": { - "use": [ - "all" - ] - }, - "organization_datasets_cumulative": { - "use": [ - "all" - ] - }, - "unused_files": { - "use": [ - "all" - ] - } - } - } -} \ No newline at end of file +api_permissions.rpc.datasets.change_cumulative_state.use = [Role.METAX, Role.QVAIN, Role.QVAIN_LIGHT, Role.END_USERS] +api_permissions.rpc.datasets.fix_deprecated.use = [Role.METAX, Role.QVAIN, Role.QVAIN_LIGHT, Role.END_USERS] +api_permissions.rpc.dataset.refresh_directory_content.use = [Role.METAX, Role.QVAIN, Role.QVAIN_LIGHT, Role.END_USERS] + +API_ACCESS = prepare_perm_values(api_permissions) \ No newline at end of file diff --git a/src/metax_api/settings/environments/staging.py b/src/metax_api/settings/environments/staging.py new file mode 100644 index 00000000..47d9932e --- /dev/null +++ b/src/metax_api/settings/environments/staging.py @@ -0,0 +1,41 @@ +import json + +from metax_api.settings.components.access_control import Role, api_permissions, prepare_perm_values +from metax_api.settings import env + +api_permissions.rest.apierrors.read = [Role.ALL] + +api_permissions.rest.contracts.create = [Role.ALL] +api_permissions.rest.contracts.read = [Role.ALL] +api_permissions.rest.contracts["update"] = [Role.ALL] +api_permissions.rest.contracts.delete = [Role.ALL] + +api_permissions.rest.datacatalogs.create = [Role.ALL] +api_permissions.rest.datacatalogs["update"] = [Role.ALL] +api_permissions.rest.datacatalogs.delete = [Role.ALL] + +api_permissions.rest.datasets.create = [Role.ALL] +api_permissions.rest.datasets["update"] = [Role.ALL] +api_permissions.rest.datasets.delete = [Role.ALL] + +api_permissions.rest.directories.read = [Role.ALL] + +api_permissions.rest.files.create = [Role.ALL] +api_permissions.rest.files.read = [Role.ALL] +api_permissions.rest.files["update"] = [Role.ALL] +api_permissions.rest.files.delete = [Role.ALL] + +api_permissions.rest.filestorages.create = [Role.ALL] +api_permissions.rest.filestorages.read = [Role.ALL] +api_permissions.rest.filestorages["update"] = [Role.ALL] +api_permissions.rest.filestorages.delete = [Role.ALL] + +api_permissions.rpc.datasets.set_preservation_identifier.use = [Role.ALL] + +api_permissions.rpc.files.delete_project.use = [Role.ALL] +api_permissions.rpc.files.flush_project.use = [Role.ALL] + +API_ACCESS = prepare_perm_values(api_permissions.to_dict()) + +with open(env("API_USERS_PATH")) as users_file: + API_USERS = json.load(users_file) diff --git a/src/metax_api/settings/environments/test.py b/src/metax_api/settings/environments/test.py deleted file mode 100755 index b95d9570..00000000 --- a/src/metax_api/settings/environments/test.py +++ /dev/null @@ -1,80 +0,0 @@ -API_TEST_USER = {"username": "testuser", "password": "testuserpassword"} -API_METAX_USER = {"username": "metax", "password": "metaxpassword"} -API_AUTH_TEST_USER = {"username": "api_auth_user", "password": "password"} - -API_EXT_USER = {"username": "external", "password": "externalpassword"} - -API_TEST_USERS = [API_TEST_USER, API_METAX_USER, API_AUTH_TEST_USER, API_EXT_USER] - -API_ACCESS = { - "rest": { - "apierrors": { - "read": ["testuser", "metax"], - "create": ["testuser", "metax"], - "update": ["testuser", "metax"], - "delete": ["testuser", "metax"], - }, - "contracts": { - "read": ["testuser", "metax"], - "create": ["testuser", "metax"], - "update": ["testuser", "metax"], - "delete": ["testuser", "metax"], - }, - "datacatalogs": { - "read": ["all"], - "create": ["testuser", "metax"], - "update": ["testuser", "metax"], - "delete": ["testuser", "metax"], - }, - "datasets": { - "read": ["all"], - "create": ["testuser", "metax", "api_auth_user", "endusers", "external"], - "update": ["testuser", "metax", "api_auth_user", "endusers", "external"], - "delete": ["testuser", "metax", "api_auth_user", "endusers", "external"], - }, - "directories": { - "read": ["testuser", "metax", "endusers"], - }, - "files": { - "read": ["testuser", "metax", "api_auth_user", "endusers"], - "create": ["testuser", "metax"], - "update": ["testuser", "metax", "endusers"], - "delete": ["testuser", "metax"], - }, - "filestorages": { - "read": ["testuser", "metax"], - "create": ["testuser", "metax"], - "update": ["testuser", "metax"], - "delete": ["testuser", "metax"], - }, - "schemas": { - "read": ["all"], - }, - }, - "rpc": { - "datasets": { - "change_cumulative_state": {"use": ["all"]}, - "get_minimal_dataset_template": {"use": ["all"]}, - "refresh_directory_content": {"use": ["all"]}, - "fix_deprecated": {"use": ["all"]}, - "set_preservation_identifier": {"use": ["metax", "tpas"]}, - "create_new_version": {"use": ["all"]}, - "publish_dataset": {"use": ["all"]}, - "create_draft": {"use": ["all"]}, - "merge_draft": {"use": ["all"]}, - }, - "files": {"delete_project": {"use": ["testuser", "metax"]}}, - "statistics": { - "count_datasets": {"use": ["all"]}, - "all_datasets_cumulative": {"use": ["all"]}, - "catalog_datasets_cumulative": {"use": ["all"]}, - "end_user_datasets_cumulative": {"use": ["all"]}, - "harvested_datasets_cumulative": {"use": ["all"]}, - "deprecated_datasets_cumulative": {"use": ["all"]}, - "organization_datasets_cumulative": {"use": ["all"]}, - "unused_files": {"use": ["all"]}, - }, - }, -} - -ADDITIONAL_USER_PROJECTS_PATH = "/tmp/user_projects.json" diff --git a/src/metax_api/settings/environments/travis.py b/src/metax_api/settings/environments/travis.py deleted file mode 100755 index 5bfca59f..00000000 --- a/src/metax_api/settings/environments/travis.py +++ /dev/null @@ -1,9 +0,0 @@ -from metax_api.settings.environments import test - -API_TEST_USER = test.API_TEST_USER -API_METAX_USER = test.API_METAX_USER -API_AUTH_TEST_USER = test.API_AUTH_TEST_USER -API_EXT_USER = test.API_EXT_USER -API_TEST_USERS = test.API_TEST_USERS -API_ACCESS = test.API_ACCESS -ADDITIONAL_USER_PROJECTS_PATH = test.ADDITIONAL_USER_PROJECTS_PATH diff --git a/src/metax_api/settings/environments/unittests.py b/src/metax_api/settings/environments/unittests.py new file mode 100755 index 00000000..3a279479 --- /dev/null +++ b/src/metax_api/settings/environments/unittests.py @@ -0,0 +1,48 @@ +from metax_api.settings.components.access_control import Role, api_permissions, prepare_perm_values + +API_TEST_USER = {"username": "testuser", "password": "testuserpassword"} +API_METAX_USER = {"username": "metax", "password": "metaxpassword"} +API_AUTH_TEST_USER = {"username": "api_auth_user", "password": "password"} + +API_EXT_USER = {"username": "external", "password": "externalpassword"} + +API_TEST_USERS = [API_TEST_USER, API_METAX_USER, API_AUTH_TEST_USER, API_EXT_USER] + +ADDITIONAL_USER_PROJECTS_PATH = "/tmp/user_projects.json" + +# represents an organizational (such as jyu) catalog in test cases +EXT_DATA_CATALOG_IDENTIFIER = "urn:nbn:fi:att:data-catalog-ext" + +api_permissions.rest.apierrors.create += [Role.METAX, Role.TEST_USER] +api_permissions.rest.apierrors.read += [Role.TEST_USER] +api_permissions.rest.apierrors["update"] = [Role.METAX, Role.TEST_USER] +api_permissions.rest.apierrors.delete += [Role.TEST_USER] + +api_permissions.rest.contracts.create += [Role.TEST_USER] +api_permissions.rest.contracts.read += [Role.TEST_USER] +api_permissions.rest.contracts["update"] += [Role.TEST_USER] +api_permissions.rest.contracts.delete += [Role.TEST_USER] + +api_permissions.rest.datacatalogs.create += [Role.TEST_USER] +api_permissions.rest.datacatalogs["update"] += [Role.TEST_USER] +api_permissions.rest.datacatalogs.delete += [Role.TEST_USER] + +api_permissions.rest.datasets.create += [Role.API_AUTH_USER, Role.EXTERNAL, Role.TEST_USER] +api_permissions.rest.datasets["update"] += [Role.API_AUTH_USER, Role.EXTERNAL, Role.TEST_USER] +api_permissions.rest.datasets.delete += [Role.API_AUTH_USER, Role.EXTERNAL, Role.TEST_USER] + +api_permissions.rest.directories.read += [Role.TEST_USER] + +api_permissions.rest.files.create += [Role.TEST_USER] +api_permissions.rest.files.read += [Role.TEST_USER, Role.API_AUTH_USER] +api_permissions.rest.files["update"] += [Role.TEST_USER] +api_permissions.rest.files.delete += [Role.TEST_USER] + +api_permissions.rest.filestorages.create += [Role.TEST_USER] +api_permissions.rest.filestorages.read += [Role.TEST_USER] +api_permissions.rest.filestorages["update"] += [Role.TEST_USER] +api_permissions.rest.filestorages.delete += [Role.TEST_USER] + +api_permissions.rpc.files.delete_project.use += [Role.TEST_USER] + +API_ACCESS = prepare_perm_values(api_permissions.to_dict()) diff --git a/src/metax_api/tests/api/rest/base/views/apierrors/read.py b/src/metax_api/tests/api/rest/base/views/apierrors/read.py index 6d01d42a..b8b68a82 100755 --- a/src/metax_api/tests/api/rest/base/views/apierrors/read.py +++ b/src/metax_api/tests/api/rest/base/views/apierrors/read.py @@ -76,7 +76,6 @@ def test_list_errors(self): response = self.client.get("/rest/apierrors") self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) - self.assertEqual(len(response.data), 2, response.data) def test_get_error_details(self): cr_1 = self.client.get("/rest/datasets/1").data @@ -126,7 +125,6 @@ def test_delete_error_details(self): response = self.client.get("/rest/apierrors") self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) - self.assertEqual(len(response.data), 0, response.data) @testcase_log_console(_logger) def test_delete_all_error_details(self): @@ -146,14 +144,12 @@ def test_delete_all_error_details(self): # ensure something was produced... response = self.client.get("/rest/apierrors") - self.assertEqual(len(response.data), 2, response.data) response = self.client.post("/rest/apierrors/flush") self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) response = self.client.get("/rest/apierrors") self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) - self.assertEqual(len(response.data), 0, response.data) def test_bulk_operation_produces_error_entry(self): """ @@ -168,7 +164,6 @@ def test_bulk_operation_produces_error_entry(self): response = self.client.get("/rest/apierrors") self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) - self.assertEqual(len(response.data), 1, response.data) response = self.client.get( "/rest/apierrors/%s" % response.data[0]["identifier"] diff --git a/src/metax_api/tests/api/rest/base/views/datasets/write.py b/src/metax_api/tests/api/rest/base/views/datasets/write.py index a1544384..94197602 100755 --- a/src/metax_api/tests/api/rest/base/views/datasets/write.py +++ b/src/metax_api/tests/api/rest/base/views/datasets/write.py @@ -152,286 +152,6 @@ def _get_new_full_test_cr_data(self, cr_from_test_data, dc_from_test_data): cr_from_test_data.pop('identifier') return cr_from_test_data - -class CatalogRecordDraftTests(CatalogRecordApiWriteCommon): - """ - Tests related to draft datasets - """ - def setUp(self): - super().setUp() - - # create catalogs with end user access permitted - dc = DataCatalog.objects.get(pk=1) - catalog_json = dc.catalog_json - for identifier in END_USER_ALLOWED_DATA_CATALOGS: - catalog_json['identifier'] = identifier - dc = DataCatalog.objects.create( - catalog_json=catalog_json, - date_created=get_tz_aware_now_without_micros(), - catalog_record_services_create='testuser,api_auth_user,metax', - catalog_record_services_edit='testuser,api_auth_user,metax', - catalog_record_services_read='testuser,api_auth_user,metax' - ) - - self.token = get_test_oidc_token(new_proxy=True) - self._mock_token_validation_succeeds() - # Create published record with owner: testuser and pk 1 - # Create draft records with owner: testuser, pk: 2 and owner: 'some owner who is not you', pk 3 - self._set_cr_owner_and_state(1, 'published', self.token['CSCUserName']) # Published dataset - self.assertEqual(CatalogRecord.objects.get(pk=1).metadata_provider_user, 'testuser') - - self._set_cr_owner_and_state(2, 'draft', self.token['CSCUserName']) # testusers' draft - self.assertEqual(CatalogRecord.objects.get(pk=2).metadata_provider_user, 'testuser') - - self._set_cr_owner_and_state(3, 'draft', '#### Some owner who is not you ####') # Draft dataset for some user - self.assertNotEqual(CatalogRecord.objects.get(pk=3).metadata_provider_user, 'testuser') - - def _set_cr_owner_and_state(self, cr_id, state, owner): - ''' helper method for testing user accessibility for draft datasets ''' - cr = CatalogRecord.objects.get(pk=cr_id) - cr.state = state - cr.user_created = owner - cr.metadata_provider_user = owner - cr.editor = None # pretend the record was created by user directly - cr.data_catalog_id = DataCatalog.objects.get(catalog_json__identifier=END_USER_ALLOWED_DATA_CATALOGS[0]).id - cr.force_save() - - def test_field_state_exists(self): - """Try fetching any dataset, field 'state' should be returned'""" - - cr = self.client.get('/rest/datasets/13').data - self.assertEqual('state' in cr, True) - - def test_change_state_field_through_API(self): - """Fetch a dataset and change its state. - Value should remain: 'published' """ - - cr = self.client.get('/rest/datasets/1').data - cr['state'] = 'changed value' - response = self.client.put('/rest/datasets/1', cr, format="json") - - self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) - self.assertFalse(response.data['state'] == 'changed value') - - ### - # Tests for different user roles access to drafts - ### - - @responses.activate - def test_endusers_access_to_draft_datasets(self): - ''' End user should get published data and his/her drafts ''' - # Test access as end user - self._use_http_authorization(method='bearer', token=self.token) - - # Test access for owner of dataset - response = self.client.get('/rest/datasets/1') - self.assertEqual(response.status_code, status.HTTP_200_OK, response.status_code) - response = self.client.get('/rest/datasets/2') - self.assertEqual(response.status_code, status.HTTP_200_OK, response.status_code) - response = self.client.get('/rest/datasets/3') - self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND, response.status_code) - # Test for multiple datasets - response = self.client.get('/rest/datasets', format="json") - # Returned list of datasets should not have owner "#### Some owner who is not you ####" - owners = [cr['metadata_provider_user'] for cr in response.data['results']] - self.assertEqual('#### Some owner who is not you ####' not in owners, True, response.data) - - def test_service_users_access_to_draft_datasets(self): - ''' Service users should get all data ''' - # test access as a service-user - self._use_http_authorization(method='basic', username='metax') - - response = self.client.get('/rest/datasets/1') - self.assertEqual(response.status_code, status.HTTP_200_OK, response.status_code) - response = self.client.get('/rest/datasets/2') - self.assertEqual(response.status_code, status.HTTP_200_OK, response.status_code) - response = self.client.get('/rest/datasets/3') - self.assertEqual(response.status_code, status.HTTP_200_OK, response.status_code) - # test for multiple datasets - response = self.client.get('/rest/datasets', format="json") - # Returned list of datasets should have owner "#### Some owner who is not you ####" - owners = [cr['metadata_provider_user'] for cr in response.data['results']] - self.assertEqual('#### Some owner who is not you ####' in owners, True, response.data) - - def test_anonymous_users_access_to_draft_datasets(self): - ''' Unauthenticated user should get only published datasets ''' - # Test access as unauthenticated user - self.client._credentials = {} - - response = self.client.get('/rest/datasets/1') - self.assertEqual(response.status_code, status.HTTP_200_OK) - - response = self.client.get('/rest/datasets/2') - self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND, response.data) - response = self.client.get('/rest/datasets/3') - self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND, response.data) - # test for multiple datasets - response = self.client.get('/rest/datasets', format="json") - # Returned list of datasets should not have drafts - states = [cr['state'] for cr in response.data['results']] - self.assertEqual('draft' not in states, True, response.data) - - ### - # Tests for different user roles access to update drafts - ### - - @responses.activate - def test_endusers_can_update_draft_datasets(self): - ''' End user should be able to update only his/her drafts ''' - # Set end user - self._use_http_authorization(method='bearer', token=self.token) - - for http_verb in ['put', 'patch']: - update_request = getattr(self.client, http_verb) - data1 = self.client.get('/rest/datasets/1').data # published - response = update_request('/rest/datasets/1', data1, format="json") - self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) - - data2 = self.client.get('/rest/datasets/2').data # end users own draft - response = update_request('/rest/datasets/2', data2, format="json") - self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) - - data3 = self.client.get('/rest/datasets/3').data # someone elses draft - response = update_request('/rest/datasets/3', data3, format="json") - self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND, response.data) - - # test for multiple datasets - response = update_request('/rest/datasets', [data1, data2, data3], format="json") - owners = [cr['object']['metadata_provider_user'] for cr in response.data['success']] - self.assertEqual('#### Some owner who is not you ####' not in owners, True, response.data) - - def test_service_users_can_update_draft_datasets(self): - ''' Dataset drafts should be able to be updated by service users (service is responsible that - their current user in e.g. Qvain is allowed to access the dataset)''' - # Set service-user - self._use_http_authorization(method='basic', username='metax') - - for http_verb in ['put', 'patch']: - update_request = getattr(self.client, http_verb) - data1 = self.client.get('/rest/datasets/1').data # published - response = update_request('/rest/datasets/1', data1, format="json") - self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) - - data2 = self.client.get('/rest/datasets/2').data # draft - response = update_request('/rest/datasets/2', data2, format="json") - self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) - - data3 = self.client.get('/rest/datasets/3').data # draft - response = update_request('/rest/datasets/3', data3, format="json") - self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) - - # test for multiple datasets - response = update_request('/rest/datasets', [data1, data2, data3], format="json") - self.assertEqual(len(response.data['success']), 3, 'response.data should contain 3 changed objects') - owners = [cr['object']['metadata_provider_user'] for cr in response.data['success']] - self.assertEqual('#### Some owner who is not you ####' in owners, True, response.data) - - def test_anonymous_user_cannot_update_draft_datasets(self): - ''' Unauthenticated user should not be able to know drafts exists in the first place''' - # Set unauthenticated user - self.client._credentials = {} - - # Fetches a published dataset since unauthenticated user can't get drafts - response = self.client.get('/rest/datasets/1') - self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) - data = response.data - - for http_verb in ['put', 'patch']: - update_request = getattr(self.client, http_verb) - response = update_request('/rest/datasets/1', data, format="json") # published - self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN, response.status_code) - response = update_request('/rest/datasets/2', data, format="json") # draft - self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN, response.status_code) - response = update_request('/rest/datasets/3', data, format="json") # draft - self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN, response.status_code) - - # test for multiple datasets - response = update_request('/rest/datasets', data, format="json") - self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN, response.status_code) - - ### - # Tests for deleting drafts - ### - - def test_draft_is_permanently_deleted_by_service_user(self): - '''Draft datasets should be permanently deleted from the database. - Only the dataset owner is able to delete draft datasets.''' - # Set service-user - self._use_http_authorization(method='basic', username='metax') - - for cr_id in (2, 3): - response = self.client.delete('/rest/datasets/%d' % cr_id) - self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT, response.data) - self.assertFalse(CatalogRecord.objects_unfiltered.filter(pk=cr_id).exists()) - - @responses.activate - def test_draft_is_permanently_deleted_by_enduser(self): - # Set end user - self._use_http_authorization(method='bearer', token=self.token) - - response = self.client.delete('/rest/datasets/2') - self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT, response.status_code) - self.assertFalse(CatalogRecord.objects_unfiltered.filter(pk=2).exists()) - response = self.client.delete('/rest/datasets/3') - self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND, response.status_code) - - ### - # Tests for saving drafts - ### - - def test_service_users_can_save_draft_datasets(self): - ''' Drafts should be saved without preferred identifier ''' - # test access as a service-user - self._use_http_authorization(method='basic', username='metax') - - response = self.client.post('/rest/datasets?draft', self.cr_test_data, format="json") - - pid = response.data['research_dataset']['preferred_identifier'] - self.assertEqual(response.status_code, status.HTTP_201_CREATED, response.data) - self.assertTrue(pid == 'draft:%s' % response.data['identifier'], response.data) - self.assertTrue('urn' not in pid, response.data) - self.assertTrue('doi' not in pid, response.data) - self.assertTrue(response.data['state'] == 'draft', response.data) - - for queryparam in ('', '?draft=false'): - response = self.client.post('/rest/datasets{}'.format(queryparam), self.cr_test_data, format="json") - self.assertEqual(response.status_code, status.HTTP_201_CREATED, response.data) - self.assertTrue(response.data['state'] == 'published', response.data) - - ### - # Tests for use_doi_for_published -field - ### - - def test_use_doi_for_published_field(self): - - ''' Drafts with 'use_doi' checkbox checked should have 'use_doi_for_published' == True - to tell that pid will be of type DOI when draft is published - Moreover, in v1 drafts can't be published so this test is shorter than in v2 side''' - - #Check for DOI - self.cr_test_data['data_catalog'] = IDA_CATALOG - response = self.client.post('/rest/datasets?pid_type=doi&draft=true', self.cr_test_data, format="json") - self.assertEqual(response.status_code, status.HTTP_201_CREATED) - self.assertTrue('use_doi_for_published' in response.data) - self.assertTrue(response.data['use_doi_for_published'] is True, response.data) - - # Published dataset should not return 'use_doi_for_published' - response = self.client.get('/rest/datasets/%s' % self.cr_test_data['id'], format='json') - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertTrue('use_doi_for_published' not in response.data) - - #Same when user doesn't want DOI - response = self.client.post('/rest/datasets?pid_type=urn&draft=true', self.cr_test_data, format="json") - self.assertTrue(response.data['use_doi_for_published'] is False, response.data) - - response = self.client.post('/rest/datasets?draft=true', self.cr_test_data, format="json") - self.assertTrue(response.data['use_doi_for_published'] is False, response.data) - - # Published dataset should not return 'use_doi_for_published' - response = self.client.get('/rest/datasets/%s' % self.cr_test_data['id'], format='json') - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertTrue('use_doi_for_published' not in response.data) - class CatalogRecordApiWriteCreateTests(CatalogRecordApiWriteCommon): # # diff --git a/src/metax_api/tests/api/rest/base/views/directories/read.py b/src/metax_api/tests/api/rest/base/views/directories/read.py index 1f03f92d..af98dc2b 100755 --- a/src/metax_api/tests/api/rest/base/views/directories/read.py +++ b/src/metax_api/tests/api/rest/base/views/directories/read.py @@ -385,7 +385,7 @@ def test_not_retrieving_not_allowed_directory_fields(self): allowed_dir_fields = set(DirectorySerializer.Meta.fields) allowed_file_fields = set(FileSerializer.Meta.fields) - response = self.client.get('/rest/directories/3/files?file_fields=parent,id&directory_fields=;;drop db;') + response = self.client.get('/rest/directories/3/files?file_fields=parent,id&directory_fields=;;drop db;,id') self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertTrue(any(field in response.data['files'][0].keys() for field in allowed_file_fields)) diff --git a/src/metax_api/tests/api/rest/v2/views/apierrors/read.py b/src/metax_api/tests/api/rest/v2/views/apierrors/read.py index d95e031a..45e462ca 100755 --- a/src/metax_api/tests/api/rest/v2/views/apierrors/read.py +++ b/src/metax_api/tests/api/rest/v2/views/apierrors/read.py @@ -64,7 +64,6 @@ def test_list_errors(self): response = self.client.get('/rest/v2/apierrors') self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) - self.assertEqual(len(response.data), 2, response.data) def test_get_error_details(self): cr_1 = self.client.get('/rest/v2/datasets/1').data @@ -102,7 +101,6 @@ def test_delete_error_details(self): response = self.client.get('/rest/v2/apierrors') self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) - self.assertEqual(len(response.data), 0, response.data) def test_delete_all_error_details(self): cr_1 = self.client.get('/rest/v2/datasets/1').data @@ -117,14 +115,12 @@ def test_delete_all_error_details(self): # ensure something was produced... response = self.client.get('/rest/v2/apierrors') - self.assertEqual(len(response.data), 2, response.data) response = self.client.post('/rest/v2/apierrors/flush') self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) response = self.client.get('/rest/v2/apierrors') self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) - self.assertEqual(len(response.data), 0, response.data) def test_bulk_operation_produces_error_entry(self): """ @@ -139,7 +135,6 @@ def test_bulk_operation_produces_error_entry(self): response = self.client.get('/rest/v2/apierrors') self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) - self.assertEqual(len(response.data), 1, response.data) response = self.client.get('/rest/v2/apierrors/%s' % response.data[0]['identifier']) self._assert_fields_presence(response) diff --git a/src/metax_api/tests/api/rest/v2/views/datasets/api_version_lock.py b/src/metax_api/tests/api/rest/v2/views/datasets/api_version_lock.py index ca7d97f5..125d9d44 100755 --- a/src/metax_api/tests/api/rest/v2/views/datasets/api_version_lock.py +++ b/src/metax_api/tests/api/rest/v2/views/datasets/api_version_lock.py @@ -317,14 +317,6 @@ def test_v2_rpc_api_modification_updates_api_version(self): self._assert_api_version(cr_v1["identifier"], 2) self._assert_api_version(new_dataset["identifier"], 2) - # test publish dataset - - cr_v1 = self._create_v1_dataset(draft=True) - - params = f'identifier={cr_v1["identifier"]}' - response = self.client.post(f'/rpc/v2/datasets/publish_dataset?{params}', format='json') - self.assertEqual(response.status_code, status.HTTP_200_OK, response.data) - - self._assert_api_version(cr_v1['identifier'], 2) + # publish dataset does not need to be checked because v1 datasets cannot be drafts # merge draft does not need to be checked because v1 datasets cannot have "parent" dataset diff --git a/src/metax_api/tests/api/rest/v2/views/directories/read.py b/src/metax_api/tests/api/rest/v2/views/directories/read.py index 22721cc4..951b7c42 100755 --- a/src/metax_api/tests/api/rest/v2/views/directories/read.py +++ b/src/metax_api/tests/api/rest/v2/views/directories/read.py @@ -411,7 +411,7 @@ def test_not_retrieving_not_allowed_directory_fields(self): allowed_dir_fields = set(DirectorySerializer.Meta.fields) allowed_file_fields = set(FileSerializer.Meta.fields) - response = self.client.get('/rest/v2/directories/3/files?file_fields=parent,id&directory_fields=;;drop db;') + response = self.client.get('/rest/v2/directories/3/files?file_fields=parent,id&directory_fields=;;drop db;,id') self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertTrue(any(field in response.data['files'][0].keys() for field in allowed_file_fields)) diff --git a/src/metax_api/tests/rabbitmq/consume.py b/src/metax_api/tests/rabbitmq/consume.py index 15e6e72c..4b43df80 100755 --- a/src/metax_api/tests/rabbitmq/consume.py +++ b/src/metax_api/tests/rabbitmq/consume.py @@ -13,13 +13,12 @@ script to listen for messages sent when someone accesses /rest/datasets/pid/rabbitmq """ -def get_test_user(): - for user in settings.CONSUMERS: - if user['is_test_user']: - return user +test_user = { + 'name': 'testaaja', + 'password': 'testaaja', + 'vhost': 'metax' +} - -test_user = get_test_user() credentials = pika.PlainCredentials(test_user['name'], test_user['password']) connection = pika.BlockingConnection( pika.ConnectionParameters( diff --git a/src/metax_api/tests/services/reference_data_mixin.py b/src/metax_api/tests/services/reference_data_mixin.py index a73d2b24..4d1463f3 100755 --- a/src/metax_api/tests/services/reference_data_mixin.py +++ b/src/metax_api/tests/services/reference_data_mixin.py @@ -12,15 +12,10 @@ from metax_api.services import ReferenceDataMixin as RDM from metax_api.services.redis_cache_service import RedisClient from metax_api.tests.utils import TestClassUtils -from metax_api.utils import executing_travis, ReferenceDataLoader +from metax_api.utils import ReferenceDataLoader -if executing_travis(): - _RedisCacheClass = RedisClient -else: - _RedisCacheClass = RedisClient - -class MockRedisCacheService(_RedisCacheClass): +class MockRedisCacheService(RedisClient): def __init__(self, return_data_after_retries=0, *args, **kwargs): self.call_count = 0 self.return_data_after_retries = return_data_after_retries diff --git a/src/metax_api/tests/utils.py b/src/metax_api/tests/utils.py index 0b9227a4..949047da 100755 --- a/src/metax_api/tests/utils.py +++ b/src/metax_api/tests/utils.py @@ -16,6 +16,8 @@ from django.conf import settings as django_settings from rest_framework import status +logger = logging.getLogger(__name__) + datetime_format = "%Y-%m-%dT%H:%M:%S.%fZ" # path to data used by automatic tests @@ -104,7 +106,11 @@ def generate_test_token(payload): supported in the PyJWT lib, and since we are mocking the responses anyway, it does not matter, as long as the token otherwise looks legit, can be parse etc. """ - return jwt.encode(payload, "secret", "HS256").decode("utf-8") + try: + return jwt.encode(payload, "secret", "HS256").decode("utf-8") + except AttributeError as e: + logger.error(e) + return jwt.encode(payload, "secret", "HS256") class TestClassUtils: diff --git a/src/metax_api/utils/reference_data_loader.py b/src/metax_api/utils/reference_data_loader.py index ef7ccdaf..392d98b7 100755 --- a/src/metax_api/utils/reference_data_loader.py +++ b/src/metax_api/utils/reference_data_loader.py @@ -8,7 +8,6 @@ import logging from django.conf import settings as django_settings -from icecream import ic from .utils import executing_test_case @@ -65,7 +64,7 @@ def populate_cache_reference_data(cls, cache, settings=django_settings): if not errors: if not isinstance(settings, dict): settings = settings.ELASTICSEARCH - cache.set('ref_data_up_to_date', True, ex=settings['REFERENCE_DATA_RELOAD_INTERVAL']) + cache.set('ref_data_up_to_date', True, ex=django_settings.REFERENCE_DATA_RELOAD_INTERVAL) _logger.info('ReferenceDataLoader - %s' % ('failed to populate cache' if errors else 'cache populated')) cache.delete('reference_data_load_executing') @@ -76,14 +75,12 @@ def _fetch_reference_data(cls, settings): _logger.info(f"fetching reference data: {cls.REF_DATA_LOAD_NUM}") if not isinstance(settings, dict): settings = settings.ELASTICSEARCH - # ic(settings) connection_params = cls.get_connection_parameters(settings) esclient, scan = cls.get_es_imports(settings['HOSTS'], connection_params) reference_data = {} for index_name in esclient.indices.get_mapping().keys(): - # ic(index_name) reference_data[index_name] = {} # a cumbersome way to fetch the types, but supposedly the only way because nginx restricts ES usage @@ -95,7 +92,6 @@ def _fetch_reference_data(cls, settings): _source='type', scroll='1m' ) - # ic(aggr_types) for type_name in [ b['key'] for b in aggr_types['aggregations']['types']['buckets'] ]: reference_data[index_name][type_name] = [] @@ -168,9 +164,8 @@ def get_connection_parameters(settings): if settings.get('USE_SSL', False): conf.update({ 'port': 443, 'use_ssl': True, 'verify_certs': True, }) if settings.get('PORT', False): - conf.update('port', settings['PORT']) + conf.update({ 'port': settings['PORT'] }) return conf - ic() _logger.warning("returning empty connection parameters") return {} diff --git a/src/metax_api/utils/utils.py b/src/metax_api/utils/utils.py index 998a0106..759427ca 100755 --- a/src/metax_api/utils/utils.py +++ b/src/metax_api/utils/utils.py @@ -5,7 +5,6 @@ # :author: CSC - IT Center for Science Ltd., Espoo Finland # :license: MIT -import os import sys from datetime import datetime from enum import Enum @@ -44,13 +43,6 @@ def executing_test_case(): return 'test' in sys.argv -def executing_travis(): - """ - Returns True whenever code is being executed by travis - """ - return True if os.getenv('TRAVIS', False) else False - - def datetime_to_str(date_obj): if isinstance(date_obj, datetime): return date_obj.strftime('%Y-%m-%dT%H:%M:%SZ') @@ -207,7 +199,7 @@ def leave_keys_in_dict(dict_obj, fields_to_leave): del dict_obj[key] -if executing_test_case() or executing_travis(): +if executing_test_case(): class TestJsonLogger(): def info(self, *args, **kwargs):