From a5a42eb4b0ab8f86b8be0ca7b4412e915e8b960b Mon Sep 17 00:00:00 2001 From: Tomi Mickelsson Date: Sat, 16 Mar 2024 10:58:28 +0200 Subject: [PATCH] ansible scripts --- Dockerfile | 8 +- README.md | 155 +++++++++---------------------- ansible/deploy.yaml | 33 +++++++ ansible/install-app.yaml | 101 ++++++++++++++++++++ ansible/install-db.yaml | 43 +++++++++ ansible/install-redis.yaml | 8 ++ ansible/myhosts.ini | 11 +++ ansible/vars.yml | 11 +++ conf/pydaemon.service | 2 +- conf/server-config-localdev.json | 18 ++++ conf/server-config.json | 16 ++-- conf/uwsgi.ini | 12 ++- fabfile.py | 125 ------------------------- requirements.txt | 1 + rsync.sh | 2 +- scripts/dbmigrate.py | 2 + 16 files changed, 290 insertions(+), 258 deletions(-) create mode 100644 ansible/deploy.yaml create mode 100644 ansible/install-app.yaml create mode 100644 ansible/install-db.yaml create mode 100644 ansible/install-redis.yaml create mode 100644 ansible/myhosts.ini create mode 100644 ansible/vars.yml create mode 100644 conf/server-config-localdev.json delete mode 100644 fabfile.py diff --git a/Dockerfile b/Dockerfile index 5e23015..cbc8c58 100644 --- a/Dockerfile +++ b/Dockerfile @@ -13,7 +13,7 @@ RUN set -ex \ htop \ ' \ && apt-get update && apt-get install -y $buildDeps $deps --no-install-recommends && rm -rf /var/lib/apt/lists/* \ - && pip install uWSGI==2.0.21 \ + && pip install uWSGI==2.0.24 \ && apt-get purge -y --auto-remove $buildDeps \ && find /usr/local -depth \ \( \ @@ -39,8 +39,8 @@ COPY conf/loginscript.sh /etc/profile # background spooler dir RUN mkdir /tmp/pysrv_spooler -# we don't need this file with Docker but uwsgi looks for it -RUN echo `date +%s` >/app/VERSION +# we don't need this file with Docker (autoload is enabled) but uwsgi looks for it +RUN echo `date +%s` >/app/RESTART EXPOSE 80 @@ -51,7 +51,7 @@ EXPOSE 80 # - here I use the sample template from repo # - it is also possible to override the config with env variables, either here # or in Amazon ECS or Kubernetes configuration -COPY conf/server-config.json /app/real-server-config.json +# COPY conf/server-config-localdev.json /app/real-server-config.json # ENV PYSRV_DATABASE_HOST host.docker.internal # ENV PYSRV_REDIS_HOST host.docker.internal # ENV PYSRV_DATABASE_PASSWORD x diff --git a/README.md b/README.md index 8a24c1f..0f96299 100644 --- a/README.md +++ b/README.md @@ -11,7 +11,9 @@ Open sourced on Sep 2018 after years of production use at multiple sites. Update Sep 2020: Run in Raspberry with an SQLite database. -Update May 2023: Python and libraries updated to recent versions. Python 3.11 in use. Still a fine foundation for a new project - the architecture does not age, and simple outlives complex. +Update May 2023: Python and libraries updated, Python 3.11 into use. Still a fine foundation for a new project - the architecture does not age, and simple outlives complex. + +Update March 2024: Ansible scripts for automatic install to cloud. **Table of contents** @@ -28,8 +30,7 @@ Update May 2023: Python and libraries updated to recent versions. Python 3.11 in * [Mules: extra servers](#mules) * [Logging](#logging) * [Tests](#tests) -* [Deploy to VPS](#deploy-to-vps) -* [Setup VPS server](#setup-vps-server) +* [Deploy to cloud](#deploy-to-cloud) * [Nginx](#nginx) * [Security](#security) * [Scaling up](#scaling-up) @@ -148,12 +149,14 @@ Source files The whole of this server fits into a small set of files: ``` +├── /ansible/ # ansible files for automated cloud install ├── /conf/ # configuration files │ ├── /favicon.ico # site icon │ ├── /loginscript.sh # docker shell login script, sets paths -│ ├── /pydaemon.service # systemd daemon config (if you run in a VPS) +│ ├── /pydaemon.service # systemd daemon config │ ├── /robots.txt # deny all from robots -│ ├── /server-config.json # main server config: db, redis, etc +│ ├── /server-config-localdev.json # main server config for localdev in docker +│ ├── /server-config.json # main server config, ansible fills and copies to cloud │ └── /uwsgi.ini # uwsgi daemon config, for localdev & server ├── /migrations/ # db migrations - postgresql │ ├── /001_users.py # users table, the foundation @@ -187,9 +190,8 @@ The whole of this server fits into a small set of files: │ └── /sample.log.txt # sample logging output from api test ├── build.sh # build Docker image in dev mode ├── Dockerfile # docker image config -├── fabfile.py # automation tasks: rsync deploy, migrations ├── requirements.txt # python 3rd party dependencies -├── rsync.sh # rsync sources to server and reload (instead of fabfile) +├── rsync.sh # rsync sources to server and reload (instead of ansible) ├── run.sh # run server locally with Docker in dev mode └── shell.sh # run interactive shell inside docker instance ``` @@ -211,17 +213,13 @@ So how do you get started with your own project? I suggest to take this route: Run locally with Docker ----------------------- -The fastest and easiest way to test drive RESTPie3 on your machine is to use -[Docker](https://www.docker.com/). The server fully supports Docker - the -Docker image is created with this [Dockerfile](Dockerfile). - +RESTPie3 is easy to run locally via Docker. The base image is an [official python image](https://hub.docker.com/_/python) -variant **python:3.9-slim-buster**, a recent and small Debian. +variant **python:3.11-slim-bullseye**. If you already have Docker installed, the quick steps to run RESTPie3 with SQLite and Redis are: - # download latest redis version 5.x docker pull redis:5 # create + start the redis instance @@ -239,7 +237,7 @@ SQLite and Redis are: docker exec -it restpie-dev bash -l -c 'python /app/scripts/dbmigrate.py' -If all went OK, RESTPie3 + Redis are running and you should be able to list +If all went OK, RESTPie3, SQLite and Redis are running and you should be able to list the REST API at http://localhost:8100/api/list The SQLite database is empty at this point so empty lists are returned from @@ -251,17 +249,16 @@ uses curl to do a signup and insert a new movie in the database: For a serious setup you want to have full PostgreSQL. Do the setup like this: - # download latest postgresql version 12.x - docker pull postgres:12 + docker pull postgres:15 # create + start a postgres instance - use your own db + password! - # the params here must match the ones in conf/server-config.json + # the params here must match the ones in conf/server-config-localdev.json docker run -d --name pos-restpie -p 5432:5432 -e POSTGRES_DB=tmdb -e POSTGRES_USER=tm -e POSTGRES_PASSWORD=MY_PASSWORD postgres:12 # activate the uuid extension docker exec -it pos-restpie psql -U tm -d tmdb -c 'create extension "uuid-ossp"' - # and then in server-config.json + # and then in server-config-localdev.json # set PYSRV_DATABASE_HOST (see PYSRV_DATABASE_HOST_POSTGRESQL) To start and stop these docker instances, invoke: @@ -273,10 +270,6 @@ To start and stop these docker instances, invoke: docker stop pos-restpie docker stop restpie-dev -If you don't want to use docker, you can install Redis, PostgreSQL, python3 -and the required python libs on your local machine too. On OSX, -[Homebrew](https://brew.sh/) is a good installation tool. These steps are not -documented here, but it's not that hard. Develop locally with Docker @@ -639,101 +632,44 @@ Run tests inside the DEV instance: docker exec -it restpie-dev bash -l -c 'python /app/test/test_redis.py' -Deploy to Linux server running Docker -------------------------------------- - -To be written. Docker compose, rsync+reload script etc. - - -Deploy to VPS -------------- - -Even though the world is crazy about Docker, I still often like to deploy code -directly and quickly to VPS servers, especially during project start and early -development. Setting up the components and the environment at server requires -some initial effort but you then have absolute control and visibility to -the whole server. Not every project needs a big cluster first. - -Setting up a whole cluster at [AWS ECS](https://aws.amazon.com/ecs/) is no -easy feat, you need to learn and configure A LOT. -[Dokku](http://dokku.viewdocs.io/dokku/) seems nice but has limitations, -allowing to run only a single Docker image. I wish the container/Kubernetes industry still matures more and provides a -[Heroku](https://www.heroku.com/)-like effortless deployments of Docker -images. - -So if you have plain VPS servers, and want to have super speedy updates from -localhost to the servers, I have created a single Python script -[fabfile.py](fabfile.py) that automates the deployment. It relies on [Fabric tool](http://www.fabfile.org/) that rsyncs the source code securely over SSH. - -The deployment is activated just with: - - fab deploy - -This transfers only the changed source files from localhost to a server, -performs database migrations and restarts the Python server. All in just -4 seconds. This makes the core dev/test loop really fast. - -You can also ignore fabfile.py and just run ./rsync.sh. - -In any case, this is just an optional script. If you have a big environment, -you most likely have a Continous Integration / Deployment solution in place. - - -Setup VPS server ----------------- - -Here are rough steps about how to setup a VPS server for this Python server. -This is not a tutorial, I assume you know Linux and SSH basics. - -During the development of this project I used latest Ubuntu 18.04 myself. -These steps should work for Ubuntu. Steps will vary depending on your OS and -version. Python3.x comes pre-installed in recent Ubuntus. - -Install PostgreSQL and Redis at server: - - sudo apt-get update - sudo apt-get install redis-server - sudo apt-get install postgresql - sudo apt-get install python3-pip - sudo apt-get install rsync (on Debian) - mkdir /app/ +Deploy to Cloud +--------------- -Redis does not require more setup. For PostgreSQL, create the database and -the user: (pick your own names and secrets for the capital parts!) +There are endless ways to deploy server software to cloud, X amount of tools and Y amount of different kinds of online services. For an early startup or for a hobby project my advice is this: start small and easy - buy a cheap virtual private server, VPS, or a few of them, from a big or a small cloud vendor and run your backend services there. Have control of your infra. Have simple scripts to automate things. Then automate more via Github Actions and so on. There is time later to grow your infra if you get past product-market-fit challenges and actually enjoy a bigger business. - sudo su - postgres - createuser MY_USER - createdb -O tm MY_DATABASE - psql MY_DATABASE - alter user MY_USER with encrypted password 'MY_PASSWORD'; - create extension if not exists "uuid-ossp"; +[Ansible](https://www.ansible.com/) is one of the best lightweigt tools to automate infra tasks. It simply runs commands over the SSH against all your servers. -Write the IP-address or server name locally in your fabfile.py as -TEST_SERVER. Plus add your SSH credentials and a path to your public key. -Then transfer source files to the server, and install the -[systemd daemon](conf/pydaemon.service): +I have prepared 4 Ansible scripts to setup RESTPie3 quickly on a Linux server(s). These steps have been verified to work with Debian 12.5 "bookworm". - # locally - fab deploy - fab deploy_mydaemon +Prerequisites: +- Have a root ssh access to your server(s) via ssh publickey, and use ssh-agent to store your private key so you don't have to type it all the time. After setup you should disable server root access. +- Update your server to the latest versions: `apt update & apt upgrade` +- Locally create a new ssh-key for the user-level Linux account that is later used for deploys + `ssh-keygen -t rsa -f ~/.ssh/id_myapp -C 'restpie3 deploy account'` +- If you run OSX, update local rsync to latest 3.x: `brew install rsync` +- Install Ansible tools and extensions locally (OSX: `brew install ansible`) +- Put your desired config/secrets into [vars.yml](ansible/vars.yml) Note: do NOT commit secrets into git! Move this file elsewhere or put into `.gitignore` +- Write the IP-address or domain name of your server into [myhosts.ini](ansible/myhosts.ini). If you have multiple servers, write them all here. -Install Python libraries: +Then run these 3 scripts to setup Redis + PostgreSQL + RESTPie3 on your server(s): - sudo pip3 install -r /app/requirements.txt + cd restpie3/ansible/ + ansible-playbook -i myhosts.ini install-redis.yaml + ansible-playbook -i myhosts.ini install-db.yaml + ansible-playbook -i myhosts.ini install-app.yaml -Edit the json config file at server, write PostgreSQL credentials: +Now you should have a running RESTPie3 at the server you configured. Point your browser there. - cd /app/ - cp conf/server-config.json real-server-config.json - pico /app/real-server-config.json +Your code updates from local machine to server can be deployed with: -And finally re-deploy: (does database migration, server restart) + ansible-playbook -i myhosts.ini deploy.yaml - # locally - fab deploy +Later you might want to run this script from Github Action to have consistent automation. -For a production setup you must also configure uwsgi to run as a lower -privileged user and not as a root! Check the [uwsgi guide](https://uwsgi-docs.readthedocs.io/en/latest/). +Finally do enhance the security of this simple setup: +- Have a load balancer as a service and then run your servers in a private network. Atleast install a proxy server in front of RESTPie3, like [Nginx](https://www.nginx.com/) or [Caddy](https://caddyserver.com/). Only allow HTTPS traffic. +- Run uwsgi as a low-privilege user behind the proxy, see [uwsgi.ini](conf/uwsgi.ini) +- Disable ssh root login after initial setup. Nginx @@ -934,13 +870,6 @@ scratch. Only HTML and CSS is used with zero lines of Javascript. It is easy to start the project with them and create something fancier later if needed. -If you want inspiration of larger front-ends, you could take a look of my two -open-source starter kits although they are getting a bit old already. The tech -stacks nextjs and nuxtjs are still relevant in 2021 though. - -* [React/Nextjs starter](https://github.com/tomimick/tm-nextjs-starter) -* [Vue/Nuxtjs starter](https://github.com/tomimick/tm-nuxtjs-starter) - Need help? ---------- diff --git a/ansible/deploy.yaml b/ansible/deploy.yaml new file mode 100644 index 0000000..88b91ee --- /dev/null +++ b/ansible/deploy.yaml @@ -0,0 +1,33 @@ + +- name: Deploy software to server + hosts: apphosts + # note: deploy happens via regular user, not root, which should be disabled + + vars_files: + - vars.yml + + tasks: + - name: Synch files to server + ansible.posix.synchronize: + src: "{{ src_folder }}" + dest: /app/ + rsync_opts: + - "--exclude=.git" + - "--chown={{ app_user }}:staff" + + - name: DB migration + ansible.builtin.shell: + cmd: /app/PYENV/bin/python3 /app/scripts/dbmigrate.py + chdir: /app/ + environment: + PYTHONPATH: /app/py/ + PYSRV_CONFIG_PATH: /app/real-server-config.json + PATH: "/app/PYENV/bin/:{{ ansible_env.PATH }}" + when: 0 + # when toggles true/false + + - name: Restart server + ansible.builtin.file: + path: /app/RESTART + state: touch + mode: u=rw,g=r,o=r diff --git a/ansible/install-app.yaml b/ansible/install-app.yaml new file mode 100644 index 0000000..14aabda --- /dev/null +++ b/ansible/install-app.yaml @@ -0,0 +1,101 @@ + +- name: Install our python app server + hosts: apphostsroot + + vars_files: + - vars.yml + + tasks: + - name: Create unix user + ansible.builtin.user: + name: "{{ app_user }}" + groups: "sudo" + + - name: Upload ssh pubkey to authorized key + ansible.posix.authorized_key: + user: "{{ app_user }}" + state: present + key: "{{ lookup('file', '~/.ssh/id_myapp.pub') }}" + + - name: Create /app dir + ansible.builtin.file: + path: /app/ + state: directory + owner: "{{ app_user }}" + mode: u=rw,g=r,o=r + + - name: Install packages + ansible.builtin.apt: + pkg: + - python3-pip + - python3-venv + - uwsgi-core + - uwsgi-plugin-python3 + - htop + + - name: Create python virtual env + ansible.builtin.command: "python3 -m venv /app/PYENV/" + + - name: Copy source files to server + ansible.posix.synchronize: + src: "{{ src_folder }}" + dest: /app/ + owner: false + group: false + rsync_opts: + - "--exclude=.git" + + - name: Create RESTART file + ansible.builtin.file: + path: /app/RESTART + state: touch + + - name: Recursively change ownership of dir + ansible.builtin.file: + path: /app/ + state: directory + recurse: yes + owner: "{{ app_user }}" + group: "staff" + + - name: Install py libs + ansible.builtin.pip: + requirements: /app/requirements.txt + virtualenv: /app/PYENV/ + + - name: Copy srv config file with correct data + ansible.builtin.template: + src: "{{ src_folder }}/conf/server-config.json" + dest: /app/real-server-config.json + owner: "{{ app_user }}" + group: root + mode: u=rw,g=r,o=r + + - name: Copy service file + ansible.builtin.copy: + src: "{{ src_folder }}/conf/pydaemon.service" + dest: /etc/systemd/system/ + owner: root + group: root + mode: u=rwx,g=rx,o=rx + + - name: Enable pydaemon service + ansible.builtin.systemd_service: + name: pydaemon + enabled: true + masked: no + + - name: Start pydaemon + ansible.builtin.systemd_service: + daemon_reload: true + state: started + name: pydaemon + + - name: Init db schema + ansible.builtin.shell: + cmd: /app/PYENV/bin/python3 /app/scripts/dbmigrate.py + chdir: /app/ + environment: + PYTHONPATH: /app/py/ + PYSRV_CONFIG_PATH: /app/real-server-config.json + PATH: "/app/PYENV/bin/:{{ ansible_env.PATH }}" diff --git a/ansible/install-db.yaml b/ansible/install-db.yaml new file mode 100644 index 0000000..3991be3 --- /dev/null +++ b/ansible/install-db.yaml @@ -0,0 +1,43 @@ + +- name: Install PostgreSQL database server + hosts: dbhost + become: yes + + vars_files: + - vars.yml + + tasks: + - name: Install PostgreSQL server + ansible.builtin.apt: + name: postgresql + + - name: Install pip + ansible.builtin.apt: + pkg: + - python3-pip + + - name: Install psycopg2 for community plugin below + ansible.builtin.pip: + name: psycopg2-binary + extra_args: --break-system-packages + + - name: Create db user + community.postgresql.postgresql_user: + name: "{{ db_user }}" + password: "{{ db_password }}" + become: yes + become_user: postgres + + - name: Create app db + community.postgresql.postgresql_db: + name: "{{ db_name }}" + owner: "{{ db_user }}" + become: yes + become_user: postgres + + - name: Add uuid extension + community.postgresql.postgresql_ext: + name: uuid-ossp + db: "{{ db_name }}" + become: yes + become_user: postgres diff --git a/ansible/install-redis.yaml b/ansible/install-redis.yaml new file mode 100644 index 0000000..31d6446 --- /dev/null +++ b/ansible/install-redis.yaml @@ -0,0 +1,8 @@ + +- name: Install Redis server + hosts: redishost + + tasks: + - name: Install Redis + ansible.builtin.apt: + name: redis diff --git a/ansible/myhosts.ini b/ansible/myhosts.ini new file mode 100644 index 0000000..5a20bad --- /dev/null +++ b/ansible/myhosts.ini @@ -0,0 +1,11 @@ +[apphosts] +10.10.10.10 ansible_ssh_user=myapp + +[apphostsroot] +10.10.10.10 ansible_ssh_user=root + +[redishost] +10.10.10.10 ansible_ssh_user=root + +[dbhost] +10.10.10.10 ansible_ssh_user=root diff --git a/ansible/vars.yml b/ansible/vars.yml new file mode 100644 index 0000000..a1c1de9 --- /dev/null +++ b/ansible/vars.yml @@ -0,0 +1,11 @@ +--- +redis_host: localhost +db_host: localhost +src_folder: /Users/x/restpie3/ +app_user: myapp +db_user: my-db-account +db_password: my-password-here +db_name: mydb + +# this file has your secrets - keep it safe, do not commit into repository! +# do not use the default values here! diff --git a/conf/pydaemon.service b/conf/pydaemon.service index 452f289..d612e6a 100644 --- a/conf/pydaemon.service +++ b/conf/pydaemon.service @@ -16,7 +16,7 @@ User=root #Group=mygroup # note: create /tmp/pysrv_spooler on reboot ExecStartPre=/bin/mkdir -p /tmp/pysrv_spooler; -ExecStart=/usr/local/bin/uwsgi --ini /app/conf/uwsgi.ini:uwsgi-production +ExecStart=uwsgi --ini /app/conf/uwsgi.ini:uwsgi-production RuntimeDirectory=mydaemon Restart=always RestartSec=3 diff --git a/conf/server-config-localdev.json b/conf/server-config-localdev.json new file mode 100644 index 0000000..b6241cd --- /dev/null +++ b/conf/server-config-localdev.json @@ -0,0 +1,18 @@ +{ + "name": "python server config, for localdev", + + "PYSRV_IS_PRODUCTION": "", + + "PYSRV_DATABASE_HOST": "/app/data/mydb.sqlite", + "PYSRV_DATABASE_HOST_POSTGRESQL": "host.docker.internal", + "PYSRV_DATABASE_HOST_SQLITE": "/app/data/mydb.sqlite", + "PYSRV_DATABASE_PORT": "5432", + "PYSRV_DATABASE_NAME": "tmdb", + "PYSRV_DATABASE_USER": "tm", + "PYSRV_DATABASE_PASSWORD": "MY_PASSWORD", + + "PYSRV_COOKIE_HTTPS_ONLY": false, + "PYSRV_REDIS_HOST": "host.docker.internal:6379", + "PYSRV_DOMAIN_NAME": "", + "PYSRV_CORS_ALLOW_ORIGIN": "*" +} diff --git a/conf/server-config.json b/conf/server-config.json index 19a3e73..f346080 100644 --- a/conf/server-config.json +++ b/conf/server-config.json @@ -1,18 +1,16 @@ { - "name": "python server config template - rename me", + "name": "python server config, modified by ansible", "PYSRV_IS_PRODUCTION": "", - "PYSRV_DATABASE_HOST": "/app/data/mydb.sqlite", - "PYSRV_DATABASE_HOST_POSTGRESQL": "host.docker.internal", - "PYSRV_DATABASE_HOST_SQLITE": "/app/data/mydb.sqlite", - "PYSRV_DATABASE_PORT": "54320", - "PYSRV_DATABASE_NAME": "tmdb", - "PYSRV_DATABASE_USER": "tm", - "PYSRV_DATABASE_PASSWORD": "MY_PASSWORD", + "PYSRV_DATABASE_HOST": "{{ db_host }}", + "PYSRV_DATABASE_PORT": "5432", + "PYSRV_DATABASE_NAME": "{{ db_name }}", + "PYSRV_DATABASE_USER": "{{ db_user }}", + "PYSRV_DATABASE_PASSWORD": "{{ db_password }}", "PYSRV_COOKIE_HTTPS_ONLY": false, - "PYSRV_REDIS_HOST": "host.docker.internal:6379", + "PYSRV_REDIS_HOST": "{{ redis_host }}", "PYSRV_DOMAIN_NAME": "", "PYSRV_CORS_ALLOW_ORIGIN": "*" } diff --git a/conf/uwsgi.ini b/conf/uwsgi.ini index f2ed6ba..29c650c 100644 --- a/conf/uwsgi.ini +++ b/conf/uwsgi.ini @@ -30,7 +30,7 @@ static-map = /favicon.ico=conf/favicon.ico # local dev with docker - py-autoreload enabled [uwsgi-debug-docker] env = FLASK_ENV=development -env = PYSRV_CONFIG_PATH=/app/conf/server-config.json +env = PYSRV_CONFIG_PATH=/app/conf/server-config-localdev.json http = 0.0.0.0:80 # if using nginx and uwsgi_pass: # uwsgi-socket = localhost:8010 @@ -60,8 +60,9 @@ mule = py/mule1.py # test/production server config - plain python and docker [uwsgi-production] -env = PYSRV_CONFIG_PATH=/app/real-server-config.json -http = 0.0.0.0:80 +plugins = corerouter,python3,logfile,spooler,http +env=PYSRV_CONFIG_PATH=/app/real-server-config.json +http = :80 # if using nginx and uwsgi_pass: # uwsgi-socket = localhost:8010 master = 1 @@ -70,8 +71,9 @@ callable = app processes = 4 chdir = /app/ pythonpath = /app/py/ +virtualenv = /app/PYENV/ # deploy-script touches this file and uwsgi restarts -touch-reload=/app/VERSION +touch-reload=/app/RESTART harakiri=20 disable-logging=1 spooler-quiet=1 @@ -84,7 +86,7 @@ vacuum = true # log to file (and stdout too so docker run locally works) logger = file:/app/app.log logger = stdio -# run as this user - MUST SET LOWER PRIVILEGES! +# run as this user - MUST SET LOWER PRIVILEGES! (Port 80 requires root) ; uid=appuser ; gid=appgroup # workers live max this many requests and secs diff --git a/fabfile.py b/fabfile.py deleted file mode 100644 index 9cd91fa..0000000 --- a/fabfile.py +++ /dev/null @@ -1,125 +0,0 @@ -#!/usr/bin/python -# -*- coding: utf-8 -*- - -# fabfile.py: automated tasks -# - deploy sources from local machine to test/production server -# - migrate local/server database -# -# Author: Tomi.Mickelsson@iki.fi - -import sys -import os -import time -import io - -from fabric.api import env, run, task, sudo, local, put -from fabric.contrib.console import confirm -from fabric.contrib.project import rsync_project -from fabric.operations import prompt - - -# write your own server info here: -TEST_SERVER = "testserver.mydomain.com" -PRODUCTION_SERVER = "www.mydomain.com" -SSH_USER = "" -SSH_PRIVATE_KEY = "~/.ssh/xxx_rsa" - - -# -------------------------------------------------------------------------- -# fabric reads these - -env.hosts = [TEST_SERVER] -env.use_ssh_config = True -env.user = SSH_USER -env.remotedir = "/app/" -env.port = 22 -env.key_filename = SSH_PRIVATE_KEY - -# -------------------------------------------------------------------------- -# DATABASE TASKS - -@task -def postgres_migrate_local(): - """Local database migrate""" - local("python scripts/dbmigrate.py") - -@task -def postgres_migrate_remote(): - """Server database migrate""" - dir = env.remotedir - cmd = "cd {}; PYTHONPATH={}py PYSRV_CONFIG_PATH={} python3 scripts/dbmigrate.py".format(dir, dir, dir+"real-server-config.json") - run(cmd) - -@task -def postgres_run_server(): - local("postgres -D /usr/local/var/postgres") - -@task -def postgres_list_tables(): - sql = "SELECT * FROM pg_catalog.pg_tables WHERE schemaname = 'public'" - local("psql -d tmdb -c \"{}\"".format(sql)) - -@task -def postgres_list_users(): - sql = "SELECT * FROM users" - local("psql -d tmdb -c \"{}\"".format(sql)) - -@task -def postgres_gen_models(): - """Generate peewee models from database: generated-models.py""" - - cmd = "pwiz.py -e postgresql -u tm -P tmdb >generated-models.py" - local(cmd) - - -# -------------------------------------------------------------------------- -# DEPLOY TASKS - -@task -def production(): - """Set target host to production server""" - - if confirm("DEPLOY PRODUCTION, YOU SURE ??????", default=False): - env.hosts = [PRODUCTION_SERVER] - print("Deploying soon... ", env.hosts[0].upper()) - # wait a little so you can still stop... - time.sleep(5) - else: - print("Exiting") - sys.exit(1) - -@task -def deploy(): - """Deploy current local sources to server + db migration""" - - rsync_files() - - postgres_migrate_remote() - - # touch VERSION, uwsgi will then restart automatically - data = io.StringIO("%d" % time.time()) - put(data, "/app/VERSION", use_sudo=False) - - -def rsync_files(): - """rsync source files to remote server""" - - exclude_list = ['*.pyc', '.git', '.DS_Store', 'node_modules', '__pycache__', - 'doc', 'trash'] - - rsync_project(env.remotedir, local_dir=".", delete=False, - default_opts='-hrvz', exclude=exclude_list, - extra_opts=' -O --no-perms --checksum') - -@task -def deploy_mydaemon(): - """Update uwsgi master config conf/pydaemon.service, then restart""" - - sudo("systemctl stop pydaemon", warn_only=True) - - put("conf/pydaemon.service", "/etc/systemd/system/", use_sudo=True) - - sudo("systemctl enable pydaemon") - sudo("systemctl daemon-reload") - sudo("systemctl start pydaemon") - diff --git a/requirements.txt b/requirements.txt index 2e080fc..bdcac44 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,3 +7,4 @@ psycopg2-binary==2.9.6 pytz==2022.7.1 redis==4.5.4 requests==2.31.0 +uwsgidecorators==1.1.0 diff --git a/rsync.sh b/rsync.sh index 11e4e00..e085b03 100755 --- a/rsync.sh +++ b/rsync.sh @@ -10,5 +10,5 @@ sleep 3 rsync -av --exclude '.git' --exclude '__pycache__' --exclude '*.pyc' --exclude '*.sqlite' * $HOST:/app/ # ask python server to reload sources -ssh $HOST touch /app/VERSION +ssh $HOST touch /app/RESTART diff --git a/scripts/dbmigrate.py b/scripts/dbmigrate.py index fe2dc34..1f67d43 100644 --- a/scripts/dbmigrate.py +++ b/scripts/dbmigrate.py @@ -6,6 +6,7 @@ # # Author: Tomi.Mickelsson@iki.fi +import sys import os import config @@ -31,3 +32,4 @@ else: print("migrate OK") +sys.exit(ret)