Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Refractoring #2

Open
wants to merge 4 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
129 changes: 129 additions & 0 deletions .github/workflows/build_and_publish.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,129 @@
name: Build, publish and deploy docker

on:
push:
branches: [ 'main' ]
tags:
- 'v*'

env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}

jobs:
build-and-push-image:
name: Build and push
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- name: Checkout repository
uses: actions/checkout@v4

- name: Log in to the Container registry
uses: docker/login-action@v2
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}

- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v4
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: |
type=ref,event=tag,enable=${{ startsWith(github.ref, 'refs/tags/v') }}
type=raw,value=latest,enable=${{ startsWith(github.ref, 'refs/tags/v') }}
type=raw,value=test,enable=true
- name: Build and push Docker image
uses: docker/build-push-action@v4
with:
context: .
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}

deploy-testing:
name: Deploy Testing
needs: build-and-push-image
runs-on: [ self-hosted, Linux, testing ]
environment:
name: Testing
env:
CONTAINER_NAME: com_profcomff_tgbot_print_test
permissions:
packages: read
steps:
- name: Pull new version
run: docker pull ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:test
- name: Migrate DB
run: |
docker run \
--rm \
--env DB_DSN=${{ secrets.DB_DSN }} \
--env TG_TOKEN=${{ secrets.BOT_TOKEN }} \
--env MARKETING_URL=${{ vars.MARKETING_URL }} \
--env DEFAULT_PARE_iD=${{ vars.DEFAULT_PARE_iD }} \
--env ADMIN_TG_ID=${{ vars.ADMIN_TG_ID }} \
--env DEFAULT_PARE_USERNAME=${{ vars.DEFAULT_PARE_USERNAME }} \
--name ${{ env.CONTAINER_NAME }}_migration \
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:test \
alembic upgrade head
- name: Run new version
run: |
docker stop ${{ env.CONTAINER_NAME }} || true && docker rm ${{ env.CONTAINER_NAME }} || true
docker run \
--detach \
--restart on-failure:3 \
--env DB_DSN=${{ secrets.DB_DSN }} \
--env TG_TOKEN=${{ secrets.BOT_TOKEN }} \
--env MARKETING_URL=${{ vars.MARKETING_URL }} \
--env DEFAULT_PARE_iD=${{ vars.DEFAULT_PARE_iD }} \
--env ADMIN_TG_ID=${{ vars.ADMIN_TG_ID }} \
--env DEFAULT_PARE_USERNAME=${{ vars.DEFAULT_PARE_USERNAME }} \
--name ${{ env.CONTAINER_NAME }} \
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:test

deploy-production:
name: Deploy Production
needs: build-and-push-image
if: startsWith(github.ref, 'refs/tags/v')
runs-on: [ self-hosted, Linux, production ]
environment:
name: Production
env:
CONTAINER_NAME: com_profcomff_tgbot_print
permissions:
packages: read
steps:
- name: Pull new version
run: docker pull ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest
- name: Migrate DB
run: |
docker run \
--rm \
--env DB_DSN=${{ secrets.DB_DSN }} \
--env TG_TOKEN=${{ secrets.BOT_TOKEN }} \
--env MARKETING_URL=${{ vars.MARKETING_URL }} \
--env DEFAULT_PARE_iD=${{ vars.DEFAULT_PARE_iD }} \
--env ADMIN_TG_ID=${{ vars.ADMIN_TG_ID }} \
--env DEFAULT_PARE_USERNAME=${{ vars.DEFAULT_PARE_USERNAME }} \
--name ${{ env.CONTAINER_NAME }}_migration \
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest \
alembic upgrade head
- name: Run new version
run: |
docker stop ${{ env.CONTAINER_NAME }} || true && docker rm ${{ env.CONTAINER_NAME }} || true
docker run \
--detach \
--restart always \
--env DB_DSN=${{ secrets.DB_DSN }} \
--env TG_TOKEN=${{ secrets.BOT_TOKEN }} \
--env MARKETING_URL=${{ vars.MARKETING_URL }} \
--env DEFAULT_PARE_iD=${{ vars.DEFAULT_PARE_iD }} \
--env ADMIN_TG_ID=${{ vars.ADMIN_TG_ID }} \
--env DEFAULT_PARE_USERNAME=${{ vars.DEFAULT_PARE_USERNAME }} \
--name ${{ env.CONTAINER_NAME }} \
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest
24 changes: 24 additions & 0 deletions .github/workflows/checks.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
name: Python package

on:
pull_request:


jobs:
linting:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: 3.11
- uses: isort/isort-action@master
with:
requirementsFiles: "requirements.txt requirements.dev.txt"
- uses: psf/black@stable
- name: Comment if linting failed
if: ${{ failure() }}
uses: thollander/actions-comment-pull-request@v2
with:
message: |
:poop: Code linting failed, use `black` and `isort` to fix it.
53 changes: 0 additions & 53 deletions .github/workflows/deploy.yml

This file was deleted.

53 changes: 0 additions & 53 deletions .github/workflows/test_deploy.yml

This file was deleted.

102 changes: 102 additions & 0 deletions alembic.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
# A generic, single database configuration.

[alembic]
# path to migration scripts
script_location = migrations

# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s

# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .

# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python-dateutil library that can be
# installed by adding `alembic[tz]` to the pip requirements
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =

# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40

# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false

# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false

# version location specification; This defaults
# to migrations/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions

# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.

# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8

sqlalchemy.url = driver://user:pass@localhost/dbname


[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples

# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME

# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic

[handlers]
keys = console

[formatters]
keys = generic

[logger_root]
level = WARN
handlers = console
qualname =

[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine

[logger_alembic]
level = INFO
handlers =
qualname = alembic

[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic

[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S
6 changes: 3 additions & 3 deletions controllerBD/add_info_to_db.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,9 @@


def add_gender_info():
if not db_session.query(exists().where(
Gender.gender_name == 'Не указано'
)).scalar():
if not db_session.query(
exists().where(Gender.gender_name == "Не указано")
).scalar():

db_session.add_all([el1, el2, el3])
db_session.commit()
6 changes: 4 additions & 2 deletions controllerBD/db_loader.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
from sqlalchemy import create_engine
from sqlalchemy.orm import declarative_base, Session
from sqlalchemy.orm import Session, declarative_base

engine = create_engine('sqlite:///db/coffee_database.db')
from data.config import DB_DSN

engine = create_engine(DB_DSN)
engine.connect()
db_session = Session(bind=engine)

Expand Down
Loading