Skip to content

Commit

Permalink
support new data model (#14)
Browse files Browse the repository at this point in the history
* Bump cryptography from 41.0.4 to 42.0.4 (#13)

Bumps [cryptography](https://github.com/pyca/cryptography) from 41.0.4 to 42.0.4.
- [Changelog](https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst)
- [Commits](pyca/cryptography@41.0.4...42.0.4)

---
updated-dependencies:
- dependency-name: cryptography
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <[email protected]>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>

* Bump fastapi from 0.103.1 to 0.109.1 (#15)

Bumps [fastapi](https://github.com/tiangolo/fastapi) from 0.103.1 to 0.109.1.
- [Release notes](https://github.com/tiangolo/fastapi/releases)
- [Commits](fastapi/fastapi@0.103.1...0.109.1)

---
updated-dependencies:
- dependency-name: fastapi
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <[email protected]>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>

* Bump starlette from 0.27.0 to 0.36.2 (#16)

Bumps [starlette](https://github.com/encode/starlette) from 0.27.0 to 0.36.2.
- [Release notes](https://github.com/encode/starlette/releases)
- [Changelog](https://github.com/encode/starlette/blob/master/docs/release-notes.md)
- [Commits](encode/starlette@0.27.0...0.36.2)

---
updated-dependencies:
- dependency-name: starlette
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <[email protected]>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>

* add tables

* testing & new data

* added skills

* added activity

---------

Signed-off-by: dependabot[bot] <[email protected]>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: extreme4all <>
  • Loading branch information
extreme4all and dependabot[bot] authored Mar 24, 2024
1 parent e08f3ea commit 4d70b82
Show file tree
Hide file tree
Showing 33 changed files with 1,376 additions and 230 deletions.
17 changes: 17 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v2.3.0
hooks:
- id: check-yaml
- id: end-of-file-fixer
- id: trailing-whitespace
- repo: https://github.com/psf/black
rev: 22.10.0
hooks:
- id: black
- repo: https://github.com/pycqa/isort
rev: 5.12.0
hooks:
- id: isort
name: isort (python)
args: ["--profile", "black"]
27 changes: 18 additions & 9 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -58,30 +58,39 @@ pre-commit: ## Run pre-commit
test-setup:
python3 -m pip install pytest

create-venv:
python3 -m venv .venv
source .venv/bin/activate

requirements:
python3 -m pip install -r requirements.txt
python3 -m pip install pytest-asyncio==0.23.6
python3 -m pip install httpx==0.27.0
python3 -m pip install pre-commit==3.6.2
python3 -m pip install ruff==0.1.15
pre-commit install

docker-down:
docker-compose down

docker-rebuild: docker-down
docker-compose --verbose up --build
docker-restart:
docker compose down
docker compose up --build -d

docker-force-rebuild:
docker-compose --verbose up --build --force-recreate
docker test:
docker compose down
docker compose up --build -d
pytest

api-setup:
python3 -m pip install "fastapi[all]"

env-setup:
touch .env
echo "KAFKA_HOST= 'localhost:9092'" >> .env
echo "DATABASE_URL= 'mysql+aiomysql://root:root_bot_buster@localhost:3306/playerdata'" >> .env
echo "DATABASE_URL= 'mysql+aiomysql://root:root_bot_buster@localhost:3307/playerdata'" >> .env
echo "ENV='DEV'" >> .env
echo "POOL_RECYCLE='60'" >> .env
echo "POOL_TIMEOUT='30'" >> .env

docs:
open http://localhost:5000/docs
xdg-open http://localhost:5000/docs
. http://localhost:5000/docs
. http://localhost:5000/docs
52 changes: 1 addition & 51 deletions docker-compose.yaml
Original file line number Diff line number Diff line change
@@ -1,55 +1,5 @@
version: '3'
services:
# kafka:
# container_name: kafka
# image: bitnami/kafka:3.5.1-debian-11-r3
# environment:
# - ALLOW_PLAINTEXT_LISTENER=yes
# - KAFKA_CFG_LISTENERS=PLAINTEXT://:9092,CONTROLLER://:9093,EXTERNAL://:9094
# - KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP=CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,EXTERNAL:PLAINTEXT
# - KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT://kafka:9092,EXTERNAL://localhost:9094
# - KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE=false
# # volumes:
# # - ./kafka:/bitnami/kafka:rw
# ports:
# - 9094:9094
# - 9092:9092
# healthcheck:
# test: ["CMD", "kafka-topics.sh", "--list", "--bootstrap-server", "localhost:9092"]
# interval: 30s
# timeout: 10s
# retries: 5
# networks:
# - botdetector-network

# kafdrop:
# container_name: kafdrop
# image: obsidiandynamics/kafdrop:latest
# environment:
# - KAFKA_BROKERCONNECT=kafka:9092
# - JVM_OPTS=-Xms32M -Xmx64M
# - SERVER_SERVLET_CONTEXTPATH=/
# ports:
# - 9000:9000
# restart: on-failure
# networks:
# - botdetector-network
# depends_on:
# kafka:
# condition: service_healthy

# kafka_setup:
# container_name: kafka_setup
# build:
# context: ./kafka_setup
# environment:
# - KAFKA_BROKER=kafka:9092
# networks:
# - botdetector-network
# depends_on:
# kafka:
# condition: service_healthy

mysql:
container_name: database
build:
Expand All @@ -61,7 +11,7 @@ services:
- ./mysql/docker-entrypoint-initdb.d:/docker-entrypoint-initdb.d
# - ./mysql/mount:/var/lib/mysql # creates persistence
ports:
- 3306:3306
- 3307:3306
networks:
- botdetector-network

Expand Down
49 changes: 49 additions & 0 deletions mysql/docker-entrypoint-initdb.d/01_tables.sql
Original file line number Diff line number Diff line change
Expand Up @@ -393,3 +393,52 @@ CREATE TABLE playerHiscoreDataXPChange (
KEY IDX_xpChange_Player_id_ts_date (Player_id,ts_date) USING BTREE,
CONSTRAINT fk_phd_xp_pl FOREIGN KEY (Player_id) REFERENCES Players (id) ON DELETE RESTRICT ON UPDATE RESTRICT
);

CREATE TABLE `scraper_data` (
`scraper_id` bigint unsigned NOT NULL AUTO_INCREMENT,
`created_at` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
`player_id` int unsigned NOT NULL,
`record_date` date GENERATED ALWAYS AS (cast(`created_at` as date)) STORED,
PRIMARY KEY (`scraper_id`),
UNIQUE KEY `unique_player_per_day` (`player_id`,`record_date`)
);

CREATE TABLE `scraper_data_latest` (
`scraper_id` bigint unsigned NOT NULL AUTO_INCREMENT,
`created_at` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
`record_date` date GENERATED ALWAYS AS (cast(`created_at` as date)) STORED,
`player_id` int unsigned NOT NULL,
PRIMARY KEY (`player_id`),
KEY `idx_scraper_id` (`scraper_id`),
KEY `idx_record_date` (`record_date`)
);

CREATE TABLE skills (
skill_id TINYINT UNSIGNED AUTO_INCREMENT PRIMARY KEY, # < 255
skill_name VARCHAR(50) NOT NULL,
UNIQUE KEY unique_skill_name (skill_name)
);
CREATE TABLE activities (
activity_id TINYINT UNSIGNED AUTO_INCREMENT PRIMARY KEY, # < 255
activity_name VARCHAR(50) NOT NULL,
UNIQUE KEY unique_activity_name (activity_name)
);


CREATE TABLE player_skills (
scraper_id BIGINT UNSIGNED NOT NULL,
skill_id TINYINT UNSIGNED NOT NULL,
skill_value INT UNSIGNED NOT NULL DEFAULT 0, # < 200 000 000
FOREIGN KEY (scraper_id) REFERENCES scraper_data(scraper_id) ON DELETE CASCADE,
FOREIGN KEY (skill_id) REFERENCES skills(skill_id) ON DELETE CASCADE,
PRIMARY KEY (scraper_id, skill_id)
);

CREATE TABLE player_activities (
scraper_id BIGINT UNSIGNED NOT NULL,
activity_id TINYINT UNSIGNED NOT NULL,
activity_value INT UNSIGNED NOT NULL DEFAULT 0, # some guy could get over 65k kc
FOREIGN KEY (scraper_id) REFERENCES scraper_data(scraper_id) ON DELETE CASCADE,
FOREIGN KEY (activity_id) REFERENCES activities(activity_id) ON DELETE CASCADE,
PRIMARY KEY (scraper_id, activity_id)
);
Loading

0 comments on commit 4d70b82

Please sign in to comment.