Skip to content

.github/workflows/autoupdate.yaml #11015

.github/workflows/autoupdate.yaml

.github/workflows/autoupdate.yaml #11015

Workflow file for this run

# This workflow is triggered every hour
# and runs:
# bash generate-csvs.sh && python generate-jsons.py
# to update the data files
# and then commits the changes to the repo
on:
schedule:
- cron: '11 * * * *'
# and on puzh
push:
branches:
- main
permissions:
contents: read
pages: write
id-token: write
concurrency:
group: "pages"
cancel-in-progress: false
jobs:
update:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Generate CSVs
run: |
bash data/generate-csvs.sh
# Check if there are any changes by downloading the latest
# files, and comparing.
wget -O old_routes.csv "https://vrs-standing-data.adsb.lol/routes.csv" || true
wget -O old_airports.csv "https://vrs-standing-data.adsb.lol/airports.csv" || true
# check if old_routes.csv and routes.csv are different
DIFFERENT=0
if ! cmp -s old_routes.csv data/routes.csv; then
DIFFERENT=1
fi
if ! cmp -s old_airports.csv data/airports.csv; then
DIFFERENT=1
fi
rm old_routes.csv old_airports.csv || true
# If DIFFERENT=0, we exit with code 1, which will cancel the job
if [ $DIFFERENT -eq 0 ]; then
echo "No changes detected"
# Only for scheduled runs, we exit with code 1
if [ "${{ github.event_name }}" = "schedule" ]; then
exit 1
fi
fi
# Gzip stuff
gzip --best --keep data/*csv
- name: Generate JSONs
run: |
python data/generate-jsons.py
- name: Copy LICENSE and README to data
run: |
cp LICENSE README.md data/
- name: Generate Directory Listings
uses: jayanta525/[email protected]
with:
FOLDER: data
- name: Setup Pages
uses: actions/configure-pages@v3
- name: Upload artifact
uses: actions/upload-pages-artifact@v1
with:
# Upload entire repository
path: 'data'
- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@v1